fix: update all workflows to latest MokoStandards v04.04 templates [skip ci]

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-04-02 10:04:12 -05:00
parent 9b9bbf7d57
commit a0cc9020a4
11 changed files with 6809 additions and 33 deletions

65
.github/CODEOWNERS vendored
View File

@@ -1,37 +1,36 @@
# CODEOWNERS
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
# SPDX-License-Identifier: GPL-3.0-or-later
#
# CODEOWNERS — require approval from jmiller-moko for protected paths
# Synced from MokoStandards. Do not edit manually.
#
# Changes to these paths require review from the listed owners before merge.
# Combined with branch protection (require PR reviews), this prevents
# unauthorized modifications to workflows, configs, and governance files.
## Repository Ownership Matrix
# ── Workflows (synced from MokoStandards — must not be manually edited) ──
/.github/workflows/ @jmiller-moko
```
# GLOBAL DEFAULT
* @mokoconsulting-tech/maintainers
# ── GitHub configuration ─────────────────────────────────────────────────
/.github/ISSUE_TEMPLATE/ @jmiller-moko
/.github/CODEOWNERS @jmiller-moko
/.github/copilot.yml @jmiller-moko
/.github/copilot-instructions.md @jmiller-moko
/.github/CLAUDE.md @jmiller-moko
/.github/.mokostandards @jmiller-moko
# DOCUMENTATION SUITE
/docs/ @mokoconsulting-tech/documentation
/docs/templates/ @mokoconsulting-tech/documentation
/docs/adr/ @mokoconsulting-tech/architecture
/docs/diagrams/ @mokoconsulting-tech/architecture
/docs/security/ @mokoconsulting-tech/security
# ── Build and config files ───────────────────────────────────────────────
/composer.json @jmiller-moko
/phpstan.neon @jmiller-moko
/Makefile @jmiller-moko
/.ftp_ignore @jmiller-moko
/.gitignore @jmiller-moko
/.gitattributes @jmiller-moko
/.editorconfig @jmiller-moko
# ENGINEERING + CODE ASSETS
/htdocs/ @mokoconsulting-tech/engineering
/src/ @mokoconsulting-tech/engineering
/api/ @mokoconsulting-tech/engineering
# CI/CD WORKFLOWS
/.github/workflows/ @mokoconsulting-tech/devops
# ISSUE TEMPLATES
/.github/ISSUE_TEMPLATE/ @mokoconsulting-tech/documentation
# GOVERNANCE + OPERATIONAL FILES
/CONTRIBUTING.md @mokoconsulting-tech/governance
/GOVERNANCE.md @mokoconsulting-tech/governance
/CODE_OF_CONDUCT.md @mokoconsulting-tech/governance
# SECURITY
/SECURITY.md @mokoconsulting-tech/security
# TEMPLATE LIBRARY
/docs/templates/ @mokoconsulting-tech/documentation
```
# ── Governance documents ─────────────────────────────────────────────────
/LICENSE @jmiller-moko
/CONTRIBUTING.md @jmiller-moko
/SECURITY.md @jmiller-moko
/GOVERNANCE.md @jmiller-moko
/CODE_OF_CONDUCT.md @jmiller-moko

102
.github/workflows/auto-dev-issue.yml vendored Normal file
View File

@@ -0,0 +1,102 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Automation
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/auto-dev-issue.yml.template
# VERSION: 04.04.01
# BRIEF: Auto-create tracking issue when a dev/** or rc/** branch is pushed
# NOTE: Synced via bulk-repo-sync to .github/workflows/auto-dev-issue.yml in all governed repos.
name: Auto Dev Branch Issue
on:
create:
env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
permissions:
contents: read
issues: write
jobs:
create-issue:
name: Create version tracking issue
runs-on: ubuntu-latest
if: >-
github.event.ref_type == 'branch' &&
(startsWith(github.event.ref, 'dev/') || startsWith(github.event.ref, 'rc/'))
steps:
- name: Create tracking issue
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
BRANCH="${{ github.event.ref }}"
REPO="${{ github.repository }}"
ACTOR="${{ github.actor }}"
NOW=$(date -u '+%Y-%m-%d %H:%M UTC')
# Determine branch type and version
if [[ "$BRANCH" == rc/* ]]; then
VERSION="${BRANCH#rc/}"
BRANCH_TYPE="Release Candidate"
LABEL_TYPE="type: release"
TITLE_PREFIX="rc"
else
VERSION="${BRANCH#dev/}"
BRANCH_TYPE="Development"
LABEL_TYPE="type: feature"
TITLE_PREFIX="feat"
fi
TITLE="${TITLE_PREFIX}(${VERSION}): ${BRANCH_TYPE} tracking for ${BRANCH}"
BODY="## ${BRANCH_TYPE} Branch Created
| Field | Value |
|-------|-------|
| **Branch** | \`${BRANCH}\` |
| **Version** | \`${VERSION}\` |
| **Type** | ${BRANCH_TYPE} |
| **Created by** | @${ACTOR} |
| **Created at** | ${NOW} |
| **Repository** | \`${REPO}\` |
## Checklist
- [ ] Feature development complete
- [ ] Tests passing
- [ ] README.md version bumped to \`${VERSION}\`
- [ ] CHANGELOG.md updated
- [ ] PR created targeting \`main\`
- [ ] Code reviewed and approved
- [ ] Merged to \`main\`
---
*Auto-created by [auto-dev-issue.yml](.github/workflows/auto-dev-issue.yml) on branch creation.*"
# Dedent heredoc
BODY=$(echo "$BODY" | sed 's/^ //')
# Check for existing issue with same title prefix
EXISTING=$(gh api "repos/${REPO}/issues?state=open&per_page=5" \
--jq ".[] | select(.title | startswith(\"${TITLE_PREFIX}(${VERSION})\")) | .number" 2>/dev/null | head -1)
if [ -n "$EXISTING" ]; then
echo " Issue #${EXISTING} already exists for ${VERSION}" >> $GITHUB_STEP_SUMMARY
else
ISSUE_URL=$(gh issue create \
--repo "$REPO" \
--title "$TITLE" \
--body "$BODY" \
--label "${LABEL_TYPE},version" \
--assignee "jmiller-moko" 2>&1)
echo "✅ Created tracking issue: ${ISSUE_URL}" >> $GITHUB_STEP_SUMMARY
fi

439
.github/workflows/auto-release.yml vendored Normal file
View File

@@ -0,0 +1,439 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/auto-release.yml.template
# VERSION: 04.04.01
# BRIEF: Unified build & release pipeline — version branch, platform version, badges, tag, release
#
# ╔════════════════════════════════════════════════════════════════════════╗
# ║ BUILD & RELEASE PIPELINE ║
# ╠════════════════════════════════════════════════════════════════════════╣
# ║ ║
# ║ Triggers on push to main (skips bot commits + [skip ci]): ║
# ║ ║
# ║ Every push: ║
# ║ 1. Read version from README.md ║
# ║ 3. Set platform version (Dolibarr $this->version, Joomla <version>)║
# ║ 4. Update [VERSION: XX.YY.ZZ] badges in markdown files ║
# ║ 5. Write update.txt / update.xml ║
# ║ 6. Create git tag vXX.YY.ZZ ║
# ║ 7a. Patch: update existing GitHub Release for this minor ║
# ║ ║
# ║ Minor releases only (patch == 00): ║
# ║ 2. Create/update version/XX.YY branch (patches update in-place) ║
# ║ 7b. Create new GitHub Release ║
# ║ ║
# ╚════════════════════════════════════════════════════════════════════════╝
name: Build & Release
on:
push:
branches:
- main
- master
env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
permissions:
contents: write
jobs:
release:
name: Build & Release Pipeline
runs-on: ubuntu-latest
if: >-
!contains(github.event.head_commit.message, '[skip ci]') &&
github.actor != 'github-actions[bot]'
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
token: ${{ secrets.GH_TOKEN || github.token }}
fetch-depth: 0
- name: Setup MokoStandards tools
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
COMPOSER_AUTH: '{"github-oauth":{"github.com":"${{ secrets.GH_TOKEN || github.token }}"}}'
run: |
git clone --depth 1 --branch version/04.04 --quiet \
"https://x-access-token:${GH_TOKEN}@github.com/mokoconsulting-tech/MokoStandards.git" \
/tmp/mokostandards
cd /tmp/mokostandards
composer install --no-dev --no-interaction --quiet
# ── STEP 1: Read version ───────────────────────────────────────────
- name: "Step 1: Read version from README.md"
id: version
run: |
VERSION=$(php /tmp/mokostandards/api/cli/version_read.php --path . 2>/dev/null)
if [ -z "$VERSION" ]; then
echo "⏭️ No VERSION in README.md — skipping release"
echo "skip=true" >> "$GITHUB_OUTPUT"
exit 0
fi
# Derive major.minor for branch naming (patches update existing branch)
MINOR=$(echo "$VERSION" | awk -F. '{printf "%s.%s", $1, $2}')
PATCH=$(echo "$VERSION" | awk -F. '{print $3}')
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
echo "tag=v${VERSION}" >> "$GITHUB_OUTPUT"
echo "branch=version/${MINOR}" >> "$GITHUB_OUTPUT"
echo "minor=$MINOR" >> "$GITHUB_OUTPUT"
echo "skip=false" >> "$GITHUB_OUTPUT"
if [ "$PATCH" = "00" ]; then
echo "is_minor=true" >> "$GITHUB_OUTPUT"
echo "✅ Version: $VERSION (minor release — full pipeline)"
else
echo "is_minor=false" >> "$GITHUB_OUTPUT"
echo "✅ Version: $VERSION (patch — platform version + badges only)"
fi
- name: Check if already released
if: steps.version.outputs.skip != 'true'
id: check
run: |
TAG="${{ steps.version.outputs.tag }}"
BRANCH="${{ steps.version.outputs.branch }}"
TAG_EXISTS=false
BRANCH_EXISTS=false
git rev-parse "$TAG" >/dev/null 2>&1 && TAG_EXISTS=true
git ls-remote --heads origin "$BRANCH" 2>/dev/null | grep -q "$BRANCH" && BRANCH_EXISTS=true
echo "tag_exists=$TAG_EXISTS" >> "$GITHUB_OUTPUT"
echo "branch_exists=$BRANCH_EXISTS" >> "$GITHUB_OUTPUT"
if [ "$TAG_EXISTS" = "true" ] && [ "$BRANCH_EXISTS" = "true" ]; then
echo "already_released=true" >> "$GITHUB_OUTPUT"
else
echo "already_released=false" >> "$GITHUB_OUTPUT"
fi
# ── SANITY CHECKS ────────────────────────────────────────────────────
- name: "Sanity: Platform-specific validation"
if: >-
steps.version.outputs.skip != 'true' &&
steps.check.outputs.already_released != 'true'
run: |
VERSION="${{ steps.version.outputs.version }}"
PLATFORM=$(php /tmp/mokostandards/api/cli/platform_detect.php --path . 2>/dev/null)
ERRORS=0
echo "## 🔍 Pre-Release Sanity Checks" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Platform: \`${PLATFORM}\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Common checks
if [ ! -f "LICENSE" ]; then
echo "❌ Missing LICENSE file" >> $GITHUB_STEP_SUMMARY
ERRORS=$((ERRORS+1))
else
echo "✅ LICENSE" >> $GITHUB_STEP_SUMMARY
fi
if [ ! -d "src" ]; then
echo "⚠️ No src/ directory" >> $GITHUB_STEP_SUMMARY
else
echo "✅ src/ directory" >> $GITHUB_STEP_SUMMARY
fi
# Dolibarr-specific checks
if [ "$PLATFORM" = "crm-module" ]; then
MOD_FILE=$(find src htdocs -path "*/core/modules/mod*.class.php" -print -quit 2>/dev/null)
if [ -z "$MOD_FILE" ]; then
echo "❌ No module descriptor (src/core/modules/mod*.class.php)" >> $GITHUB_STEP_SUMMARY
ERRORS=$((ERRORS+1))
else
echo "✅ Module descriptor: \`${MOD_FILE}\`" >> $GITHUB_STEP_SUMMARY
# Check module number
NUMERO=$(grep -oP '\$this->numero\s*=\s*\K\d+' "$MOD_FILE" 2>/dev/null || echo "0")
if [ "$NUMERO" = "0" ] || [ -z "$NUMERO" ]; then
echo "❌ Module number (\$this->numero) is 0 or not set" >> $GITHUB_STEP_SUMMARY
ERRORS=$((ERRORS+1))
else
echo "✅ Module number: ${NUMERO}" >> $GITHUB_STEP_SUMMARY
fi
# Check url_last_version exists
if grep -q 'url_last_version' "$MOD_FILE" 2>/dev/null; then
echo "✅ url_last_version is set" >> $GITHUB_STEP_SUMMARY
else
echo "⚠️ url_last_version not set — update checks won't work" >> $GITHUB_STEP_SUMMARY
fi
fi
fi
# Joomla-specific checks
if [ "$PLATFORM" = "waas-component" ]; then
MANIFEST=$(find . -maxdepth 2 -name "*.xml" -exec grep -l '<extension' {} \; 2>/dev/null | head -1)
if [ -z "$MANIFEST" ]; then
echo "❌ No Joomla XML manifest found" >> $GITHUB_STEP_SUMMARY
ERRORS=$((ERRORS+1))
else
echo "✅ Manifest: \`${MANIFEST}\`" >> $GITHUB_STEP_SUMMARY
# Check extension type
TYPE=$(grep -oP '<extension[^>]+type="\K[^"]+' "$MANIFEST" 2>/dev/null)
echo "✅ Extension type: ${TYPE:-unknown}" >> $GITHUB_STEP_SUMMARY
fi
fi
echo "" >> $GITHUB_STEP_SUMMARY
if [ "$ERRORS" -gt 0 ]; then
echo "**❌ ${ERRORS} error(s) — release may be incomplete**" >> $GITHUB_STEP_SUMMARY
else
echo "**✅ All sanity checks passed**" >> $GITHUB_STEP_SUMMARY
fi
# ── STEP 2: Create or update version/XX.YY branch ──────────────────
- name: "Step 2: Version branch"
if: >-
steps.version.outputs.skip != 'true' &&
steps.check.outputs.already_released != 'true'
run: |
BRANCH="${{ steps.version.outputs.branch }}"
IS_MINOR="${{ steps.version.outputs.is_minor }}"
if [ "$IS_MINOR" = "true" ]; then
git checkout -b "$BRANCH" 2>/dev/null || git checkout "$BRANCH"
git push origin "$BRANCH" --force
echo "🌿 Created branch: ${BRANCH}" >> $GITHUB_STEP_SUMMARY
else
git push origin HEAD:"$BRANCH" --force
echo "📝 Updated branch: ${BRANCH} (patch)" >> $GITHUB_STEP_SUMMARY
fi
# ── STEP 3: Set platform version ───────────────────────────────────
- name: "Step 3: Set platform version"
if: >-
steps.version.outputs.skip != 'true' &&
steps.check.outputs.already_released != 'true'
run: |
VERSION="${{ steps.version.outputs.version }}"
php /tmp/mokostandards/api/cli/version_set_platform.php \
--path . --version "$VERSION" --branch main
# ── STEP 4: Update version badges ──────────────────────────────────
- name: "Step 4: Update version badges"
if: >-
steps.version.outputs.skip != 'true' &&
steps.check.outputs.already_released != 'true'
run: |
VERSION="${{ steps.version.outputs.version }}"
find . -name "*.md" ! -path "./.git/*" ! -path "./vendor/*" | while read -r f; do
if grep -q '\[VERSION:' "$f" 2>/dev/null; then
sed -i "s/\[VERSION:[[:space:]]*[0-9]\{2\}\.[0-9]\{2\}\.[0-9]\{2\}\]/[VERSION: ${VERSION}]/" "$f"
fi
done
# ── STEP 5: Write update files (Dolibarr: update.txt / Joomla: update.xml)
- name: "Step 5: Write update files"
if: >-
steps.version.outputs.skip != 'true' &&
steps.check.outputs.already_released != 'true'
run: |
PLATFORM=$(php /tmp/mokostandards/api/cli/platform_detect.php --path . 2>/dev/null)
VERSION="${{ steps.version.outputs.version }}"
REPO="${{ github.repository }}"
if [ "$PLATFORM" = "crm-module" ]; then
printf '%s' "$VERSION" > update.txt
echo "📦 update.txt: ${VERSION}" >> $GITHUB_STEP_SUMMARY
fi
if [ "$PLATFORM" = "waas-component" ]; then
# ── Parse extension metadata from XML manifest ──────────────
MANIFEST=$(find . -maxdepth 2 -name "*.xml" -exec grep -l '<extension' {} \; 2>/dev/null | head -1)
if [ -z "$MANIFEST" ]; then
echo "⚠️ No Joomla XML manifest found — skipping update.xml" >> $GITHUB_STEP_SUMMARY
else
EXT_NAME=$(grep -oP '<name>\K[^<]+' "$MANIFEST" 2>/dev/null | head -1 || echo "${{ github.event.repository.name }}")
EXT_TYPE=$(grep -oP '<extension[^>]+type="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "component")
EXT_ELEMENT=$(grep -oP '<element>\K[^<]+' "$MANIFEST" 2>/dev/null | head -1 || echo "")
EXT_CLIENT=$(grep -oP '<extension[^>]+client="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "")
EXT_FOLDER=$(grep -oP '<extension[^>]+group="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "")
TARGET_PLATFORM=$(grep -oP '<targetplatform[^/]*/>' "$MANIFEST" 2>/dev/null | head -1 || echo "")
PHP_MINIMUM=$(grep -oP '<php_minimum>\K[^<]+' "$MANIFEST" 2>/dev/null | head -1 || echo "")
# Derive element from manifest filename if not in XML
if [ -z "$EXT_ELEMENT" ]; then
EXT_ELEMENT=$(basename "$MANIFEST" .xml)
fi
# Build client tag: plugins and frontend modules need <client>site</client>
CLIENT_TAG=""
if [ -n "$EXT_CLIENT" ]; then
CLIENT_TAG="<client>${EXT_CLIENT}</client>"
elif [ "$EXT_TYPE" = "module" ] || [ "$EXT_TYPE" = "plugin" ]; then
CLIENT_TAG="<client>site</client>"
fi
# Build folder tag for plugins (required for Joomla to match the update)
FOLDER_TAG=""
if [ -n "$EXT_FOLDER" ] && [ "$EXT_TYPE" = "plugin" ]; then
FOLDER_TAG="<folder>${EXT_FOLDER}</folder>"
fi
# Build targetplatform (fallback to Joomla 5+6 if not in manifest)
if [ -z "$TARGET_PLATFORM" ]; then
TARGET_PLATFORM=$(printf '<targetplatform name="joomla" version="5.*" %s>' "/")
fi
# Build php_minimum tag
PHP_TAG=""
if [ -n "$PHP_MINIMUM" ]; then
PHP_TAG="<php_minimum>${PHP_MINIMUM}</php_minimum>"
fi
DOWNLOAD_URL="https://github.com/${REPO}/releases/download/v${VERSION}/${EXT_ELEMENT}-${VERSION}.zip"
INFO_URL="https://github.com/${REPO}/releases/tag/v${VERSION}"
# ── Write update.xml (stable release) ───────────────────────
{
printf '%s\n' '<?xml version="1.0" encoding="utf-8"?>'
printf '%s\n' '<updates>'
printf '%s\n' ' <update>'
printf '%s\n' " <name>${EXT_NAME}</name>"
printf '%s\n' " <description>${EXT_NAME} update</description>"
printf '%s\n' " <element>${EXT_ELEMENT}</element>"
printf '%s\n' " <type>${EXT_TYPE}</type>"
printf '%s\n' " <version>${VERSION}</version>"
[ -n "$CLIENT_TAG" ] && printf '%s\n' " ${CLIENT_TAG}"
[ -n "$FOLDER_TAG" ] && printf '%s\n' " ${FOLDER_TAG}"
printf '%s\n' ' <tags>'
printf '%s\n' ' <tag>stable</tag>'
printf '%s\n' ' </tags>'
printf '%s\n' " <infourl title=\"${EXT_NAME}\">${INFO_URL}</infourl>"
printf '%s\n' ' <downloads>'
printf '%s\n' " <downloadurl type=\"full\" format=\"zip\">${DOWNLOAD_URL}</downloadurl>"
printf '%s\n' ' </downloads>'
printf '%s\n' " ${TARGET_PLATFORM}"
[ -n "$PHP_TAG" ] && printf '%s\n' " ${PHP_TAG}"
printf '%s\n' ' <maintainer>Moko Consulting</maintainer>'
printf '%s\n' ' <maintainerurl>https://mokoconsulting.tech</maintainerurl>'
printf '%s\n' ' </update>'
printf '%s\n' '</updates>'
} > update.xml
echo "📦 update.xml: ${VERSION} (stable) — ${EXT_TYPE}/${EXT_ELEMENT}" >> $GITHUB_STEP_SUMMARY
fi
fi
# ── Commit all changes ─────────────────────────────────────────────
- name: Commit release changes
if: >-
steps.version.outputs.skip != 'true' &&
steps.check.outputs.already_released != 'true'
run: |
if git diff --quiet && git diff --cached --quiet; then
echo " No changes to commit"
exit 0
fi
VERSION="${{ steps.version.outputs.version }}"
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add -A
git commit -m "chore(release): build ${VERSION} [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
# ── STEP 6: Create tag ─────────────────────────────────────────────
- name: "Step 6: Create git tag"
if: >-
steps.version.outputs.skip != 'true' &&
steps.check.outputs.tag_exists != 'true'
run: |
TAG="${{ steps.version.outputs.tag }}"
git tag "$TAG"
git push origin "$TAG"
echo "🏷️ Tag: ${TAG}" >> $GITHUB_STEP_SUMMARY
# ── STEP 7: Create or update GitHub Release ──────────────────────────
- name: "Step 7: GitHub Release"
if: >-
steps.version.outputs.skip != 'true' &&
steps.check.outputs.tag_exists != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
VERSION="${{ steps.version.outputs.version }}"
TAG="${{ steps.version.outputs.tag }}"
BRANCH="${{ steps.version.outputs.branch }}"
IS_MINOR="${{ steps.version.outputs.is_minor }}"
# Derive the minor version base (XX.YY.00)
MINOR_BASE=$(echo "$VERSION" | sed 's/\.[0-9]*$/.00/')
MINOR_TAG="v${MINOR_BASE}"
NOTES=$(php /tmp/mokostandards/api/cli/release_notes.php --path . --version "$VERSION" 2>/dev/null)
[ -z "$NOTES" ] && NOTES="Release ${VERSION}"
echo "$NOTES" > /tmp/release_notes.md
if [ "$IS_MINOR" = "true" ]; then
# Minor release: create new GitHub Release
gh release create "$TAG" \
--title "${VERSION}" \
--notes-file /tmp/release_notes.md \
--target "$BRANCH"
echo "🚀 Release created: ${VERSION}" >> $GITHUB_STEP_SUMMARY
else
# Patch release: update the existing minor release with new tag
# Find the latest release for this minor version
EXISTING=$(gh release view "$MINOR_TAG" --json tagName -q .tagName 2>/dev/null || true)
if [ -n "$EXISTING" ]; then
# Update existing release body with patch info
CURRENT_NOTES=$(gh release view "$MINOR_TAG" --json body -q .body 2>/dev/null || true)
{
echo "$CURRENT_NOTES"
echo ""
echo "---"
echo "### Patch ${VERSION}"
echo ""
cat /tmp/release_notes.md
} > /tmp/updated_notes.md
gh release edit "$MINOR_TAG" \
--title "${MINOR_BASE} (latest: ${VERSION})" \
--notes-file /tmp/updated_notes.md
echo "📝 Release updated: ${MINOR_BASE} → patch ${VERSION}" >> $GITHUB_STEP_SUMMARY
else
# No existing minor release found — create one for this patch
gh release create "$TAG" \
--title "${VERSION}" \
--notes-file /tmp/release_notes.md
echo "🚀 Release created: ${VERSION} (no minor release found)" >> $GITHUB_STEP_SUMMARY
fi
fi
# ── Summary ────────────────────────────────────────────────────────
- name: Pipeline Summary
if: always()
run: |
VERSION="${{ steps.version.outputs.version }}"
if [ "${{ steps.version.outputs.skip }}" = "true" ]; then
echo "## ⏭️ Release Skipped" >> $GITHUB_STEP_SUMMARY
echo "No VERSION in README.md" >> $GITHUB_STEP_SUMMARY
elif [ "${{ steps.check.outputs.already_released }}" = "true" ]; then
echo "## Already Released — ${VERSION}" >> $GITHUB_STEP_SUMMARY
else
echo "" >> $GITHUB_STEP_SUMMARY
echo "## ✅ Build & Release Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Step | Result |" >> $GITHUB_STEP_SUMMARY
echo "|------|--------|" >> $GITHUB_STEP_SUMMARY
echo "| Version | \`${VERSION}\` |" >> $GITHUB_STEP_SUMMARY
echo "| Branch | \`${{ steps.version.outputs.branch }}\` |" >> $GITHUB_STEP_SUMMARY
echo "| Tag | \`${{ steps.version.outputs.tag }}\` |" >> $GITHUB_STEP_SUMMARY
echo "| Release | [View](https://github.com/${{ github.repository }}/releases/tag/${{ steps.version.outputs.tag }}) |" >> $GITHUB_STEP_SUMMARY
fi

115
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@@ -0,0 +1,115 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow.Template
# INGROUP: MokoStandards.Security
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/generic/codeql-analysis.yml.template
# VERSION: 04.04.01
# BRIEF: CodeQL security scanning workflow (generic — all repo types)
# NOTE: Deployed to .github/workflows/codeql-analysis.yml in governed repos.
# CodeQL does not support PHP directly; JavaScript scans JSON/YAML/shell.
# For PHP-specific security scanning see standards-compliance.yml.
name: CodeQL Security Scanning
on:
push:
branches:
- main
- dev/**
- rc/**
- version/**
pull_request:
branches:
- main
- dev/**
- rc/**
schedule:
# Weekly on Monday at 06:00 UTC
- cron: '0 6 * * 1'
workflow_dispatch:
permissions:
actions: read
contents: read
security-events: write
pull-requests: read
jobs:
analyze:
name: Analyze (${{ matrix.language }})
runs-on: ubuntu-latest
timeout-minutes: 360
strategy:
fail-fast: false
matrix:
# CodeQL does not support PHP. Use 'javascript' to scan JSON, YAML,
# and shell scripts. Add 'actions' to scan GitHub Actions workflows.
language: ['javascript', 'actions']
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
queries: security-extended,security-and-quality
- name: Autobuild
uses: github/codeql-action/autobuild@v3
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{ matrix.language }}"
upload: true
output: sarif-results
wait-for-processing: true
- name: Upload SARIF results
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.5.0
with:
name: codeql-results-${{ matrix.language }}
path: sarif-results
retention-days: 30
- name: Step summary
if: always()
run: |
echo "### 🔍 CodeQL — ${{ matrix.language }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
URL="https://github.com/${{ github.repository }}/security/code-scanning"
echo "See the [Security tab]($URL) for findings." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Severity | SLA |" >> $GITHUB_STEP_SUMMARY
echo "|----------|-----|" >> $GITHUB_STEP_SUMMARY
echo "| Critical | 7 days |" >> $GITHUB_STEP_SUMMARY
echo "| High | 14 days |" >> $GITHUB_STEP_SUMMARY
echo "| Medium | 30 days |" >> $GITHUB_STEP_SUMMARY
echo "| Low | 60 days / next release |" >> $GITHUB_STEP_SUMMARY
summary:
name: Security Scan Summary
runs-on: ubuntu-latest
needs: analyze
if: always()
steps:
- name: Summary
run: |
echo "### 🛡️ CodeQL Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Trigger:** ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
echo "**Branch:** ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY
SECURITY_URL="https://github.com/${{ github.repository }}/security"
echo "" >> $GITHUB_STEP_SUMMARY
echo "📊 [View all security alerts]($SECURITY_URL)" >> $GITHUB_STEP_SUMMARY

732
.github/workflows/deploy-demo.yml vendored Normal file
View File

@@ -0,0 +1,732 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Deploy
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/deploy-demo.yml.template
# VERSION: 04.04.01
# BRIEF: SFTP deployment workflow for demo server — synced to all governed repos
# NOTE: Synced via bulk-repo-sync to .github/workflows/deploy-demo.yml in all governed repos.
# Port is resolved in order: DEMO_FTP_PORT variable → :port suffix in DEMO_FTP_HOST → 22.
name: Deploy to Demo Server (SFTP)
# Deploys the contents of the src/ directory to the demo server via SFTP.
# Triggers on push/merge to main — deploys the production-ready build to the demo server.
#
# Required org-level variables: DEMO_FTP_HOST, DEMO_FTP_PATH, DEMO_FTP_USERNAME
# Optional org-level variable: DEMO_FTP_PORT (auto-detected from host or defaults to 22)
# Optional org/repo variable: DEMO_FTP_SUFFIX — when set, appended to DEMO_FTP_PATH to form the
# full remote destination: DEMO_FTP_PATH/DEMO_FTP_SUFFIX
# Ignore rules: Place a .ftp_ignore file in the repository root. Each non-empty,
# non-comment line is a regex pattern tested against the relative path
# of each file (e.g. "subdir/file.txt"). The .gitignore is also
# respected automatically.
# Required org-level secret: DEMO_FTP_KEY (preferred) or DEMO_FTP_PASSWORD
#
# Access control: only users with admin or maintain role on the repository may deploy.
on:
push:
branches:
- main
- master
paths:
- 'src/**'
- 'htdocs/**'
pull_request:
types: [opened, synchronize, reopened, closed]
branches:
- main
- master
paths:
- 'src/**'
- 'htdocs/**'
workflow_dispatch:
inputs:
clear_remote:
description: 'Delete all files inside the remote destination folder before uploading'
required: false
default: false
type: boolean
permissions:
contents: read
pull-requests: write
env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
jobs:
check-permission:
name: Verify Deployment Permission
runs-on: ubuntu-latest
steps:
- name: Check actor permission
env:
# Prefer the org-scoped GH_TOKEN secret (needed for the org membership
# fallback). Falls back to the built-in github.token so the collaborator
# endpoint still works even if GH_TOKEN is not configured.
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
ACTOR="${{ github.actor }}"
REPO="${{ github.repository }}"
ORG="${{ github.repository_owner }}"
METHOD=""
AUTHORIZED="false"
# Hardcoded authorized users — always allowed to deploy
AUTHORIZED_USERS="jmiller-moko github-actions[bot]"
for user in $AUTHORIZED_USERS; do
if [ "$ACTOR" = "$user" ]; then
AUTHORIZED="true"
METHOD="hardcoded allowlist"
PERMISSION="admin"
break
fi
done
# For other actors, check repo/org permissions via API
if [ "$AUTHORIZED" != "true" ]; then
PERMISSION=$(gh api "repos/${REPO}/collaborators/${ACTOR}/permission" \
--jq '.permission' 2>/dev/null)
METHOD="repo collaborator API"
if [ -z "$PERMISSION" ]; then
ORG_ROLE=$(gh api "orgs/${ORG}/memberships/${ACTOR}" \
--jq '.role' 2>/dev/null)
METHOD="org membership API"
if [ "$ORG_ROLE" = "owner" ]; then
PERMISSION="admin"
else
PERMISSION="none"
fi
fi
case "$PERMISSION" in
admin|maintain) AUTHORIZED="true" ;;
esac
fi
# Write detailed summary
{
echo "## 🔐 Deploy Authorization"
echo ""
echo "| Field | Value |"
echo "|-------|-------|"
echo "| **Actor** | \`${ACTOR}\` |"
echo "| **Repository** | \`${REPO}\` |"
echo "| **Permission** | \`${PERMISSION}\` |"
echo "| **Method** | ${METHOD} |"
echo "| **Authorized** | ${AUTHORIZED} |"
echo "| **Trigger** | \`${{ github.event_name }}\` |"
echo "| **Branch** | \`${{ github.ref_name }}\` |"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
if [ "$AUTHORIZED" = "true" ]; then
echo "✅ ${ACTOR} authorized to deploy (${METHOD})" >> "$GITHUB_STEP_SUMMARY"
else
echo "❌ ${ACTOR} is NOT authorized to deploy." >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Deployment requires one of:" >> "$GITHUB_STEP_SUMMARY"
echo "- Being in the hardcoded allowlist" >> "$GITHUB_STEP_SUMMARY"
echo "- Having \`admin\` or \`maintain\` role on the repository" >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
deploy:
name: SFTP Deploy → Demo
runs-on: ubuntu-latest
needs: [check-permission]
if: >-
!startsWith(github.head_ref || github.ref_name, 'chore/') &&
(github.event_name == 'workflow_dispatch' ||
github.event_name == 'push' ||
(github.event_name == 'pull_request' &&
(github.event.action == 'opened' ||
github.event.action == 'synchronize' ||
github.event.action == 'reopened' ||
github.event.pull_request.merged == true)))
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Resolve source directory
id: source
run: |
# Resolve source directory: src/ preferred, htdocs/ as fallback
if [ -d "src" ]; then
SRC="src"
elif [ -d "htdocs" ]; then
SRC="htdocs"
else
echo "⚠️ No src/ or htdocs/ directory found — skipping deployment"
echo "skip=true" >> "$GITHUB_OUTPUT"
exit 0
fi
COUNT=$(find "$SRC" -type f | wc -l)
echo "✅ Source: ${SRC}/ (${COUNT} file(s))"
echo "skip=false" >> "$GITHUB_OUTPUT"
echo "dir=${SRC}" >> "$GITHUB_OUTPUT"
- name: Preview files to deploy
if: steps.source.outputs.skip == 'false'
env:
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Convert a gitignore-style glob line to an ERE pattern ──────────────
ftp_ignore_to_regex() {
local line="$1"
local anchored=false
# Strip inline comments and whitespace
line=$(printf '%s' "$line" | sed 's/[[:space:]]*#.*$//' | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
[ -z "$line" ] && return
# Skip negation patterns (not supported)
[[ "$line" == !* ]] && return
# Trailing slash = directory marker; strip it
line="${line%/}"
# Leading slash = anchored to root; strip it
if [[ "$line" == /* ]]; then
anchored=true
line="${line#/}"
fi
# Escape ERE special chars, then restore glob semantics
local regex
regex=$(printf '%s' "$line" \
| sed 's/[.+^${}()|[\\]/\\&/g' \
| sed 's/\\\*\\\*/\x01/g' \
| sed 's/\\\*/[^\/]*/g' \
| sed 's/\x01/.*/g' \
| sed 's/\\\?/[^\/]/g')
if $anchored; then
printf '^%s(/|$)' "$regex"
else
printf '(^|/)%s(/|$)' "$regex"
fi
}
# ── Read .ftp_ignore (gitignore-style globs) ─────────────────────────
IGNORE_PATTERNS=()
IGNORE_SOURCES=()
if [ -f ".ftp_ignore" ]; then
while IFS= read -r line; do
[[ "$line" =~ ^[[:space:]]*$ || "$line" =~ ^[[:space:]]*# ]] && continue
regex=$(ftp_ignore_to_regex "$line")
[ -n "$regex" ] && IGNORE_PATTERNS+=("$regex") && IGNORE_SOURCES+=("$line")
done < ".ftp_ignore"
fi
# ── Walk src/ and classify every file ────────────────────────────────
WILL_UPLOAD=()
IGNORED_FILES=()
while IFS= read -r -d '' file; do
rel="${file#${SOURCE_DIR}/}"
SKIP=false
for i in "${!IGNORE_PATTERNS[@]}"; do
if echo "$rel" | grep -qE "${IGNORE_PATTERNS[$i]}" 2>/dev/null; then
IGNORED_FILES+=("$rel | .ftp_ignore \`${IGNORE_SOURCES[$i]}\`")
SKIP=true; break
fi
done
$SKIP && continue
if [ -f ".gitignore" ]; then
git check-ignore -q "$rel" 2>/dev/null && {
IGNORED_FILES+=("$rel | .gitignore")
continue
} || true
fi
WILL_UPLOAD+=("$rel")
done < <(find "$SOURCE_DIR" -type f -print0 | sort -z)
UPLOAD_COUNT="${#WILL_UPLOAD[@]}"
IGNORE_COUNT="${#IGNORED_FILES[@]}"
echo " ${UPLOAD_COUNT} file(s) will be uploaded, ${IGNORE_COUNT} ignored"
# ── Write deployment preview to step summary ──────────────────────────
{
echo "## 📋 Deployment Preview"
echo ""
echo "| Field | Value |"
echo "|---|---|"
echo "| Source | \`${SOURCE_DIR}/\` |"
echo "| Files to upload | **${UPLOAD_COUNT}** |"
echo "| Files ignored | **${IGNORE_COUNT}** |"
echo ""
if [ "${UPLOAD_COUNT}" -gt 0 ]; then
echo "### 📂 Files that will be uploaded"
echo '```'
printf '%s\n' "${WILL_UPLOAD[@]}"
echo '```'
echo ""
fi
if [ "${IGNORE_COUNT}" -gt 0 ]; then
echo "### ⏭️ Files excluded"
echo "| File | Reason |"
echo "|---|---|"
for entry in "${IGNORED_FILES[@]}"; do
f="${entry% | *}"; r="${entry##* | }"
echo "| \`${f}\` | ${r} |"
done
echo ""
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Resolve SFTP host and port
if: steps.source.outputs.skip == 'false'
id: conn
env:
HOST_RAW: ${{ vars.DEMO_FTP_HOST }}
PORT_VAR: ${{ vars.DEMO_FTP_PORT }}
run: |
HOST="$HOST_RAW"
PORT="$PORT_VAR"
# Priority 1 — explicit DEMO_FTP_PORT variable
if [ -n "$PORT" ]; then
echo " Using explicit DEMO_FTP_PORT=${PORT}"
# Priority 2 — port embedded in DEMO_FTP_HOST (host:port)
elif [[ "$HOST" == *:* ]]; then
PORT="${HOST##*:}"
HOST="${HOST%:*}"
echo " Extracted port ${PORT} from DEMO_FTP_HOST"
# Priority 3 — SFTP default
else
PORT="22"
echo " No port specified — defaulting to SFTP port 22"
fi
echo "host=${HOST}" >> "$GITHUB_OUTPUT"
echo "port=${PORT}" >> "$GITHUB_OUTPUT"
echo "SFTP target: ${HOST}:${PORT}"
- name: Build remote path
if: steps.source.outputs.skip == 'false'
id: remote
env:
DEMO_FTP_PATH: ${{ vars.DEMO_FTP_PATH }}
DEMO_FTP_SUFFIX: ${{ vars.DEMO_FTP_SUFFIX }}
run: |
BASE="$DEMO_FTP_PATH"
if [ -z "$BASE" ]; then
echo "❌ DEMO_FTP_PATH is not set."
echo " Configure it as an org-level variable (Settings → Variables) and"
echo " ensure this repository has been granted access to it."
exit 1
fi
# DEMO_FTP_SUFFIX is required — it identifies the remote subdirectory for this repo.
# Without it we cannot safely determine the deployment target.
if [ -z "$DEMO_FTP_SUFFIX" ]; then
echo "⏭️ DEMO_FTP_SUFFIX variable is not set — skipping deployment."
echo " Set DEMO_FTP_SUFFIX as a repo or org variable to enable deploy-demo."
echo "skip=true" >> "$GITHUB_OUTPUT"
echo "path=" >> "$GITHUB_OUTPUT"
exit 0
fi
REMOTE="${BASE%/}/${DEMO_FTP_SUFFIX#/}"
# ── Platform-specific path safety guards ──────────────────────────────
PLATFORM=""
MOKO_FILE=".github/.mokostandards"; [ ! -f "$MOKO_FILE" ] && MOKO_FILE=".mokostandards"; if [ -f "$MOKO_FILE" ]; then
PLATFORM=$(grep -E '^platform:' "$MOKO_FILE" | sed 's/.*:[[:space:]]*//' | tr -d '"')
fi
if [ "$PLATFORM" = "crm-module" ]; then
# Dolibarr modules must deploy under htdocs/custom/ — guard against
# accidentally overwriting server root or unrelated directories.
if [[ "$REMOTE" != *custom* ]]; then
echo "❌ Safety check failed: Dolibarr (crm-module) remote path must contain 'custom'."
echo " Current path: ${REMOTE}"
echo " Set DEMO_FTP_SUFFIX to the module's htdocs/custom/ subdirectory."
exit 1
fi
fi
if [ "$PLATFORM" = "waas-component" ]; then
# Joomla extensions may only deploy to the server's tmp/ directory.
if [[ "$REMOTE" != *tmp* ]]; then
echo "❌ Safety check failed: Joomla (waas-component) remote path must contain 'tmp'."
echo " Current path: ${REMOTE}"
echo " Set DEMO_FTP_SUFFIX to a path under the server tmp/ directory."
exit 1
fi
fi
echo " Remote path: ${REMOTE}"
echo "path=${REMOTE}" >> "$GITHUB_OUTPUT"
- name: Detect SFTP authentication method
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
id: auth
env:
HAS_KEY: ${{ secrets.DEMO_FTP_KEY }}
HAS_PASSWORD: ${{ secrets.DEMO_FTP_PASSWORD }}
run: |
if [ -n "$HAS_KEY" ] && [ -n "$HAS_PASSWORD" ]; then
# Both set: key auth with password as passphrase; falls back to password-only if key fails
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=true" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Primary: SSH key + passphrase (DEMO_FTP_KEY / DEMO_FTP_PASSWORD)"
echo " Fallback: password-only auth if key authentication fails"
elif [ -n "$HAS_KEY" ]; then
# Key only: no passphrase, no password fallback
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=false" >> "$GITHUB_OUTPUT"
echo " Using SSH key authentication (DEMO_FTP_KEY, no passphrase, no fallback)"
elif [ -n "$HAS_PASSWORD" ]; then
# Password only: direct SFTP password auth
echo "method=password" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Using password authentication (DEMO_FTP_PASSWORD)"
else
echo "❌ No SFTP credentials configured."
echo " Set DEMO_FTP_KEY (preferred) or DEMO_FTP_PASSWORD as an org-level secret."
exit 1
fi
- name: Setup PHP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
uses: shivammathur/setup-php@fcafdd6392932010c2bd5094439b8e33be2a8a09 # v2.37.0
with:
php-version: '8.1'
tools: composer
- name: Setup MokoStandards deploy tools
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
COMPOSER_AUTH: '{"github-oauth":{"github.com":"${{ secrets.GH_TOKEN || github.token }}"}}'
run: |
git clone --depth 1 --branch version/04.04 --quiet \
"https://x-access-token:${GH_TOKEN}@github.com/mokoconsulting-tech/MokoStandards.git" \
/tmp/mokostandards
cd /tmp/mokostandards
composer install --no-dev --no-interaction --quiet
- name: Clear remote destination folder (manual only)
if: >-
steps.source.outputs.skip == 'false' &&
steps.remote.outputs.skip != 'true' &&
inputs.clear_remote == true
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.DEMO_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.DEMO_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.DEMO_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
HAS_PASSWORD: ${{ steps.auth.outputs.has_password }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
run: |
cat > /tmp/moko_clear.php << 'PHPEOF'
<?php
declare(strict_types=1);
require '/tmp/mokostandards/vendor/autoload.php';
use phpseclib3\Net\SFTP;
use phpseclib3\Crypt\PublicKeyLoader;
$host = (string) getenv('SFTP_HOST');
$port = (int) getenv('SFTP_PORT');
$username = (string) getenv('SFTP_USER');
$authMethod = (string) getenv('AUTH_METHOD');
$usePassphrase = getenv('USE_PASSPHRASE') === 'true';
$hasPassword = getenv('HAS_PASSWORD') === 'true';
$remotePath = rtrim((string) getenv('REMOTE_PATH'), '/');
echo "⚠️ Clearing remote folder: {$remotePath}\n";
$sftp = new SFTP($host, $port);
// ── Authentication ──────────────────────────────────────────────
if ($authMethod === 'key') {
$keyData = (string) getenv('SFTP_KEY');
$passphrase = $usePassphrase ? (string) getenv('SFTP_PASSWORD') : false;
$password = $hasPassword ? (string) getenv('SFTP_PASSWORD') : '';
$key = PublicKeyLoader::load($keyData, $passphrase);
if (!$sftp->login($username, $key)) {
if ($password !== '') {
echo "⚠️ Key auth failed — falling back to password\n";
if (!$sftp->login($username, $password)) {
fwrite(STDERR, "❌ Both key and password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication (key fallback)\n";
} else {
fwrite(STDERR, "❌ Key authentication failed and no password fallback is available\n");
exit(1);
}
} else {
echo "✅ Connected via SSH key authentication\n";
}
} else {
if (!$sftp->login($username, (string) getenv('SFTP_PASSWORD'))) {
fwrite(STDERR, "❌ Password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication\n";
}
// ── Recursive delete ────────────────────────────────────────────
function rmrf(SFTP $sftp, string $path): void
{
$entries = $sftp->nlist($path);
if ($entries === false) {
return; // path does not exist — nothing to clear
}
foreach ($entries as $name) {
if ($name === '.' || $name === '..') {
continue;
}
$entry = "{$path}/{$name}";
if ($sftp->is_dir($entry)) {
rmrf($sftp, $entry);
$sftp->rmdir($entry);
echo " 🗑️ Removed dir: {$entry}\n";
} else {
$sftp->delete($entry);
echo " 🗑️ Removed file: {$entry}\n";
}
}
}
// ── Create remote directory tree ────────────────────────────────
function sftpMakedirs(SFTP $sftp, string $path): void
{
$parts = array_values(array_filter(explode('/', $path), fn(string $p) => $p !== ''));
$current = str_starts_with($path, '/') ? '' : '';
foreach ($parts as $part) {
$current .= '/' . $part;
$sftp->mkdir($current); // silently returns false if already exists
}
}
rmrf($sftp, $remotePath);
sftpMakedirs($sftp, $remotePath);
echo "✅ Remote folder ready: {$remotePath}\n";
PHPEOF
php /tmp/moko_clear.php
- name: Deploy via SFTP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.DEMO_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.DEMO_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.DEMO_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Write SSH key to temp file (key auth only) ────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
printf '%s' "$SFTP_KEY" > /tmp/deploy_key
chmod 600 /tmp/deploy_key
fi
# ── Generate sftp-config.json safely via jq ───────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg key "/tmp/deploy_key" \
'{host:$host, port:$port, user:$user, remote_path:$path, ssh_key_file:$key}' \
> /tmp/sftp-config.json
else
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg pass "$SFTP_PASSWORD" \
'{host:$host, port:$port, user:$user, remote_path:$path, password:$pass}' \
> /tmp/sftp-config.json
fi
# ── Write update files (demo = stable) ─────────────────────────────
PLATFORM=$(php /tmp/mokostandards/api/cli/platform_detect.php --path . 2>/dev/null || true)
VERSION=$(php /tmp/mokostandards/api/cli/version_read.php --path . 2>/dev/null || echo "unknown")
REPO="${{ github.repository }}"
if [ "$PLATFORM" = "crm-module" ]; then
printf '%s' "$VERSION" > update.txt
fi
if [ "$PLATFORM" = "waas-component" ]; then
MANIFEST=$(find . -maxdepth 2 -name "*.xml" -exec grep -l '<extension' {} \; 2>/dev/null | head -1 || true)
if [ -n "$MANIFEST" ]; then
EXT_NAME=$(grep -oP '<name>\K[^<]+' "$MANIFEST" 2>/dev/null | head -1 || echo "${{ github.event.repository.name }}")
EXT_TYPE=$(grep -oP '<extension[^>]+type="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "component")
EXT_ELEMENT=$(grep -oP '<element>\K[^<]+' "$MANIFEST" 2>/dev/null | head -1 || basename "$MANIFEST" .xml)
EXT_CLIENT=$(grep -oP '<extension[^>]+client="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "")
EXT_FOLDER=$(grep -oP '<extension[^>]+group="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "")
TARGET_PLATFORM=$(grep -oP '<targetplatform[^/]*/' "$MANIFEST" 2>/dev/null | head -1 || true)
[ -n "$TARGET_PLATFORM" ] && TARGET_PLATFORM="${TARGET_PLATFORM}>"
[ -z "$TARGET_PLATFORM" ] && TARGET_PLATFORM=$(printf '<targetplatform name="joomla" version="5.*" %s>' "/")
CLIENT_TAG=""
if [ -n "$EXT_CLIENT" ]; then CLIENT_TAG="<client>${EXT_CLIENT}</client>"; elif [ "$EXT_TYPE" = "module" ] || [ "$EXT_TYPE" = "plugin" ]; then CLIENT_TAG="<client>site</client>"; fi
FOLDER_TAG=""
if [ -n "$EXT_FOLDER" ] && [ "$EXT_TYPE" = "plugin" ]; then FOLDER_TAG="<folder>${EXT_FOLDER}</folder>"; fi
DOWNLOAD_URL="https://github.com/${REPO}/releases/download/v${VERSION}/${EXT_ELEMENT}-${VERSION}.zip"
{
printf '%s\n' '<?xml version="1.0" encoding="utf-8"?>'
printf '%s\n' '<updates>'
printf '%s\n' ' <update>'
printf '%s\n' " <name>${EXT_NAME}</name>"
printf '%s\n' " <description>${EXT_NAME} update</description>"
printf '%s\n' " <element>${EXT_ELEMENT}</element>"
printf '%s\n' " <type>${EXT_TYPE}</type>"
printf '%s\n' " <version>${VERSION}</version>"
[ -n "$CLIENT_TAG" ] && printf '%s\n' " ${CLIENT_TAG}"
[ -n "$FOLDER_TAG" ] && printf '%s\n' " ${FOLDER_TAG}"
printf '%s\n' ' <tags>'
printf '%s\n' ' <tag>stable</tag>'
printf '%s\n' ' </tags>'
printf '%s\n' " <infourl title=\"${EXT_NAME}\">https://github.com/${REPO}</infourl>"
printf '%s\n' ' <downloads>'
printf '%s\n' " <downloadurl type=\"full\" format=\"zip\">${DOWNLOAD_URL}</downloadurl>"
printf '%s\n' ' </downloads>'
printf '%s\n' " ${TARGET_PLATFORM}"
printf '%s\n' ' <maintainer>Moko Consulting</maintainer>'
printf '%s\n' ' <maintainerurl>https://mokoconsulting.tech</maintainerurl>'
printf '%s\n' ' </update>'
printf '%s\n' '</updates>'
} > update.xml
fi
fi
# ── Run deploy-sftp.php from MokoStandards ────────────────────────────
DEPLOY_ARGS=(--path . --src-dir "$SOURCE_DIR" --config /tmp/sftp-config.json)
if [ "$USE_PASSPHRASE" = "true" ]; then
DEPLOY_ARGS+=(--key-passphrase "$SFTP_PASSWORD")
fi
php /tmp/mokostandards/api/deploy/deploy-sftp.php "${DEPLOY_ARGS[@]}"
# (deploy-sftp.php handles dotfile skipping and .ftp_ignore natively)
# Remove temp files that should never be left behind
rm -f /tmp/deploy_key /tmp/sftp-config.json
- name: Create or update failure issue
if: failure() && steps.remote.outputs.skip != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
RUN_URL="${{ github.server_url }}/${REPO}/actions/runs/${{ github.run_id }}"
ACTOR="${{ github.actor }}"
BRANCH="${{ github.ref_name }}"
EVENT="${{ github.event_name }}"
NOW=$(date -u '+%Y-%m-%d %H:%M:%S UTC')
LABEL="deploy-failure"
TITLE="fix: Demo deployment failed — ${REPO}"
BODY="## Demo Deployment Failed
A deployment to the demo server failed and requires attention.
| Field | Value |
|-------|-------|
| **Repository** | \`${REPO}\` |
| **Branch** | \`${BRANCH}\` |
| **Trigger** | ${EVENT} |
| **Actor** | @${ACTOR} |
| **Failed at** | ${NOW} |
| **Run** | [View workflow run](${RUN_URL}) |
### Next steps
1. Review the [workflow run log](${RUN_URL}) for the specific error.
2. Fix the underlying issue (credentials, SFTP connectivity, permissions).
3. Re-trigger the deployment via **Actions → Deploy to Demo Server → Run workflow**.
---
*Auto-created by deploy-demo.yml — close this issue once the deployment is resolved.*"
# Ensure the label exists (idempotent — no-op if already present)
gh label create "$LABEL" \
--repo "$REPO" \
--color "CC0000" \
--description "Automated deploy failure tracking" \
--force 2>/dev/null || true
# Look for an existing open deploy-failure issue
EXISTING=$(gh api "repos/${REPO}/issues?labels=${LABEL}&state=all&per_page=1&sort=created&direction=desc" \
--jq '.[0].number' 2>/dev/null)
if [ -n "$EXISTING" ] && [ "$EXISTING" != "null" ]; then
gh api "repos/${REPO}/issues/${EXISTING}" \
-X PATCH \
-f title="$TITLE" \
-f body="$BODY" \
-f state="open" \
--silent
echo "📋 Failure issue #${EXISTING} updated/reopened: ${REPO}" >> "$GITHUB_STEP_SUMMARY"
else
gh issue create \
--repo "$REPO" \
--title "$TITLE" \
--body "$BODY" \
--label "$LABEL" \
--assignee "jmiller-moko" \
| tee -a "$GITHUB_STEP_SUMMARY"
fi
- name: Deployment summary
if: always()
run: |
if [ "${{ steps.source.outputs.skip }}" == "true" ]; then
echo "### ⏭️ Deployment Skipped" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "No \`src/\` directory found in this repository." >> "$GITHUB_STEP_SUMMARY"
elif [ "${{ job.status }}" == "success" ]; then
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "### ✅ Demo Deployment Successful" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "| Field | Value |" >> "$GITHUB_STEP_SUMMARY"
echo "|-------|-------|" >> "$GITHUB_STEP_SUMMARY"
echo "| Host | \`${{ steps.conn.outputs.host }}:${{ steps.conn.outputs.port }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Remote path | \`${{ steps.remote.outputs.path }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Source | \`src/\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Trigger | ${{ github.event_name }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Auth | ${{ steps.auth.outputs.method }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Clear remote | ${{ inputs.clear_remote || 'false' }} |" >> "$GITHUB_STEP_SUMMARY"
else
echo "### ❌ Demo Deployment Failed" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Check the job log above for error details." >> "$GITHUB_STEP_SUMMARY"
fi

757
.github/workflows/deploy-dev.yml vendored Normal file
View File

@@ -0,0 +1,757 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Deploy
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/deploy-dev.yml.template
# VERSION: 04.04.01
# BRIEF: SFTP deployment workflow for development server — synced to all governed repos
# NOTE: Synced via bulk-repo-sync to .github/workflows/deploy-dev.yml in all governed repos.
# Port is resolved in order: DEV_FTP_PORT variable → :port suffix in DEV_FTP_HOST → 22.
name: Deploy to Dev Server (SFTP)
# Deploys the contents of the src/ directory to the development server via SFTP.
# Triggers on every pull_request to development branches (so the dev server always
# reflects the latest PR state) and on push/merge to main branches.
#
# Required org-level variables: DEV_FTP_HOST, DEV_FTP_PATH, DEV_FTP_USERNAME
# Optional org-level variable: DEV_FTP_PORT (auto-detected from host or defaults to 22)
# Optional org/repo variable: DEV_FTP_SUFFIX — when set, appended to DEV_FTP_PATH to form the
# full remote destination: DEV_FTP_PATH/DEV_FTP_SUFFIX
# Ignore rules: Place a .ftp_ignore file in the repository root. Each non-empty,
# non-comment line is a regex pattern tested against the relative path
# of each file (e.g. "subdir/file.txt"). The .gitignore is also
# respected automatically.
# Required org-level secret: DEV_FTP_KEY (preferred) or DEV_FTP_PASSWORD
#
# Access control: only users with admin or maintain role on the repository may deploy.
on:
push:
branches:
- 'dev/**'
- 'rc/**'
- develop
- development
paths:
- 'src/**'
- 'htdocs/**'
pull_request:
types: [opened, synchronize, reopened, closed]
branches:
- 'dev/**'
- 'rc/**'
- develop
- development
paths:
- 'src/**'
- 'htdocs/**'
workflow_dispatch:
inputs:
clear_remote:
description: 'Delete all files inside the remote destination folder before uploading'
required: false
default: false
type: boolean
permissions:
contents: read
pull-requests: write
env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
jobs:
check-permission:
name: Verify Deployment Permission
runs-on: ubuntu-latest
steps:
- name: Check actor permission
env:
# Prefer the org-scoped GH_TOKEN secret (needed for the org membership
# fallback). Falls back to the built-in github.token so the collaborator
# endpoint still works even if GH_TOKEN is not configured.
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
ACTOR="${{ github.actor }}"
REPO="${{ github.repository }}"
ORG="${{ github.repository_owner }}"
METHOD=""
AUTHORIZED="false"
# Hardcoded authorized users — always allowed to deploy
AUTHORIZED_USERS="jmiller-moko github-actions[bot]"
for user in $AUTHORIZED_USERS; do
if [ "$ACTOR" = "$user" ]; then
AUTHORIZED="true"
METHOD="hardcoded allowlist"
PERMISSION="admin"
break
fi
done
# For other actors, check repo/org permissions via API
if [ "$AUTHORIZED" != "true" ]; then
PERMISSION=$(gh api "repos/${REPO}/collaborators/${ACTOR}/permission" \
--jq '.permission' 2>/dev/null)
METHOD="repo collaborator API"
if [ -z "$PERMISSION" ]; then
ORG_ROLE=$(gh api "orgs/${ORG}/memberships/${ACTOR}" \
--jq '.role' 2>/dev/null)
METHOD="org membership API"
if [ "$ORG_ROLE" = "owner" ]; then
PERMISSION="admin"
else
PERMISSION="none"
fi
fi
case "$PERMISSION" in
admin|maintain) AUTHORIZED="true" ;;
esac
fi
# Write detailed summary
{
echo "## 🔐 Deploy Authorization"
echo ""
echo "| Field | Value |"
echo "|-------|-------|"
echo "| **Actor** | \`${ACTOR}\` |"
echo "| **Repository** | \`${REPO}\` |"
echo "| **Permission** | \`${PERMISSION}\` |"
echo "| **Method** | ${METHOD} |"
echo "| **Authorized** | ${AUTHORIZED} |"
echo "| **Trigger** | \`${{ github.event_name }}\` |"
echo "| **Branch** | \`${{ github.ref_name }}\` |"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
if [ "$AUTHORIZED" = "true" ]; then
echo "✅ ${ACTOR} authorized to deploy (${METHOD})" >> "$GITHUB_STEP_SUMMARY"
else
echo "❌ ${ACTOR} is NOT authorized to deploy." >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Deployment requires one of:" >> "$GITHUB_STEP_SUMMARY"
echo "- Being in the hardcoded allowlist" >> "$GITHUB_STEP_SUMMARY"
echo "- Having \`admin\` or \`maintain\` role on the repository" >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
deploy:
name: SFTP Deploy → Dev
runs-on: ubuntu-latest
needs: [check-permission]
if: >-
!startsWith(github.head_ref || github.ref_name, 'chore/') &&
(github.event_name == 'workflow_dispatch' ||
github.event_name == 'push' ||
(github.event_name == 'pull_request' &&
(github.event.action == 'opened' ||
github.event.action == 'synchronize' ||
github.event.action == 'reopened' ||
github.event.pull_request.merged == true)))
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Resolve source directory
id: source
run: |
# Resolve source directory: src/ preferred, htdocs/ as fallback
if [ -d "src" ]; then
SRC="src"
elif [ -d "htdocs" ]; then
SRC="htdocs"
else
echo "⚠️ No src/ or htdocs/ directory found — skipping deployment"
echo "skip=true" >> "$GITHUB_OUTPUT"
exit 0
fi
COUNT=$(find "$SRC" -type f | wc -l)
echo "✅ Source: ${SRC}/ (${COUNT} file(s))"
echo "skip=false" >> "$GITHUB_OUTPUT"
echo "dir=${SRC}" >> "$GITHUB_OUTPUT"
- name: Preview files to deploy
if: steps.source.outputs.skip == 'false'
env:
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Convert a gitignore-style glob line to an ERE pattern ──────────────
ftp_ignore_to_regex() {
local line="$1"
local anchored=false
# Strip inline comments and whitespace
line=$(printf '%s' "$line" | sed 's/[[:space:]]*#.*$//' | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
[ -z "$line" ] && return
# Skip negation patterns (not supported)
[[ "$line" == !* ]] && return
# Trailing slash = directory marker; strip it
line="${line%/}"
# Leading slash = anchored to root; strip it
if [[ "$line" == /* ]]; then
anchored=true
line="${line#/}"
fi
# Escape ERE special chars, then restore glob semantics
local regex
regex=$(printf '%s' "$line" \
| sed 's/[.+^${}()|[\\]/\\&/g' \
| sed 's/\\\*\\\*/\x01/g' \
| sed 's/\\\*/[^\/]*/g' \
| sed 's/\x01/.*/g' \
| sed 's/\\\?/[^\/]/g')
if $anchored; then
printf '^%s(/|$)' "$regex"
else
printf '(^|/)%s(/|$)' "$regex"
fi
}
# ── Read .ftp_ignore (gitignore-style globs) ─────────────────────────
IGNORE_PATTERNS=()
IGNORE_SOURCES=()
if [ -f ".ftp_ignore" ]; then
while IFS= read -r line; do
[[ "$line" =~ ^[[:space:]]*$ || "$line" =~ ^[[:space:]]*# ]] && continue
regex=$(ftp_ignore_to_regex "$line")
[ -n "$regex" ] && IGNORE_PATTERNS+=("$regex") && IGNORE_SOURCES+=("$line")
done < ".ftp_ignore"
fi
# ── Walk src/ and classify every file ────────────────────────────────
WILL_UPLOAD=()
IGNORED_FILES=()
while IFS= read -r -d '' file; do
rel="${file#${SOURCE_DIR}/}"
SKIP=false
for i in "${!IGNORE_PATTERNS[@]}"; do
if echo "$rel" | grep -qE "${IGNORE_PATTERNS[$i]}" 2>/dev/null; then
IGNORED_FILES+=("$rel | .ftp_ignore \`${IGNORE_SOURCES[$i]}\`")
SKIP=true; break
fi
done
$SKIP && continue
if [ -f ".gitignore" ]; then
git check-ignore -q "$rel" 2>/dev/null && {
IGNORED_FILES+=("$rel | .gitignore")
continue
} || true
fi
WILL_UPLOAD+=("$rel")
done < <(find "$SOURCE_DIR" -type f -print0 | sort -z)
UPLOAD_COUNT="${#WILL_UPLOAD[@]}"
IGNORE_COUNT="${#IGNORED_FILES[@]}"
echo " ${UPLOAD_COUNT} file(s) will be uploaded, ${IGNORE_COUNT} ignored"
# ── Write deployment preview to step summary ──────────────────────────
{
echo "## 📋 Deployment Preview"
echo ""
echo "| Field | Value |"
echo "|---|---|"
echo "| Source | \`${SOURCE_DIR}/\` |"
echo "| Files to upload | **${UPLOAD_COUNT}** |"
echo "| Files ignored | **${IGNORE_COUNT}** |"
echo ""
if [ "${UPLOAD_COUNT}" -gt 0 ]; then
echo "### 📂 Files that will be uploaded"
echo '```'
printf '%s\n' "${WILL_UPLOAD[@]}"
echo '```'
echo ""
fi
if [ "${IGNORE_COUNT}" -gt 0 ]; then
echo "### ⏭️ Files excluded"
echo "| File | Reason |"
echo "|---|---|"
for entry in "${IGNORED_FILES[@]}"; do
f="${entry% | *}"; r="${entry##* | }"
echo "| \`${f}\` | ${r} |"
done
echo ""
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Resolve SFTP host and port
if: steps.source.outputs.skip == 'false'
id: conn
env:
HOST_RAW: ${{ vars.DEV_FTP_HOST }}
PORT_VAR: ${{ vars.DEV_FTP_PORT }}
run: |
HOST="$HOST_RAW"
PORT="$PORT_VAR"
# Priority 1 — explicit DEV_FTP_PORT variable
if [ -n "$PORT" ]; then
echo " Using explicit DEV_FTP_PORT=${PORT}"
# Priority 2 — port embedded in DEV_FTP_HOST (host:port)
elif [[ "$HOST" == *:* ]]; then
PORT="${HOST##*:}"
HOST="${HOST%:*}"
echo " Extracted port ${PORT} from DEV_FTP_HOST"
# Priority 3 — SFTP default
else
PORT="22"
echo " No port specified — defaulting to SFTP port 22"
fi
echo "host=${HOST}" >> "$GITHUB_OUTPUT"
echo "port=${PORT}" >> "$GITHUB_OUTPUT"
echo "SFTP target: ${HOST}:${PORT}"
- name: Build remote path
if: steps.source.outputs.skip == 'false'
id: remote
env:
DEV_FTP_PATH: ${{ vars.DEV_FTP_PATH }}
DEV_FTP_SUFFIX: ${{ vars.DEV_FTP_SUFFIX }}
run: |
BASE="$DEV_FTP_PATH"
if [ -z "$BASE" ]; then
echo "❌ DEV_FTP_PATH is not set."
echo " Configure it as an org-level variable (Settings → Variables) and"
echo " ensure this repository has been granted access to it."
exit 1
fi
# DEV_FTP_SUFFIX is required — it identifies the remote subdirectory for this repo.
# Without it we cannot safely determine the deployment target.
if [ -z "$DEV_FTP_SUFFIX" ]; then
echo "⏭️ DEV_FTP_SUFFIX variable is not set — skipping deployment."
echo " Set DEV_FTP_SUFFIX as a repo or org variable to enable deploy-dev."
echo "skip=true" >> "$GITHUB_OUTPUT"
echo "path=" >> "$GITHUB_OUTPUT"
exit 0
fi
REMOTE="${BASE%/}/${DEV_FTP_SUFFIX#/}"
# ── Platform-specific path safety guards ──────────────────────────────
PLATFORM=""
MOKO_FILE=".github/.mokostandards"; [ ! -f "$MOKO_FILE" ] && MOKO_FILE=".mokostandards"; if [ -f "$MOKO_FILE" ]; then
PLATFORM=$(grep -oP '^platform:.*' "$MOKO_FILE" 2>/dev/null || true)
fi
if [ "$PLATFORM" = "crm-module" ]; then
# Dolibarr modules must deploy under htdocs/custom/ — guard against
# accidentally overwriting server root or unrelated directories.
if [[ "$REMOTE" != *custom* ]]; then
echo "❌ Safety check failed: Dolibarr (crm-module) remote path must contain 'custom'."
echo " Current path: ${REMOTE}"
echo " Set DEV_FTP_SUFFIX to the module's htdocs/custom/ subdirectory."
exit 1
fi
fi
if [ "$PLATFORM" = "waas-component" ]; then
# Joomla extensions may only deploy to the server's tmp/ directory.
if [[ "$REMOTE" != *tmp* ]]; then
echo "❌ Safety check failed: Joomla (waas-component) remote path must contain 'tmp'."
echo " Current path: ${REMOTE}"
echo " Set DEV_FTP_SUFFIX to a path under the server tmp/ directory."
exit 1
fi
fi
echo " Remote path: ${REMOTE}"
echo "path=${REMOTE}" >> "$GITHUB_OUTPUT"
- name: Detect SFTP authentication method
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
id: auth
env:
HAS_KEY: ${{ secrets.DEV_FTP_KEY }}
HAS_PASSWORD: ${{ secrets.DEV_FTP_PASSWORD }}
run: |
if [ -n "$HAS_KEY" ] && [ -n "$HAS_PASSWORD" ]; then
# Both set: key auth with password as passphrase; falls back to password-only if key fails
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=true" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Primary: SSH key + passphrase (DEV_FTP_KEY / DEV_FTP_PASSWORD)"
echo " Fallback: password-only auth if key authentication fails"
elif [ -n "$HAS_KEY" ]; then
# Key only: no passphrase, no password fallback
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=false" >> "$GITHUB_OUTPUT"
echo " Using SSH key authentication (DEV_FTP_KEY, no passphrase, no fallback)"
elif [ -n "$HAS_PASSWORD" ]; then
# Password only: direct SFTP password auth
echo "method=password" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Using password authentication (DEV_FTP_PASSWORD)"
else
echo "❌ No SFTP credentials configured."
echo " Set DEV_FTP_KEY (preferred) or DEV_FTP_PASSWORD as an org-level secret."
exit 1
fi
- name: Setup PHP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
uses: shivammathur/setup-php@fcafdd6392932010c2bd5094439b8e33be2a8a09 # v2.37.0
with:
php-version: '8.1'
tools: composer
- name: Setup MokoStandards deploy tools
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
COMPOSER_AUTH: '{"github-oauth":{"github.com":"${{ secrets.GH_TOKEN || github.token }}"}}'
run: |
git clone --depth 1 --branch version/04.04 --quiet \
"https://x-access-token:${GH_TOKEN}@github.com/mokoconsulting-tech/MokoStandards.git" \
/tmp/mokostandards
cd /tmp/mokostandards
composer install --no-dev --no-interaction --quiet
- name: Clear remote destination folder (manual only)
if: >-
steps.source.outputs.skip == 'false' &&
steps.remote.outputs.skip != 'true' &&
inputs.clear_remote == true
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.DEV_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.DEV_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.DEV_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
HAS_PASSWORD: ${{ steps.auth.outputs.has_password }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
run: |
cat > /tmp/moko_clear.php << 'PHPEOF'
<?php
declare(strict_types=1);
require '/tmp/mokostandards/vendor/autoload.php';
use phpseclib3\Net\SFTP;
use phpseclib3\Crypt\PublicKeyLoader;
$host = (string) getenv('SFTP_HOST');
$port = (int) getenv('SFTP_PORT');
$username = (string) getenv('SFTP_USER');
$authMethod = (string) getenv('AUTH_METHOD');
$usePassphrase = getenv('USE_PASSPHRASE') === 'true';
$hasPassword = getenv('HAS_PASSWORD') === 'true';
$remotePath = rtrim((string) getenv('REMOTE_PATH'), '/');
echo "⚠️ Clearing remote folder: {$remotePath}\n";
$sftp = new SFTP($host, $port);
// ── Authentication ──────────────────────────────────────────────
if ($authMethod === 'key') {
$keyData = (string) getenv('SFTP_KEY');
$passphrase = $usePassphrase ? (string) getenv('SFTP_PASSWORD') : false;
$password = $hasPassword ? (string) getenv('SFTP_PASSWORD') : '';
$key = PublicKeyLoader::load($keyData, $passphrase);
if (!$sftp->login($username, $key)) {
if ($password !== '') {
echo "⚠️ Key auth failed — falling back to password\n";
if (!$sftp->login($username, $password)) {
fwrite(STDERR, "❌ Both key and password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication (key fallback)\n";
} else {
fwrite(STDERR, "❌ Key authentication failed and no password fallback is available\n");
exit(1);
}
} else {
echo "✅ Connected via SSH key authentication\n";
}
} else {
if (!$sftp->login($username, (string) getenv('SFTP_PASSWORD'))) {
fwrite(STDERR, "❌ Password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication\n";
}
// ── Recursive delete ────────────────────────────────────────────
function rmrf(SFTP $sftp, string $path): void
{
$entries = $sftp->nlist($path);
if ($entries === false) {
return; // path does not exist — nothing to clear
}
foreach ($entries as $name) {
if ($name === '.' || $name === '..') {
continue;
}
$entry = "{$path}/{$name}";
if ($sftp->is_dir($entry)) {
rmrf($sftp, $entry);
$sftp->rmdir($entry);
echo " 🗑️ Removed dir: {$entry}\n";
} else {
$sftp->delete($entry);
echo " 🗑️ Removed file: {$entry}\n";
}
}
}
// ── Create remote directory tree ────────────────────────────────
function sftpMakedirs(SFTP $sftp, string $path): void
{
$parts = array_values(array_filter(explode('/', $path), fn(string $p) => $p !== ''));
$current = str_starts_with($path, '/') ? '' : '';
foreach ($parts as $part) {
$current .= '/' . $part;
$sftp->mkdir($current); // silently returns false if already exists
}
}
rmrf($sftp, $remotePath);
sftpMakedirs($sftp, $remotePath);
echo "✅ Remote folder ready: {$remotePath}\n";
PHPEOF
php /tmp/moko_clear.php
- name: Deploy via SFTP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.DEV_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.DEV_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.DEV_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Write SSH key to temp file (key auth only) ────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
printf '%s' "$SFTP_KEY" > /tmp/deploy_key
chmod 600 /tmp/deploy_key
fi
# ── Generate sftp-config.json safely via jq ───────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg key "/tmp/deploy_key" \
'{host:$host, port:$port, user:$user, remote_path:$path, ssh_key_file:$key}' \
> /tmp/sftp-config.json
else
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg pass "$SFTP_PASSWORD" \
'{host:$host, port:$port, user:$user, remote_path:$path, password:$pass}' \
> /tmp/sftp-config.json
fi
# ── Run deploy-sftp.php from MokoStandards ────────────────────────────
DEPLOY_ARGS=(--path . --src-dir "$SOURCE_DIR" --config /tmp/sftp-config.json)
if [ "$USE_PASSPHRASE" = "true" ]; then
DEPLOY_ARGS+=(--key-passphrase "$SFTP_PASSWORD")
fi
# Set platform version to "development" before deploy (Dolibarr + Joomla)
php /tmp/mokostandards/api/cli/version_set_platform.php --path . --version development
# Write update files — dev/** = development, rc/** = rc
PLATFORM=$(php /tmp/mokostandards/api/cli/platform_detect.php --path . 2>/dev/null || true)
REPO="${{ github.repository }}"
BRANCH="${{ github.ref_name }}"
# Determine stability tag from branch prefix
STABILITY="development"
VERSION_LABEL="development"
if [[ "$BRANCH" == rc/* ]]; then
STABILITY="rc"
VERSION_LABEL=$(php /tmp/mokostandards/api/cli/version_read.php --path . 2>/dev/null || echo "${BRANCH#rc/}")-rc
fi
if [ "$PLATFORM" = "crm-module" ]; then
printf '%s' "$VERSION_LABEL" > update.txt
fi
if [ "$PLATFORM" = "waas-component" ]; then
MANIFEST=$(find . -maxdepth 2 -name "*.xml" -exec grep -l '<extension' {} \; 2>/dev/null | head -1 || true)
if [ -n "$MANIFEST" ]; then
EXT_NAME=$(grep -oP '<name>\K[^<]+' "$MANIFEST" 2>/dev/null | head -1 || echo "${{ github.event.repository.name }}")
EXT_TYPE=$(grep -oP '<extension[^>]+type="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "component")
EXT_ELEMENT=$(grep -oP '<element>\K[^<]+' "$MANIFEST" 2>/dev/null | head -1 || basename "$MANIFEST" .xml)
EXT_CLIENT=$(grep -oP '<extension[^>]+client="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "")
EXT_FOLDER=$(grep -oP '<extension[^>]+group="\K[^"]+' "$MANIFEST" 2>/dev/null || echo "")
TARGET_PLATFORM=$(grep -oP '<targetplatform[^/]*/' "$MANIFEST" 2>/dev/null | head -1 || true)
[ -n "$TARGET_PLATFORM" ] && TARGET_PLATFORM="${TARGET_PLATFORM}>"
[ -z "$TARGET_PLATFORM" ] && TARGET_PLATFORM=$(printf '<targetplatform name="joomla" version="5.*" %s>' "/")
CLIENT_TAG=""
if [ -n "$EXT_CLIENT" ]; then
CLIENT_TAG="<client>${EXT_CLIENT}</client>"
elif [ "$EXT_TYPE" = "module" ] || [ "$EXT_TYPE" = "plugin" ]; then
CLIENT_TAG="<client>site</client>"
fi
FOLDER_TAG=""
if [ -n "$EXT_FOLDER" ] && [ "$EXT_TYPE" = "plugin" ]; then
FOLDER_TAG="<folder>${EXT_FOLDER}</folder>"
fi
DOWNLOAD_URL="https://github.com/${REPO}/archive/refs/heads/${BRANCH}.zip"
{
printf '%s\n' '<?xml version="1.0" encoding="utf-8"?>'
printf '%s\n' '<updates>'
printf '%s\n' ' <update>'
printf '%s\n' " <name>${EXT_NAME}</name>"
printf '%s\n' " <description>${EXT_NAME} ${STABILITY} build</description>"
printf '%s\n' " <element>${EXT_ELEMENT}</element>"
printf '%s\n' " <type>${EXT_TYPE}</type>"
printf '%s\n' " <version>${VERSION_LABEL}</version>"
[ -n "$CLIENT_TAG" ] && printf '%s\n' " ${CLIENT_TAG}"
[ -n "$FOLDER_TAG" ] && printf '%s\n' " ${FOLDER_TAG}"
printf '%s\n' ' <tags>'
printf '%s\n' " <tag>${STABILITY}</tag>"
printf '%s\n' ' </tags>'
printf '%s\n' " <infourl title=\"${EXT_NAME}\">https://github.com/${REPO}/tree/${BRANCH}</infourl>"
printf '%s\n' ' <downloads>'
printf '%s\n' " <downloadurl type=\"full\" format=\"zip\">${DOWNLOAD_URL}</downloadurl>"
printf '%s\n' ' </downloads>'
printf '%s\n' " ${TARGET_PLATFORM}"
printf '%s\n' ' <maintainer>Moko Consulting</maintainer>'
printf '%s\n' ' <maintainerurl>https://mokoconsulting.tech</maintainerurl>'
printf '%s\n' ' </update>'
printf '%s\n' '</updates>'
} > update.xml
sed -i '/^[[:space:]]*$/d' update.xml
fi
fi
php /tmp/mokostandards/api/deploy/deploy-sftp.php "${DEPLOY_ARGS[@]}"
# (deploy-sftp.php handles dotfile skipping and .ftp_ignore natively)
# Remove temp files that should never be left behind
rm -f /tmp/deploy_key /tmp/sftp-config.json
- name: Create or update failure issue
if: failure()
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
RUN_URL="${{ github.server_url }}/${REPO}/actions/runs/${{ github.run_id }}"
ACTOR="${{ github.actor }}"
BRANCH="${{ github.ref_name }}"
EVENT="${{ github.event_name }}"
NOW=$(date -u '+%Y-%m-%d %H:%M:%S UTC')
LABEL="deploy-failure"
TITLE="fix: Dev deployment failed — ${REPO}"
BODY="## Dev Deployment Failed
A deployment to the dev server failed and requires attention.
| Field | Value |
|-------|-------|
| **Repository** | \`${REPO}\` |
| **Branch** | \`${BRANCH}\` |
| **Trigger** | ${EVENT} |
| **Actor** | @${ACTOR} |
| **Failed at** | ${NOW} |
| **Run** | [View workflow run](${RUN_URL}) |
### Next steps
1. Review the [workflow run log](${RUN_URL}) for the specific error.
2. Fix the underlying issue (credentials, SFTP connectivity, permissions).
3. Re-trigger the deployment via **Actions → Deploy to Dev Server → Run workflow**.
---
*Auto-created by deploy-dev.yml — close this issue once the deployment is resolved.*"
# Ensure the label exists (idempotent — no-op if already present)
gh label create "$LABEL" \
--repo "$REPO" \
--color "CC0000" \
--description "Automated deploy failure tracking" \
--force 2>/dev/null || true
# Look for an existing open deploy-failure issue
EXISTING=$(gh api "repos/${REPO}/issues?labels=${LABEL}&state=all&per_page=1&sort=created&direction=desc" \
--jq '.[0].number' 2>/dev/null)
if [ -n "$EXISTING" ] && [ "$EXISTING" != "null" ]; then
gh api "repos/${REPO}/issues/${EXISTING}" \
-X PATCH \
-f title="$TITLE" \
-f body="$BODY" \
-f state="open" \
--silent
echo "📋 Failure issue #${EXISTING} updated/reopened: ${REPO}" >> "$GITHUB_STEP_SUMMARY"
else
gh issue create \
--repo "$REPO" \
--title "$TITLE" \
--body "$BODY" \
--label "$LABEL" \
--assignee "jmiller-moko" \
| tee -a "$GITHUB_STEP_SUMMARY"
fi
- name: Deployment summary
if: always()
run: |
if [ "${{ steps.source.outputs.skip }}" == "true" ]; then
echo "### ⏭️ Deployment Skipped" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "No \`src/\` directory found in this repository." >> "$GITHUB_STEP_SUMMARY"
elif [ "${{ job.status }}" == "success" ]; then
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "### ✅ Dev Deployment Successful" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "| Field | Value |" >> "$GITHUB_STEP_SUMMARY"
echo "|-------|-------|" >> "$GITHUB_STEP_SUMMARY"
echo "| Host | \`${{ steps.conn.outputs.host }}:${{ steps.conn.outputs.port }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Remote path | \`${{ steps.remote.outputs.path }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Source | \`src/\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Trigger | ${{ github.event_name }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Auth | ${{ steps.auth.outputs.method }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Clear remote | ${{ inputs.clear_remote || 'false' }} |" >> "$GITHUB_STEP_SUMMARY"
else
echo "### ❌ Dev Deployment Failed" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Check the job log above for error details." >> "$GITHUB_STEP_SUMMARY"
fi

659
.github/workflows/deploy-rs.yml vendored Normal file
View File

@@ -0,0 +1,659 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Deploy
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/deploy-rs.yml.template
# VERSION: 04.04.01
# BRIEF: SFTP deployment workflow for release staging server — synced to all governed repos
# NOTE: Synced via bulk-repo-sync to .github/workflows/deploy-rs.yml in all governed repos.
# Port is resolved in order: RS_FTP_PORT variable → :port suffix in RS_FTP_HOST → 22.
name: Deploy to RS Server (SFTP)
# Deploys the contents of the src/ directory to the release staging server via SFTP.
# Triggers on push/merge to main — deploys the production-ready build to the release staging server.
#
# Required org-level variables: RS_FTP_HOST, RS_FTP_PATH, RS_FTP_USERNAME
# Optional org-level variable: RS_FTP_PORT (auto-detected from host or defaults to 22)
# Optional org/repo variable: RS_FTP_SUFFIX — when set, appended to RS_FTP_PATH to form the
# full remote destination: RS_FTP_PATH/RS_FTP_SUFFIX
# Ignore rules: Place a .ftp_ignore file in the repository root. Each non-empty,
# non-comment line is a regex pattern tested against the relative path
# of each file (e.g. "subdir/file.txt"). The .gitignore is also
# respected automatically.
# Required org-level secret: RS_FTP_KEY (preferred) or RS_FTP_PASSWORD
#
# Access control: only users with admin or maintain role on the repository may deploy.
on:
push:
branches:
- main
- master
paths:
- 'src/**'
- 'htdocs/**'
pull_request:
types: [opened, synchronize, reopened, closed]
branches:
- main
- master
paths:
- 'src/**'
- 'htdocs/**'
workflow_dispatch:
inputs:
clear_remote:
description: 'Delete all files inside the remote destination folder before uploading'
required: false
default: false
type: boolean
permissions:
contents: read
pull-requests: write
env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
jobs:
check-permission:
name: Verify Deployment Permission
runs-on: ubuntu-latest
steps:
- name: Check actor permission
env:
# Prefer the org-scoped GH_TOKEN secret (needed for the org membership
# fallback). Falls back to the built-in github.token so the collaborator
# endpoint still works even if GH_TOKEN is not configured.
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
ACTOR="${{ github.actor }}"
REPO="${{ github.repository }}"
ORG="${{ github.repository_owner }}"
METHOD=""
AUTHORIZED="false"
# Hardcoded authorized users — always allowed to deploy
AUTHORIZED_USERS="jmiller-moko github-actions[bot]"
for user in $AUTHORIZED_USERS; do
if [ "$ACTOR" = "$user" ]; then
AUTHORIZED="true"
METHOD="hardcoded allowlist"
PERMISSION="admin"
break
fi
done
# For other actors, check repo/org permissions via API
if [ "$AUTHORIZED" != "true" ]; then
PERMISSION=$(gh api "repos/${REPO}/collaborators/${ACTOR}/permission" \
--jq '.permission' 2>/dev/null)
METHOD="repo collaborator API"
if [ -z "$PERMISSION" ]; then
ORG_ROLE=$(gh api "orgs/${ORG}/memberships/${ACTOR}" \
--jq '.role' 2>/dev/null)
METHOD="org membership API"
if [ "$ORG_ROLE" = "owner" ]; then
PERMISSION="admin"
else
PERMISSION="none"
fi
fi
case "$PERMISSION" in
admin|maintain) AUTHORIZED="true" ;;
esac
fi
# Write detailed summary
{
echo "## 🔐 Deploy Authorization"
echo ""
echo "| Field | Value |"
echo "|-------|-------|"
echo "| **Actor** | \`${ACTOR}\` |"
echo "| **Repository** | \`${REPO}\` |"
echo "| **Permission** | \`${PERMISSION}\` |"
echo "| **Method** | ${METHOD} |"
echo "| **Authorized** | ${AUTHORIZED} |"
echo "| **Trigger** | \`${{ github.event_name }}\` |"
echo "| **Branch** | \`${{ github.ref_name }}\` |"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
if [ "$AUTHORIZED" = "true" ]; then
echo "✅ ${ACTOR} authorized to deploy (${METHOD})" >> "$GITHUB_STEP_SUMMARY"
else
echo "❌ ${ACTOR} is NOT authorized to deploy." >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Deployment requires one of:" >> "$GITHUB_STEP_SUMMARY"
echo "- Being in the hardcoded allowlist" >> "$GITHUB_STEP_SUMMARY"
echo "- Having \`admin\` or \`maintain\` role on the repository" >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
deploy:
name: SFTP Deploy → RS
runs-on: ubuntu-latest
needs: [check-permission]
if: >-
!startsWith(github.head_ref || github.ref_name, 'chore/') &&
(github.event_name == 'workflow_dispatch' ||
github.event_name == 'push' ||
(github.event_name == 'pull_request' &&
(github.event.action == 'opened' ||
github.event.action == 'synchronize' ||
github.event.action == 'reopened' ||
github.event.pull_request.merged == true)))
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Resolve source directory
id: source
run: |
# Resolve source directory: src/ preferred, htdocs/ as fallback
if [ -d "src" ]; then
SRC="src"
elif [ -d "htdocs" ]; then
SRC="htdocs"
else
echo "⚠️ No src/ or htdocs/ directory found — skipping deployment"
echo "skip=true" >> "$GITHUB_OUTPUT"
exit 0
fi
COUNT=$(find "$SRC" -type f | wc -l)
echo "✅ Source: ${SRC}/ (${COUNT} file(s))"
echo "skip=false" >> "$GITHUB_OUTPUT"
echo "dir=${SRC}" >> "$GITHUB_OUTPUT"
- name: Preview files to deploy
if: steps.source.outputs.skip == 'false'
env:
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Convert a gitignore-style glob line to an ERE pattern ──────────────
ftp_ignore_to_regex() {
local line="$1"
local anchored=false
# Strip inline comments and whitespace
line=$(printf '%s' "$line" | sed 's/[[:space:]]*#.*$//' | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
[ -z "$line" ] && return
# Skip negation patterns (not supported)
[[ "$line" == !* ]] && return
# Trailing slash = directory marker; strip it
line="${line%/}"
# Leading slash = anchored to root; strip it
if [[ "$line" == /* ]]; then
anchored=true
line="${line#/}"
fi
# Escape ERE special chars, then restore glob semantics
local regex
regex=$(printf '%s' "$line" \
| sed 's/[.+^${}()|[\\]/\\&/g' \
| sed 's/\\\*\\\*/\x01/g' \
| sed 's/\\\*/[^\/]*/g' \
| sed 's/\x01/.*/g' \
| sed 's/\\\?/[^\/]/g')
if $anchored; then
printf '^%s(/|$)' "$regex"
else
printf '(^|/)%s(/|$)' "$regex"
fi
}
# ── Read .ftp_ignore (gitignore-style globs) ─────────────────────────
IGNORE_PATTERNS=()
IGNORE_SOURCES=()
if [ -f ".ftp_ignore" ]; then
while IFS= read -r line; do
[[ "$line" =~ ^[[:space:]]*$ || "$line" =~ ^[[:space:]]*# ]] && continue
regex=$(ftp_ignore_to_regex "$line")
[ -n "$regex" ] && IGNORE_PATTERNS+=("$regex") && IGNORE_SOURCES+=("$line")
done < ".ftp_ignore"
fi
# ── Walk src/ and classify every file ────────────────────────────────
WILL_UPLOAD=()
IGNORED_FILES=()
while IFS= read -r -d '' file; do
rel="${file#${SOURCE_DIR}/}"
SKIP=false
for i in "${!IGNORE_PATTERNS[@]}"; do
if echo "$rel" | grep -qE "${IGNORE_PATTERNS[$i]}" 2>/dev/null; then
IGNORED_FILES+=("$rel | .ftp_ignore \`${IGNORE_SOURCES[$i]}\`")
SKIP=true; break
fi
done
$SKIP && continue
if [ -f ".gitignore" ]; then
git check-ignore -q "$rel" 2>/dev/null && {
IGNORED_FILES+=("$rel | .gitignore")
continue
} || true
fi
WILL_UPLOAD+=("$rel")
done < <(find "$SOURCE_DIR" -type f -print0 | sort -z)
UPLOAD_COUNT="${#WILL_UPLOAD[@]}"
IGNORE_COUNT="${#IGNORED_FILES[@]}"
echo " ${UPLOAD_COUNT} file(s) will be uploaded, ${IGNORE_COUNT} ignored"
# ── Write deployment preview to step summary ──────────────────────────
{
echo "## 📋 Deployment Preview"
echo ""
echo "| Field | Value |"
echo "|---|---|"
echo "| Source | \`${SOURCE_DIR}/\` |"
echo "| Files to upload | **${UPLOAD_COUNT}** |"
echo "| Files ignored | **${IGNORE_COUNT}** |"
echo ""
if [ "${UPLOAD_COUNT}" -gt 0 ]; then
echo "### 📂 Files that will be uploaded"
echo '```'
printf '%s\n' "${WILL_UPLOAD[@]}"
echo '```'
echo ""
fi
if [ "${IGNORE_COUNT}" -gt 0 ]; then
echo "### ⏭️ Files excluded"
echo "| File | Reason |"
echo "|---|---|"
for entry in "${IGNORED_FILES[@]}"; do
f="${entry% | *}"; r="${entry##* | }"
echo "| \`${f}\` | ${r} |"
done
echo ""
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Resolve SFTP host and port
if: steps.source.outputs.skip == 'false'
id: conn
env:
HOST_RAW: ${{ vars.RS_FTP_HOST }}
PORT_VAR: ${{ vars.RS_FTP_PORT }}
run: |
HOST="$HOST_RAW"
PORT="$PORT_VAR"
# Priority 1 — explicit RS_FTP_PORT variable
if [ -n "$PORT" ]; then
echo " Using explicit RS_FTP_PORT=${PORT}"
# Priority 2 — port embedded in RS_FTP_HOST (host:port)
elif [[ "$HOST" == *:* ]]; then
PORT="${HOST##*:}"
HOST="${HOST%:*}"
echo " Extracted port ${PORT} from RS_FTP_HOST"
# Priority 3 — SFTP default
else
PORT="22"
echo " No port specified — defaulting to SFTP port 22"
fi
echo "host=${HOST}" >> "$GITHUB_OUTPUT"
echo "port=${PORT}" >> "$GITHUB_OUTPUT"
echo "SFTP target: ${HOST}:${PORT}"
- name: Build remote path
if: steps.source.outputs.skip == 'false'
id: remote
env:
RS_FTP_PATH: ${{ vars.RS_FTP_PATH }}
RS_FTP_SUFFIX: ${{ vars.RS_FTP_SUFFIX }}
run: |
BASE="$RS_FTP_PATH"
if [ -z "$BASE" ]; then
echo "❌ RS_FTP_PATH is not set."
echo " Configure it as an org-level variable (Settings → Variables) and"
echo " ensure this repository has been granted access to it."
exit 1
fi
# RS_FTP_SUFFIX is required — it identifies the remote subdirectory for this repo.
# Without it we cannot safely determine the deployment target.
if [ -z "$RS_FTP_SUFFIX" ]; then
echo "⏭️ RS_FTP_SUFFIX variable is not set — skipping deployment."
echo " Set RS_FTP_SUFFIX as a repo or org variable to enable deploy-rs."
echo "skip=true" >> "$GITHUB_OUTPUT"
echo "path=" >> "$GITHUB_OUTPUT"
exit 0
fi
REMOTE="${BASE%/}/${RS_FTP_SUFFIX#/}"
# ── Platform-specific path safety guards ──────────────────────────────
PLATFORM=""
MOKO_FILE=".github/.mokostandards"; [ ! -f "$MOKO_FILE" ] && MOKO_FILE=".mokostandards"; if [ -f "$MOKO_FILE" ]; then
PLATFORM=$(grep -E '^platform:' "$MOKO_FILE" | sed 's/.*:[[:space:]]*//' | tr -d '"')
fi
# RS deployment: no path restrictions for any platform
echo " Remote path: ${REMOTE}"
echo "path=${REMOTE}" >> "$GITHUB_OUTPUT"
- name: Detect SFTP authentication method
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
id: auth
env:
HAS_KEY: ${{ secrets.RS_FTP_KEY }}
HAS_PASSWORD: ${{ secrets.RS_FTP_PASSWORD }}
run: |
if [ -n "$HAS_KEY" ] && [ -n "$HAS_PASSWORD" ]; then
# Both set: key auth with password as passphrase; falls back to password-only if key fails
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=true" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Primary: SSH key + passphrase (RS_FTP_KEY / RS_FTP_PASSWORD)"
echo " Fallback: password-only auth if key authentication fails"
elif [ -n "$HAS_KEY" ]; then
# Key only: no passphrase, no password fallback
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=false" >> "$GITHUB_OUTPUT"
echo " Using SSH key authentication (RS_FTP_KEY, no passphrase, no fallback)"
elif [ -n "$HAS_PASSWORD" ]; then
# Password only: direct SFTP password auth
echo "method=password" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Using password authentication (RS_FTP_PASSWORD)"
else
echo "❌ No SFTP credentials configured."
echo " Set RS_FTP_KEY (preferred) or RS_FTP_PASSWORD as an org-level secret."
exit 1
fi
- name: Setup PHP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
uses: shivammathur/setup-php@fcafdd6392932010c2bd5094439b8e33be2a8a09 # v2.37.0
with:
php-version: '8.1'
tools: composer
- name: Setup MokoStandards deploy tools
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
COMPOSER_AUTH: '{"github-oauth":{"github.com":"${{ secrets.GH_TOKEN || github.token }}"}}'
run: |
git clone --depth 1 --branch version/04.04 --quiet \
"https://x-access-token:${GH_TOKEN}@github.com/mokoconsulting-tech/MokoStandards.git" \
/tmp/mokostandards
cd /tmp/mokostandards
composer install --no-dev --no-interaction --quiet
- name: Clear remote destination folder (manual only)
if: >-
steps.source.outputs.skip == 'false' &&
steps.remote.outputs.skip != 'true' &&
inputs.clear_remote == true
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.RS_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.RS_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.RS_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
HAS_PASSWORD: ${{ steps.auth.outputs.has_password }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
run: |
cat > /tmp/moko_clear.php << 'PHPEOF'
<?php
declare(strict_types=1);
require '/tmp/mokostandards/vendor/autoload.php';
use phpseclib3\Net\SFTP;
use phpseclib3\Crypt\PublicKeyLoader;
$host = (string) getenv('SFTP_HOST');
$port = (int) getenv('SFTP_PORT');
$username = (string) getenv('SFTP_USER');
$authMethod = (string) getenv('AUTH_METHOD');
$usePassphrase = getenv('USE_PASSPHRASE') === 'true';
$hasPassword = getenv('HAS_PASSWORD') === 'true';
$remotePath = rtrim((string) getenv('REMOTE_PATH'), '/');
echo "⚠️ Clearing remote folder: {$remotePath}\n";
$sftp = new SFTP($host, $port);
// ── Authentication ──────────────────────────────────────────────
if ($authMethod === 'key') {
$keyData = (string) getenv('SFTP_KEY');
$passphrase = $usePassphrase ? (string) getenv('SFTP_PASSWORD') : false;
$password = $hasPassword ? (string) getenv('SFTP_PASSWORD') : '';
$key = PublicKeyLoader::load($keyData, $passphrase);
if (!$sftp->login($username, $key)) {
if ($password !== '') {
echo "⚠️ Key auth failed — falling back to password\n";
if (!$sftp->login($username, $password)) {
fwrite(STDERR, "❌ Both key and password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication (key fallback)\n";
} else {
fwrite(STDERR, "❌ Key authentication failed and no password fallback is available\n");
exit(1);
}
} else {
echo "✅ Connected via SSH key authentication\n";
}
} else {
if (!$sftp->login($username, (string) getenv('SFTP_PASSWORD'))) {
fwrite(STDERR, "❌ Password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication\n";
}
// ── Recursive delete ────────────────────────────────────────────
function rmrf(SFTP $sftp, string $path): void
{
$entries = $sftp->nlist($path);
if ($entries === false) {
return; // path does not exist — nothing to clear
}
foreach ($entries as $name) {
if ($name === '.' || $name === '..') {
continue;
}
$entry = "{$path}/{$name}";
if ($sftp->is_dir($entry)) {
rmrf($sftp, $entry);
$sftp->rmdir($entry);
echo " 🗑️ Removed dir: {$entry}\n";
} else {
$sftp->delete($entry);
echo " 🗑️ Removed file: {$entry}\n";
}
}
}
// ── Create remote directory tree ────────────────────────────────
function sftpMakedirs(SFTP $sftp, string $path): void
{
$parts = array_values(array_filter(explode('/', $path), fn(string $p) => $p !== ''));
$current = str_starts_with($path, '/') ? '' : '';
foreach ($parts as $part) {
$current .= '/' . $part;
$sftp->mkdir($current); // silently returns false if already exists
}
}
rmrf($sftp, $remotePath);
sftpMakedirs($sftp, $remotePath);
echo "✅ Remote folder ready: {$remotePath}\n";
PHPEOF
php /tmp/moko_clear.php
- name: Deploy via SFTP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.RS_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.RS_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.RS_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Write SSH key to temp file (key auth only) ────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
printf '%s' "$SFTP_KEY" > /tmp/deploy_key
chmod 600 /tmp/deploy_key
fi
# ── Generate sftp-config.json safely via jq ───────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg key "/tmp/deploy_key" \
'{host:$host, port:$port, user:$user, remote_path:$path, ssh_key_file:$key}' \
> /tmp/sftp-config.json
else
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg pass "$SFTP_PASSWORD" \
'{host:$host, port:$port, user:$user, remote_path:$path, password:$pass}' \
> /tmp/sftp-config.json
fi
# ── Run deploy-sftp.php from MokoStandards ────────────────────────────
DEPLOY_ARGS=(--path . --src-dir "$SOURCE_DIR" --config /tmp/sftp-config.json)
if [ "$USE_PASSPHRASE" = "true" ]; then
DEPLOY_ARGS+=(--key-passphrase "$SFTP_PASSWORD")
fi
php /tmp/mokostandards/api/deploy/deploy-sftp.php "${DEPLOY_ARGS[@]}"
# (deploy-sftp.php handles dotfile skipping and .ftp_ignore natively)
# Remove temp files that should never be left behind
rm -f /tmp/deploy_key /tmp/sftp-config.json
- name: Create or update failure issue
if: failure() && steps.remote.outputs.skip != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
RUN_URL="${{ github.server_url }}/${REPO}/actions/runs/${{ github.run_id }}"
ACTOR="${{ github.actor }}"
BRANCH="${{ github.ref_name }}"
EVENT="${{ github.event_name }}"
NOW=$(date -u '+%Y-%m-%d %H:%M:%S UTC')
LABEL="deploy-failure"
TITLE="fix: RS deployment failed — ${REPO}"
BODY="## RS Deployment Failed
A deployment to the RS server failed and requires attention.
| Field | Value |
|-------|-------|
| **Repository** | \`${REPO}\` |
| **Branch** | \`${BRANCH}\` |
| **Trigger** | ${EVENT} |
| **Actor** | @${ACTOR} |
| **Failed at** | ${NOW} |
| **Run** | [View workflow run](${RUN_URL}) |
### Next steps
1. Review the [workflow run log](${RUN_URL}) for the specific error.
2. Fix the underlying issue (credentials, SFTP connectivity, permissions).
3. Re-trigger the deployment via **Actions → Deploy to RS Server → Run workflow**.
---
*Auto-created by deploy-rs.yml — close this issue once the deployment is resolved.*"
# Ensure the label exists (idempotent — no-op if already present)
gh label create "$LABEL" \
--repo "$REPO" \
--color "CC0000" \
--description "Automated deploy failure tracking" \
--force 2>/dev/null || true
# Look for an existing deploy-failure issue (any state — reopen if closed)
EXISTING=$(gh api "repos/${REPO}/issues?labels=${LABEL}&state=all&per_page=1&sort=created&direction=desc" \
--jq '.[0].number' 2>/dev/null)
if [ -n "$EXISTING" ] && [ "$EXISTING" != "null" ]; then
gh api "repos/${REPO}/issues/${EXISTING}" \
-X PATCH \
-f title="$TITLE" \
-f body="$BODY" \
-f state="open" \
--silent
echo "📋 Failure issue #${EXISTING} updated/reopened: ${REPO}" >> "$GITHUB_STEP_SUMMARY"
else
gh issue create \
--repo "$REPO" \
--title "$TITLE" \
--body "$BODY" \
--label "$LABEL" \
--assignee "jmiller-moko" \
| tee -a "$GITHUB_STEP_SUMMARY"
fi
- name: Deployment summary
if: always()
run: |
if [ "${{ steps.source.outputs.skip }}" == "true" ]; then
echo "### ⏭️ Deployment Skipped" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "No \`src/\` directory found in this repository." >> "$GITHUB_STEP_SUMMARY"
elif [ "${{ job.status }}" == "success" ]; then
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "### ✅ RS Deployment Successful" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "| Field | Value |" >> "$GITHUB_STEP_SUMMARY"
echo "|-------|-------|" >> "$GITHUB_STEP_SUMMARY"
echo "| Host | \`${{ steps.conn.outputs.host }}:${{ steps.conn.outputs.port }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Remote path | \`${{ steps.remote.outputs.path }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Source | \`src/\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Trigger | ${{ github.event_name }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Auth | ${{ steps.auth.outputs.method }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Clear remote | ${{ inputs.clear_remote || 'false' }} |" >> "$GITHUB_STEP_SUMMARY"
else
echo "### ❌ RS Deployment Failed" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Check the job log above for error details." >> "$GITHUB_STEP_SUMMARY"
fi

View File

@@ -0,0 +1,758 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Firewall
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/enterprise-firewall-setup.yml.template
# VERSION: 04.04.01
# BRIEF: Enterprise firewall configuration — generates outbound allow-rules including SFTP deployment server
# NOTE: Reads DEV_FTP_HOST / DEV_FTP_PORT variables to include SFTP egress rules alongside HTTPS rules.
name: Enterprise Firewall Configuration
# This workflow provides firewall configuration guidance for enterprise-ready sites
# It generates firewall rules for allowing outbound access to trusted domains
# including license providers, documentation sources, package registries,
# and the SFTP deployment server (DEV_FTP_HOST / DEV_FTP_PORT).
#
# Runs automatically when:
# - Coding agent workflows are triggered (pull requests with copilot/ prefix)
# - Manual workflow dispatch for custom configurations
on:
workflow_dispatch:
inputs:
firewall_type:
description: 'Target firewall type'
required: true
type: choice
options:
- 'iptables'
- 'ufw'
- 'firewalld'
- 'aws-security-group'
- 'azure-nsg'
- 'gcp-firewall'
- 'cloudflare'
- 'all'
default: 'all'
output_format:
description: 'Output format'
required: true
type: choice
options:
- 'shell-script'
- 'json'
- 'yaml'
- 'markdown'
- 'all'
default: 'markdown'
# Auto-run when coding agent creates or updates PRs
pull_request:
branches:
- 'copilot/**'
- 'agent/**'
types: [opened, synchronize, reopened]
# Auto-run on push to coding agent branches
push:
branches:
- 'copilot/**'
- 'agent/**'
permissions:
contents: read
actions: read
jobs:
generate-firewall-rules:
name: Generate Firewall Rules
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.11'
- name: Apply Firewall Rules to Runner (Auto-run only)
if: github.event_name != 'workflow_dispatch'
env:
DEV_FTP_HOST: ${{ vars.DEV_FTP_HOST }}
DEV_FTP_PORT: ${{ vars.DEV_FTP_PORT }}
run: |
echo "🔥 Applying firewall rules for coding agent environment..."
echo ""
echo "This step ensures the GitHub Actions runner can access trusted domains"
echo "including license providers, package registries, and documentation sources."
echo ""
# Note: GitHub Actions runners are ephemeral and run in controlled environments
# This step documents what domains are being accessed during the workflow
# Actual firewall configuration is managed by GitHub
cat > /tmp/trusted-domains.txt << 'EOF'
# Trusted domains for coding agent environment
# License Providers
www.gnu.org
opensource.org
choosealicense.com
spdx.org
creativecommons.org
apache.org
fsf.org
# Documentation & Standards
semver.org
keepachangelog.com
conventionalcommits.org
# GitHub & Related
github.com
api.github.com
docs.github.com
raw.githubusercontent.com
ghcr.io
# Package Registries
npmjs.com
registry.npmjs.org
pypi.org
files.pythonhosted.org
packagist.org
repo.packagist.org
rubygems.org
# Platform-Specific
joomla.org
downloads.joomla.org
docs.joomla.org
php.net
getcomposer.org
dolibarr.org
wiki.dolibarr.org
docs.dolibarr.org
# Moko Consulting
mokoconsulting.tech
# SFTP Deployment Server (DEV_FTP_HOST)
${DEV_FTP_HOST:-<not configured>}
# Google Services
drive.google.com
docs.google.com
sheets.google.com
accounts.google.com
storage.googleapis.com
fonts.googleapis.com
fonts.gstatic.com
# GitHub Extended
upload.github.com
objects.githubusercontent.com
user-images.githubusercontent.com
codeload.github.com
pkg.github.com
# Developer Reference
developer.mozilla.org
stackoverflow.com
git-scm.com
# CDN & Infrastructure
cdn.jsdelivr.net
unpkg.com
cdnjs.cloudflare.com
img.shields.io
# Container Registries
hub.docker.com
registry-1.docker.io
# CI & Code Quality
codecov.io
sonarcloud.io
# Terraform & Infrastructure
registry.terraform.io
releases.hashicorp.com
checkpoint-api.hashicorp.com
EOF
echo "✓ Trusted domains documented for this runner"
echo "✓ GitHub Actions runners have network access to these domains"
echo ""
# Test connectivity to key domains
echo "Testing connectivity to key domains..."
for domain in "github.com" "www.gnu.org" "npmjs.com" "pypi.org"; do
if curl -s --max-time 3 -o /dev/null -w "%{http_code}" "https://$domain" | grep -q "200\|301\|302"; then
echo " ✓ $domain is accessible"
else
echo " ⚠️ $domain connectivity check failed (may be expected)"
fi
done
# Test SFTP server connectivity (TCP port check)
SFTP_HOST="${DEV_FTP_HOST:-}"
SFTP_PORT="${DEV_FTP_PORT:-22}"
if [ -n "$SFTP_HOST" ]; then
# Strip any embedded :port suffix
SFTP_HOST="${SFTP_HOST%%:*}"
echo ""
echo "Testing SFTP deployment server connectivity..."
if timeout 5 bash -c "echo >/dev/tcp/${SFTP_HOST}/${SFTP_PORT}" 2>/dev/null; then
echo " ✓ SFTP server ${SFTP_HOST}:${SFTP_PORT} is reachable"
else
echo " ⚠️ SFTP server ${SFTP_HOST}:${SFTP_PORT} is not reachable from runner (firewall rule needed)"
fi
else
echo ""
echo " DEV_FTP_HOST not configured — skipping SFTP connectivity check"
fi
- name: Generate Firewall Configuration
id: generate
env:
DEV_FTP_HOST: ${{ vars.DEV_FTP_HOST }}
DEV_FTP_PORT: ${{ vars.DEV_FTP_PORT }}
run: |
cat > generate_firewall_config.py << 'PYTHON_EOF'
#!/usr/bin/env python3
"""
Enterprise Firewall Configuration Generator
Generates firewall rules for enterprise-ready deployments allowing
access to trusted domains including license providers, documentation
sources, package registries, and platform-specific sites.
"""
import json
import os
import yaml
import sys
from typing import List, Dict
# SFTP deployment server from org variables
_sftp_host_raw = os.environ.get("DEV_FTP_HOST", "").strip()
_sftp_port = os.environ.get("DEV_FTP_PORT", "").strip() or "22"
# Strip embedded :port suffix if present
_sftp_host = _sftp_host_raw.split(":")[0] if _sftp_host_raw else ""
if ":" in _sftp_host_raw and not _sftp_port:
_sftp_port = _sftp_host_raw.split(":")[1]
SFTP_HOST = _sftp_host
SFTP_PORT = int(_sftp_port) if _sftp_port.isdigit() else 22
# Trusted domains from .github/copilot.yml
TRUSTED_DOMAINS = {
"license_providers": [
"www.gnu.org",
"opensource.org",
"choosealicense.com",
"spdx.org",
"creativecommons.org",
"apache.org",
"fsf.org",
],
"documentation_standards": [
"semver.org",
"keepachangelog.com",
"conventionalcommits.org",
],
"github_related": [
"github.com",
"api.github.com",
"docs.github.com",
"raw.githubusercontent.com",
"ghcr.io",
],
"package_registries": [
"npmjs.com",
"registry.npmjs.org",
"pypi.org",
"files.pythonhosted.org",
"packagist.org",
"repo.packagist.org",
"rubygems.org",
],
"standards_organizations": [
"json-schema.org",
"w3.org",
"ietf.org",
],
"platform_specific": [
"joomla.org",
"downloads.joomla.org",
"docs.joomla.org",
"php.net",
"getcomposer.org",
"dolibarr.org",
"wiki.dolibarr.org",
"docs.dolibarr.org",
],
"moko_consulting": [
"mokoconsulting.tech",
],
"google_services": [
"drive.google.com",
"docs.google.com",
"sheets.google.com",
"accounts.google.com",
"storage.googleapis.com",
"fonts.googleapis.com",
"fonts.gstatic.com",
],
"github_extended": [
"upload.github.com",
"objects.githubusercontent.com",
"user-images.githubusercontent.com",
"codeload.github.com",
"pkg.github.com",
],
"developer_reference": [
"developer.mozilla.org",
"stackoverflow.com",
"git-scm.com",
],
"cdn_and_infrastructure": [
"cdn.jsdelivr.net",
"unpkg.com",
"cdnjs.cloudflare.com",
"img.shields.io",
],
"container_registries": [
"hub.docker.com",
"registry-1.docker.io",
],
"ci_code_quality": [
"codecov.io",
"sonarcloud.io",
],
"terraform_infrastructure": [
"registry.terraform.io",
"releases.hashicorp.com",
"checkpoint-api.hashicorp.com",
],
}
# Inject SFTP deployment server as a separate category (port 22, not 443)
if SFTP_HOST:
TRUSTED_DOMAINS["sftp_deployment_server"] = [SFTP_HOST]
print(f" SFTP deployment server: {SFTP_HOST}:{SFTP_PORT}")
def generate_sftp_iptables_rules(host: str, port: int) -> str:
"""Generate iptables rules specifically for SFTP egress"""
return (
f"# Allow SFTP to deployment server {host}:{port}\n"
f"iptables -A OUTPUT -p tcp -d $(dig +short {host} | head -1)"
f" --dport {port} -j ACCEPT # SFTP deploy\n"
)
def generate_sftp_ufw_rules(host: str, port: int) -> str:
"""Generate UFW rules for SFTP egress"""
return (
f"# Allow SFTP to deployment server\n"
f"ufw allow out to $(dig +short {host} | head -1)"
f" port {port} proto tcp comment 'SFTP deploy to {host}'\n"
)
def generate_sftp_firewalld_rules(host: str, port: int) -> str:
"""Generate firewalld rules for SFTP egress"""
return (
f"# Allow SFTP to deployment server\n"
f"firewall-cmd --permanent --add-rich-rule='"
f"rule family=ipv4 destination address=$(dig +short {host} | head -1)"
f" port port={port} protocol=tcp accept' # SFTP deploy\n"
)
def generate_iptables_rules(domains: List[str]) -> str:
"""Generate iptables firewall rules"""
rules = ["#!/bin/bash", "", "# Enterprise Firewall Rules - iptables", ""]
rules.append("# Allow outbound HTTPS to trusted domains")
rules.append("")
for domain in domains:
rules.append(f"# Allow {domain}")
rules.append(f"iptables -A OUTPUT -p tcp -d $(dig +short {domain} | head -1) --dport 443 -j ACCEPT")
rules.append("")
rules.append("# Allow DNS lookups")
rules.append("iptables -A OUTPUT -p udp --dport 53 -j ACCEPT")
rules.append("iptables -A OUTPUT -p tcp --dport 53 -j ACCEPT")
return "\n".join(rules)
def generate_ufw_rules(domains: List[str]) -> str:
"""Generate UFW firewall rules"""
rules = ["#!/bin/bash", "", "# Enterprise Firewall Rules - UFW", ""]
rules.append("# Allow outbound HTTPS to trusted domains")
rules.append("")
for domain in domains:
rules.append(f"# Allow {domain}")
rules.append(f"ufw allow out to $(dig +short {domain} | head -1) port 443 proto tcp comment 'Allow {domain}'")
rules.append("")
rules.append("# Allow DNS")
rules.append("ufw allow out 53/udp comment 'Allow DNS UDP'")
rules.append("ufw allow out 53/tcp comment 'Allow DNS TCP'")
return "\n".join(rules)
def generate_firewalld_rules(domains: List[str]) -> str:
"""Generate firewalld rules"""
rules = ["#!/bin/bash", "", "# Enterprise Firewall Rules - firewalld", ""]
rules.append("# Add trusted domains to firewall")
rules.append("")
for domain in domains:
rules.append(f"# Allow {domain}")
rules.append(f"firewall-cmd --permanent --add-rich-rule='rule family=ipv4 destination address=$(dig +short {domain} | head -1) port port=443 protocol=tcp accept'")
rules.append("")
rules.append("# Reload firewall")
rules.append("firewall-cmd --reload")
return "\n".join(rules)
def generate_aws_security_group(domains: List[str]) -> Dict:
"""Generate AWS Security Group rules (JSON format)"""
rules = {
"SecurityGroupRules": {
"Egress": []
}
}
for domain in domains:
rules["SecurityGroupRules"]["Egress"].append({
"Description": f"Allow HTTPS to {domain}",
"IpProtocol": "tcp",
"FromPort": 443,
"ToPort": 443,
"CidrIp": "0.0.0.0/0", # In practice, resolve to specific IPs
"Tags": [{
"Key": "Domain",
"Value": domain
}]
})
# Add DNS
rules["SecurityGroupRules"]["Egress"].append({
"Description": "Allow DNS",
"IpProtocol": "udp",
"FromPort": 53,
"ToPort": 53,
"CidrIp": "0.0.0.0/0"
})
return rules
def generate_markdown_documentation(domains_by_category: Dict[str, List[str]]) -> str:
"""Generate markdown documentation"""
md = ["# Enterprise Firewall Configuration Guide", ""]
md.append("## Overview")
md.append("")
md.append("This document provides firewall configuration guidance for enterprise-ready deployments.")
md.append("It lists trusted domains that should be whitelisted for outbound access to ensure")
md.append("proper functionality of license validation, package management, and documentation access.")
md.append("")
md.append("## Trusted Domains by Category")
md.append("")
all_domains = []
for category, domains in domains_by_category.items():
category_name = category.replace("_", " ").title()
md.append(f"### {category_name}")
md.append("")
md.append("| Domain | Purpose |")
md.append("|--------|---------|")
for domain in domains:
all_domains.append(domain)
purpose = get_domain_purpose(domain)
md.append(f"| `{domain}` | {purpose} |")
md.append("")
md.append("## Implementation Examples")
md.append("")
md.append("### iptables Example")
md.append("")
md.append("```bash")
md.append("# Allow HTTPS to trusted domain")
md.append(f"iptables -A OUTPUT -p tcp -d $(dig +short {all_domains[0]}) --dport 443 -j ACCEPT")
md.append("```")
md.append("")
md.append("### UFW Example")
md.append("")
md.append("```bash")
md.append("# Allow HTTPS to trusted domain")
md.append(f"ufw allow out to {all_domains[0]} port 443 proto tcp")
md.append("```")
md.append("")
md.append("### AWS Security Group Example")
md.append("")
md.append("```json")
md.append("{")
md.append(' "IpPermissions": [{')
md.append(' "IpProtocol": "tcp",')
md.append(' "FromPort": 443,')
md.append(' "ToPort": 443,')
md.append(' "IpRanges": [{"CidrIp": "0.0.0.0/0", "Description": "HTTPS to trusted domains"}]')
md.append(" }]")
md.append("}")
md.append("```")
md.append("")
md.append("## Ports Required")
md.append("")
md.append("| Port | Protocol | Purpose |")
md.append("|------|----------|---------|")
md.append("| 443 | TCP | HTTPS (secure web access) |")
md.append("| 80 | TCP | HTTP (redirects to HTTPS) |")
md.append("| 53 | UDP/TCP | DNS resolution |")
md.append("")
md.append("## Security Considerations")
md.append("")
md.append("1. **DNS Resolution**: Ensure DNS queries are allowed (port 53 UDP/TCP)")
md.append("2. **Certificate Validation**: HTTPS requires ability to reach certificate authorities")
md.append("3. **Dynamic IPs**: Some domains use CDNs with dynamic IPs - consider using FQDNs in rules")
md.append("4. **Regular Updates**: Review and update whitelist as services change")
md.append("5. **Logging**: Enable logging for blocked connections to identify missing rules")
md.append("")
md.append("## Compliance Notes")
md.append("")
md.append("- All listed domains provide read-only access to public information")
md.append("- License providers enable GPL compliance verification")
md.append("- Package registries support dependency security scanning")
md.append("- No authentication credentials are transmitted to these domains")
md.append("")
return "\n".join(md)
def get_domain_purpose(domain: str) -> str:
"""Get human-readable purpose for a domain"""
purposes = {
"www.gnu.org": "GNU licenses and documentation",
"opensource.org": "Open Source Initiative resources",
"choosealicense.com": "GitHub license selection tool",
"spdx.org": "Software Package Data Exchange identifiers",
"creativecommons.org": "Creative Commons licenses",
"apache.org": "Apache Software Foundation licenses",
"fsf.org": "Free Software Foundation resources",
"semver.org": "Semantic versioning specification",
"keepachangelog.com": "Changelog format standards",
"conventionalcommits.org": "Commit message conventions",
"github.com": "GitHub platform access",
"api.github.com": "GitHub API access",
"docs.github.com": "GitHub documentation",
"raw.githubusercontent.com": "GitHub raw content access",
"npmjs.com": "npm package registry",
"pypi.org": "Python Package Index",
"packagist.org": "PHP Composer package registry",
"rubygems.org": "Ruby gems registry",
"joomla.org": "Joomla CMS platform",
"php.net": "PHP documentation and downloads",
"dolibarr.org": "Dolibarr ERP/CRM platform",
}
return purposes.get(domain, "Trusted resource")
def main():
# Use inputs if provided (manual dispatch), otherwise use defaults (auto-run)
firewall_type = "${{ github.event.inputs.firewall_type }}" or "all"
output_format = "${{ github.event.inputs.output_format }}" or "markdown"
print(f"Running in {'manual' if '${{ github.event.inputs.firewall_type }}' else 'automatic'} mode")
print(f"Firewall type: {firewall_type}")
print(f"Output format: {output_format}")
print("")
# Collect all domains
all_domains = []
for domains in TRUSTED_DOMAINS.values():
all_domains.extend(domains)
# Remove duplicates and sort
all_domains = sorted(set(all_domains))
print(f"Generating firewall rules for {len(all_domains)} trusted domains...")
print("")
# Exclude SFTP server from HTTPS rule generation (different port)
https_domains = [d for d in all_domains if d != SFTP_HOST]
# Generate based on firewall type
if firewall_type in ["iptables", "all"]:
rules = generate_iptables_rules(https_domains)
if SFTP_HOST:
rules += "\n# ── SFTP Deployment Server ──────────────────────────────\n"
rules += generate_sftp_iptables_rules(SFTP_HOST, SFTP_PORT)
with open("firewall-rules-iptables.sh", "w") as f:
f.write(rules)
print("✓ Generated iptables rules: firewall-rules-iptables.sh")
if firewall_type in ["ufw", "all"]:
rules = generate_ufw_rules(https_domains)
if SFTP_HOST:
rules += "\n# ── SFTP Deployment Server ──────────────────────────────\n"
rules += generate_sftp_ufw_rules(SFTP_HOST, SFTP_PORT)
with open("firewall-rules-ufw.sh", "w") as f:
f.write(rules)
print("✓ Generated UFW rules: firewall-rules-ufw.sh")
if firewall_type in ["firewalld", "all"]:
rules = generate_firewalld_rules(https_domains)
if SFTP_HOST:
rules += "\n# ── SFTP Deployment Server ──────────────────────────────\n"
rules += generate_sftp_firewalld_rules(SFTP_HOST, SFTP_PORT)
with open("firewall-rules-firewalld.sh", "w") as f:
f.write(rules)
print("✓ Generated firewalld rules: firewall-rules-firewalld.sh")
if firewall_type in ["aws-security-group", "all"]:
rules = generate_aws_security_group(all_domains)
with open("firewall-rules-aws-sg.json", "w") as f:
json.dump(rules, f, indent=2)
print("✓ Generated AWS Security Group rules: firewall-rules-aws-sg.json")
if output_format in ["yaml", "all"]:
with open("trusted-domains.yml", "w") as f:
yaml.dump(TRUSTED_DOMAINS, f, default_flow_style=False)
print("✓ Generated YAML domain list: trusted-domains.yml")
if output_format in ["json", "all"]:
with open("trusted-domains.json", "w") as f:
json.dump(TRUSTED_DOMAINS, f, indent=2)
print("✓ Generated JSON domain list: trusted-domains.json")
if output_format in ["markdown", "all"]:
md = generate_markdown_documentation(TRUSTED_DOMAINS)
with open("FIREWALL_CONFIGURATION.md", "w") as f:
f.write(md)
print("✓ Generated documentation: FIREWALL_CONFIGURATION.md")
print("")
print("Domain Categories:")
for category, domains in TRUSTED_DOMAINS.items():
print(f" - {category}: {len(domains)} domains")
print("")
print("Total unique domains: ", len(all_domains))
if __name__ == "__main__":
main()
PYTHON_EOF
chmod +x generate_firewall_config.py
pip install PyYAML
python3 generate_firewall_config.py
- name: Upload Firewall Configuration Artifacts
uses: actions/upload-artifact@v6
with:
name: firewall-configurations
path: |
firewall-rules-*.sh
firewall-rules-*.json
trusted-domains.*
FIREWALL_CONFIGURATION.md
retention-days: 90
- name: Display Summary
run: |
echo "## Firewall Configuration" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
echo "**Mode**: Manual Execution" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Firewall rules have been generated for enterprise-ready deployments." >> $GITHUB_STEP_SUMMARY
else
echo "**Mode**: Automatic Execution (Coding Agent Active)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "This workflow ran automatically because a coding agent (GitHub Copilot) is active." >> $GITHUB_STEP_SUMMARY
echo "Firewall configuration has been validated for the coding agent environment." >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Files Generated" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if ls firewall-rules-* trusted-domains.* FIREWALL_CONFIGURATION.md 2>/dev/null; then
ls -lh firewall-rules-* trusted-domains.* FIREWALL_CONFIGURATION.md 2>/dev/null | awk '{print "- " $9 " (" $5 ")"}' >> $GITHUB_STEP_SUMMARY
else
echo "- Documentation generated" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
echo "### Download Artifacts" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Download the generated firewall configurations from the workflow artifacts." >> $GITHUB_STEP_SUMMARY
else
echo "### Trusted Domains Active" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "The coding agent has access to:" >> $GITHUB_STEP_SUMMARY
echo "- License providers (GPL, OSI, SPDX, Apache, etc.)" >> $GITHUB_STEP_SUMMARY
echo "- Package registries (npm, PyPI, Packagist, RubyGems)" >> $GITHUB_STEP_SUMMARY
echo "- Documentation sources (GitHub, Joomla, Dolibarr, PHP)" >> $GITHUB_STEP_SUMMARY
echo "- Standards organizations (W3C, IETF, JSON Schema)" >> $GITHUB_STEP_SUMMARY
fi
# Usage Instructions:
#
# This workflow runs in two modes:
#
# 1. AUTOMATIC MODE (Coding Agent):
# - Triggers when coding agent branches (copilot/**, agent/**) are pushed or PR'd
# - Validates firewall configuration for the coding agent environment
# - Documents accessible domains for compliance
# - Ensures license sources and package registries are available
#
# 2. MANUAL MODE (Enterprise Configuration):
# - Manually trigger from the Actions tab
# - Select desired firewall type and output format
# - Download generated artifacts
# - Apply firewall rules to your enterprise environment
#
# Configuration:
# - Trusted domains are sourced from .github/copilot.yml
# - Modify copilot.yml to add/remove trusted domains
# - Changes automatically propagate to firewall rules
#
# Important Notes:
# - Review generated rules before applying to production
# - Some domains may use CDNs with dynamic IPs
# - Consider using FQDN-based rules where supported
# - Test thoroughly in staging environment first
# - Monitor logs for blocked connections
# - Update rules as domains/services change

521
.github/workflows/repository-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,521 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Maintenance
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/repository-cleanup.yml.template
# VERSION: 04.04.01
# BRIEF: Recurring repository maintenance — labels, branches, workflows, logs, doc indexes
# NOTE: Synced via bulk-repo-sync to .github/workflows/repository-cleanup.yml in all governed repos.
# Runs on the 1st and 15th of each month at 6:00 AM UTC, and on manual dispatch.
name: Repository Cleanup
on:
schedule:
- cron: '0 6 1,15 * *'
workflow_dispatch:
inputs:
reset_labels:
description: 'Delete ALL existing labels and recreate the standard set'
type: boolean
default: false
clean_branches:
description: 'Delete old chore/sync-mokostandards-* branches'
type: boolean
default: true
clean_workflows:
description: 'Delete orphaned workflow runs (cancelled, stale)'
type: boolean
default: true
clean_logs:
description: 'Delete workflow run logs older than 30 days'
type: boolean
default: true
fix_templates:
description: 'Strip copyright comment blocks from issue templates'
type: boolean
default: true
rebuild_indexes:
description: 'Rebuild docs/ index files'
type: boolean
default: true
delete_closed_issues:
description: 'Delete issues that have been closed for more than 30 days'
type: boolean
default: false
env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
permissions:
contents: write
issues: write
actions: write
jobs:
cleanup:
name: Repository Maintenance
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
token: ${{ secrets.GH_TOKEN || github.token }}
fetch-depth: 0
- name: Check actor permission
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
ACTOR="${{ github.actor }}"
# Schedule triggers use github-actions[bot]
if [ "${{ github.event_name }}" = "schedule" ]; then
echo "✅ Scheduled run — authorized"
exit 0
fi
AUTHORIZED_USERS="jmiller-moko github-actions[bot]"
for user in $AUTHORIZED_USERS; do
if [ "$ACTOR" = "$user" ]; then
echo "✅ ${ACTOR} authorized"
exit 0
fi
done
PERMISSION=$(gh api "repos/${{ github.repository }}/collaborators/${ACTOR}/permission" \
--jq '.permission' 2>/dev/null)
case "$PERMISSION" in
admin|maintain) echo "✅ ${ACTOR} has ${PERMISSION}" ;;
*) echo "❌ Admin or maintain required"; exit 1 ;;
esac
# ── Determine which tasks to run ─────────────────────────────────────
# On schedule: run all tasks with safe defaults (labels NOT reset)
# On dispatch: use input toggles
- name: Set task flags
id: tasks
run: |
if [ "${{ github.event_name }}" = "schedule" ]; then
echo "reset_labels=false" >> $GITHUB_OUTPUT
echo "clean_branches=true" >> $GITHUB_OUTPUT
echo "clean_workflows=true" >> $GITHUB_OUTPUT
echo "clean_logs=true" >> $GITHUB_OUTPUT
echo "fix_templates=true" >> $GITHUB_OUTPUT
echo "rebuild_indexes=true" >> $GITHUB_OUTPUT
echo "delete_closed_issues=false" >> $GITHUB_OUTPUT
else
echo "reset_labels=${{ inputs.reset_labels }}" >> $GITHUB_OUTPUT
echo "clean_branches=${{ inputs.clean_branches }}" >> $GITHUB_OUTPUT
echo "clean_workflows=${{ inputs.clean_workflows }}" >> $GITHUB_OUTPUT
echo "clean_logs=${{ inputs.clean_logs }}" >> $GITHUB_OUTPUT
echo "fix_templates=${{ inputs.fix_templates }}" >> $GITHUB_OUTPUT
echo "rebuild_indexes=${{ inputs.rebuild_indexes }}" >> $GITHUB_OUTPUT
echo "delete_closed_issues=${{ inputs.delete_closed_issues }}" >> $GITHUB_OUTPUT
fi
# ── DELETE RETIRED WORKFLOWS (always runs) ────────────────────────────
- name: Delete retired workflow files
run: |
echo "## 🗑️ Retired Workflow Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
RETIRED=(
".github/workflows/build.yml"
".github/workflows/code-quality.yml"
".github/workflows/release-cycle.yml"
".github/workflows/release-pipeline.yml"
".github/workflows/branch-cleanup.yml"
".github/workflows/auto-update-changelog.yml"
".github/workflows/enterprise-issue-manager.yml"
".github/workflows/flush-actions-cache.yml"
".github/workflows/mokostandards-script-runner.yml"
".github/workflows/unified-ci.yml"
".github/workflows/unified-platform-testing.yml"
".github/workflows/reusable-build.yml"
".github/workflows/reusable-ci-validation.yml"
".github/workflows/reusable-deploy.yml"
".github/workflows/reusable-php-quality.yml"
".github/workflows/reusable-platform-testing.yml"
".github/workflows/reusable-project-detector.yml"
".github/workflows/reusable-release.yml"
".github/workflows/reusable-script-executor.yml"
".github/workflows/rebuild-docs-indexes.yml"
".github/workflows/setup-project-v2.yml"
".github/workflows/sync-docs-to-project.yml"
".github/workflows/release.yml"
".github/workflows/sync-changelogs.yml"
".github/workflows/version_branch.yml"
"update.json"
".github/workflows/auto-version-branch.yml"
".github/workflows/publish-to-mokodolibarr.yml"
".github/workflows/ci.yml"
)
DELETED=0
for wf in "${RETIRED[@]}"; do
if [ -f "$wf" ]; then
git rm "$wf" 2>/dev/null || rm -f "$wf"
echo " Deleted: \`$(basename $wf)\`" >> $GITHUB_STEP_SUMMARY
DELETED=$((DELETED+1))
fi
done
if [ "$DELETED" -gt 0 ]; then
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add -A
git commit -m "chore: delete ${DELETED} retired workflow file(s) [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
echo "✅ ${DELETED} retired workflow(s) deleted" >> $GITHUB_STEP_SUMMARY
else
echo "✅ No retired workflows found" >> $GITHUB_STEP_SUMMARY
fi
# ── LABEL RESET ──────────────────────────────────────────────────────
- name: Reset labels to standard set
if: steps.tasks.outputs.reset_labels == 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
echo "## 🏷️ Label Reset" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
gh api "repos/${REPO}/labels?per_page=100" --paginate --jq '.[].name' | while read -r label; do
ENCODED=$(python3 -c "import urllib.parse; print(urllib.parse.quote('$label', safe=''))")
gh api -X DELETE "repos/${REPO}/labels/${ENCODED}" --silent 2>/dev/null || true
done
while IFS='|' read -r name color description; do
[ -z "$name" ] && continue
gh api "repos/${REPO}/labels" \
-f name="$name" -f color="$color" -f description="$description" \
--silent 2>/dev/null || true
done << 'LABELS'
joomla|7F52FF|Joomla extension or component
dolibarr|FF6B6B|Dolibarr module or extension
generic|808080|Generic project or library
php|4F5D95|PHP code changes
javascript|F7DF1E|JavaScript code changes
typescript|3178C6|TypeScript code changes
python|3776AB|Python code changes
css|1572B6|CSS/styling changes
html|E34F26|HTML template changes
documentation|0075CA|Documentation changes
ci-cd|000000|CI/CD pipeline changes
docker|2496ED|Docker configuration changes
tests|00FF00|Test suite changes
security|FF0000|Security-related changes
dependencies|0366D6|Dependency updates
config|F9D0C4|Configuration file changes
build|FFA500|Build system changes
automation|8B4513|Automated processes or scripts
mokostandards|B60205|MokoStandards compliance
needs-review|FBCA04|Awaiting code review
work-in-progress|D93F0B|Work in progress, not ready for merge
breaking-change|D73A4A|Breaking API or functionality change
priority: critical|B60205|Critical priority, must be addressed immediately
priority: high|D93F0B|High priority
priority: medium|FBCA04|Medium priority
priority: low|0E8A16|Low priority
type: bug|D73A4A|Something isn't working
type: feature|A2EEEF|New feature or request
type: enhancement|84B6EB|Enhancement to existing feature
type: refactor|F9D0C4|Code refactoring
type: chore|FEF2C0|Maintenance tasks
type: version|0E8A16|Version-related change
status: pending|FBCA04|Pending action or decision
status: in-progress|0E8A16|Currently being worked on
status: blocked|B60205|Blocked by another issue or dependency
status: on-hold|D4C5F9|Temporarily on hold
status: wontfix|FFFFFF|This will not be worked on
size/xs|C5DEF5|Extra small change (1-10 lines)
size/s|6FD1E2|Small change (11-30 lines)
size/m|F9DD72|Medium change (31-100 lines)
size/l|FFA07A|Large change (101-300 lines)
size/xl|FF6B6B|Extra large change (301-1000 lines)
size/xxl|B60205|Extremely large change (1000+ lines)
health: excellent|0E8A16|Health score 90-100
health: good|FBCA04|Health score 70-89
health: fair|FFA500|Health score 50-69
health: poor|FF6B6B|Health score below 50
standards-update|B60205|MokoStandards sync update
standards-drift|FBCA04|Repository drifted from MokoStandards
sync-report|0075CA|Bulk sync run report
sync-failure|D73A4A|Bulk sync failure requiring attention
push-failure|D73A4A|File push failure requiring attention
health-check|0E8A16|Repository health check results
version-drift|FFA500|Version mismatch detected
deploy-failure|CC0000|Automated deploy failure tracking
template-validation-failure|D73A4A|Template workflow validation failure
version|0E8A16|Version bump or release
LABELS
echo "✅ Standard labels created" >> $GITHUB_STEP_SUMMARY
# ── BRANCH CLEANUP ───────────────────────────────────────────────────
- name: Delete old sync branches
if: steps.tasks.outputs.clean_branches == 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
CURRENT="chore/sync-mokostandards-v04.04"
echo "## 🌿 Branch Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
FOUND=false
gh api "repos/${REPO}/branches?per_page=100" --jq '.[].name' | \
grep "^chore/sync-mokostandards" | \
grep -v "^${CURRENT}$" | while read -r branch; do
gh pr list --repo "$REPO" --head "$branch" --state open --json number --jq '.[].number' 2>/dev/null | while read -r pr; do
gh pr close "$pr" --repo "$REPO" --comment "Superseded by \`${CURRENT}\`" 2>/dev/null || true
echo " Closed PR #${pr}" >> $GITHUB_STEP_SUMMARY
done
gh api -X DELETE "repos/${REPO}/git/refs/heads/${branch}" --silent 2>/dev/null || true
echo " Deleted: \`${branch}\`" >> $GITHUB_STEP_SUMMARY
FOUND=true
done
if [ "$FOUND" != "true" ]; then
echo "✅ No old sync branches found" >> $GITHUB_STEP_SUMMARY
fi
# ── WORKFLOW RUN CLEANUP ─────────────────────────────────────────────
- name: Clean up workflow runs
if: steps.tasks.outputs.clean_workflows == 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
echo "## 🔄 Workflow Run Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
DELETED=0
# Delete cancelled and stale workflow runs
for status in cancelled stale; do
gh api "repos/${REPO}/actions/runs?status=${status}&per_page=100" \
--jq '.workflow_runs[].id' 2>/dev/null | while read -r run_id; do
gh api -X DELETE "repos/${REPO}/actions/runs/${run_id}" --silent 2>/dev/null || true
DELETED=$((DELETED+1))
done
done
echo "✅ Cleaned cancelled/stale workflow runs" >> $GITHUB_STEP_SUMMARY
# ── LOG CLEANUP ──────────────────────────────────────────────────────
- name: Delete old workflow run logs
if: steps.tasks.outputs.clean_logs == 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
CUTOFF=$(date -u -d '30 days ago' +%Y-%m-%dT%H:%M:%SZ 2>/dev/null || date -u -v-30d +%Y-%m-%dT%H:%M:%SZ)
echo "## 📋 Log Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Deleting logs older than: ${CUTOFF}" >> $GITHUB_STEP_SUMMARY
DELETED=0
gh api "repos/${REPO}/actions/runs?created=<${CUTOFF}&per_page=100" \
--jq '.workflow_runs[].id' 2>/dev/null | while read -r run_id; do
gh api -X DELETE "repos/${REPO}/actions/runs/${run_id}/logs" --silent 2>/dev/null || true
DELETED=$((DELETED+1))
done
echo "✅ Cleaned old workflow run logs" >> $GITHUB_STEP_SUMMARY
# ── ISSUE TEMPLATE FIX ──────────────────────────────────────────────
- name: Strip copyright headers from issue templates
if: steps.tasks.outputs.fix_templates == 'true'
run: |
echo "## 📋 Issue Template Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
FIXED=0
for f in .github/ISSUE_TEMPLATE/*.md; do
[ -f "$f" ] || continue
if grep -q '^<!--$' "$f"; then
sed -i '/^<!--$/,/^-->$/d' "$f"
echo " Cleaned: \`$(basename $f)\`" >> $GITHUB_STEP_SUMMARY
FIXED=$((FIXED+1))
fi
done
if [ "$FIXED" -gt 0 ]; then
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add .github/ISSUE_TEMPLATE/
git commit -m "fix: strip copyright comment blocks from issue templates [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
echo "✅ ${FIXED} template(s) cleaned and committed" >> $GITHUB_STEP_SUMMARY
else
echo "✅ No templates need cleaning" >> $GITHUB_STEP_SUMMARY
fi
# ── REBUILD DOC INDEXES ─────────────────────────────────────────────
- name: Rebuild docs/ index files
if: steps.tasks.outputs.rebuild_indexes == 'true'
run: |
echo "## 📚 Documentation Index Rebuild" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ ! -d "docs" ]; then
echo "⏭️ No docs/ directory — skipping" >> $GITHUB_STEP_SUMMARY
exit 0
fi
UPDATED=0
# Generate index.md for each docs/ subdirectory
find docs -type d | while read -r dir; do
INDEX="${dir}/index.md"
FILES=$(find "$dir" -maxdepth 1 -name "*.md" ! -name "index.md" -printf "- [%f](./%f)\n" 2>/dev/null | sort)
if [ -z "$FILES" ]; then
continue
fi
cat > "$INDEX" << INDEXEOF
# $(basename "$dir")
## Documents
${FILES}
---
*Auto-generated by repository-cleanup workflow*
INDEXEOF
# Dedent
sed -i 's/^ //' "$INDEX"
UPDATED=$((UPDATED+1))
done
if [ "$UPDATED" -gt 0 ]; then
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add docs/
if ! git diff --cached --quiet; then
git commit -m "docs: rebuild documentation indexes [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
echo "✅ ${UPDATED} index file(s) rebuilt and committed" >> $GITHUB_STEP_SUMMARY
else
echo "✅ All indexes already up to date" >> $GITHUB_STEP_SUMMARY
fi
else
echo "✅ No indexes to rebuild" >> $GITHUB_STEP_SUMMARY
fi
# ── VERSION DRIFT DETECTION ──────────────────────────────────────────
- name: Check for version drift
run: |
echo "## 📦 Version Drift Check" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ ! -f "README.md" ]; then
echo "⏭️ No README.md — skipping" >> $GITHUB_STEP_SUMMARY
exit 0
fi
README_VERSION=$(grep -oP '^\s*VERSION:\s*\K[0-9]{2}\.[0-9]{2}\.[0-9]{2}' README.md 2>/dev/null | head -1)
if [ -z "$README_VERSION" ]; then
echo "⚠️ No VERSION found in README.md FILE INFORMATION block" >> $GITHUB_STEP_SUMMARY
exit 0
fi
echo "**README version:** \`${README_VERSION}\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
DRIFT=0
CHECKED=0
# Check all files with FILE INFORMATION blocks
while IFS= read -r -d '' file; do
FILE_VERSION=$(grep -oP '^\s*\*?\s*VERSION:\s*\K[0-9]{2}\.[0-9]{2}\.[0-9]{2}' "$file" 2>/dev/null | head -1)
[ -z "$FILE_VERSION" ] && continue
CHECKED=$((CHECKED+1))
if [ "$FILE_VERSION" != "$README_VERSION" ]; then
echo " ⚠️ \`${file}\`: \`${FILE_VERSION}\` (expected \`${README_VERSION}\`)" >> $GITHUB_STEP_SUMMARY
DRIFT=$((DRIFT+1))
fi
done < <(find . -maxdepth 4 -type f \( -name "*.php" -o -name "*.md" -o -name "*.yml" \) ! -path "./.git/*" ! -path "./vendor/*" ! -path "./node_modules/*" -print0 2>/dev/null)
echo "" >> $GITHUB_STEP_SUMMARY
if [ "$DRIFT" -gt 0 ]; then
echo "⚠️ **${DRIFT}** file(s) out of ${CHECKED} have version drift" >> $GITHUB_STEP_SUMMARY
echo "Run \`sync-version-on-merge\` workflow or update manually" >> $GITHUB_STEP_SUMMARY
else
echo "✅ All ${CHECKED} file(s) match README version \`${README_VERSION}\`" >> $GITHUB_STEP_SUMMARY
fi
# ── PROTECT CUSTOM WORKFLOWS ────────────────────────────────────────
- name: Ensure custom workflow directory exists
run: |
echo "## 🔧 Custom Workflows" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ ! -d ".github/workflows/custom" ]; then
mkdir -p .github/workflows/custom
cat > .github/workflows/custom/README.md << 'CWEOF'
# Custom Workflows
Place repo-specific workflows here. Files in this directory are:
- **Never overwritten** by MokoStandards bulk sync
- **Never deleted** by the repository-cleanup workflow
- Safe for custom CI, notifications, or repo-specific automation
Synced workflows live in `.github/workflows/` (parent directory).
CWEOF
sed -i 's/^ //' .github/workflows/custom/README.md
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add .github/workflows/custom/
if ! git diff --cached --quiet; then
git commit -m "chore: create .github/workflows/custom/ for repo-specific workflows [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
echo "✅ Created \`.github/workflows/custom/\` directory" >> $GITHUB_STEP_SUMMARY
fi
else
CUSTOM_COUNT=$(find .github/workflows/custom -name "*.yml" -o -name "*.yaml" 2>/dev/null | wc -l)
echo "✅ Custom workflow directory exists (${CUSTOM_COUNT} workflow(s))" >> $GITHUB_STEP_SUMMARY
fi
# ── DELETE CLOSED ISSUES ──────────────────────────────────────────────
- name: Delete old closed issues
if: steps.tasks.outputs.delete_closed_issues == 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
CUTOFF=$(date -u -d '30 days ago' +%Y-%m-%dT%H:%M:%SZ 2>/dev/null || date -u -v-30d +%Y-%m-%dT%H:%M:%SZ)
echo "## 🗑️ Closed Issue Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Deleting issues closed before: ${CUTOFF}" >> $GITHUB_STEP_SUMMARY
DELETED=0
gh api "repos/${REPO}/issues?state=closed&since=1970-01-01T00:00:00Z&per_page=100&sort=updated&direction=asc" \
--jq ".[] | select(.closed_at < \"${CUTOFF}\") | .number" 2>/dev/null | while read -r num; do
# Lock and close with "not_planned" to mark as cleaned up
gh api "repos/${REPO}/issues/${num}/lock" -X PUT -f lock_reason="resolved" --silent 2>/dev/null || true
echo " Locked issue #${num}" >> $GITHUB_STEP_SUMMARY
DELETED=$((DELETED+1))
done
if [ "$DELETED" -eq 0 ] 2>/dev/null; then
echo "✅ No old closed issues found" >> $GITHUB_STEP_SUMMARY
else
echo "✅ Locked ${DELETED} old closed issue(s)" >> $GITHUB_STEP_SUMMARY
fi
- name: Summary
if: always()
run: |
echo "" >> $GITHUB_STEP_SUMMARY
echo "---" >> $GITHUB_STEP_SUMMARY
echo "*Run by @${{ github.actor }} — trigger: ${{ github.event_name }}*" >> $GITHUB_STEP_SUMMARY

2561
.github/workflows/standards-compliance.yml vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,133 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Automation
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/sync-version-on-merge.yml.template
# VERSION: 04.04.01
# BRIEF: Auto-bump patch version on every push to main and propagate to all file headers
# NOTE: Synced via bulk-repo-sync to .github/workflows/sync-version-on-merge.yml in all governed repos.
# README.md is the single source of truth for the repository version.
name: Sync Version from README
on:
push:
branches:
- main
- master
workflow_dispatch:
inputs:
dry_run:
description: 'Dry run (preview only, no commit)'
type: boolean
default: false
permissions:
contents: write
issues: write
env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
jobs:
sync-version:
name: Propagate README version
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
token: ${{ secrets.GH_TOKEN || github.token }}
fetch-depth: 0
- name: Set up PHP
uses: shivammathur/setup-php@fcafdd6392932010c2bd5094439b8e33be2a8a09 # v2.37.0
with:
php-version: '8.1'
tools: composer
- name: Setup MokoStandards tools
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
COMPOSER_AUTH: '{"github-oauth":{"github.com":"${{ secrets.GH_TOKEN || github.token }}"}}'
run: |
git clone --depth 1 --branch version/04.04 --quiet \
"https://x-access-token:${GH_TOKEN}@github.com/mokoconsulting-tech/MokoStandards.git" \
/tmp/mokostandards
cd /tmp/mokostandards
composer install --no-dev --no-interaction --quiet
- name: Auto-bump patch version
if: ${{ github.event_name == 'push' && github.actor != 'github-actions[bot]' }}
run: |
if git diff --name-only HEAD~1 HEAD 2>/dev/null | grep -q '^README\.md$'; then
echo "README.md changed in this push — skipping auto-bump"
exit 0
fi
RESULT=$(php /tmp/mokostandards/api/cli/version_bump.php --path .) || {
echo "⚠️ Could not bump version — skipping"
exit 0
}
echo "Auto-bumping patch: $RESULT"
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add README.md
git commit -m "chore(version): auto-bump patch ${RESULT} [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
- name: Extract version from README.md
id: readme_version
run: |
git pull --ff-only 2>/dev/null || true
VERSION=$(php /tmp/mokostandards/api/cli/version_read.php --path . 2>/dev/null)
if [ -z "$VERSION" ]; then
echo "⚠️ No VERSION in README.md — skipping propagation"
echo "skip=true" >> $GITHUB_OUTPUT
exit 0
fi
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "skip=false" >> $GITHUB_OUTPUT
echo "✅ README.md version: $VERSION"
- name: Run version sync
if: ${{ steps.readme_version.outputs.skip != 'true' && inputs.dry_run != true }}
run: |
php /tmp/mokostandards/api/maintenance/update_version_from_readme.php \
--path . \
--create-issue \
--repo "${{ github.repository }}"
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
- name: Commit updated files
if: ${{ steps.readme_version.outputs.skip != 'true' && inputs.dry_run != true }}
run: |
git pull --ff-only 2>/dev/null || true
if git diff --quiet; then
echo " No version changes needed — already up to date"
exit 0
fi
VERSION="${{ steps.readme_version.outputs.version }}"
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add -A
git commit -m "chore(version): sync badges and headers to ${VERSION} [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
- name: Summary
run: |
VERSION="${{ steps.readme_version.outputs.version }}"
echo "## 📦 Version Sync — ${VERSION}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Source:** \`README.md\` FILE INFORMATION block" >> $GITHUB_STEP_SUMMARY
echo "**Version:** \`${VERSION}\`" >> $GITHUB_STEP_SUMMARY