From 6d40b1fc20e7744331e6cbaee63421b0c399373c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 18 Jan 2026 01:31:25 +0000 Subject: [PATCH 1/6] Initial plan From 09a4b0b49066ad93cd2c65ed77179212608ee844 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 18 Jan 2026 01:36:58 +0000 Subject: [PATCH 2/6] Sync with MokoStandards: remove local scripts and reusable workflows Co-authored-by: jmiller-moko <230051081+jmiller-moko@users.noreply.github.com> --- .github/workflows/ci.yml | 2 +- .github/workflows/reusable-build.yml | 222 ---- .github/workflows/reusable-ci-validation.yml | 534 -------- .github/workflows/reusable-deploy.yml | 312 ----- .github/workflows/reusable-joomla-testing.yml | 356 ------ .github/workflows/reusable-php-quality.yml | 297 ----- .../workflows/reusable-project-detector.yml | 138 --- .github/workflows/reusable-release.yml | 397 ------ .../workflows/reusable-script-executor.yml | 210 ---- .github/workflows/standards-compliance.yml | 2 +- .github/workflows/version_branch.yml | 1074 ++++++----------- CONTRIBUTING.md | 6 +- README.md | 11 +- docs/README.md | 1 - scripts/ENTERPRISE.md | 583 --------- scripts/README.md | 75 -- scripts/lib/common.py | 452 ------- scripts/lib/extension_utils.py | 356 ------ scripts/lib/joomla_manifest.py | 430 ------- scripts/release/detect_platform.py | 98 -- scripts/release/package_extension.py | 327 ----- scripts/release/update_dates.sh | 69 -- scripts/run/scaffold_extension.py | 447 ------- scripts/run/validate_all.py | 181 --- scripts/validate/manifest.py | 168 --- scripts/validate/no_secrets.py | 212 ---- scripts/validate/paths.py | 169 --- scripts/validate/php_syntax.py | 218 ---- scripts/validate/tabs.py | 140 --- scripts/validate/workflows.py | 216 ---- scripts/validate/xml_wellformed.py | 206 ---- 31 files changed, 375 insertions(+), 7534 deletions(-) delete mode 100644 .github/workflows/reusable-build.yml delete mode 100644 .github/workflows/reusable-ci-validation.yml delete mode 100644 .github/workflows/reusable-deploy.yml delete mode 100644 .github/workflows/reusable-joomla-testing.yml delete mode 100644 .github/workflows/reusable-php-quality.yml delete mode 100644 .github/workflows/reusable-project-detector.yml delete mode 100644 .github/workflows/reusable-release.yml delete mode 100644 .github/workflows/reusable-script-executor.yml delete mode 100644 scripts/ENTERPRISE.md delete mode 100644 scripts/README.md delete mode 100755 scripts/lib/common.py delete mode 100644 scripts/lib/extension_utils.py delete mode 100755 scripts/lib/joomla_manifest.py delete mode 100755 scripts/release/detect_platform.py delete mode 100755 scripts/release/package_extension.py delete mode 100755 scripts/release/update_dates.sh delete mode 100755 scripts/run/scaffold_extension.py delete mode 100755 scripts/run/validate_all.py delete mode 100755 scripts/validate/manifest.py delete mode 100755 scripts/validate/no_secrets.py delete mode 100755 scripts/validate/paths.py delete mode 100755 scripts/validate/php_syntax.py delete mode 100755 scripts/validate/tabs.py delete mode 100755 scripts/validate/workflows.py delete mode 100755 scripts/validate/xml_wellformed.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 071e619..ae22098 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,7 +50,7 @@ permissions: jobs: ci: name: Repository Validation Pipeline - uses: ./.github/workflows/reusable-ci-validation.yml + uses: mokoconsulting-tech/MokoStandards/.github/workflows/reusable-ci-validation.yml@main with: profile: full secrets: inherit diff --git a/.github/workflows/reusable-build.yml b/.github/workflows/reusable-build.yml deleted file mode 100644 index 42079e5..0000000 --- a/.github/workflows/reusable-build.yml +++ /dev/null @@ -1,222 +0,0 @@ -# Copyright (C) 2026 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# FILE INFORMATION -# DEFGROUP: GitHub.Workflow -# INGROUP: MokoStandards.Reusable -# REPO: https://github.com/mokoconsulting-tech/MokoStandards -# PATH: /.github/workflows/reusable-build.yml -# VERSION: 01.00.00 -# BRIEF: Reusable type-aware build workflow for Joomla, Dolibarr, and generic projects -# NOTE: Automatically detects project type and applies appropriate build steps - -name: Reusable Build - -on: - workflow_call: - inputs: - php-version: - description: 'PHP version to use for build' - required: false - type: string - default: '8.1' - node-version: - description: 'Node.js version to use for build' - required: false - type: string - default: '20.x' - working-directory: - description: 'Working directory for build' - required: false - type: string - default: '.' - upload-artifacts: - description: 'Upload build artifacts' - required: false - type: boolean - default: true - artifact-name: - description: 'Name for uploaded artifacts' - required: false - type: string - default: 'build-artifacts' - -permissions: - contents: read - -jobs: - detect: - name: Detect Project Type - uses: ./.github/workflows/reusable-project-detector.yml - with: - working-directory: ${{ inputs.working-directory }} - - build: - name: Build (${{ needs.detect.outputs.project-type }}) - runs-on: ubuntu-latest - needs: detect - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Setup PHP - if: needs.detect.outputs.has-php == 'true' - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ inputs.php-version }} - extensions: mbstring, xml, zip, json - tools: composer:v2 - - - name: Setup Node.js - if: needs.detect.outputs.has-node == 'true' - uses: actions/setup-node@v6 - with: - node-version: ${{ inputs.node-version }} - - - name: Get Composer cache directory - if: needs.detect.outputs.has-php == 'true' - id: composer-cache - run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT - - - name: Cache Composer dependencies - if: needs.detect.outputs.has-php == 'true' - uses: actions/cache@v5 - with: - path: ${{ steps.composer-cache.outputs.dir }} - key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} - restore-keys: ${{ runner.os }}-composer- - - - name: Cache Node modules - if: needs.detect.outputs.has-node == 'true' - uses: actions/cache@v5 - with: - path: ~/.npm - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: ${{ runner.os }}-node- - - - name: Install PHP dependencies - if: needs.detect.outputs.has-php == 'true' - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "composer.json" ]; then - composer install --prefer-dist --no-progress --no-interaction - echo "โœ… Composer dependencies installed" >> $GITHUB_STEP_SUMMARY - fi - - - name: Install Node dependencies - if: needs.detect.outputs.has-node == 'true' - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "package.json" ]; then - npm ci - echo "โœ… npm dependencies installed" >> $GITHUB_STEP_SUMMARY - fi - - - name: Build Joomla Extension - if: needs.detect.outputs.project-type == 'joomla' - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ—๏ธ Building Joomla Extension" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Extension Type:** ${{ needs.detect.outputs.extension-type }}" >> $GITHUB_STEP_SUMMARY - - # Run npm build if package.json has build script - if [ -f "package.json" ] && grep -q '"build"' package.json; then - echo "Running npm build..." - npm run build - echo "- โœ… npm build completed" >> $GITHUB_STEP_SUMMARY - fi - - # Run composer scripts if available - if [ -f "composer.json" ] && grep -q '"build"' composer.json; then - echo "Running composer build..." - composer run-script build - echo "- โœ… composer build completed" >> $GITHUB_STEP_SUMMARY - fi - - echo "- โœ… Joomla extension build completed" >> $GITHUB_STEP_SUMMARY - - - name: Build Dolibarr Module - if: needs.detect.outputs.project-type == 'dolibarr' - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ—๏ธ Building Dolibarr Module" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - # Run npm build if available - if [ -f "package.json" ] && grep -q '"build"' package.json; then - echo "Running npm build..." - npm run build - echo "- โœ… npm build completed" >> $GITHUB_STEP_SUMMARY - fi - - # Install Dolibarr-specific dependencies - if [ -f "composer.json" ]; then - composer install --no-dev --optimize-autoloader - echo "- โœ… Production dependencies installed" >> $GITHUB_STEP_SUMMARY - fi - - echo "- โœ… Dolibarr module build completed" >> $GITHUB_STEP_SUMMARY - - - name: Build Generic Project - if: needs.detect.outputs.project-type == 'generic' - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ—๏ธ Building Generic Project" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - # Try various build methods - if [ -f "package.json" ] && grep -q '"build"' package.json; then - npm run build - echo "- โœ… npm build completed" >> $GITHUB_STEP_SUMMARY - fi - - if [ -f "Makefile" ]; then - make build 2>/dev/null || echo "- โ„น๏ธ Makefile build not available" >> $GITHUB_STEP_SUMMARY - fi - - echo "- โœ… Generic project build completed" >> $GITHUB_STEP_SUMMARY - - - name: Verify build output - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ“ฆ Build Output" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - # Check for common build output directories - for dir in dist build public out; do - if [ -d "$dir" ]; then - echo "- โœ… Found build output: \`$dir/\`" >> $GITHUB_STEP_SUMMARY - du -sh "$dir" >> $GITHUB_STEP_SUMMARY - fi - done - - - name: Upload build artifacts - if: inputs.upload-artifacts - uses: actions/upload-artifact@v6 - with: - name: ${{ inputs.artifact-name }}-${{ needs.detect.outputs.project-type }} - path: | - ${{ inputs.working-directory }}/dist/ - ${{ inputs.working-directory }}/build/ - ${{ inputs.working-directory }}/public/ - ${{ inputs.working-directory }}/out/ - retention-days: 7 - if-no-files-found: ignore diff --git a/.github/workflows/reusable-ci-validation.yml b/.github/workflows/reusable-ci-validation.yml deleted file mode 100644 index b5796f0..0000000 --- a/.github/workflows/reusable-ci-validation.yml +++ /dev/null @@ -1,534 +0,0 @@ -# Copyright (C) 2026 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# FILE INFORMATION -# DEFGROUP: GitHub.Workflow -# INGROUP: MokoStandards.Reusable -# REPO: https://github.com/mokoconsulting-tech/MokoStandards -# PATH: /.github/workflows/reusable-ci-validation.yml -# VERSION: 01.00.00 -# BRIEF: Reusable CI validation workflow for repository standards enforcement -# NOTE: Supports multiple validation profiles (basic, full, strict) with configurable checks - -name: Reusable CI Validation - -on: - workflow_call: - inputs: - profile: - description: 'Validation profile (basic, full, strict)' - required: false - type: string - default: 'basic' - node-version: - description: 'Node.js version for frontend validation' - required: false - type: string - default: '20.x' - php-version: - description: 'PHP version for backend validation' - required: false - type: string - default: '8.1' - working-directory: - description: 'Working directory for validation' - required: false - type: string - default: '.' - validate-manifests: - description: 'Validate XML manifests (Joomla/Dolibarr)' - required: false - type: boolean - default: true - validate-changelogs: - description: 'Validate CHANGELOG.md format and structure' - required: false - type: boolean - default: true - validate-licenses: - description: 'Validate license headers in source files' - required: false - type: boolean - default: true - validate-security: - description: 'Check for secrets and security issues' - required: false - type: boolean - default: true - fail-on-warnings: - description: 'Fail the workflow on validation warnings' - required: false - type: boolean - default: false - -permissions: - contents: read - pull-requests: write - checks: write - -jobs: - setup: - name: Setup Validation Environment - runs-on: ubuntu-latest - outputs: - has-php: ${{ steps.detect.outputs.has-php }} - has-node: ${{ steps.detect.outputs.has-node }} - has-scripts: ${{ steps.detect.outputs.has-scripts }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Detect project components - id: detect - working-directory: ${{ inputs.working-directory }} - run: | - # Detect PHP files - if find . -name "*.php" -type f | head -1 | grep -q .; then - echo "has-php=true" >> $GITHUB_OUTPUT - echo "โœ… PHP files detected" >> $GITHUB_STEP_SUMMARY - else - echo "has-php=false" >> $GITHUB_OUTPUT - echo "โ„น๏ธ No PHP files detected" >> $GITHUB_STEP_SUMMARY - fi - - # Detect Node.js project - if [ -f "package.json" ]; then - echo "has-node=true" >> $GITHUB_OUTPUT - echo "โœ… Node.js project detected" >> $GITHUB_STEP_SUMMARY - else - echo "has-node=false" >> $GITHUB_OUTPUT - echo "โ„น๏ธ No Node.js project detected" >> $GITHUB_STEP_SUMMARY - fi - - # Detect validation scripts - if [ -d "scripts/validate" ] || [ -d ".github/scripts/validate" ]; then - echo "has-scripts=true" >> $GITHUB_OUTPUT - echo "โœ… Validation scripts found" >> $GITHUB_STEP_SUMMARY - else - echo "has-scripts=false" >> $GITHUB_OUTPUT - echo "โ„น๏ธ No validation scripts found" >> $GITHUB_STEP_SUMMARY - fi - - required-validations: - name: Required Validations - runs-on: ubuntu-latest - needs: setup - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Normalize line endings - run: git config --global core.autocrlf false - - - name: Setup PHP - if: needs.setup.outputs.has-php == 'true' - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ inputs.php-version }} - extensions: mbstring, xml - coverage: none - - - name: Setup Node.js - if: needs.setup.outputs.has-node == 'true' - uses: actions/setup-node@v6 - with: - node-version: ${{ inputs.node-version }} - - - name: Make scripts executable - if: needs.setup.outputs.has-scripts == 'true' - working-directory: ${{ inputs.working-directory }} - run: | - if [ -d "scripts" ]; then - find scripts -name "*.sh" -type f -exec chmod +x {} \; - fi - if [ -d ".github/scripts" ]; then - find .github/scripts -name "*.sh" -type f -exec chmod +x {} \; - fi - - - name: Validate XML manifests - if: inputs.validate-manifests - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ“‹ Manifest Validation" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - if [ -f "scripts/validate/manifest.sh" ]; then - echo "Running manifest validation script..." >> $GITHUB_STEP_SUMMARY - if ./scripts/validate/manifest.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then - echo "โœ… Manifest validation passed" >> $GITHUB_STEP_SUMMARY - else - echo "โŒ Manifest validation script failed" >> $GITHUB_STEP_SUMMARY - exit 1 - fi - elif [ -f ".github/scripts/validate/manifest.sh" ]; then - echo "Running manifest validation script..." >> $GITHUB_STEP_SUMMARY - if ./.github/scripts/validate/manifest.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then - echo "โœ… Manifest validation passed" >> $GITHUB_STEP_SUMMARY - else - echo "โŒ Manifest validation script failed" >> $GITHUB_STEP_SUMMARY - exit 1 - fi - elif command -v xmllint >/dev/null 2>&1; then - # Basic XML validation using xmllint - echo "Using xmllint for basic XML validation..." >> $GITHUB_STEP_SUMMARY - XML_FOUND=false - ERROR_FOUND=false - - while IFS= read -r file; do - XML_FOUND=true - if ! xmllint --noout "$file" 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then - echo "โŒ Invalid XML: $file" >> $GITHUB_STEP_SUMMARY - ERROR_FOUND=true - fi - done < <(find . -name "*.xml" -type f ! -path "*/node_modules/*" ! -path "*/.git/*") - - if [ "$ERROR_FOUND" = true ]; then - echo "" >> $GITHUB_STEP_SUMMARY - echo "โŒ XML validation failed" >> $GITHUB_STEP_SUMMARY - exit 1 - elif [ "$XML_FOUND" = true ]; then - echo "" >> $GITHUB_STEP_SUMMARY - echo "โœ… Basic XML validation passed" >> $GITHUB_STEP_SUMMARY - else - echo "โ„น๏ธ No XML files found to validate" >> $GITHUB_STEP_SUMMARY - fi - else - echo "โ„น๏ธ No manifest validation script or xmllint available" >> $GITHUB_STEP_SUMMARY - echo "Skipping XML validation" >> $GITHUB_STEP_SUMMARY - fi - - - name: Validate XML well-formedness - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "scripts/validate/xml_wellformed.sh" ]; then - ./scripts/validate/xml_wellformed.sh - elif [ -f ".github/scripts/validate/xml_wellformed.sh" ]; then - ./.github/scripts/validate/xml_wellformed.sh - else - echo "โ„น๏ธ No XML well-formedness validation script found, skipping" - fi - - - name: Validate PHP syntax - if: needs.setup.outputs.has-php == 'true' - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ” PHP Syntax Validation" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - if [ -f "scripts/validate/php_syntax.sh" ]; then - echo "Running PHP syntax validation script..." >> $GITHUB_STEP_SUMMARY - if ./scripts/validate/php_syntax.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then - echo "โœ… PHP syntax validation passed" >> $GITHUB_STEP_SUMMARY - else - echo "โŒ PHP syntax validation script failed" >> $GITHUB_STEP_SUMMARY - exit 1 - fi - elif [ -f ".github/scripts/validate/php_syntax.sh" ]; then - echo "Running PHP syntax validation script..." >> $GITHUB_STEP_SUMMARY - if ./.github/scripts/validate/php_syntax.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then - echo "โœ… PHP syntax validation passed" >> $GITHUB_STEP_SUMMARY - else - echo "โŒ PHP syntax validation script failed" >> $GITHUB_STEP_SUMMARY - exit 1 - fi - else - # Basic PHP syntax check - echo "Running basic PHP syntax check..." >> $GITHUB_STEP_SUMMARY - ERROR_FOUND=false - - while IFS= read -r file; do - if ! php -l "$file" 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then - echo "โŒ Syntax error in: $file" >> $GITHUB_STEP_SUMMARY - ERROR_FOUND=true - fi - done < <(find . -name "*.php" -type f ! -path "*/vendor/*" ! -path "*/node_modules/*") - - if [ "$ERROR_FOUND" = true ]; then - echo "" >> $GITHUB_STEP_SUMMARY - echo "โŒ PHP syntax errors found" >> $GITHUB_STEP_SUMMARY - exit 1 - else - echo "" >> $GITHUB_STEP_SUMMARY - echo "โœ… PHP syntax validation passed" >> $GITHUB_STEP_SUMMARY - fi - fi - - optional-validations: - name: Optional Validations (${{ inputs.profile }}) - runs-on: ubuntu-latest - needs: setup - if: inputs.profile != 'basic' - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Setup PHP - if: needs.setup.outputs.has-php == 'true' - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ inputs.php-version }} - extensions: mbstring, xml - coverage: none - - - name: Make scripts executable - if: needs.setup.outputs.has-scripts == 'true' - working-directory: ${{ inputs.working-directory }} - run: | - if [ -d "scripts" ]; then - find scripts -name "*.sh" -type f -exec chmod +x {} \; - fi - if [ -d ".github/scripts" ]; then - find .github/scripts -name "*.sh" -type f -exec chmod +x {} \; - fi - - - name: Validate changelog - if: inputs.validate-changelogs - continue-on-error: ${{ !inputs.fail-on-warnings }} - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ“ Changelog Validation" >> $GITHUB_STEP_SUMMARY - - if [ -f "scripts/validate/changelog.sh" ]; then - ./scripts/validate/changelog.sh - echo "โœ… Changelog validation passed" >> $GITHUB_STEP_SUMMARY - elif [ -f ".github/scripts/validate/changelog.sh" ]; then - ./.github/scripts/validate/changelog.sh - echo "โœ… Changelog validation passed" >> $GITHUB_STEP_SUMMARY - elif [ -f "CHANGELOG.md" ]; then - # Basic changelog validation - if grep -q "## \[" CHANGELOG.md; then - echo "โœ… Changelog appears well-formed" >> $GITHUB_STEP_SUMMARY - else - echo "โš ๏ธ Changelog may not follow standard format" >> $GITHUB_STEP_SUMMARY - [ "${{ inputs.fail-on-warnings }}" = "true" ] && exit 1 - fi - else - echo "โš ๏ธ No CHANGELOG.md found" >> $GITHUB_STEP_SUMMARY - fi - - - name: Validate license headers - if: inputs.validate-licenses - continue-on-error: ${{ !inputs.fail-on-warnings }} - working-directory: ${{ inputs.working-directory }} - run: | - echo "### โš–๏ธ License Header Validation" >> $GITHUB_STEP_SUMMARY - - if [ -f "scripts/validate/license_headers.sh" ]; then - ./scripts/validate/license_headers.sh - echo "โœ… License headers validated" >> $GITHUB_STEP_SUMMARY - elif [ -f ".github/scripts/validate/license_headers.sh" ]; then - ./.github/scripts/validate/license_headers.sh - echo "โœ… License headers validated" >> $GITHUB_STEP_SUMMARY - else - # Basic license header check - COUNT_FILE=$(mktemp) - find . \( -name "*.php" -o -name "*.js" -o -name "*.py" \) -type f -exec sh -c 'if ! head -20 "$1" | grep -qi "license\|copyright\|spdx"; then echo "1"; fi' _ {} \; > "$COUNT_FILE" - FILES_WITHOUT_LICENSE=$(wc -l < "$COUNT_FILE") - rm -f "$COUNT_FILE" - if [ "$FILES_WITHOUT_LICENSE" -eq 0 ]; then - echo "โœ… License headers appear present" >> $GITHUB_STEP_SUMMARY - else - echo "โš ๏ธ Some files may be missing license headers" >> $GITHUB_STEP_SUMMARY - [ "${{ inputs.fail-on-warnings }}" = "true" ] && exit 1 - fi - fi - - - name: Validate language structure - continue-on-error: true - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "scripts/validate/language_structure.sh" ]; then - ./scripts/validate/language_structure.sh - elif [ -f ".github/scripts/validate/language_structure.sh" ]; then - ./.github/scripts/validate/language_structure.sh - else - echo "โ„น๏ธ No language structure validation script found" - fi - - - name: Validate paths - continue-on-error: true - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "scripts/validate/paths.sh" ]; then - ./scripts/validate/paths.sh - elif [ -f ".github/scripts/validate/paths.sh" ]; then - ./.github/scripts/validate/paths.sh - else - echo "โ„น๏ธ No path validation script found" - fi - - - name: Validate tabs/whitespace - continue-on-error: true - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "scripts/validate/tabs.sh" ]; then - ./scripts/validate/tabs.sh - elif [ -f ".github/scripts/validate/tabs.sh" ]; then - ./.github/scripts/validate/tabs.sh - else - echo "โ„น๏ธ No tabs validation script found" - fi - - - name: Validate version alignment - continue-on-error: ${{ !inputs.fail-on-warnings }} - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "scripts/validate/version_alignment.sh" ]; then - ./scripts/validate/version_alignment.sh - elif [ -f ".github/scripts/validate/version_alignment.sh" ]; then - ./.github/scripts/validate/version_alignment.sh - else - echo "โ„น๏ธ No version alignment validation script found" - fi - - security-validations: - name: Security Validations - runs-on: ubuntu-latest - if: inputs.validate-security - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Make scripts executable - working-directory: ${{ inputs.working-directory }} - run: | - if [ -d "scripts" ]; then - find scripts -name "*.sh" -type f -exec chmod +x {} \; - fi - if [ -d ".github/scripts" ]; then - find .github/scripts -name "*.sh" -type f -exec chmod +x {} \; - fi - - - name: Check for secrets - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ”’ Security Validation" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - if [ -f "scripts/validate/no_secrets.sh" ]; then - if ./scripts/validate/no_secrets.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then - echo "โœ… No secrets found" >> $GITHUB_STEP_SUMMARY - else - echo "โŒ Secret validation script failed" >> $GITHUB_STEP_SUMMARY - exit 1 - fi - elif [ -f ".github/scripts/validate/no_secrets.sh" ]; then - if ./.github/scripts/validate/no_secrets.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then - echo "โœ… No secrets found" >> $GITHUB_STEP_SUMMARY - else - echo "โŒ Secret validation script failed" >> $GITHUB_STEP_SUMMARY - exit 1 - fi - else - # Basic secrets check using find to properly exclude directories - PATTERNS=( - "password\s*=\s*['\"][^'\"]+['\"]" - "api[_-]?key\s*=\s*['\"][^'\"]+['\"]" - "secret\s*=\s*['\"][^'\"]+['\"]" - "token\s*=\s*['\"][^'\"]+['\"]" - "BEGIN RSA PRIVATE KEY" - "BEGIN PRIVATE KEY" - ) - - FOUND=0 - echo "Scanning for potential secrets..." >> $GITHUB_STEP_SUMMARY - - for pattern in "${PATTERNS[@]}"; do - # Use find to exclude directories and files, then grep the results - while IFS= read -r file; do - if [ -f "$file" ]; then - if grep -HnE "$pattern" "$file" 2>/dev/null; then - FOUND=1 - echo "โš ๏ธ Found pattern in: $file" >> $GITHUB_STEP_SUMMARY - fi - fi - done < <(find . -type f \ - ! -path "*/.git/*" \ - ! -path "*/node_modules/*" \ - ! -path "*/vendor/*" \ - ! -path "*/.github/*" \ - ! -path "*/docs/*" \ - ! -name "*.md" \ - 2>/dev/null) - done - - if [ $FOUND -eq 0 ]; then - echo "" >> $GITHUB_STEP_SUMMARY - echo "โœ… Basic security check passed - no secrets detected" >> $GITHUB_STEP_SUMMARY - else - echo "" >> $GITHUB_STEP_SUMMARY - echo "โŒ Potential secrets or credentials detected" >> $GITHUB_STEP_SUMMARY - echo "Please review the findings above and ensure they are test fixtures or documentation examples" >> $GITHUB_STEP_SUMMARY - exit 1 - fi - fi - - summary: - name: Validation Summary - runs-on: ubuntu-latest - needs: [required-validations, optional-validations, security-validations] - if: always() - - steps: - - name: Generate validation summary - run: | - echo "### ๐ŸŽฏ CI Validation Summary" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Profile:** ${{ inputs.profile }}" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "| Validation Stage | Status |" >> $GITHUB_STEP_SUMMARY - echo "|-----------------|--------|" >> $GITHUB_STEP_SUMMARY - echo "| Required Validations | ${{ needs.required-validations.result == 'success' && 'โœ… Passed' || 'โŒ Failed' }} |" >> $GITHUB_STEP_SUMMARY - echo "| Optional Validations | ${{ needs.optional-validations.result == 'success' && 'โœ… Passed' || needs.optional-validations.result == 'skipped' && 'โญ๏ธ Skipped' || 'โŒ Failed' }} |" >> $GITHUB_STEP_SUMMARY - echo "| Security Validations | ${{ needs.security-validations.result == 'success' && 'โœ… Passed' || needs.security-validations.result == 'skipped' && 'โญ๏ธ Skipped' || 'โŒ Failed' }} |" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Repository:** $GITHUB_REPOSITORY" >> $GITHUB_STEP_SUMMARY - echo "**Branch:** $GITHUB_REF_NAME" >> $GITHUB_STEP_SUMMARY - echo "**Commit:** $GITHUB_SHA" >> $GITHUB_STEP_SUMMARY - - - name: Check validation results - run: | - if [ "${{ needs.required-validations.result }}" == "failure" ]; then - echo "โŒ Required validations failed" - exit 1 - fi - - if [ "${{ needs.security-validations.result }}" == "failure" ]; then - echo "โŒ Security validations failed" - exit 1 - fi - - if [ "${{ inputs.profile }}" == "strict" ] && [ "${{ needs.optional-validations.result }}" == "failure" ]; then - echo "โŒ Optional validations failed in strict mode" - exit 1 - fi - - echo "โœ… CI validation completed successfully" diff --git a/.github/workflows/reusable-deploy.yml b/.github/workflows/reusable-deploy.yml deleted file mode 100644 index e32e4fc..0000000 --- a/.github/workflows/reusable-deploy.yml +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright (C) 2026 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# FILE INFORMATION -# DEFGROUP: GitHub.Workflow -# INGROUP: MokoStandards.Reusable -# REPO: https://github.com/mokoconsulting-tech/MokoStandards -# PATH: /.github/workflows/reusable-deploy.yml -# VERSION: 01.00.00 -# BRIEF: Reusable type-aware deployment workflow for staging and production -# NOTE: Supports Joomla, Dolibarr, and generic deployments with health checks - -name: Reusable Deploy - -on: - workflow_call: - inputs: - environment: - description: 'Target environment (staging, production)' - required: true - type: string - version: - description: 'Version to deploy (optional, uses latest if not specified)' - required: false - type: string - deployment-method: - description: 'Deployment method (rsync, ftp, ssh, kubernetes, custom)' - required: false - type: string - default: 'custom' - health-check-url: - description: 'URL to check after deployment' - required: false - type: string - health-check-timeout: - description: 'Health check timeout in seconds' - required: false - type: number - default: 300 - working-directory: - description: 'Working directory' - required: false - type: string - default: '.' - secrets: - DEPLOY_HOST: - description: 'Deployment host/server' - required: false - DEPLOY_USER: - description: 'Deployment user' - required: false - DEPLOY_KEY: - description: 'SSH private key or deployment credentials' - required: false - DEPLOY_PATH: - description: 'Deployment path on target server' - required: false - -permissions: - contents: read - deployments: write - -jobs: - detect: - name: Detect Project Type - uses: ./.github/workflows/reusable-project-detector.yml - with: - working-directory: ${{ inputs.working-directory }} - - prepare: - name: Prepare Deployment - runs-on: ubuntu-latest - needs: detect - outputs: - deployment-id: ${{ steps.create-deployment.outputs.deployment_id }} - version: ${{ steps.version.outputs.version }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Determine version - id: version - run: | - if [ -n "${{ inputs.version }}" ]; then - VERSION="${{ inputs.version }}" - else - # Use latest tag or commit SHA - VERSION=$(git describe --tags --always) - fi - echo "version=${VERSION}" >> $GITHUB_OUTPUT - echo "Deploying version: ${VERSION}" - - - name: Create deployment - id: create-deployment - uses: chrnorm/deployment-action@v2 - with: - token: ${{ secrets.GITHUB_TOKEN }} - environment: ${{ inputs.environment }} - description: "Deploy ${{ needs.detect.outputs.project-type }} v${{ steps.version.outputs.version }}" - - - name: Deployment info - run: | - echo "### ๐Ÿš€ Deployment Preparation" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Environment:** ${{ inputs.environment }}" >> $GITHUB_STEP_SUMMARY - echo "**Version:** ${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY - echo "**Project Type:** ${{ needs.detect.outputs.project-type }}" >> $GITHUB_STEP_SUMMARY - echo "**Method:** ${{ inputs.deployment-method }}" >> $GITHUB_STEP_SUMMARY - - build: - name: Build for Deployment - needs: [detect, prepare] - uses: ./.github/workflows/reusable-build.yml - with: - working-directory: ${{ inputs.working-directory }} - upload-artifacts: true - artifact-name: deployment-package - - deploy: - name: Deploy to ${{ inputs.environment }} - runs-on: ubuntu-latest - needs: [detect, prepare, build] - environment: - name: ${{ inputs.environment }} - url: ${{ inputs.health-check-url }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Download build artifacts - uses: actions/download-artifact@v7 - with: - name: deployment-package-${{ needs.detect.outputs.project-type }} - path: ./dist - - - name: Setup SSH key - if: inputs.deployment-method == 'ssh' || inputs.deployment-method == 'rsync' - run: | - mkdir -p ~/.ssh - echo "${{ secrets.DEPLOY_KEY }}" > ~/.ssh/deploy_key - chmod 600 ~/.ssh/deploy_key - ssh-keyscan -H "${{ secrets.DEPLOY_HOST }}" >> ~/.ssh/known_hosts - - - name: Deploy via rsync - if: inputs.deployment-method == 'rsync' - run: | - echo "Deploying via rsync to ${{ secrets.DEPLOY_HOST }}..." - - rsync -avz --delete \ - -e "ssh -i ~/.ssh/deploy_key -o StrictHostKeyChecking=no" \ - ./dist/ \ - "${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}:${{ secrets.DEPLOY_PATH }}" - - echo "โœ… rsync deployment completed" >> $GITHUB_STEP_SUMMARY - - - name: Deploy via SSH - if: inputs.deployment-method == 'ssh' - run: | - echo "Deploying via SSH to ${{ secrets.DEPLOY_HOST }}..." - - # Create deployment package - tar -czf deployment.tar.gz -C ./dist . - - # Copy to server - scp -i ~/.ssh/deploy_key deployment.tar.gz \ - "${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}:/tmp/" - - # Extract on server - ssh -i ~/.ssh/deploy_key "${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}" << 'EOF' - cd ${{ secrets.DEPLOY_PATH }} - tar -xzf /tmp/deployment.tar.gz - rm /tmp/deployment.tar.gz - EOF - - echo "โœ… SSH deployment completed" >> $GITHUB_STEP_SUMMARY - - - name: Deploy Joomla Extension - if: needs.detect.outputs.project-type == 'joomla' && inputs.deployment-method == 'custom' - run: | - echo "### ๐Ÿ”ง Joomla Extension Deployment" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - # Custom Joomla deployment logic - echo "โš ๏ธ Custom Joomla deployment logic required" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "Typical steps:" >> $GITHUB_STEP_SUMMARY - echo "1. Upload extension package to Joomla server" >> $GITHUB_STEP_SUMMARY - echo "2. Install/update via Joomla Extension Manager API" >> $GITHUB_STEP_SUMMARY - echo "3. Clear Joomla cache" >> $GITHUB_STEP_SUMMARY - echo "4. Run database migrations if needed" >> $GITHUB_STEP_SUMMARY - - # Placeholder for actual deployment commands - echo "Add your Joomla-specific deployment commands here" - - - name: Deploy Dolibarr Module - if: needs.detect.outputs.project-type == 'dolibarr' && inputs.deployment-method == 'custom' - run: | - echo "### ๐Ÿ”ง Dolibarr Module Deployment" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - # Custom Dolibarr deployment logic - echo "โš ๏ธ Custom Dolibarr deployment logic required" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "Typical steps:" >> $GITHUB_STEP_SUMMARY - echo "1. Upload module to Dolibarr htdocs/custom directory" >> $GITHUB_STEP_SUMMARY - echo "2. Activate module via Dolibarr API or admin panel" >> $GITHUB_STEP_SUMMARY - echo "3. Run module setup hooks" >> $GITHUB_STEP_SUMMARY - echo "4. Clear Dolibarr cache" >> $GITHUB_STEP_SUMMARY - - # Placeholder for actual deployment commands - echo "Add your Dolibarr-specific deployment commands here" - - - name: Deploy Generic Application - if: needs.detect.outputs.project-type == 'generic' && inputs.deployment-method == 'custom' - run: | - echo "### ๐Ÿ”ง Generic Application Deployment" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - echo "โš ๏ธ Custom deployment logic required" >> $GITHUB_STEP_SUMMARY - echo "Add your application-specific deployment commands" >> $GITHUB_STEP_SUMMARY - - - name: Health check - if: inputs.health-check-url != '' - run: | - echo "Running health check on ${{ inputs.health-check-url }}..." - - TIMEOUT=${{ inputs.health-check-timeout }} - ELAPSED=0 - INTERVAL=10 - - while [ $ELAPSED -lt $TIMEOUT ]; do - if curl -f -s -o /dev/null -w "%{http_code}" "${{ inputs.health-check-url }}" | grep -q "200"; then - echo "โœ… Health check passed" >> $GITHUB_STEP_SUMMARY - exit 0 - fi - echo "Health check attempt $((ELAPSED / INTERVAL + 1)) failed, retrying..." - sleep $INTERVAL - ELAPSED=$((ELAPSED + INTERVAL)) - done - - echo "โŒ Health check failed after ${TIMEOUT}s" >> $GITHUB_STEP_SUMMARY - exit 1 - - - name: Update deployment status (success) - if: success() - uses: chrnorm/deployment-status@v2 - with: - token: ${{ secrets.GITHUB_TOKEN }} - deployment-id: ${{ needs.prepare.outputs.deployment-id }} - state: success - environment-url: ${{ inputs.health-check-url }} - - - name: Deployment summary - if: success() - run: | - echo "### โœ… Deployment Successful" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Environment:** ${{ inputs.environment }}" >> $GITHUB_STEP_SUMMARY - echo "**Version:** ${{ needs.prepare.outputs.version }}" >> $GITHUB_STEP_SUMMARY - echo "**Project Type:** ${{ needs.detect.outputs.project-type }}" >> $GITHUB_STEP_SUMMARY - echo "**Time:** $(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> $GITHUB_STEP_SUMMARY - if [ -n "${{ inputs.health-check-url }}" ]; then - echo "**URL:** ${{ inputs.health-check-url }}" >> $GITHUB_STEP_SUMMARY - fi - - rollback: - name: Rollback on Failure - runs-on: ubuntu-latest - needs: [prepare, deploy] - if: failure() - - steps: - - name: Update deployment status (failure) - uses: chrnorm/deployment-status@v2 - with: - token: ${{ secrets.GITHUB_TOKEN }} - deployment-id: ${{ needs.prepare.outputs.deployment-id }} - state: failure - - - name: Rollback deployment - run: | - echo "### โŒ Deployment Failed - Initiating Rollback" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "โš ๏ธ Rollback logic needs to be implemented" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "Typical rollback steps:" >> $GITHUB_STEP_SUMMARY - echo "1. Restore previous version from backup" >> $GITHUB_STEP_SUMMARY - echo "2. Revert database migrations if applied" >> $GITHUB_STEP_SUMMARY - echo "3. Clear caches" >> $GITHUB_STEP_SUMMARY - echo "4. Verify health checks pass" >> $GITHUB_STEP_SUMMARY - - # Add your rollback commands here diff --git a/.github/workflows/reusable-joomla-testing.yml b/.github/workflows/reusable-joomla-testing.yml deleted file mode 100644 index b0129ff..0000000 --- a/.github/workflows/reusable-joomla-testing.yml +++ /dev/null @@ -1,356 +0,0 @@ -# Copyright (C) 2026 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# FILE INFORMATION -# DEFGROUP: GitHub.Workflow -# INGROUP: MokoStandards.Reusable -# REPO: https://github.com/mokoconsulting-tech/MokoStandards -# PATH: /.github/workflows/reusable-joomla-testing.yml -# VERSION: 01.00.00 -# BRIEF: Reusable Joomla testing workflow with matrix PHP/Joomla versions -# NOTE: Supports PHPUnit, integration tests, and code coverage - -name: Reusable Joomla Testing - -on: - workflow_call: - inputs: - php-versions: - description: 'JSON array of PHP versions to test' - required: false - type: string - default: '["7.4", "8.0", "8.1", "8.2"]' - joomla-versions: - description: 'JSON array of Joomla versions to test' - required: false - type: string - default: '["4.4", "5.0", "5.1"]' - coverage: - description: 'Enable code coverage reporting' - required: false - type: boolean - default: false - coverage-php-version: - description: 'PHP version to use for coverage reporting' - required: false - type: string - default: '8.1' - coverage-joomla-version: - description: 'Joomla version to use for coverage reporting' - required: false - type: string - default: '5.0' - working-directory: - description: 'Working directory for tests' - required: false - type: string - default: '.' - run-integration-tests: - description: 'Run integration tests with Joomla installation' - required: false - type: boolean - default: true - secrets: - CODECOV_TOKEN: - description: 'Codecov token for coverage uploads' - required: false - -permissions: - contents: read - pull-requests: write - checks: write - -jobs: - unit-tests: - name: PHPUnit (PHP ${{ matrix.php-version }}, Joomla ${{ matrix.joomla-version }}) - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - php-version: ${{ fromJSON(inputs.php-versions) }} - joomla-version: ${{ fromJSON(inputs.joomla-versions) }} - exclude: - # PHP 7.4 not compatible with Joomla 5.x - - php-version: '7.4' - joomla-version: '5.0' - - php-version: '7.4' - joomla-version: '5.1' - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Setup PHP ${{ matrix.php-version }} - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ matrix.php-version }} - extensions: mbstring, xml, mysqli, zip, gd, intl - coverage: ${{ inputs.coverage && matrix.php-version == inputs.coverage-php-version && matrix.joomla-version == inputs.coverage-joomla-version && 'xdebug' || 'none' }} - tools: composer:v2 - - - name: Get Composer cache directory - id: composer-cache - run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT - - - name: Cache Composer dependencies - uses: actions/cache@v5 - with: - path: ${{ steps.composer-cache.outputs.dir }} - key: ${{ runner.os }}-php-${{ matrix.php-version }}-composer-${{ hashFiles('**/composer.lock') }} - restore-keys: | - ${{ runner.os }}-php-${{ matrix.php-version }}-composer- - ${{ runner.os }}-php- - - - name: Validate composer.json - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "composer.json" ]; then - composer validate --strict - else - echo "No composer.json found, skipping validation" - fi - - - name: Install dependencies - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "composer.json" ]; then - composer install --prefer-dist --no-progress --no-interaction - else - echo "No composer.json found, skipping dependency installation" - fi - - - name: Setup Joomla test environment - working-directory: ${{ inputs.working-directory }} - run: | - echo "Setting up Joomla ${{ matrix.joomla-version }} test environment" - # Add Joomla-specific environment variables - echo "JOOMLA_VERSION=${{ matrix.joomla-version }}" >> $GITHUB_ENV - - - name: Run PHPUnit tests - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "vendor/bin/phpunit" ]; then - if [ "${{ inputs.coverage && matrix.php-version == inputs.coverage-php-version && matrix.joomla-version == inputs.coverage-joomla-version }}" == "true" ]; then - vendor/bin/phpunit --coverage-text --coverage-clover=coverage.xml - else - vendor/bin/phpunit - fi - elif [ -f "phpunit.xml" ] || [ -f "phpunit.xml.dist" ]; then - if [ "${{ inputs.coverage && matrix.php-version == inputs.coverage-php-version && matrix.joomla-version == inputs.coverage-joomla-version }}" == "true" ]; then - php vendor/phpunit/phpunit/phpunit --coverage-text --coverage-clover=coverage.xml - else - php vendor/phpunit/phpunit/phpunit - fi - else - echo "โš ๏ธ No PHPUnit configuration found, skipping tests" - exit 0 - fi - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v5 - if: inputs.coverage && matrix.php-version == inputs.coverage-php-version && matrix.joomla-version == inputs.coverage-joomla-version - with: - file: ${{ inputs.working-directory }}/coverage.xml - flags: unittests,php-${{ matrix.php-version }},joomla-${{ matrix.joomla-version }} - name: codecov-joomla-${{ matrix.php-version }}-${{ matrix.joomla-version }} - token: ${{ secrets.CODECOV_TOKEN }} - fail_ci_if_error: false - - integration-tests: - name: Integration (Joomla ${{ matrix.joomla-version }}) - runs-on: ubuntu-latest - if: inputs.run-integration-tests - - services: - mysql: - image: mysql:8.0 - env: - MYSQL_ROOT_PASSWORD: root - MYSQL_DATABASE: joomla_test - MYSQL_USER: joomla - MYSQL_PASSWORD: joomla - ports: - - 3306:3306 - options: >- - --health-cmd="mysqladmin ping --silent" - --health-interval=10s - --health-timeout=5s - --health-retries=5 - - strategy: - fail-fast: false - matrix: - joomla-version: ${{ fromJSON(inputs.joomla-versions) }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Setup PHP - uses: shivammathur/setup-php@v2 - with: - php-version: '8.1' - extensions: mbstring, xml, mysqli, zip, gd, intl, pdo_mysql - tools: composer:v2 - - - name: Get Composer cache directory - id: composer-cache - run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT - - - name: Cache Composer dependencies - uses: actions/cache@v5 - with: - path: ${{ steps.composer-cache.outputs.dir }} - key: ${{ runner.os }}-integration-composer-${{ hashFiles('**/composer.lock') }} - restore-keys: ${{ runner.os }}-integration-composer- - - - name: Install dependencies - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "composer.json" ]; then - composer install --prefer-dist --no-progress --no-interaction - fi - - - name: Download and setup Joomla ${{ matrix.joomla-version }} - run: | - echo "๐Ÿ“ฆ Setting up Joomla ${{ matrix.joomla-version }} for integration testing" - - # Create Joomla directory - mkdir -p /tmp/joomla - cd /tmp/joomla - - # Determine Joomla version to download - JOOMLA_VERSION="${{ matrix.joomla-version }}" - - # Download latest patch version for the specified minor version - if [[ "$JOOMLA_VERSION" == "4.4" ]]; then - DOWNLOAD_VERSION="4.4-Stable" - elif [[ "$JOOMLA_VERSION" == "5.0" ]]; then - DOWNLOAD_VERSION="5.0-Stable" - elif [[ "$JOOMLA_VERSION" == "5.1" ]]; then - DOWNLOAD_VERSION="5.1-Stable" - else - DOWNLOAD_VERSION="${JOOMLA_VERSION}-Stable" - fi - - echo "Downloading Joomla ${DOWNLOAD_VERSION}..." - curl -L -o joomla.zip "https://downloads.joomla.org/cms/joomla${JOOMLA_VERSION%%.*}/${DOWNLOAD_VERSION}" || \ - curl -L -o joomla.zip "https://github.com/joomla/joomla-cms/releases/download/${JOOMLA_VERSION}.0/Joomla_${JOOMLA_VERSION}.0-Stable-Full_Package.zip" || \ - echo "โš ๏ธ Could not download Joomla, integration tests may be limited" - - if [ -f joomla.zip ]; then - unzip -q joomla.zip - echo "โœ… Joomla extracted successfully" - fi - - - name: Configure Joomla - run: | - echo "โš™๏ธ Configuring Joomla for testing" - - if [ -d "/tmp/joomla" ]; then - cd /tmp/joomla - - # Create basic Joomla configuration - cat > configuration.php << 'EOF' - > $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "| Test Suite | Status |" >> $GITHUB_STEP_SUMMARY - echo "|------------|--------|" >> $GITHUB_STEP_SUMMARY - echo "| Unit Tests | ${{ needs.unit-tests.result == 'success' && 'โœ… Passed' || needs.unit-tests.result == 'skipped' && 'โญ๏ธ Skipped' || 'โŒ Failed' }} |" >> $GITHUB_STEP_SUMMARY - echo "| Integration Tests | ${{ needs.integration-tests.result == 'success' && 'โœ… Passed' || needs.integration-tests.result == 'skipped' && 'โญ๏ธ Skipped' || 'โŒ Failed' }} |" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Repository:** $GITHUB_REPOSITORY" >> $GITHUB_STEP_SUMMARY - echo "**Branch:** $GITHUB_REF_NAME" >> $GITHUB_STEP_SUMMARY - echo "**Commit:** $GITHUB_SHA" >> $GITHUB_STEP_SUMMARY - - - name: Check test results - run: | - if [ "${{ needs.unit-tests.result }}" == "failure" ]; then - echo "โŒ Unit tests failed" - exit 1 - fi - - if [ "${{ needs.integration-tests.result }}" == "failure" ]; then - echo "โŒ Integration tests failed" - exit 1 - fi - - echo "โœ… All test suites passed or were skipped" diff --git a/.github/workflows/reusable-php-quality.yml b/.github/workflows/reusable-php-quality.yml deleted file mode 100644 index c484f21..0000000 --- a/.github/workflows/reusable-php-quality.yml +++ /dev/null @@ -1,297 +0,0 @@ -# Copyright (C) 2026 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# FILE INFORMATION -# DEFGROUP: GitHub.Workflow -# INGROUP: MokoStandards.Reusable -# REPO: https://github.com/mokoconsulting-tech/MokoStandards -# PATH: /.github/workflows/reusable-php-quality.yml -# VERSION: 01.00.00 -# BRIEF: Reusable PHP code quality analysis workflow -# NOTE: Supports PHPCS, PHPStan, Psalm with configurable PHP versions and tools - -name: Reusable PHP Quality - -on: - workflow_call: - inputs: - php-versions: - description: 'JSON array of PHP versions to test' - required: false - type: string - default: '["7.4", "8.0", "8.1", "8.2"]' - tools: - description: 'JSON array of quality tools to run (phpcs, phpstan, psalm)' - required: false - type: string - default: '["phpcs", "phpstan", "psalm"]' - working-directory: - description: 'Working directory for the quality checks' - required: false - type: string - default: '.' - phpcs-standard: - description: 'PHPCS coding standard to use' - required: false - type: string - default: 'PSR12' - phpstan-level: - description: 'PHPStan analysis level (0-9)' - required: false - type: string - default: '5' - psalm-level: - description: 'Psalm error level (1-8)' - required: false - type: string - default: '4' - fail-on-error: - description: 'Fail the workflow if quality checks find issues' - required: false - type: boolean - default: true - outputs: - quality-score: - description: 'Overall quality score percentage' - value: ${{ jobs.aggregate.outputs.score }} - -permissions: - contents: read - pull-requests: write - checks: write - -jobs: - phpcs: - name: PHP_CodeSniffer (PHP ${{ matrix.php-version }}) - runs-on: ubuntu-latest - if: contains(fromJSON(inputs.tools), 'phpcs') - - strategy: - fail-fast: false - matrix: - php-version: ${{ fromJSON(inputs.php-versions) }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Setup PHP ${{ matrix.php-version }} - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ matrix.php-version }} - extensions: mbstring, xml - tools: composer:v2, phpcs - coverage: none - - - name: Get Composer cache directory - id: composer-cache - run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT - - - name: Cache Composer dependencies - uses: actions/cache@v5 - with: - path: ${{ steps.composer-cache.outputs.dir }} - key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} - restore-keys: ${{ runner.os }}-composer- - - - name: Install dependencies - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "composer.json" ]; then - composer install --prefer-dist --no-progress --no-interaction - fi - - - name: Run PHP_CodeSniffer - working-directory: ${{ inputs.working-directory }} - continue-on-error: ${{ !inputs.fail-on-error }} - run: | - if [ -f "phpcs.xml" ] || [ -f "phpcs.xml.dist" ]; then - phpcs --standard=phpcs.xml --report=summary --report-width=120 - elif [ -f "vendor/bin/phpcs" ]; then - vendor/bin/phpcs --standard=${{ inputs.phpcs-standard }} --report=summary --report-width=120 src/ - else - phpcs --standard=${{ inputs.phpcs-standard }} --report=summary --report-width=120 . || echo "No PHP files found or PHPCS configuration missing" - fi - - phpstan: - name: PHPStan (PHP ${{ matrix.php-version }}) - runs-on: ubuntu-latest - if: contains(fromJSON(inputs.tools), 'phpstan') - - strategy: - fail-fast: false - matrix: - php-version: ${{ fromJSON(inputs.php-versions) }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Setup PHP ${{ matrix.php-version }} - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ matrix.php-version }} - extensions: mbstring, xml - tools: composer:v2, phpstan - coverage: none - - - name: Get Composer cache directory - id: composer-cache - run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT - - - name: Cache Composer dependencies - uses: actions/cache@v5 - with: - path: ${{ steps.composer-cache.outputs.dir }} - key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} - restore-keys: ${{ runner.os }}-composer- - - - name: Install dependencies - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "composer.json" ]; then - composer install --prefer-dist --no-progress --no-interaction - fi - - - name: Run PHPStan - working-directory: ${{ inputs.working-directory }} - continue-on-error: ${{ !inputs.fail-on-error }} - run: | - if [ -f "phpstan.neon" ] || [ -f "phpstan.neon.dist" ]; then - phpstan analyse --no-progress --error-format=github - elif [ -f "vendor/bin/phpstan" ]; then - vendor/bin/phpstan analyse src/ --level=${{ inputs.phpstan-level }} --no-progress --error-format=github - else - phpstan analyse . --level=${{ inputs.phpstan-level }} --no-progress --error-format=github || echo "No PHP files found or PHPStan configuration missing" - fi - - psalm: - name: Psalm (PHP ${{ matrix.php-version }}) - runs-on: ubuntu-latest - if: contains(fromJSON(inputs.tools), 'psalm') - - strategy: - fail-fast: false - matrix: - php-version: ${{ fromJSON(inputs.php-versions) }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Setup PHP ${{ matrix.php-version }} - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ matrix.php-version }} - extensions: mbstring, xml - tools: composer:v2, psalm - coverage: none - - - name: Get Composer cache directory - id: composer-cache - run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT - - - name: Cache Composer dependencies - uses: actions/cache@v5 - with: - path: ${{ steps.composer-cache.outputs.dir }} - key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} - restore-keys: ${{ runner.os }}-composer- - - - name: Install dependencies - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "composer.json" ]; then - composer install --prefer-dist --no-progress --no-interaction - fi - - - name: Run Psalm - working-directory: ${{ inputs.working-directory }} - continue-on-error: ${{ !inputs.fail-on-error }} - run: | - if [ -f "psalm.xml" ] || [ -f "psalm.xml.dist" ]; then - psalm --no-progress --output-format=github --show-info=false - elif [ -f "vendor/bin/psalm" ]; then - # Initialize Psalm config if it doesn't exist - if [ ! -f "psalm.xml" ]; then - echo "Initializing Psalm configuration..." - if ! vendor/bin/psalm --init src/ ${{ inputs.psalm-level }}; then - echo "โš ๏ธ Psalm initialization failed, proceeding with defaults" - fi - fi - vendor/bin/psalm --no-progress --output-format=github --show-info=false - else - psalm --no-progress --output-format=github --show-info=false || echo "No PHP files found or Psalm configuration missing" - fi - - aggregate: - name: Quality Check Summary - runs-on: ubuntu-latest - needs: [phpcs, phpstan, psalm] - if: always() - outputs: - score: ${{ steps.calculate.outputs.score }} - - steps: - - name: Calculate quality score - id: calculate - run: | - # Count successful jobs - SUCCESS=0 - TOTAL=0 - - if [ "${{ needs.phpcs.result }}" != "skipped" ]; then - TOTAL=$((TOTAL + 1)) - [ "${{ needs.phpcs.result }}" == "success" ] && SUCCESS=$((SUCCESS + 1)) - fi - - if [ "${{ needs.phpstan.result }}" != "skipped" ]; then - TOTAL=$((TOTAL + 1)) - [ "${{ needs.phpstan.result }}" == "success" ] && SUCCESS=$((SUCCESS + 1)) - fi - - if [ "${{ needs.psalm.result }}" != "skipped" ]; then - TOTAL=$((TOTAL + 1)) - [ "${{ needs.psalm.result }}" == "success" ] && SUCCESS=$((SUCCESS + 1)) - fi - - # Calculate percentage - if [ $TOTAL -gt 0 ]; then - SCORE=$((SUCCESS * 100 / TOTAL)) - else - SCORE=100 - fi - - echo "score=$SCORE" >> $GITHUB_OUTPUT - echo "Quality Score: $SCORE%" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "- PHPCS: ${{ needs.phpcs.result }}" >> $GITHUB_STEP_SUMMARY - echo "- PHPStan: ${{ needs.phpstan.result }}" >> $GITHUB_STEP_SUMMARY - echo "- Psalm: ${{ needs.psalm.result }}" >> $GITHUB_STEP_SUMMARY - - - name: Check overall status - if: inputs.fail-on-error - run: | - if [ "${{ needs.phpcs.result }}" == "failure" ] || \ - [ "${{ needs.phpstan.result }}" == "failure" ] || \ - [ "${{ needs.psalm.result }}" == "failure" ]; then - echo "โŒ Quality checks failed" - exit 1 - fi - echo "โœ… All quality checks passed" diff --git a/.github/workflows/reusable-project-detector.yml b/.github/workflows/reusable-project-detector.yml deleted file mode 100644 index 4d96129..0000000 --- a/.github/workflows/reusable-project-detector.yml +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright (C) 2026 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# FILE INFORMATION -# DEFGROUP: GitHub.Workflow -# INGROUP: MokoStandards.Reusable -# REPO: https://github.com/mokoconsulting-tech/MokoStandards -# PATH: /.github/workflows/reusable-project-detector.yml -# VERSION: 01.00.00 -# BRIEF: Reusable workflow for detecting project type (Joomla, Dolibarr, Generic) -# NOTE: Provides project_type and extension_type outputs for downstream workflows - -name: Reusable Project Type Detection - -on: - workflow_call: - inputs: - working-directory: - description: 'Working directory for detection' - required: false - type: string - default: '.' - outputs: - project-type: - description: 'Detected project type (joomla, dolibarr, generic)' - value: ${{ jobs.detect.outputs.project_type }} - extension-type: - description: 'Detected extension type (component, module, plugin, etc.)' - value: ${{ jobs.detect.outputs.extension_type }} - has-php: - description: 'Whether project contains PHP files' - value: ${{ jobs.detect.outputs.has_php }} - has-node: - description: 'Whether project contains Node.js/package.json' - value: ${{ jobs.detect.outputs.has_node }} - -permissions: - contents: read - -jobs: - detect: - name: Detect Project Type - runs-on: ubuntu-latest - outputs: - project_type: ${{ steps.detect.outputs.project_type }} - extension_type: ${{ steps.detect.outputs.extension_type }} - has_php: ${{ steps.detect.outputs.has_php }} - has_node: ${{ steps.detect.outputs.has_node }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Detect project type and components - id: detect - working-directory: ${{ inputs.working-directory }} - run: | - echo "### ๐Ÿ” Project Detection" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - # Detection priority: Joomla > Dolibarr > Generic - - # Check for Joomla indicators - if [ -f "joomla.xml" ] || \ - find . -maxdepth 2 \( -name "mod_*.xml" -o -name "plg_*.xml" -o -name "com_*.xml" -o -name "pkg_*.xml" -o -name "tpl_*.xml" \) 2>/dev/null | head -1 | grep -q .; then - echo "project_type=joomla" >> $GITHUB_OUTPUT - echo "**Project Type:** Joomla" >> $GITHUB_STEP_SUMMARY - - # Detect Joomla extension type - if [ -d "administrator/components" ] || [ -d "components" ]; then - echo "extension_type=component" >> $GITHUB_OUTPUT - echo "**Extension Type:** Component" >> $GITHUB_STEP_SUMMARY - elif find . -maxdepth 1 -name "mod_*.xml" 2>/dev/null | head -1 | grep -q .; then - echo "extension_type=module" >> $GITHUB_OUTPUT - echo "**Extension Type:** Module" >> $GITHUB_STEP_SUMMARY - elif find . -maxdepth 1 -name "plg_*.xml" 2>/dev/null | head -1 | grep -q .; then - echo "extension_type=plugin" >> $GITHUB_OUTPUT - echo "**Extension Type:** Plugin" >> $GITHUB_STEP_SUMMARY - elif find . -maxdepth 1 -name "pkg_*.xml" 2>/dev/null | head -1 | grep -q .; then - echo "extension_type=package" >> $GITHUB_OUTPUT - echo "**Extension Type:** Package" >> $GITHUB_STEP_SUMMARY - elif find . -maxdepth 1 -name "tpl_*.xml" 2>/dev/null | head -1 | grep -q .; then - echo "extension_type=template" >> $GITHUB_OUTPUT - echo "**Extension Type:** Template" >> $GITHUB_STEP_SUMMARY - else - echo "extension_type=component" >> $GITHUB_OUTPUT - echo "**Extension Type:** Component (default)" >> $GITHUB_STEP_SUMMARY - fi - - # Check for Dolibarr indicators - elif [ -d "htdocs" ] || [ -d "core/modules" ] || \ - ([ -f "composer.json" ] && grep -q "dolibarr" composer.json 2>/dev/null); then - echo "project_type=dolibarr" >> $GITHUB_OUTPUT - echo "extension_type=module" >> $GITHUB_OUTPUT - echo "**Project Type:** Dolibarr" >> $GITHUB_STEP_SUMMARY - echo "**Extension Type:** Module" >> $GITHUB_STEP_SUMMARY - - # Default to Generic - else - echo "project_type=generic" >> $GITHUB_OUTPUT - echo "extension_type=application" >> $GITHUB_OUTPUT - echo "**Project Type:** Generic" >> $GITHUB_STEP_SUMMARY - echo "**Extension Type:** Application" >> $GITHUB_STEP_SUMMARY - fi - - # Detect PHP presence - if find . -name "*.php" -type f 2>/dev/null | head -1 | grep -q .; then - echo "has_php=true" >> $GITHUB_OUTPUT - echo "- โœ… PHP files detected" >> $GITHUB_STEP_SUMMARY - else - echo "has_php=false" >> $GITHUB_OUTPUT - echo "- โ„น๏ธ No PHP files detected" >> $GITHUB_STEP_SUMMARY - fi - - # Detect Node.js presence - if [ -f "package.json" ]; then - echo "has_node=true" >> $GITHUB_OUTPUT - echo "- โœ… Node.js project detected (package.json)" >> $GITHUB_STEP_SUMMARY - else - echo "has_node=false" >> $GITHUB_OUTPUT - echo "- โ„น๏ธ No Node.js project detected" >> $GITHUB_STEP_SUMMARY - fi diff --git a/.github/workflows/reusable-release.yml b/.github/workflows/reusable-release.yml deleted file mode 100644 index 0595e97..0000000 --- a/.github/workflows/reusable-release.yml +++ /dev/null @@ -1,397 +0,0 @@ -# Copyright (C) 2026 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# FILE INFORMATION -# DEFGROUP: GitHub.Workflow -# INGROUP: MokoStandards.Reusable -# REPO: https://github.com/mokoconsulting-tech/MokoStandards -# PATH: /.github/workflows/reusable-release.yml -# VERSION: 01.00.00 -# BRIEF: Reusable type-aware release workflow for Joomla, Dolibarr, and generic projects -# NOTE: Creates releases with type-specific packaging and optional marketplace publishing - -name: Reusable Release - -on: - workflow_call: - inputs: - version: - description: 'Release version (semver format)' - required: true - type: string - prerelease: - description: 'Mark as pre-release' - required: false - type: boolean - default: false - draft: - description: 'Create as draft release' - required: false - type: boolean - default: false - php-version: - description: 'PHP version for build' - required: false - type: string - default: '8.1' - create-github-release: - description: 'Create GitHub release' - required: false - type: boolean - default: true - publish-to-marketplace: - description: 'Publish to marketplace (Joomla/Dolibarr)' - required: false - type: boolean - default: false - working-directory: - description: 'Working directory' - required: false - type: string - default: '.' - secrets: - MARKETPLACE_TOKEN: - description: 'Marketplace API token (JED/Dolistore)' - required: false - -permissions: - contents: write - -jobs: - detect: - name: Detect Project Type - uses: ./.github/workflows/reusable-project-detector.yml - with: - working-directory: ${{ inputs.working-directory }} - - build-package: - name: Build Release Package - runs-on: ubuntu-latest - needs: detect - outputs: - package-name: ${{ steps.package.outputs.name }} - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Setup PHP - if: needs.detect.outputs.has-php == 'true' - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ inputs.php-version }} - extensions: mbstring, xml, zip - tools: composer:v2 - - - name: Setup Node.js - if: needs.detect.outputs.has-node == 'true' - uses: actions/setup-node@v6 - with: - node-version: '20.x' - - - name: Validate version format - run: | - VERSION="${{ inputs.version }}" - if ! echo "$VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$'; then - echo "โŒ Invalid version format: $VERSION" - echo "Expected semver format: X.Y.Z or X.Y.Z-prerelease" - exit 1 - fi - echo "โœ… Version format valid: $VERSION" - - - name: Install dependencies - working-directory: ${{ inputs.working-directory }} - run: | - if [ -f "composer.json" ]; then - composer install --no-dev --optimize-autoloader --no-interaction - echo "โœ… Composer dependencies installed" >> $GITHUB_STEP_SUMMARY - fi - - if [ -f "package.json" ]; then - npm ci - if grep -q '"build"' package.json; then - npm run build - fi - echo "โœ… Node dependencies installed and built" >> $GITHUB_STEP_SUMMARY - fi - - - name: Update version in files - working-directory: ${{ inputs.working-directory }} - run: | - VERSION="${{ inputs.version }}" - - # Update version in XML manifests (Joomla/Dolibarr) - if [ "${{ needs.detect.outputs.project-type }}" == "joomla" ] || \ - [ "${{ needs.detect.outputs.project-type }}" == "dolibarr" ]; then - find . -name "*.xml" -type f -not -path "*/node_modules/*" -not -path "*/vendor/*" \ - -exec sed -i "s/[^<]*<\/version>/${VERSION}<\/version>/g" {} \; - echo "- โœ… Updated version in XML manifests" >> $GITHUB_STEP_SUMMARY - fi - - # Update version in package.json - if [ -f "package.json" ]; then - sed -i "s/\"version\": \"[^\"]*\"/\"version\": \"${VERSION}\"/g" package.json - echo "- โœ… Updated version in package.json" >> $GITHUB_STEP_SUMMARY - fi - - # Update version in composer.json - if [ -f "composer.json" ]; then - sed -i "s/\"version\": \"[^\"]*\"/\"version\": \"${VERSION}\"/g" composer.json - echo "- โœ… Updated version in composer.json" >> $GITHUB_STEP_SUMMARY - fi - - - name: Create Joomla package - if: needs.detect.outputs.project-type == 'joomla' - working-directory: ${{ inputs.working-directory }} - run: | - mkdir -p build/package - - # Copy files excluding development artifacts - rsync -av \ - --exclude='build' \ - --exclude='tests' \ - --exclude='.git*' \ - --exclude='composer.json' \ - --exclude='composer.lock' \ - --exclude='phpunit.xml*' \ - --exclude='phpcs.xml*' \ - --exclude='phpstan.neon*' \ - --exclude='psalm.xml*' \ - --exclude='node_modules' \ - --exclude='.github' \ - --exclude='package.json' \ - --exclude='package-lock.json' \ - . build/package/ - - # Determine extension name from manifest - MANIFEST=$(find . -maxdepth 1 -name "*.xml" -not -name "phpunit.xml*" -type f | head -1) - if [ -n "$MANIFEST" ]; then - EXT_NAME=$(basename "$MANIFEST" .xml) - else - EXT_NAME=$(basename "$GITHUB_REPOSITORY" | sed 's/^joomla-//') - fi - - # Create ZIP package - cd build/package - VERSION="${{ inputs.version }}" - PACKAGE_NAME="${EXT_NAME}-${VERSION}.zip" - zip -r "../${PACKAGE_NAME}" . - cd ../.. - - echo "PACKAGE_NAME=${PACKAGE_NAME}" >> $GITHUB_ENV - echo "โœ… Created Joomla package: ${PACKAGE_NAME}" >> $GITHUB_STEP_SUMMARY - - - name: Create Dolibarr package - if: needs.detect.outputs.project-type == 'dolibarr' - working-directory: ${{ inputs.working-directory }} - run: | - mkdir -p build/package - - # Copy module files - rsync -av \ - --exclude='build' \ - --exclude='tests' \ - --exclude='.git*' \ - --exclude='node_modules' \ - --exclude='.github' \ - . build/package/ - - # Determine module name - if [ -f "core/modules/modMyModule.class.php" ]; then - MODULE_NAME=$(grep -oP "class modMyModule extends DolibarrModules" core/modules/*.php | head -1 | sed 's/class mod//' | sed 's/ extends.*//') - else - MODULE_NAME=$(basename "$GITHUB_REPOSITORY" | sed 's/^dolibarr-//') - fi - - # Create ZIP package - cd build/package - VERSION="${{ inputs.version }}" - PACKAGE_NAME="${MODULE_NAME}-${VERSION}.zip" - zip -r "../${PACKAGE_NAME}" . - cd ../.. - - echo "PACKAGE_NAME=${PACKAGE_NAME}" >> $GITHUB_ENV - echo "โœ… Created Dolibarr package: ${PACKAGE_NAME}" >> $GITHUB_STEP_SUMMARY - - - name: Create Generic package - if: needs.detect.outputs.project-type == 'generic' - working-directory: ${{ inputs.working-directory }} - run: | - mkdir -p build/package - - # Copy relevant build artifacts - if [ -d "dist" ]; then - cp -r dist/* build/package/ - elif [ -d "build" ]; then - cp -r build/* build/package/ - else - # Copy all files excluding development artifacts - rsync -av \ - --exclude='build' \ - --exclude='tests' \ - --exclude='.git*' \ - --exclude='node_modules' \ - . build/package/ - fi - - # Create package - REPO_NAME=$(basename "$GITHUB_REPOSITORY") - VERSION="${{ inputs.version }}" - PACKAGE_NAME="${REPO_NAME}-${VERSION}.tar.gz" - - cd build - tar -czf "${PACKAGE_NAME}" package/ - cd .. - - echo "PACKAGE_NAME=${PACKAGE_NAME}" >> $GITHUB_ENV - echo "โœ… Created generic package: ${PACKAGE_NAME}" >> $GITHUB_STEP_SUMMARY - - - name: Generate checksums - working-directory: ${{ inputs.working-directory }} - run: | - cd build - PACKAGE="${PACKAGE_NAME}" - - if [ -f "$PACKAGE" ]; then - sha256sum "$PACKAGE" > "${PACKAGE}.sha256" - md5sum "$PACKAGE" > "${PACKAGE}.md5" - echo "โœ… Generated checksums" >> $GITHUB_STEP_SUMMARY - fi - - - name: Output package info - id: package - run: | - echo "name=${PACKAGE_NAME}" >> $GITHUB_OUTPUT - - - name: Upload release artifacts - uses: actions/upload-artifact@v6 - with: - name: release-package - path: | - ${{ inputs.working-directory }}/build/*.zip - ${{ inputs.working-directory }}/build/*.tar.gz - ${{ inputs.working-directory }}/build/*.sha256 - ${{ inputs.working-directory }}/build/*.md5 - retention-days: 30 - - create-release: - name: Create GitHub Release - runs-on: ubuntu-latest - needs: [detect, build-package] - if: inputs.create-github-release - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - - name: Download release artifacts - uses: actions/download-artifact@v7 - with: - name: release-package - path: ./artifacts - - - name: Extract changelog - id: changelog - run: | - VERSION="${{ inputs.version }}" - - if [ -f "CHANGELOG.md" ]; then - # Extract changelog for this version - awk "/## \[${VERSION}\]/,/## \[/{if(/## \[${VERSION}\]/)next;else if(/## \[/)exit;else print}" CHANGELOG.md > release_notes.md - - if [ ! -s release_notes.md ]; then - echo "## Release ${VERSION}" > release_notes.md - echo "" >> release_notes.md - echo "No specific changelog found for this version." >> release_notes.md - echo "Please refer to the full CHANGELOG.md for details." >> release_notes.md - fi - else - echo "## Release ${VERSION}" > release_notes.md - echo "" >> release_notes.md - echo "Release created from ${{ needs.detect.outputs.project-type }} project." >> release_notes.md - fi - - - name: Create GitHub Release - uses: softprops/action-gh-release@v2 - with: - tag_name: v${{ inputs.version }} - name: Release ${{ inputs.version }} - body_path: release_notes.md - draft: ${{ inputs.draft }} - prerelease: ${{ inputs.prerelease }} - files: | - artifacts/* - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Release summary - run: | - echo "### ๐Ÿš€ Release Created Successfully" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Version:** ${{ inputs.version }}" >> $GITHUB_STEP_SUMMARY - echo "**Project Type:** ${{ needs.detect.outputs.project-type }}" >> $GITHUB_STEP_SUMMARY - echo "**Extension Type:** ${{ needs.detect.outputs.extension-type }}" >> $GITHUB_STEP_SUMMARY - echo "**Pre-release:** ${{ inputs.prerelease }}" >> $GITHUB_STEP_SUMMARY - echo "**Draft:** ${{ inputs.draft }}" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Package:** ${{ needs.build-package.outputs.package-name }}" >> $GITHUB_STEP_SUMMARY - - publish-marketplace: - name: Publish to Marketplace - runs-on: ubuntu-latest - needs: [detect, build-package, create-release] - if: inputs.publish-to-marketplace && (needs.detect.outputs.project-type == 'joomla' || needs.detect.outputs.project-type == 'dolibarr') - - steps: - - name: Download release artifacts - uses: actions/download-artifact@v7 - with: - name: release-package - path: ./artifacts - - - name: Publish to marketplace - run: | - echo "### ๐ŸŒ Marketplace Publishing" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - PROJECT_TYPE="${{ needs.detect.outputs.project-type }}" - - if [ "$PROJECT_TYPE" == "joomla" ]; then - echo "โš ๏ธ Joomla Extensions Directory (JED) publishing requires manual submission" >> $GITHUB_STEP_SUMMARY - echo "Package ready at: artifacts/${{ needs.build-package.outputs.package-name }}" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "To publish to JED:" >> $GITHUB_STEP_SUMMARY - echo "1. Visit https://extensions.joomla.org/" >> $GITHUB_STEP_SUMMARY - echo "2. Login and submit the extension package" >> $GITHUB_STEP_SUMMARY - elif [ "$PROJECT_TYPE" == "dolibarr" ]; then - echo "โš ๏ธ Dolistore publishing requires manual submission" >> $GITHUB_STEP_SUMMARY - echo "Package ready at: artifacts/${{ needs.build-package.outputs.package-name }}" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "To publish to Dolistore:" >> $GITHUB_STEP_SUMMARY - echo "1. Visit https://www.dolistore.com/" >> $GITHUB_STEP_SUMMARY - echo "2. Login and submit the module package" >> $GITHUB_STEP_SUMMARY - fi - - # Note: Automated marketplace publishing would require - # marketplace-specific API implementation here - # For now, we provide manual instructions diff --git a/.github/workflows/reusable-script-executor.yml b/.github/workflows/reusable-script-executor.yml deleted file mode 100644 index 897e9e5..0000000 --- a/.github/workflows/reusable-script-executor.yml +++ /dev/null @@ -1,210 +0,0 @@ -# Copyright (C) 2026 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# FILE INFORMATION -# DEFGROUP: GitHub.Workflows -# INGROUP: MokoStandards.Reusable -# REPO: https://github.com/mokoconsulting-tech/MokoStandards -# PATH: /.github/workflows/reusable-script-executor.yml -# VERSION: 01.00.00 -# BRIEF: Reusable workflow to execute MokoStandards scripts in any repository -# NOTE: Provides unified script execution with proper environment setup - -name: Execute MokoStandards Script - -on: - workflow_call: - inputs: - script_path: - description: 'Path to script relative to scripts/ directory (e.g., validate/no_secrets.py)' - required: true - type: string - script_args: - description: 'Arguments to pass to the script' - required: false - type: string - default: '' - python_version: - description: 'Python version to use' - required: false - type: string - default: '3.11' - install_dependencies: - description: 'Install Python dependencies (pyyaml, etc.)' - required: false - type: boolean - default: true - working_directory: - description: 'Working directory for script execution' - required: false - type: string - default: '.' - create_summary: - description: 'Create GitHub step summary' - required: false - type: boolean - default: true - outputs: - exit_code: - description: 'Script exit code' - value: ${{ jobs.execute-script.outputs.exit_code }} - script_output: - description: 'Script output (truncated to 1000 chars)' - value: ${{ jobs.execute-script.outputs.script_output }} - -jobs: - execute-script: - name: Execute ${{ inputs.script_path }} - runs-on: ubuntu-latest - - outputs: - exit_code: ${{ steps.run-script.outputs.exit_code }} - script_output: ${{ steps.run-script.outputs.script_output }} - - permissions: - contents: read - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Setup Python - if: endsWith(inputs.script_path, '.py') - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python_version }} - - - name: Install Python dependencies - if: endsWith(inputs.script_path, '.py') && inputs.install_dependencies - run: | - python -m pip install --upgrade pip - pip install pyyaml - - # Install additional dependencies if requirements file exists - if [ -f "requirements.txt" ]; then - pip install -r requirements.txt - fi - - if [ "${{ inputs.create_summary }}" == "true" ]; then - echo "## ๐Ÿ“ฆ Dependencies Installed" >> $GITHUB_STEP_SUMMARY - echo "- Python ${{ inputs.python_version }}" >> $GITHUB_STEP_SUMMARY - echo "- PyYAML (for configuration)" >> $GITHUB_STEP_SUMMARY - fi - - - name: Setup Bash - if: endsWith(inputs.script_path, '.sh') - run: | - bash --version - - - name: Verify script exists - id: verify - run: | - SCRIPT_PATH="scripts/${{ inputs.script_path }}" - - if [ ! -f "$SCRIPT_PATH" ]; then - echo "โŒ Script not found: $SCRIPT_PATH" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "Available scripts:" >> $GITHUB_STEP_SUMMARY - find scripts -name "*.py" -o -name "*.sh" | sort >> $GITHUB_STEP_SUMMARY - exit 1 - fi - - echo "script_full_path=$SCRIPT_PATH" >> $GITHUB_OUTPUT - - if [ "${{ inputs.create_summary }}" == "true" ]; then - echo "## โœ… Script Found" >> $GITHUB_STEP_SUMMARY - echo "**Path:** \`$SCRIPT_PATH\`" >> $GITHUB_STEP_SUMMARY - echo "**Type:** $(file -b $SCRIPT_PATH)" >> $GITHUB_STEP_SUMMARY - fi - - - name: Make script executable - run: | - chmod +x ${{ steps.verify.outputs.script_full_path }} - - - name: Run script - id: run-script - working-directory: ${{ inputs.working_directory }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - SCRIPT_PATH="${{ steps.verify.outputs.script_full_path }}" - SCRIPT_ARGS="${{ inputs.script_args }}" - - echo "## ๐Ÿš€ Executing Script" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Script:** \`$SCRIPT_PATH\`" >> $GITHUB_STEP_SUMMARY - echo "**Arguments:** \`$SCRIPT_ARGS\`" >> $GITHUB_STEP_SUMMARY - echo "**Working Directory:** \`${{ inputs.working_directory }}\`" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "### Output" >> $GITHUB_STEP_SUMMARY - echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - - # Execute script and capture output - set +e - - if [[ "$SCRIPT_PATH" == *.py ]]; then - OUTPUT=$(python3 "$SCRIPT_PATH" $SCRIPT_ARGS 2>&1) - EXIT_CODE=$? - elif [[ "$SCRIPT_PATH" == *.sh ]]; then - OUTPUT=$(bash "$SCRIPT_PATH" $SCRIPT_ARGS 2>&1) - EXIT_CODE=$? - else - OUTPUT=$("$SCRIPT_PATH" $SCRIPT_ARGS 2>&1) - EXIT_CODE=$? - fi - - set -e - - # Save outputs - echo "exit_code=$EXIT_CODE" >> $GITHUB_OUTPUT - - # Truncate output for GitHub output (max 1000 chars) - OUTPUT_TRUNCATED="${OUTPUT:0:1000}" - echo "script_output<> $GITHUB_OUTPUT - echo "$OUTPUT_TRUNCATED" >> $GITHUB_OUTPUT - echo "EOF" >> $GITHUB_OUTPUT - - # Show full output in summary (with line limit) - echo "$OUTPUT" | head -n 100 >> $GITHUB_STEP_SUMMARY - echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - # Report exit code - if [ $EXIT_CODE -eq 0 ]; then - echo "### โœ… Script Completed Successfully" >> $GITHUB_STEP_SUMMARY - echo "**Exit Code:** $EXIT_CODE" >> $GITHUB_STEP_SUMMARY - else - echo "### โŒ Script Failed" >> $GITHUB_STEP_SUMMARY - echo "**Exit Code:** $EXIT_CODE" >> $GITHUB_STEP_SUMMARY - fi - - exit $EXIT_CODE - - - name: Upload script output - if: always() - uses: actions/upload-artifact@v6 - with: - name: script-output-${{ github.run_id }} - path: | - *.log - *.json - *.csv - retention-days: 7 - if-no-files-found: ignore diff --git a/.github/workflows/standards-compliance.yml b/.github/workflows/standards-compliance.yml index c18928a..c1cdacf 100644 --- a/.github/workflows/standards-compliance.yml +++ b/.github/workflows/standards-compliance.yml @@ -63,7 +63,7 @@ permissions: jobs: compliance: name: Standards Compliance Validation - uses: ./.github/workflows/reusable-ci-validation.yml + uses: mokoconsulting-tech/MokoStandards/.github/workflows/reusable-ci-validation.yml@main with: profile: ${{ inputs.profile || 'full' }} validate-manifests: true diff --git a/.github/workflows/version_branch.yml b/.github/workflows/version_branch.yml index 86a2067..eed8836 100644 --- a/.github/workflows/version_branch.yml +++ b/.github/workflows/version_branch.yml @@ -1,738 +1,402 @@ -name: Version Branch and Bump +# Copyright (C) 2026 Moko Consulting +# +# This file is part of a Moko Consulting project. +# +# SPDX-License-Identifier: GPL-3.0-or-later +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# FILE INFORMATION +# DEFGROUP: GitHub.WorkflowTemplate +# INGROUP: MokoStandards.Templates +# REPO: https://github.com/mokoconsulting-tech/MokoStandards +# PATH: /.github/workflow-templates/release-cycle.yml +# VERSION: 01.00.00 +# BRIEF: Release management workflow with automated release flow +# NOTE: Implements main โ†’ dev โ†’ rc โ†’ version โ†’ main cycle with semantic versioning + +name: Release Management on: - # Run monthly on the 1st at 03:00 UTC for automated version planning - schedule: - - cron: '0 3 1 * *' - - # Run on pull requests to version branches for validation - pull_request: - branches: - - version/** - - rc/** - - # Run on release creation for version tracking - release: - types: [published, created] - - # Allow manual triggering with full options workflow_dispatch: inputs: - new_version: - description: "New version in format NN.NN.NN (example 03.01.00)" + action: + description: 'Release action to perform' + required: true + type: choice + options: + - start-release + - create-rc + - finalize-release + - hotfix + version: + description: 'Version number (e.g., 1.2.3 for semantic versioning)' required: true type: string - version_text: - description: "Optional version label text (example: LTS, RC1, hotfix)" + release_notes: + description: 'Release notes or changelog summary (optional)' required: false - default: "" type: string - report_only: - description: "Report only mode (no branch creation, no file writes, report output only)" - required: false - default: "false" - type: choice - options: - - "true" - - "false" - commit_changes: - description: "Commit and push changes (forced to true when report_only=false)" - required: false - default: "true" - type: choice - options: - - "true" - - "false" - branch_prefix: - description: "Branch prefix for version (version/ for stable, rc/ for release candidate, dev/ for development)" - required: false - default: "dev/" - type: choice - options: - - "dev/" - - "rc/" - - "version/" - -concurrency: - group: ${{ github.workflow }}-${{ github.repository }}-${{ github.event.inputs.new_version }} - cancel-in-progress: false permissions: contents: write pull-requests: write -defaults: - run: - shell: bash - jobs: - version-bump: - name: Version branch and bump + validate-version: + name: Validate Version Format runs-on: ubuntu-latest - - env: - NEW_VERSION: ${{ github.event.inputs.new_version }} - VERSION_TEXT: ${{ github.event.inputs.version_text }} - REPORT_ONLY: ${{ github.event.inputs.report_only }} - COMMIT_CHANGES: ${{ github.event.inputs.commit_changes }} - BASE_BRANCH: ${{ github.ref_name }} - BRANCH_PREFIX: ${{ github.event.inputs.branch_prefix || 'dev/' }} - ERROR_LOG: /tmp/version_branch_errors.log - CI_HELPERS: /tmp/moko_ci_helpers.sh - REPORT_PATH: /tmp/version-bump-report.json - + outputs: + version: ${{ steps.validate.outputs.version }} + major: ${{ steps.validate.outputs.major }} + minor: ${{ steps.validate.outputs.minor }} + patch: ${{ steps.validate.outputs.patch }} + steps: - - name: Checkout repository - uses: actions/checkout@v6 + - name: Validate Semantic Version + id: validate + run: | + VERSION="${{ inputs.version }}" + + # Remove 'v' prefix if present + VERSION=${VERSION#v} + + # Validate semantic versioning format (MAJOR.MINOR.PATCH) + if ! echo "$VERSION" | grep -qE '^[0-9]+\.[0-9]+\.[0-9]+$'; then + echo "โŒ Invalid version format: $VERSION" >> $GITHUB_STEP_SUMMARY + echo "Expected format: MAJOR.MINOR.PATCH (e.g., 1.2.3)" >> $GITHUB_STEP_SUMMARY + exit 1 + fi + + # Extract version components + MAJOR=$(echo "$VERSION" | cut -d. -f1) + MINOR=$(echo "$VERSION" | cut -d. -f2) + PATCH=$(echo "$VERSION" | cut -d. -f3) + + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "major=$MAJOR" >> $GITHUB_OUTPUT + echo "minor=$MINOR" >> $GITHUB_OUTPUT + echo "patch=$PATCH" >> $GITHUB_OUTPUT + + echo "โœ… Valid semantic version: $VERSION" >> $GITHUB_STEP_SUMMARY + echo "- Major: $MAJOR" >> $GITHUB_STEP_SUMMARY + echo "- Minor: $MINOR" >> $GITHUB_STEP_SUMMARY + echo "- Patch: $PATCH" >> $GITHUB_STEP_SUMMARY + + start-release: + name: Start Release (main โ†’ dev) + runs-on: ubuntu-latest + needs: validate-version + if: inputs.action == 'start-release' + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 with: fetch-depth: 0 - ref: ${{ github.ref_name }} - - - name: Init CI helpers + ref: main + + - name: Configure Git run: | - set -Eeuo pipefail - : > "$ERROR_LOG" - - cat > "$CI_HELPERS" <<'SH' - set -Eeuo pipefail - - moko_init() { - local step_name="${1:-step}" - export PS4='+ ['"${step_name}"':${BASH_SOURCE##*/}:${LINENO}] ' - set -x - trap "moko_on_err '${step_name}' \"\$LINENO\" \"\$BASH_COMMAND\"" ERR - } - - moko_on_err() { - local step_name="$1" - local line_no="$2" - local last_cmd="$3" - - echo "[FATAL] ${step_name} failed at line ${line_no}" >&2 - echo "[FATAL] Last command: ${last_cmd}" >&2 - - if [[ -n "${ERROR_LOG:-}" ]]; then - echo "$(date -u +%Y-%m-%dT%H:%M:%SZ) | ${step_name} | line ${line_no} | ${last_cmd}" >> "$ERROR_LOG" || true - fi - } - - moko_bool() { - local v="${1:-false}" - [[ "${v}" == "true" ]] - } - - moko_trim() { - local s="${1:-}" - s="${s#${s%%[![:space:]]*}}" - s="${s%${s##*[![:space:]]}}" - printf '%s' "$s" - } - SH - - chmod 0755 "$CI_HELPERS" - - - name: Validate inputs and policy locks - run: | - source "$CI_HELPERS" - moko_init "Validate inputs and policy locks" - - VERSION_TEXT="$(moko_trim "${VERSION_TEXT}")" - - echo "[INFO] Inputs received:" - echo " NEW_VERSION=${NEW_VERSION}" - echo " VERSION_TEXT=${VERSION_TEXT}" - echo " REPORT_ONLY=${REPORT_ONLY}" - echo " COMMIT_CHANGES=${COMMIT_CHANGES}" - echo " BASE_BRANCH=${BASE_BRANCH}" - echo " BRANCH_PREFIX=${BRANCH_PREFIX}" - - [[ -n "${NEW_VERSION}" ]] || { echo "[ERROR] new_version missing" >&2; exit 2; } - [[ "${NEW_VERSION}" =~ ^[0-9]{2}[.][0-9]{2}[.][0-9]{2}$ ]] || { echo "[ERROR] Invalid version format: ${NEW_VERSION}" >&2; exit 2; } - - # Validate BRANCH_PREFIX is one of the allowed values - case "${BRANCH_PREFIX}" in - "dev/"|"rc/"|"version/") - echo "[INFO] โœ“ Branch prefix '${BRANCH_PREFIX}' is valid" - ;; - *) - echo "[FATAL] BRANCH_PREFIX must be one of: dev/, rc/, version/ (got: '${BRANCH_PREFIX}')" >&2 - exit 2 - ;; - esac - - if ! moko_bool "${REPORT_ONLY}" && [[ "${COMMIT_CHANGES}" != "true" ]]; then - echo "[FATAL] commit_changes must be 'true' when report_only is 'false' to ensure the branch is auditable." >&2 - exit 2 - fi - - if [[ -n "${VERSION_TEXT}" ]]; then - if [[ ! "${VERSION_TEXT}" =~ ^[A-Za-z0-9._-]{1,32}$ ]]; then - echo "[FATAL] version_text must match ^[A-Za-z0-9._-]{1,32}$ when set." >&2 - exit 2 - fi - fi - - git ls-remote --exit-code --heads origin "${BASE_BRANCH}" >/dev/null 2>&1 || { - echo "[ERROR] Base branch does not exist on origin: ${BASE_BRANCH}" >&2 - echo "[INFO] Remote branches:" >&2 - git ls-remote --heads origin | awk '{sub("refs/heads/","",$2); print $2}' >&2 - exit 2 - } - - echo "VERSION_TEXT=${VERSION_TEXT}" >> "$GITHUB_ENV" - - - name: Sanity check workflow file (no literal tabs or control chars) - run: | - source "$CI_HELPERS" - moko_init "Sanity check workflow file" - - python3 - <<'PY' - from pathlib import Path - - target = Path('.github/workflows/version_branch.yml') - if not target.exists(): - raise SystemExit('[FATAL] Missing workflow file: .github/workflows/version_branch.yml') - - data = target.read_bytes() - - # Disallow literal tab (0x09) and other ASCII control characters except LF (0x0A) and CR (0x0D). - # Report line numbers without printing the raw characters. - - def byte_to_line(blob: bytes, idx: int) -> int: - return blob[:idx].count(b'\n') + 1 - - bad = [] - for i, b in enumerate(data): - if b == 0x09: - bad.append(('TAB', i, b)) - elif b < 0x20 and b not in (0x0A, 0x0D): - bad.append(('CTRL', i, b)) - - if bad: - print('[ERROR] Disallowed characters detected in workflow file:') - for kind, off, val in bad[:200]: - line_no = byte_to_line(data, off) - if kind == 'TAB': - print(f' line {line_no}: TAB_PRESENT') - else: - print(f' line {line_no}: CTRL_0x{val:02X}_PRESENT') - raise SystemExit(2) - - print('[INFO] Sanity check passed') - PY - - - name: Enterprise policy gate - run: | - source "$CI_HELPERS" - moko_init "Enterprise policy gate" - - required=( - "LICENSE" - "CONTRIBUTING.md" - "CODE_OF_CONDUCT.md" - "SECURITY.md" - "GOVERNANCE.md" - "CHANGELOG.md" - ) - - missing=0 - for f in "${required[@]}"; do - if [[ ! -f "${f}" ]]; then - echo "[ERROR] Missing required file: ${f}" >&2 - missing=1 - continue - fi - if [[ ! -s "${f}" ]]; then - echo "[ERROR] Required file is empty: ${f}" >&2 - missing=1 - continue - fi - done - - if [[ "${missing}" -ne 0 ]]; then - echo "[FATAL] Policy gate failed. Add missing governance artifacts before versioning." >&2 - exit 2 - fi - - echo "[INFO] Policy gate passed" - - - name: Branch namespace collision defense - run: | - source "$CI_HELPERS" - moko_init "Branch namespace collision defense" - - # Skip collision check for known static prefixes - case "${BRANCH_PREFIX}" in - "dev/"|"rc/"|"version/") - echo "[INFO] Skipping collision check for known prefix '${BRANCH_PREFIX}'" >&2 - exit 0 - ;; - esac - - PREFIX_TOP="${BRANCH_PREFIX%%/*}" - if git ls-remote --exit-code --heads origin "${PREFIX_TOP}" >/dev/null 2>&1; then - echo "[FATAL] Branch namespace collision detected: '${PREFIX_TOP}' exists on origin." >&2 - exit 2 - fi - - - name: Configure git identity - if: ${{ env.REPORT_ONLY != 'true' }} - run: | - source "$CI_HELPERS" - moko_init "Configure git identity" - git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - - - name: Create version branch (local) - if: ${{ env.REPORT_ONLY != 'true' }} + + - name: Create Development Branch run: | - source "$CI_HELPERS" - moko_init "Create version branch (local)" - - BRANCH_NAME="${BRANCH_PREFIX}${NEW_VERSION}" - echo "[INFO] Creating local branch: ${BRANCH_NAME} from origin/${BASE_BRANCH}" - - git fetch --all --tags --prune - - if git ls-remote --exit-code --heads origin "${BRANCH_NAME}" >/dev/null 2>&1; then - echo "[FATAL] Branch already exists on origin: ${BRANCH_NAME}" >&2 - exit 2 + VERSION="${{ needs.validate-version.outputs.version }}" + DEV_BRANCH="dev/$VERSION" + + # Check if dev branch already exists + if git ls-remote --heads origin "$DEV_BRANCH" | grep -q "$DEV_BRANCH"; then + echo "โš ๏ธ Development branch $DEV_BRANCH already exists" >> $GITHUB_STEP_SUMMARY + exit 1 + fi + + # Create and push development branch + git checkout -b "$DEV_BRANCH" + git push origin "$DEV_BRANCH" + + echo "โœ… Created development branch: $DEV_BRANCH" >> $GITHUB_STEP_SUMMARY + echo "Next steps:" >> $GITHUB_STEP_SUMMARY + echo "1. Make changes in $DEV_BRANCH" >> $GITHUB_STEP_SUMMARY + echo "2. Run 'create-rc' action when ready for testing" >> $GITHUB_STEP_SUMMARY + + - name: Update Version Files + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + + # Update version in common files (customize for your project) + # Examples: + if [ -f "package.json" ]; then + npm version "$VERSION" --no-git-tag-version + git add package.json package-lock.json 2>/dev/null || true + echo "โœ… Updated package.json" >> $GITHUB_STEP_SUMMARY + fi + + if [ -f "composer.json" ]; then + # Update version in composer.json if it exists + if grep -q "\"version\":" composer.json; then + sed -i "s/\"version\": \".*\"/\"version\": \"$VERSION\"/" composer.json + git add composer.json + echo "โœ… Updated composer.json" >> $GITHUB_STEP_SUMMARY + fi + fi + + # Commit changes if any + if ! git diff --staged --quiet; then + git commit -m "chore: bump version to $VERSION" + git push origin "dev/$VERSION" + echo "โœ… Version files updated and committed" >> $GITHUB_STEP_SUMMARY fi - git checkout -B "${BRANCH_NAME}" "origin/${BASE_BRANCH}" - echo "BRANCH_NAME=${BRANCH_NAME}" >> "$GITHUB_ENV" - - - name: Validate version hierarchy (prevent creating in lower priority branches) - if: ${{ env.REPORT_ONLY != 'true' }} + create-rc: + name: Create Release Candidate (dev โ†’ rc) + runs-on: ubuntu-latest + needs: validate-version + if: inputs.action == 'create-rc' + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: dev/${{ needs.validate-version.outputs.version }} + + - name: Configure Git run: | - source "$CI_HELPERS" - moko_init "Validate version hierarchy" - - # Version hierarchy (highest to lowest priority): - # 1. version/X.Y.Z - production/stable versions - # 2. rc/X.Y.Z - release candidate versions - # 3. dev/X.Y.Z - development versions - # - # Rule: If a version exists in a higher priority branch, - # do not allow creating it in a lower priority branch + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Create Release Candidate Branch + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + RC_BRANCH="rc/$VERSION" + DEV_BRANCH="dev/$VERSION" - echo "[INFO] Checking version hierarchy for: ${NEW_VERSION}" - echo "[INFO] Current branch prefix: ${BRANCH_PREFIX}" + # Check if rc branch already exists + if git ls-remote --heads origin "$RC_BRANCH" | grep -q "$RC_BRANCH"; then + echo "โš ๏ธ Release candidate branch $RC_BRANCH already exists" >> $GITHUB_STEP_SUMMARY + exit 1 + fi + + # Create RC branch from dev + git checkout -b "$RC_BRANCH" + git push origin "$RC_BRANCH" + + echo "โœ… Created release candidate branch: $RC_BRANCH" >> $GITHUB_STEP_SUMMARY + echo "Next steps:" >> $GITHUB_STEP_SUMMARY + echo "1. Test thoroughly in $RC_BRANCH" >> $GITHUB_STEP_SUMMARY + echo "2. Fix any issues found (commit to $RC_BRANCH)" >> $GITHUB_STEP_SUMMARY + echo "3. Run 'finalize-release' action when ready to release" >> $GITHUB_STEP_SUMMARY + + - name: Create Pre-release Tag + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + TAG="v${VERSION}-rc" + + git tag -a "$TAG" -m "Release Candidate $VERSION" + git push origin "$TAG" + + echo "โœ… Created pre-release tag: $TAG" >> $GITHUB_STEP_SUMMARY - # Determine current priority level - case "${BRANCH_PREFIX}" in - "version/") - echo "[INFO] Creating stable version branch - no hierarchy checks needed" + finalize-release: + name: Finalize Release (rc โ†’ version โ†’ main) + runs-on: ubuntu-latest + needs: validate-version + if: inputs.action == 'finalize-release' + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: rc/${{ needs.validate-version.outputs.version }} + + - name: Configure Git + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Create Version Branch + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + VERSION_BRANCH="version/$VERSION" + + # Create version branch (permanent record) + git checkout -b "$VERSION_BRANCH" + git push origin "$VERSION_BRANCH" + + echo "โœ… Created version branch: $VERSION_BRANCH" >> $GITHUB_STEP_SUMMARY + + - name: Merge to Main + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + + # Merge to main + git checkout main + git pull origin main + git merge --no-ff "version/$VERSION" -m "Release version $VERSION" + git push origin main + + echo "โœ… Merged to main branch" >> $GITHUB_STEP_SUMMARY + + - name: Create Release Tag + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + TAG="v$VERSION" + + git tag -a "$TAG" -m "Release $VERSION" + git push origin "$TAG" + + echo "โœ… Created release tag: $TAG" >> $GITHUB_STEP_SUMMARY + + - name: Generate Release Notes + id: release_notes + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + + # Generate changelog from commits + NOTES="${{ inputs.release_notes }}" + + if [ -z "$NOTES" ]; then + # Auto-generate from git log if not provided + NOTES=$(git log --pretty=format:"- %s" "v${VERSION}-rc"..HEAD 2>/dev/null || echo "Initial release") + fi + + # Save to file for GitHub release + cat > release_notes.md </dev/null || echo "- Initial release") + EOF + + echo "โœ… Generated release notes" >> $GITHUB_STEP_SUMMARY + + - name: Create GitHub Release + uses: softprops/action-gh-release@v1 + with: + tag_name: v${{ needs.validate-version.outputs.version }} + name: Release ${{ needs.validate-version.outputs.version }} + body_path: release_notes.md + draft: false + prerelease: false + + - name: Cleanup Development Branches + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + + # Optionally delete dev and rc branches after release + # Uncomment if you want automatic cleanup: + # git push origin --delete "dev/$VERSION" 2>/dev/null || true + # git push origin --delete "rc/$VERSION" 2>/dev/null || true + + echo "โ„น๏ธ Development branches retained for history" >> $GITHUB_STEP_SUMMARY + echo "To manually cleanup, run:" >> $GITHUB_STEP_SUMMARY + echo " git push origin --delete dev/$VERSION" >> $GITHUB_STEP_SUMMARY + echo " git push origin --delete rc/$VERSION" >> $GITHUB_STEP_SUMMARY + + hotfix: + name: Create Hotfix Branch + runs-on: ubuntu-latest + needs: validate-version + if: inputs.action == 'hotfix' + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: main + + - name: Configure Git + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Create Hotfix Branch + run: | + VERSION="${{ needs.validate-version.outputs.version }}" + HOTFIX_BRANCH="hotfix/$VERSION" + + # Create hotfix branch from main + git checkout -b "$HOTFIX_BRANCH" + git push origin "$HOTFIX_BRANCH" + + echo "โœ… Created hotfix branch: $HOTFIX_BRANCH" >> $GITHUB_STEP_SUMMARY + echo "Next steps:" >> $GITHUB_STEP_SUMMARY + echo "1. Apply hotfix changes to $HOTFIX_BRANCH" >> $GITHUB_STEP_SUMMARY + echo "2. Test thoroughly" >> $GITHUB_STEP_SUMMARY + echo "3. Create PR to merge back to main" >> $GITHUB_STEP_SUMMARY + echo "4. After merge, create release tag manually or re-run finalize-release" >> $GITHUB_STEP_SUMMARY + + summary: + name: Release Summary + runs-on: ubuntu-latest + needs: [validate-version, start-release, create-rc, finalize-release, hotfix] + if: always() + + steps: + - name: Generate Summary + run: | + echo "# Release Management Summary" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "**Action**: ${{ inputs.action }}" >> $GITHUB_STEP_SUMMARY + echo "**Version**: ${{ needs.validate-version.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + case "${{ inputs.action }}" in + start-release) + echo "## Release Started" >> $GITHUB_STEP_SUMMARY + echo "- Development branch created: dev/${{ needs.validate-version.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "- Version files updated" >> $GITHUB_STEP_SUMMARY ;; - "rc/") - echo "[INFO] Creating RC branch - checking if version exists in stable" - if git ls-remote --exit-code --heads origin "version/${NEW_VERSION}" >/dev/null 2>&1; then - echo "[FATAL] Version ${NEW_VERSION} already exists in stable branch: version/${NEW_VERSION}" >&2 - echo "[FATAL] Cannot create RC version for a version that already exists in stable" >&2 - exit 2 - fi - echo "[INFO] โœ“ No conflict with stable versions" + create-rc) + echo "## Release Candidate Created" >> $GITHUB_STEP_SUMMARY + echo "- RC branch created: rc/${{ needs.validate-version.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "- Pre-release tag created: v${{ needs.validate-version.outputs.version }}-rc" >> $GITHUB_STEP_SUMMARY ;; - "dev/") - echo "[INFO] Creating dev branch - checking if version exists in stable or RC" - - # Check stable versions - if git ls-remote --exit-code --heads origin "version/${NEW_VERSION}" >/dev/null 2>&1; then - echo "[FATAL] Version ${NEW_VERSION} already exists in stable branch: version/${NEW_VERSION}" >&2 - echo "[FATAL] Cannot create dev version for a version that already exists in stable" >&2 - exit 2 - fi - - # Check RC versions - if git ls-remote --exit-code --heads origin "rc/${NEW_VERSION}" >/dev/null 2>&1; then - echo "[FATAL] Version ${NEW_VERSION} already exists in RC branch: rc/${NEW_VERSION}" >&2 - echo "[FATAL] Cannot create dev version for a version that already exists in RC" >&2 - exit 2 - fi - - echo "[INFO] โœ“ No conflict with stable or RC versions" + finalize-release) + echo "## Release Finalized" >> $GITHUB_STEP_SUMMARY + echo "- Version branch created: version/${{ needs.validate-version.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "- Merged to main" >> $GITHUB_STEP_SUMMARY + echo "- Release tag created: v${{ needs.validate-version.outputs.version }}" >> $GITHUB_STEP_SUMMARY + echo "- GitHub release published" >> $GITHUB_STEP_SUMMARY ;; - *) - echo "[WARN] Unknown branch prefix: ${BRANCH_PREFIX}, skipping hierarchy check" + hotfix) + echo "## Hotfix Branch Created" >> $GITHUB_STEP_SUMMARY + echo "- Hotfix branch created: hotfix/${{ needs.validate-version.outputs.version }}" >> $GITHUB_STEP_SUMMARY ;; esac - - name: Enforce update feed files absent (update.xml, updates.xml) - if: ${{ env.REPORT_ONLY != 'true' }} - run: | - source "$CI_HELPERS" - moko_init "Enforce update feed deletion" - - git rm -f --ignore-unmatch update.xml updates.xml || true - rm -f update.xml updates.xml || true - - if [[ -f update.xml || -f updates.xml ]]; then - echo "[FATAL] update feed files still present after deletion attempt." >&2 - ls -la update.xml updates.xml 2>/dev/null || true - exit 2 - fi - - - name: Preflight discovery (governed version markers outside .github) - run: | - source "$CI_HELPERS" - moko_init "Preflight discovery" - - COUNT=$(grep -RIn --exclude-dir=.git --exclude-dir=.github -i -E "VERSION[[:space:]]*:[[:space:]]*[0-9]{2}[.][0-9]{2}[.][0-9]{2}" . | wc -l || true) - COUNT2=$(grep -RIn --exclude-dir=.git --exclude-dir=.github " hits (repo wide): ${COUNT2}" - - if [[ "${COUNT}" -eq 0 && "${COUNT2}" -eq 0 ]]; then - echo "[FATAL] No governed version markers found outside .github" >&2 - exit 2 - fi - - - name: Bump versions and update manifest dates (targeted, excluding .github) - run: | - source "$CI_HELPERS" - moko_init "Version bump" - - python3 - <<'PY' - import json - import os - import re - from pathlib import Path - from collections import defaultdict - from datetime import datetime, timezone - - new_version = (os.environ.get("NEW_VERSION") or "").strip() - version_text = (os.environ.get("VERSION_TEXT") or "").strip() - report_only = (os.environ.get("REPORT_ONLY") or "").strip().lower() == "true" - report_path = (os.environ.get("REPORT_PATH") or "").strip() - - stamp = datetime.now(timezone.utc).strftime("%Y-%m-%d") - root = Path(".").resolve() - - # Use escape sequences only. Do not introduce literal tab characters. - header_re = re.compile(r"(?im)(VERSION[ \t]*:[ \t]*)([0-9]{2}[.][0-9]{2}[.][0-9]{2})") - manifest_marker_re = re.compile(r"(?is))([^<]*?)()") - xml_date_res = [ - re.compile(r"(?is)()([^<]*?)()"), - re.compile(r"(?is)()([^<]*?)()"), - re.compile(r"(?is)()([^<]*?)()"), - ] - - skip_ext = { - ".json", ".png", ".jpg", ".jpeg", ".gif", ".svg", ".ico", ".pdf", - ".zip", ".7z", ".tar", ".gz", ".woff", ".woff2", ".ttf", ".otf", - ".mp3", ".mp4", - } - skip_dirs = {".git", ".github", "node_modules", "vendor", ".venv", "dist", "build"} - - counters = defaultdict(int) - updated_files = [] - updated_manifests = [] - would_update_files = [] - would_update_manifests = [] - - exclude_root = {"update.xml", "updates.xml"} - - def should_skip(p: Path) -> bool: - if p.suffix.lower() in skip_ext: - counters["skipped_by_ext"] += 1 - return True - parts = {x.lower() for x in p.parts} - if any(d in parts for d in skip_dirs): - counters["skipped_by_dir"] += 1 - return True - return False - - for p in root.rglob("*"): - if not p.is_file(): - continue - if should_skip(p): - continue - - if p.parent == root and p.name.lower() in exclude_root: - counters["skipped_release_artifacts"] += 1 - continue - - try: - original = p.read_text(encoding="utf-8", errors="replace") - except Exception: - counters["skipped_read_error"] += 1 - continue - - text = original - - text, n1 = header_re.subn(lambda m: m.group(1) + new_version, text) - if n1: - counters["header_replacements"] += n1 - - is_manifest = (p.suffix.lower() == ".xml" and manifest_marker_re.search(original) is not None) - if is_manifest: - text, n2 = xml_version_re.subn(lambda m: m.group(1) + new_version + m.group(3), text) - if n2: - counters["xml_version_replacements"] += n2 - - for rx in xml_date_res: - text, n3 = rx.subn(lambda m: m.group(1) + stamp + m.group(3), text) - if n3: - counters["xml_date_replacements"] += n3 - - if text != original: - would_update_files.append(str(p)) - if is_manifest: - would_update_manifests.append(str(p)) - - if not report_only: - p.write_text(text, encoding="utf-8") - updated_files.append(str(p)) - if is_manifest: - updated_manifests.append(str(p)) - - report = { - "mode": "report_only" if report_only else "apply", - "new_version": new_version, - "version_text": version_text, - "stamp_utc": stamp, - "counters": dict(counters), - "updated_files": updated_files, - "updated_manifests": updated_manifests, - "would_update_files": would_update_files, - "would_update_manifests": would_update_manifests, - } - - Path(report_path).write_text(json.dumps(report, indent=2), encoding="utf-8") - - print("[INFO] Report written to:", report_path) - print("[INFO] Mode:", report["mode"]) - print("[INFO] Would update files:", len(would_update_files)) - print("[INFO] Would update manifests:", len(would_update_manifests)) - print("[INFO] Updated files:", len(updated_files)) - print("[INFO] Updated manifests:", len(updated_manifests)) - PY - - - name: Update CHANGELOG (move Unreleased into version entry) - run: | - source "$CI_HELPERS" - moko_init "Update CHANGELOG" - - if [[ ! -f "CHANGELOG.md" ]]; then - echo "[FATAL] CHANGELOG.md not found." >&2 - exit 2 - fi - - python3 - <<'PY' - import os - from datetime import datetime, timezone - from pathlib import Path - - new_version = (os.environ.get("NEW_VERSION") or "").strip() - if not new_version: - raise SystemExit("[FATAL] NEW_VERSION not set") - - stamp = datetime.now(timezone.utc).strftime("%Y-%m-%d") - p = Path("CHANGELOG.md") - lines = p.read_text(encoding="utf-8", errors="replace").splitlines(True) - - def is_h2(line: str) -> bool: - return line.lstrip().startswith("## ") - - def norm(line: str) -> str: - return line.strip().lower() - - def find_idx(predicate): - for i, ln in enumerate(lines): - if predicate(ln): - return i - return None - - unreleased_idx = find_idx(lambda ln: norm(ln) == "## [unreleased]") - version_idx = find_idx(lambda ln: ln.lstrip().startswith(f"## [{new_version}]")) - - def version_header() -> list[str]: - return ["\n", f"## [{new_version}] - {stamp}\n", "\n"] - - if unreleased_idx is None: - if version_idx is None: - insert_at = 0 - for i, ln in enumerate(lines): - if ln.lstrip().startswith("# "): - insert_at = i + 1 - while insert_at < len(lines) and lines[insert_at].strip() == "": - insert_at += 1 - break - entry = version_header() + ["- No changes recorded.\n", "\n"] - lines[insert_at:insert_at] = entry - p.write_text("".join(lines), encoding="utf-8") - raise SystemExit(0) - - u_start = unreleased_idx + 1 - u_end = len(lines) - for j in range(u_start, len(lines)): - if is_h2(lines[j]): - u_end = j - break - - unreleased_body = "".join(lines[u_start:u_end]).strip() - - if version_idx is None: - lines[u_end:u_end] = version_header() - - version_idx = find_idx(lambda ln: ln.lstrip().startswith(f"## [{new_version}]")) - if version_idx is None: - raise SystemExit("[FATAL] Failed to locate version header after insertion") - - if unreleased_body: - insert_at = version_idx + 1 - while insert_at < len(lines) and lines[insert_at].strip() == "": - insert_at += 1 - - moved = ["\n"] + [ln + "\n" for ln in unreleased_body.split("\n") if ln != ""] + ["\n"] - lines[insert_at:insert_at] = moved - - unreleased_idx = find_idx(lambda ln: norm(ln) == "## [unreleased]") - if unreleased_idx is not None: - u_start = unreleased_idx + 1 - u_end = len(lines) - for j in range(u_start, len(lines)): - if is_h2(lines[j]): - u_end = j - break - lines[u_start:u_end] = ["\n"] - - p.write_text("".join(lines), encoding="utf-8") - PY - - - name: Commit changes - if: ${{ env.REPORT_ONLY != 'true' }} - run: | - source "$CI_HELPERS" - moko_init "Commit changes" - - if [[ -z "$(git status --porcelain=v1)" ]]; then - echo "[INFO] No changes detected. Skipping commit." - exit 0 - fi - - git add -A - - MSG="chore(release): bump version to ${NEW_VERSION}" - if [[ -n "${VERSION_TEXT}" ]]; then - MSG="${MSG} (${VERSION_TEXT})" - fi - - git commit -m "${MSG}" - - - name: Push branch - if: ${{ env.REPORT_ONLY != 'true' }} - run: | - source "$CI_HELPERS" - moko_init "Push branch" - - if [[ -z "${BRANCH_NAME:-}" ]]; then - echo "[FATAL] BRANCH_NAME not set." >&2 - exit 2 - fi - - git push --set-upstream origin "${BRANCH_NAME}" - - - name: Create pull request - if: ${{ env.REPORT_ONLY != 'true' }} - env: - GH_TOKEN: ${{ github.token }} - run: | - source "$CI_HELPERS" - moko_init "Create pull request" - - if [[ -z "${BRANCH_NAME:-}" ]]; then - echo "[FATAL] BRANCH_NAME not set." >&2 - exit 2 - fi - - PR_TITLE="Version branch ${NEW_VERSION}" - if [[ -n "${VERSION_TEXT}" ]]; then - PR_TITLE="${PR_TITLE} (${VERSION_TEXT})" - fi - - cat > /tmp/pr_body.txt <<'PRBODY' - This pull request was automatically created by the version branch workflow. - PRBODY - - # Add dynamic content - { - echo "" - echo "Version: ${NEW_VERSION}" - echo "Version Text: ${VERSION_TEXT:-N/A}" - echo "Branch Prefix: ${BRANCH_PREFIX}" - echo "Base Branch: ${BASE_BRANCH}" - } >> /tmp/pr_body.txt - - echo "[INFO] Creating pull request from ${BRANCH_NAME} to ${BASE_BRANCH}" - - set +e - PR_OUTPUT=$(gh pr create \ - --base "${BASE_BRANCH}" \ - --head "${BRANCH_NAME}" \ - --title "${PR_TITLE}" \ - --body-file /tmp/pr_body.txt 2>&1) - PR_EXIT_CODE=$? - set -e - - if [[ ${PR_EXIT_CODE} -eq 0 ]]; then - echo "[INFO] Pull request created successfully" - echo "${PR_OUTPUT}" - else - echo "[WARN] Failed to create pull request (exit code: ${PR_EXIT_CODE})" >&2 - echo "[WARN] Output: ${PR_OUTPUT}" >&2 - - # Check for common error conditions with specific patterns - if echo "${PR_OUTPUT}" | grep -iqE "pull request.*already exists"; then - echo "[INFO] PR already exists, continuing..." >&2 - elif echo "${PR_OUTPUT}" | grep -iqE "no commits between.*branch"; then - echo "[INFO] No commits between branches, continuing..." >&2 - else - echo "[WARN] Unexpected error occurred, but continuing workflow" >&2 - fi - fi - - - name: Publish audit trail - if: always() - run: | - source "$CI_HELPERS" - moko_init "Publish audit trail" - - echo "# Version branch run" >> "$GITHUB_STEP_SUMMARY" - echo "" >> "$GITHUB_STEP_SUMMARY" - echo "- Repository: $GITHUB_REPOSITORY" >> "$GITHUB_STEP_SUMMARY" - echo "- Base branch: ${BASE_BRANCH}" >> "$GITHUB_STEP_SUMMARY" - echo "- Branch prefix: ${BRANCH_PREFIX}" >> "$GITHUB_STEP_SUMMARY" - echo "- New version: ${NEW_VERSION}" >> "$GITHUB_STEP_SUMMARY" - echo "- Version text: ${VERSION_TEXT}" >> "$GITHUB_STEP_SUMMARY" - echo "- Report only: ${REPORT_ONLY}" >> "$GITHUB_STEP_SUMMARY" - echo "- Commit changes: ${COMMIT_CHANGES}" >> "$GITHUB_STEP_SUMMARY" - echo "- New branch: ${BRANCH_NAME:-}" >> "$GITHUB_STEP_SUMMARY" - echo "" >> "$GITHUB_STEP_SUMMARY" - - echo "## Version bump report" >> "$GITHUB_STEP_SUMMARY" - echo "" >> "$GITHUB_STEP_SUMMARY" - - if [[ -f "${REPORT_PATH}" ]]; then - echo "\`\`\`json" >> "$GITHUB_STEP_SUMMARY" - head -c 12000 "${REPORT_PATH}" >> "$GITHUB_STEP_SUMMARY" || true - echo "" >> "$GITHUB_STEP_SUMMARY" - echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" - else - echo "Report file not found at: ${REPORT_PATH}" >> "$GITHUB_STEP_SUMMARY" - fi - - echo "" >> "$GITHUB_STEP_SUMMARY" - echo "## Error summary" >> "$GITHUB_STEP_SUMMARY" - echo "" >> "$GITHUB_STEP_SUMMARY" - - if [[ -f "$ERROR_LOG" && -s "$ERROR_LOG" ]]; then - echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" - tail -n 200 "$ERROR_LOG" >> "$GITHUB_STEP_SUMMARY" || true - echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" - else - echo "No errors recorded." >> "$GITHUB_STEP_SUMMARY" - fi +# RELEASE FLOW DIAGRAM: +# +# Normal Release: +# main โ†’ dev/X.Y.Z โ†’ rc/X.Y.Z โ†’ version/X.Y.Z โ†’ main (tagged vX.Y.Z) +# +# Hotfix: +# main โ†’ hotfix/X.Y.Z โ†’ main (tagged vX.Y.Z) +# +# SEMANTIC VERSIONING: +# - MAJOR version: incompatible API changes +# - MINOR version: backwards-compatible functionality +# - PATCH version: backwards-compatible bug fixes +# +# CUSTOMIZATION: +# - Modify version file updates in start-release job +# - Add build/test steps before creating releases +# - Customize release notes generation +# - Add notification steps (Slack, email, etc.) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8aaa234..f262c46 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -61,9 +61,6 @@ cd moko-cassiopeia # Run development setup make dev-setup - -# Install Git hooks (optional but recommended) -./scripts/git/install-hooks.sh ``` See [docs/QUICK_START.md](./docs/QUICK_START.md) for detailed setup instructions. @@ -75,8 +72,7 @@ The repository provides several tools to streamline development: * **Makefile**: Common development tasks (`make help` to see all commands) * **Pre-commit Hooks**: Automatic local validation before commits * **VS Code Tasks**: Pre-configured tasks for common operations -* **Validation Scripts**: Located in `scripts/validate/` -* **CI/CD Workflows**: Automated testing and deployment +* **CI/CD Workflows**: Automated testing and deployment via MokoStandards Run `make validate-required` before submitting PRs to catch common issues early. diff --git a/README.md b/README.md index 0285f78..bab357a 100644 --- a/README.md +++ b/README.md @@ -150,9 +150,6 @@ make quality # Create distribution package make package - -# Install Git hooks (optional but recommended) -python3 ./scripts/git/install-hooks.py ``` **New to the project?** See [Quick Start Guide](./docs/QUICK_START.md) for a 5-minute walkthrough. @@ -162,23 +159,21 @@ python3 ./scripts/git/install-hooks.py - **[Quick Start Guide](./docs/QUICK_START.md)** - Get up and running in 5 minutes - **[Workflow Guide](./docs/WORKFLOW_GUIDE.md)** - Complete workflow reference with examples - **[Joomla Development Guide](./docs/JOOMLA_DEVELOPMENT.md)** - Testing, quality checks, and deployment -- **[Scripts Documentation](./scripts/README.md)** - Available automation scripts - **[Contributing Guide](./CONTRIBUTING.md)** - How to contribute ### Available Tools - **Makefile**: Run `make help` to see all available commands -- **Python Scripts**: All automation scripts are now Python-based for cross-platform compatibility - **Pre-commit Hooks**: Automatic validation before commits - **VS Code Tasks**: Pre-configured development tasks - **GitHub Actions**: Automated CI/CD pipelines ### Cross-Platform Support -All scripts are now written in Python for maximum cross-platform compatibility: +All automation is handled through the Makefile and GitHub Actions workflows for maximum cross-platform compatibility: - **Joomla Extension Support**: Full support for Joomla 4.x and 5.x templates, components, modules, and plugins -- **Dolibarr Module Support**: Automatic detection and packaging of Dolibarr modules -- **Platform Detection**: Scripts automatically detect whether you're working with Joomla or Dolibarr extensions +- **Platform Detection**: Workflows automatically detect whether you're working with Joomla extensions +- **MokoStandards Integration**: Uses reusable workflows from MokoStandards for consistency ### Joomla Development Workflows diff --git a/docs/README.md b/docs/README.md index 053e4ff..06dd1e1 100644 --- a/docs/README.md +++ b/docs/README.md @@ -69,7 +69,6 @@ moko-cassiopeia/ โ”œโ”€โ”€ src/ # Template source code โ”‚ โ”œโ”€โ”€ templates/ # Joomla template files โ”‚ โ””โ”€โ”€ media/ # Assets (CSS, JS, images) -โ”œโ”€โ”€ scripts/ # Build and automation scripts โ”œโ”€โ”€ tests/ # Automated tests โ””โ”€โ”€ .github/ # GitHub configuration and workflows ``` diff --git a/scripts/ENTERPRISE.md b/scripts/ENTERPRISE.md deleted file mode 100644 index a89e28a..0000000 --- a/scripts/ENTERPRISE.md +++ /dev/null @@ -1,583 +0,0 @@ - - -# Enterprise Standards for Scripts - -This document defines the enterprise-grade standards and best practices -implemented across all automation scripts in this repository. - -## Table of Contents - -- [Overview](#overview) -- [Core Principles](#core-principles) -- [Script Structure](#script-structure) -- [Error Handling](#error-handling) -- [Logging and Observability](#logging-and-observability) -- [Security Standards](#security-standards) -- [Dependency Management](#dependency-management) -- [Exit Codes](#exit-codes) -- [Documentation Requirements](#documentation-requirements) -- [Testing and Validation](#testing-and-validation) -- [Operational Considerations](#operational-considerations) - -## Overview - -All scripts in this repository follow enterprise-grade standards to ensure: -- **Reliability**: Predictable behavior in all environments -- **Security**: Protection against vulnerabilities and credential exposure -- **Observability**: Clear logging and error reporting -- **Maintainability**: Consistent patterns and documentation -- **Portability**: Cross-platform compatibility - -## Core Principles - -### 1. Fail Fast, Fail Clearly - -Scripts must fail immediately when encountering errors and provide clear, -actionable error messages. - -```bash -set -euo pipefail # Required at top of all bash scripts -``` - -- `-e`: Exit on first error -- `-u`: Exit on undefined variable reference -- `-o pipefail`: Propagate pipeline failures - -### 2. Zero Assumptions - -- Always validate inputs -- Check for required dependencies -- Verify file/directory existence before access -- Never assume environment state - -### 3. Idempotency Where Possible - -Scripts should be safe to run multiple times without causing harm or -inconsistency. - -### 4. Least Privilege - -Scripts should: -- Never require root unless absolutely necessary -- Use minimal file system permissions -- Validate before modifying files - -## Script Structure - -### Standard Header Template - -Every script must include: - -```bash -#!/usr/bin/env bash - -# ============================================================================ -# Copyright (C) 2025 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# [Full license text...] -# ============================================================================ - -# ============================================================================ -# FILE INFORMATION -# ============================================================================ -# DEFGROUP: Script.Category -# INGROUP: Subcategory -# REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -# PATH: /scripts/path/to/script.sh -# VERSION: XX.XX.XX -# BRIEF: One-line description of script purpose -# NOTE: Additional context or usage notes -# ============================================================================ - -set -euo pipefail -``` - -### Usage Function - -User-facing scripts must provide a usage/help function: - -```bash -usage() { -cat <<-USAGE -Usage: $0 [OPTIONS] - -Description of what the script does. - -Options: - -h, --help Show this help message - -v, --verbose Enable verbose output - -Arguments: - ARG1 Description of first argument - ARG2 Description of second argument - -Examples: - $0 example_value - $0 -v example_value - -Exit codes: - 0 - Success - 1 - Error - 2 - Invalid arguments - -USAGE -exit 0 -} -``` - -### Argument Parsing - -```bash -# Parse arguments -if [ "${1:-}" = "-h" ] || [ "${1:-}" = "--help" ]; then - usage -fi - -[ $# -ge 1 ] || usage -``` - -### Library Sourcing - -```bash -SCRIPT_DIR="$(cd "$(dirname "$0")/.." && pwd)" -. "${SCRIPT_DIR}/lib/common.sh" - -# Check dependencies -check_dependencies python3 git -``` - -## Error Handling - -### Error Messages - -Error messages must be: -- **Clear**: Explain what went wrong -- **Actionable**: Tell user how to fix it -- **Contextual**: Include relevant details -- **Verbose**: Provide comprehensive information by default - -```bash -# Bad -die "Error" - -# Good - Verbose with context and solutions -die "Required file not found: ${CONFIG_FILE} - - Current directory: $(pwd) - Expected location: ./config/${CONFIG_FILE} - - To fix: - 1. Run setup script: ./scripts/setup.sh - 2. Or create the file manually: touch config/${CONFIG_FILE} - " -``` - -### Error Output - -- Always show full error output for failed operations -- Include line numbers and file paths -- Show error summaries with troubleshooting steps -- Provide installation guides for missing dependencies - -Example verbose error from validation: -``` -ERROR: PHP syntax validation failed -Files checked: 90 -Files with errors: 2 - -Failed files and errors: - File: src/test.php - Error: Parse error: syntax error, unexpected '}' in src/test.php on line 42 - - File: src/helper.php - Error: Parse error: syntax error, unexpected T_STRING in src/helper.php on line 15 - -To fix: Review and correct the syntax errors in the files listed above. -Run 'php -l ' on individual files for detailed error messages. -``` - -### Validation - -```bash -# Validate inputs -validate_version() { - local v="$1" - if ! printf '%s' "$v" | grep -Eq '^[0-9]+\.[0-9]+\.[0-9]+$'; then - die "Invalid version format: $v (expected X.Y.Z)" - fi -} - -# Check file existence -assert_file_exists "${MANIFEST}" || die "Manifest not found: ${MANIFEST}" - -# Verify directory -assert_dir_exists "${SRC_DIR}" || die "Source directory missing: ${SRC_DIR}" -``` - -## Logging and Observability - -### Logging Functions - -Use standard logging functions from `lib/common.sh`: - -```bash -log_info "Starting process..." # Informational messages -log_warn "Configuration missing" # Warnings (non-fatal) -log_error "Validation failed" # Errors (fatal) -die "Critical error occurred" # Fatal with exit -``` - -### Timestamps - -Include timestamps for audit trails: - -```bash -log_info "Start time: $(log_timestamp)" -# ... work ... -log_info "End time: $(log_timestamp)" -``` - -### Structured Output - -For machine-readable output, use JSON: - -```bash -printf '{"status":"ok","files_checked":%s}\n' "${count}" -``` - -### Progress Reporting - -For long-running operations: - -```bash -log_section "Phase 1: Validation" -log_step "Checking manifests..." -log_success "โœ“ Manifests valid" -log_kv "Files processed" "${count}" -``` - -## Security Standards - -### 1. No Hardcoded Secrets - -- Never commit credentials -- Use environment variables for sensitive data -- Validate against secret patterns - -### 2. Input Sanitization - -```bash -# Validate user input -if [[ "${input}" =~ [^a-zA-Z0-9._-] ]]; then - die "Invalid input: contains disallowed characters" -fi -``` - -### 3. File Operations - -```bash -# Use explicit paths -FILE="/full/path/to/file" - -# Avoid user-controlled paths without validation -# Validate before rm/mv operations -``` - -### 4. Command Injection Prevention - -```bash -# Use arrays for command arguments -args=("$file1" "$file2") -command "${args[@]}" - -# Quote all variables -grep "${pattern}" "${file}" -``` - -## Dependency Management - -### Required Dependencies Check - -```bash -# At script start -check_dependencies python3 git sed - -# Or inline -require_cmd xmllint || die "xmllint not available" -``` - -### Graceful Degradation - -When optional dependencies are missing: - -```bash -if ! command -v php >/dev/null 2>&1; then - log_warn "PHP not available, skipping syntax check" - exit 0 -fi -``` - -## Exit Codes - -Standard exit codes across all scripts: - -| Code | Meaning | Usage | -|------|---------|-------| -| 0 | Success | All operations completed successfully | -| 1 | Error | Fatal error occurred | -| 2 | Invalid arguments | Bad command-line arguments or usage | - -```bash -# Success -exit 0 - -# Fatal error -die "Error message" # Exits with code 1 - -# Invalid arguments -usage # Exits with code 0 (help shown) -# or -log_error "Invalid argument" -exit 2 -``` - -## Documentation Requirements - -### 1. Script Headers - -Must include: -- Copyright notice -- SPDX license identifier -- FILE INFORMATION section -- Version number -- Brief description - -### 2. Inline Comments - -Use comments for: -- Complex logic explanation -- Why decisions were made (not what code does) -- Security considerations -- Performance notes - -```bash -# Use git ls-files for performance vs. find -files=$(git ls-files '*.yml' '*.yaml') - -# NOTE: Binary detection prevents corrupting image files -if file --mime-type "$f" | grep -q '^application/'; then - continue -fi -``` - -### 3. README Documentation - -Update `scripts/README.md` when: -- Adding new scripts -- Changing script behavior -- Adding new library functions - -## Testing and Validation - -### Self-Testing - -Scripts should validate their own requirements: - -```bash -# Validate environment -[ -d "${SRC_DIR}" ] || die "Source directory not found" - -# Validate configuration -[ -n "${VERSION}" ] || die "VERSION must be set" -``` - -### Script Health Checking - -Use the script health checker to validate all scripts follow standards: - -```bash -./scripts/run/script_health.sh # Check all scripts -./scripts/run/script_health.sh -v # Verbose mode with details -``` - -The health checker validates: -- Copyright headers present -- SPDX license identifiers -- FILE INFORMATION sections -- Error handling (set -euo pipefail) -- Executable permissions - -### Integration Testing - -Run validation suite before commits: - -```bash -./scripts/run/validate_all.sh -``` - -### Smoke Testing - -Basic health checks: - -```bash -./scripts/run/smoke_test.sh -``` - -## Operational Considerations - -### 1. Timeout Handling - -For long-running operations: - -```bash -run_with_timeout 300 long_running_command -``` - -### 2. Cleanup - -Use traps for cleanup: - -```bash -cleanup() { - rm -f "${TEMP_FILE}" -} -trap cleanup EXIT -``` - -### 3. Lock Files - -For singleton operations: - -```bash -LOCK_FILE="/tmp/script.lock" -if [ -f "${LOCK_FILE}" ]; then - die "Script already running (lock file exists)" -fi -touch "${LOCK_FILE}" -trap "rm -f ${LOCK_FILE}" EXIT -``` - -### 4. Signal Handling - -```bash -handle_interrupt() { - log_warn "Interrupted by user" - cleanup - exit 130 -} -trap handle_interrupt INT TERM -``` - -### 5. Dry Run Mode - -For destructive operations: - -```bash -DRY_RUN="${DRY_RUN:-false}" - -if [ "${DRY_RUN}" = "true" ]; then - log_info "DRY RUN: Would execute: $command" -else - "$command" -fi -``` - -## CI/CD Integration - -### Environment Variables - -Scripts should respect: - -```bash -CI="${CI:-false}" # Running in CI -VERBOSE="${VERBOSE:-false}" # Verbose output -DEBUG="${DEBUG:-false}" # Debug mode -``` - -### CI-Specific Behavior - -```bash -if is_ci; then - # CI-specific settings - set -x # Echo commands for debugging -fi -``` - -### Job Summaries - -For GitHub Actions: - -```bash -if [ -n "${GITHUB_STEP_SUMMARY:-}" ]; then - echo "### Validation Results" >> "$GITHUB_STEP_SUMMARY" - echo "Status: PASSED" >> "$GITHUB_STEP_SUMMARY" -fi -``` - -## Review Checklist - -Before committing new or modified scripts: - -- [ ] Includes proper copyright header -- [ ] Uses `set -euo pipefail` -- [ ] Has usage/help function (if user-facing) -- [ ] Validates all inputs -- [ ] Checks dependencies with `check_dependencies` -- [ ] Uses structured logging (`log_info`, `log_error`, etc.) -- [ ] Includes timestamps for audit trails -- [ ] Returns appropriate exit codes (0=success, 1=error, 2=invalid args) -- [ ] Includes inline comments for complex logic -- [ ] Documented in scripts/README.md -- [ ] Tested locally -- [ ] Passes `./scripts/run/script_health.sh` -- [ ] Passes all validation checks (`./scripts/run/validate_all.sh`) -- [ ] Passes `shellcheck` (if available) - -Quick validation command: -```bash -# Run all checks -./scripts/run/script_health.sh && ./scripts/run/validate_all.sh -``` - -## Version History - -| Version | Date | Description | -| ------- | ---------- | ----------- | -| 01.00.00 | 2025-01-03 | Initial enterprise standards documentation | - -## Metadata - -- **Document:** scripts/ENTERPRISE.md -- **Repository:** https://github.com/mokoconsulting-tech/moko-cassiopeia -- **Version:** 01.00.00 -- **Status:** Active diff --git a/scripts/README.md b/scripts/README.md deleted file mode 100644 index a834e7b..0000000 --- a/scripts/README.md +++ /dev/null @@ -1,75 +0,0 @@ -# Scripts Documentation - -All automation scripts for the moko-cassiopeia project are written in Python for cross-platform compatibility and support both Joomla and Dolibarr extensions. - -## Quick Reference - -```bash -# Run all validations -make validate -python3 scripts/run/validate_all.py - -# Run specific validations -python3 scripts/validate/manifest.py -python3 scripts/validate/xml_wellformed.py - -# Create distribution package (auto-detects Joomla or Dolibarr) -make package -python3 scripts/release/package_extension.py dist -``` - -## Platform Support - -All scripts automatically detect and support: -- **Joomla Extensions**: Templates, Components, Modules, Plugins, Packages -- **Dolibarr Modules**: Automatic detection and packaging - -## Available Scripts - -### Validation Scripts (`scripts/validate/`) -- `manifest.py` - Validate extension manifests (Joomla/Dolibarr) -- `xml_wellformed.py` - Validate XML syntax -- `workflows.py` - Validate GitHub Actions workflows -- `tabs.py` - Check for tab characters in YAML -- `no_secrets.py` - Scan for secrets/credentials -- `paths.py` - Check for Windows-style paths -- `php_syntax.py` - Validate PHP syntax - -### Release Scripts (`scripts/release/`) -- `package_extension.py` - Create distributable ZIP packages - -### Run Scripts (`scripts/run/`) -- `validate_all.py` - Run all validation scripts -- `scaffold_extension.py` - Create new extension scaffolding - -### Library Scripts (`scripts/lib/`) -- `common.py` - Common utilities -- `joomla_manifest.py` - Joomla manifest parsing -- `dolibarr_manifest.py` - Dolibarr module parsing -- `extension_utils.py` - Unified extension detection - -## Requirements - -- Python 3.6+ -- Git -- PHP (for PHP syntax validation) - -## Migration from Shell Scripts - -All shell scripts have been converted to Python. Use Python equivalents: - -```bash -# Old (removed) # New -./scripts/validate/manifest.sh โ†’ python3 scripts/validate/manifest.py -./scripts/release/package.sh โ†’ python3 scripts/release/package_extension.py -``` - -For detailed documentation, see individual script help: -```bash -python3 scripts/validate/manifest.py --help -python3 scripts/release/package_extension.py --help -``` - -## License - -GPL-3.0-or-later - See [LICENSE](../LICENSE) diff --git a/scripts/lib/common.py b/scripts/lib/common.py deleted file mode 100755 index 17250c4..0000000 --- a/scripts/lib/common.py +++ /dev/null @@ -1,452 +0,0 @@ -#!/usr/bin/env python3 -""" -Common utilities for Moko-Cassiopeia scripts. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Library -INGROUP: Common -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/lib/common.py -VERSION: 01.00.00 -BRIEF: Unified shared Python utilities for all CI and local scripts -""" - -import json -import os -import shutil -import sys -from pathlib import Path -from typing import Any, Dict, List, Optional, Union -import subprocess -import traceback - - -# ============================================================================ -# Environment and Detection -# ============================================================================ - -def is_ci() -> bool: - """Check if running in CI environment.""" - return os.environ.get("CI", "").lower() == "true" - - -def require_cmd(command: str) -> None: - """ - Ensure a required command is available. - - Args: - command: Command name to check - - Raises: - SystemExit: If command is not found - """ - if not shutil.which(command): - log_error(f"Required command not found: {command}") - sys.exit(1) - - -# ============================================================================ -# Logging -# ============================================================================ - -class Colors: - """ANSI color codes for terminal output.""" - RED = '\033[0;31m' - GREEN = '\033[0;32m' - YELLOW = '\033[1;33m' - BLUE = '\033[0;34m' - CYAN = '\033[0;36m' - BOLD = '\033[1m' - NC = '\033[0m' # No Color - - @classmethod - def enabled(cls) -> bool: - """Check if colors should be enabled.""" - return sys.stdout.isatty() and os.environ.get("NO_COLOR") is None - - -def log_info(message: str) -> None: - """Log informational message.""" - print(f"INFO: {message}") - - -def log_warn(message: str) -> None: - """Log warning message.""" - color = Colors.YELLOW if Colors.enabled() else "" - nc = Colors.NC if Colors.enabled() else "" - print(f"{color}WARN: {message}{nc}", file=sys.stderr) - - -def log_error(message: str) -> None: - """Log error message.""" - color = Colors.RED if Colors.enabled() else "" - nc = Colors.NC if Colors.enabled() else "" - print(f"{color}ERROR: {message}{nc}", file=sys.stderr) - - -def log_success(message: str) -> None: - """Log success message.""" - color = Colors.GREEN if Colors.enabled() else "" - nc = Colors.NC if Colors.enabled() else "" - print(f"{color}โœ“ {message}{nc}") - - -def log_step(message: str) -> None: - """Log a step in a process.""" - color = Colors.CYAN if Colors.enabled() else "" - nc = Colors.NC if Colors.enabled() else "" - print(f"{color}โžœ {message}{nc}") - - -def log_section(title: str) -> None: - """Log a section header.""" - print() - print("=" * 60) - print(title) - print("=" * 60) - - -def log_kv(key: str, value: str) -> None: - """Log a key-value pair.""" - print(f" {key}: {value}") - - -def die(message: str, exit_code: int = 1) -> None: - """ - Log error and exit. - - Args: - message: Error message - exit_code: Exit code (default: 1) - """ - log_error(message) - - if os.environ.get("VERBOSE_ERRORS", "true").lower() == "true": - print("", file=sys.stderr) - print("Stack trace:", file=sys.stderr) - traceback.print_stack(file=sys.stderr) - print("", file=sys.stderr) - print("Environment:", file=sys.stderr) - print(f" PWD: {os.getcwd()}", file=sys.stderr) - print(f" USER: {os.environ.get('USER', 'unknown')}", file=sys.stderr) - print(f" PYTHON: {sys.version}", file=sys.stderr) - print(f" CI: {is_ci()}", file=sys.stderr) - print("", file=sys.stderr) - - sys.exit(exit_code) - - -# ============================================================================ -# Validation Helpers -# ============================================================================ - -def assert_file_exists(path: Union[str, Path]) -> None: - """ - Assert that a file exists. - - Args: - path: Path to file - - Raises: - SystemExit: If file doesn't exist - """ - if not Path(path).is_file(): - die(f"Required file missing: {path}") - - -def assert_dir_exists(path: Union[str, Path]) -> None: - """ - Assert that a directory exists. - - Args: - path: Path to directory - - Raises: - SystemExit: If directory doesn't exist - """ - if not Path(path).is_dir(): - die(f"Required directory missing: {path}") - - -def assert_not_empty(value: Any, name: str) -> None: - """ - Assert that a value is not empty. - - Args: - value: Value to check - name: Name of the value for error message - - Raises: - SystemExit: If value is empty - """ - if not value: - die(f"Required value is empty: {name}") - - -# ============================================================================ -# JSON Utilities -# ============================================================================ - -def json_escape(text: str) -> str: - """ - Escape text for JSON. - - Args: - text: Text to escape - - Returns: - Escaped text - """ - return json.dumps(text)[1:-1] # Remove surrounding quotes - - -def json_output(data: Dict[str, Any], pretty: bool = False) -> None: - """ - Output data as JSON. - - Args: - data: Dictionary to output - pretty: Whether to pretty-print - """ - if pretty: - print(json.dumps(data, indent=2, sort_keys=True)) - else: - print(json.dumps(data, separators=(',', ':'))) - - -# ============================================================================ -# Path Utilities -# ============================================================================ - -def script_root() -> Path: - """ - Get the root scripts directory. - - Returns: - Path to scripts directory - """ - return Path(__file__).parent.parent - - -def repo_root() -> Path: - """ - Get the repository root directory. - - Returns: - Path to repository root - """ - return script_root().parent - - -def normalize_path(path: Union[str, Path]) -> str: - """ - Normalize a path (resolve, absolute, forward slashes). - - Args: - path: Path to normalize - - Returns: - Normalized path string - """ - return str(Path(path).resolve()).replace("\\", "/") - - -# ============================================================================ -# File Operations -# ============================================================================ - -def read_file(path: Union[str, Path], encoding: str = "utf-8") -> str: - """ - Read a file. - - Args: - path: Path to file - encoding: File encoding - - Returns: - File contents - """ - assert_file_exists(path) - return Path(path).read_text(encoding=encoding) - - -def write_file(path: Union[str, Path], content: str, encoding: str = "utf-8") -> None: - """ - Write a file. - - Args: - path: Path to file - content: Content to write - encoding: File encoding - """ - Path(path).write_text(content, encoding=encoding) - - -def ensure_dir(path: Union[str, Path]) -> None: - """ - Ensure a directory exists. - - Args: - path: Path to directory - """ - Path(path).mkdir(parents=True, exist_ok=True) - - -# ============================================================================ -# Command Execution -# ============================================================================ - -def run_command( - cmd: List[str], - capture_output: bool = True, - check: bool = True, - cwd: Optional[Union[str, Path]] = None, - env: Optional[Dict[str, str]] = None -) -> subprocess.CompletedProcess: - """ - Run a command. - - Args: - cmd: Command and arguments - capture_output: Whether to capture stdout/stderr - check: Whether to raise on non-zero exit - cwd: Working directory - env: Environment variables - - Returns: - CompletedProcess instance - """ - return subprocess.run( - cmd, - capture_output=capture_output, - text=True, - check=check, - cwd=cwd, - env=env - ) - - -def run_shell( - script: str, - capture_output: bool = True, - check: bool = True, - cwd: Optional[Union[str, Path]] = None -) -> subprocess.CompletedProcess: - """ - Run a shell script. - - Args: - script: Shell script - capture_output: Whether to capture stdout/stderr - check: Whether to raise on non-zero exit - cwd: Working directory - - Returns: - CompletedProcess instance - """ - return subprocess.run( - script, - shell=True, - capture_output=capture_output, - text=True, - check=check, - cwd=cwd - ) - - -# ============================================================================ -# Git Utilities -# ============================================================================ - -def git_root() -> Path: - """ - Get git repository root. - - Returns: - Path to git root - """ - result = run_command( - ["git", "rev-parse", "--show-toplevel"], - capture_output=True, - check=True - ) - return Path(result.stdout.strip()) - - -def git_status(porcelain: bool = True) -> str: - """ - Get git status. - - Args: - porcelain: Use porcelain format - - Returns: - Git status output - """ - cmd = ["git", "status"] - if porcelain: - cmd.append("--porcelain") - - result = run_command(cmd, capture_output=True, check=True) - return result.stdout - - -def git_branch() -> str: - """ - Get current git branch. - - Returns: - Branch name - """ - result = run_command( - ["git", "rev-parse", "--abbrev-ref", "HEAD"], - capture_output=True, - check=True - ) - return result.stdout.strip() - - -# ============================================================================ -# Main Entry Point (for testing) -# ============================================================================ - -def main() -> None: - """Test the common utilities.""" - log_section("Testing Common Utilities") - - log_info("This is an info message") - log_warn("This is a warning message") - log_success("This is a success message") - log_step("This is a step message") - - log_section("Environment") - log_kv("CI", str(is_ci())) - log_kv("Script Root", str(script_root())) - log_kv("Repo Root", str(repo_root())) - log_kv("Git Root", str(git_root())) - log_kv("Git Branch", git_branch()) - - log_section("Tests Passed") - - -if __name__ == "__main__": - main() diff --git a/scripts/lib/extension_utils.py b/scripts/lib/extension_utils.py deleted file mode 100644 index 9945f25..0000000 --- a/scripts/lib/extension_utils.py +++ /dev/null @@ -1,356 +0,0 @@ -#!/usr/bin/env python3 -""" -Extension utilities for Joomla and Dolibarr. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Library -INGROUP: Extension.Utils -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/lib/extension_utils.py -VERSION: 01.00.00 -BRIEF: Platform-aware extension utilities for Joomla and Dolibarr -""" - -import re -import xml.etree.ElementTree as ET -from dataclasses import dataclass -from enum import Enum -from pathlib import Path -from typing import Optional, Union - - -class Platform(Enum): - """Supported extension platforms.""" - JOOMLA = "joomla" - DOLIBARR = "dolibarr" - UNKNOWN = "unknown" - - -@dataclass -class ExtensionInfo: - """Extension information.""" - platform: Platform - name: str - version: str - extension_type: str - manifest_path: Path - description: Optional[str] = None - author: Optional[str] = None - author_email: Optional[str] = None - license: Optional[str] = None - - -def detect_joomla_manifest(src_dir: Union[str, Path]) -> Optional[Path]: - """ - Detect Joomla manifest file. - - Args: - src_dir: Source directory - - Returns: - Path to manifest file or None - """ - src_path = Path(src_dir) - - # Common Joomla manifest locations and patterns - manifest_patterns = [ - "templateDetails.xml", - "pkg_*.xml", - "com_*.xml", - "mod_*.xml", - "plg_*.xml", - ] - - # Search in src_dir and subdirectories (max depth 4) - for pattern in manifest_patterns: - # Direct match - matches = list(src_path.glob(pattern)) - if matches: - return matches[0] - - # Search in subdirectories - matches = list(src_path.glob(f"*/{pattern}")) - if matches: - return matches[0] - - matches = list(src_path.glob(f"*/*/{pattern}")) - if matches: - return matches[0] - - # Fallback: search for any XML with Optional[Path]: - """ - Detect Dolibarr module descriptor file. - - Args: - src_dir: Source directory - - Returns: - Path to descriptor file or None - """ - src_path = Path(src_dir) - - # Dolibarr module descriptors follow pattern: core/modules/mod*.class.php - descriptor_patterns = [ - "core/modules/mod*.class.php", - "*/modules/mod*.class.php", - "mod*.class.php", - ] - - for pattern in descriptor_patterns: - matches = list(src_path.glob(pattern)) - if matches: - # Verify it's actually a Dolibarr module descriptor - # Look for extends DolibarrModules pattern - for match in matches: - try: - content = match.read_text(encoding="utf-8") - # Check for Dolibarr module inheritance pattern - if re.search(r'extends\s+DolibarrModules', content): - return match - except Exception: - continue - - return None - - -def parse_joomla_manifest(manifest_path: Path) -> Optional[ExtensionInfo]: - """ - Parse Joomla manifest XML. - - Args: - manifest_path: Path to manifest file - - Returns: - ExtensionInfo or None - """ - try: - tree = ET.parse(manifest_path) - root = tree.getroot() - - if root.tag != "extension": - return None - - # Get extension type - ext_type = root.get("type", "unknown") - - # Get name - name_elem = root.find("name") - name = name_elem.text if name_elem is not None else "unknown" - - # Get version - version_elem = root.find("version") - version = version_elem.text if version_elem is not None else "0.0.0" - - # Get description - desc_elem = root.find("description") - description = desc_elem.text if desc_elem is not None else None - - # Get author - author_elem = root.find("author") - author = author_elem.text if author_elem is not None else None - - # Get author email - author_email_elem = root.find("authorEmail") - author_email = author_email_elem.text if author_email_elem is not None else None - - # Get license - license_elem = root.find("license") - license_text = license_elem.text if license_elem is not None else None - - return ExtensionInfo( - platform=Platform.JOOMLA, - name=name, - version=version, - extension_type=ext_type, - manifest_path=manifest_path, - description=description, - author=author, - author_email=author_email, - license=license_text - ) - - except Exception: - return None - - -def parse_dolibarr_descriptor(descriptor_path: Path) -> Optional[ExtensionInfo]: - """ - Parse Dolibarr module descriptor PHP file. - - Args: - descriptor_path: Path to descriptor file - - Returns: - ExtensionInfo or None - """ - try: - content = descriptor_path.read_text(encoding="utf-8") - - # Extract module name from class that extends DolibarrModules - # Pattern: class ModMyModule extends DolibarrModules - class_match = re.search(r'class\s+(\w+)\s+extends\s+DolibarrModules', content) - if not class_match: - # Fallback: try to find any class definition - class_match = re.search(r'class\s+(\w+)', content) - - name = class_match.group(1) if class_match else "unknown" - - # Extract version - version_match = re.search(r'\$this->version\s*=\s*[\'"]([^\'"]+)[\'"]', content) - version = version_match.group(1) if version_match else "0.0.0" - - # Extract description - desc_match = re.search(r'\$this->description\s*=\s*[\'"]([^\'"]+)[\'"]', content) - description = desc_match.group(1) if desc_match else None - - # Extract author - author_match = re.search(r'\$this->editor_name\s*=\s*[\'"]([^\'"]+)[\'"]', content) - author = author_match.group(1) if author_match else None - - return ExtensionInfo( - platform=Platform.DOLIBARR, - name=name, - version=version, - extension_type="module", - manifest_path=descriptor_path, - description=description, - author=author, - author_email=None, - license=None - ) - - except Exception: - return None - - -def get_extension_info(src_dir: Union[str, Path]) -> Optional[ExtensionInfo]: - """ - Detect and parse extension information from source directory. - Supports both Joomla and Dolibarr platforms. - - Args: - src_dir: Source directory containing extension files - - Returns: - ExtensionInfo or None if not detected - """ - src_path = Path(src_dir) - - if not src_path.is_dir(): - return None - - # Try Joomla first - joomla_manifest = detect_joomla_manifest(src_path) - if joomla_manifest: - ext_info = parse_joomla_manifest(joomla_manifest) - if ext_info: - return ext_info - - # Try Dolibarr - dolibarr_descriptor = detect_dolibarr_manifest(src_path) - if dolibarr_descriptor: - ext_info = parse_dolibarr_descriptor(dolibarr_descriptor) - if ext_info: - return ext_info - - return None - - -def is_joomla_extension(src_dir: Union[str, Path]) -> bool: - """ - Check if directory contains a Joomla extension. - - Args: - src_dir: Source directory - - Returns: - True if Joomla extension detected - """ - ext_info = get_extension_info(src_dir) - return ext_info is not None and ext_info.platform == Platform.JOOMLA - - -def is_dolibarr_extension(src_dir: Union[str, Path]) -> bool: - """ - Check if directory contains a Dolibarr module. - - Args: - src_dir: Source directory - - Returns: - True if Dolibarr module detected - """ - ext_info = get_extension_info(src_dir) - return ext_info is not None and ext_info.platform == Platform.DOLIBARR - - -def main() -> None: - """Test the extension utilities.""" - import sys - sys.path.insert(0, str(Path(__file__).parent)) - import common - - common.log_section("Testing Extension Utilities") - - # Test with current directory's src - repo_root = common.repo_root() - src_dir = repo_root / "src" - - if not src_dir.is_dir(): - common.log_warn(f"Source directory not found: {src_dir}") - return - - ext_info = get_extension_info(src_dir) - - if ext_info: - common.log_success("Extension detected!") - common.log_kv("Platform", ext_info.platform.value.upper()) - common.log_kv("Name", ext_info.name) - common.log_kv("Version", ext_info.version) - common.log_kv("Type", ext_info.extension_type) - common.log_kv("Manifest", str(ext_info.manifest_path)) - if ext_info.description: - common.log_kv("Description", ext_info.description[:60] + "...") - if ext_info.author: - common.log_kv("Author", ext_info.author) - else: - common.log_error("No extension detected") - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/scripts/lib/joomla_manifest.py b/scripts/lib/joomla_manifest.py deleted file mode 100755 index c7322d1..0000000 --- a/scripts/lib/joomla_manifest.py +++ /dev/null @@ -1,430 +0,0 @@ -#!/usr/bin/env python3 -""" -Joomla manifest parsing and validation utilities. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Library -INGROUP: Joomla.Manifest -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/lib/joomla_manifest.py -VERSION: 01.00.00 -BRIEF: Joomla manifest parsing and validation utilities -""" - -import xml.etree.ElementTree as ET -from pathlib import Path -from typing import Dict, List, Optional, Tuple -from dataclasses import dataclass - -try: - from . import common -except ImportError: - import common - - -# ============================================================================ -# Joomla Extension Types -# ============================================================================ - -class ExtensionType: - """Joomla extension types.""" - COMPONENT = "component" - MODULE = "module" - PLUGIN = "plugin" - TEMPLATE = "template" - LIBRARY = "library" - PACKAGE = "package" - FILE = "file" - LANGUAGE = "language" - - ALL_TYPES = [ - COMPONENT, MODULE, PLUGIN, TEMPLATE, - LIBRARY, PACKAGE, FILE, LANGUAGE - ] - - -# ============================================================================ -# Manifest Data Class -# ============================================================================ - -@dataclass -class ManifestInfo: - """Information extracted from a Joomla manifest.""" - path: Path - extension_type: str - name: str - version: str - description: Optional[str] = None - author: Optional[str] = None - author_email: Optional[str] = None - author_url: Optional[str] = None - copyright: Optional[str] = None - license: Optional[str] = None - creation_date: Optional[str] = None - - def to_dict(self) -> Dict[str, str]: - """Convert to dictionary.""" - return { - "path": str(self.path), - "ext_type": self.extension_type, - "name": self.name, - "version": self.version, - "description": self.description or "", - "author": self.author or "", - "author_email": self.author_email or "", - "author_url": self.author_url or "", - "copyright": self.copyright or "", - "license": self.license or "", - "creation_date": self.creation_date or "" - } - - -# ============================================================================ -# Manifest Discovery -# ============================================================================ - -def find_manifest(src_dir: str = "src") -> Path: - """ - Find the primary Joomla manifest in the given directory. - - Args: - src_dir: Source directory to search - - Returns: - Path to manifest file - - Raises: - SystemExit: If no manifest found - """ - src_path = Path(src_dir) - - if not src_path.is_dir(): - common.die(f"Source directory missing: {src_dir}") - - # Template manifest (templateDetails.xml) - template_manifest = src_path / "templateDetails.xml" - if template_manifest.is_file(): - return template_manifest - - # Also check in templates subdirectory - templates_dir = src_path / "templates" - if templates_dir.is_dir(): - for template_file in templates_dir.glob("templateDetails.xml"): - return template_file - - # Package manifest (pkg_*.xml) - pkg_manifests = list(src_path.rglob("pkg_*.xml")) - if pkg_manifests: - return pkg_manifests[0] - - # Component manifest (com_*.xml) - com_manifests = list(src_path.rglob("com_*.xml")) - if com_manifests: - return com_manifests[0] - - # Module manifest (mod_*.xml) - mod_manifests = list(src_path.rglob("mod_*.xml")) - if mod_manifests: - return mod_manifests[0] - - # Plugin manifest (plg_*.xml) - plg_manifests = list(src_path.rglob("plg_*.xml")) - if plg_manifests: - return plg_manifests[0] - - # Fallback: any XML with List[Path]: - """ - Find all Joomla manifests in the given directory. - - Args: - src_dir: Source directory to search - - Returns: - List of manifest paths - """ - src_path = Path(src_dir) - - if not src_path.is_dir(): - return [] - - manifests = [] - - # Template manifests - manifests.extend(src_path.rglob("templateDetails.xml")) - - # Package manifests - manifests.extend(src_path.rglob("pkg_*.xml")) - - # Component manifests - manifests.extend(src_path.rglob("com_*.xml")) - - # Module manifests - manifests.extend(src_path.rglob("mod_*.xml")) - - # Plugin manifests - manifests.extend(src_path.rglob("plg_*.xml")) - - return manifests - - -# ============================================================================ -# Manifest Parsing -# ============================================================================ - -def parse_manifest(manifest_path: Path) -> ManifestInfo: - """ - Parse a Joomla manifest file. - - Args: - manifest_path: Path to manifest file - - Returns: - ManifestInfo object - - Raises: - SystemExit: If parsing fails - """ - if not manifest_path.is_file(): - common.die(f"Manifest not found: {manifest_path}") - - try: - tree = ET.parse(manifest_path) - root = tree.getroot() - - # Extract extension type - ext_type = root.attrib.get("type", "").strip().lower() - if not ext_type: - common.die(f"Manifest missing type attribute: {manifest_path}") - - # Extract name - name_elem = root.find("name") - if name_elem is None or not name_elem.text: - common.die(f"Manifest missing name element: {manifest_path}") - name = name_elem.text.strip() - - # Extract version - version_elem = root.find("version") - if version_elem is None or not version_elem.text: - common.die(f"Manifest missing version element: {manifest_path}") - version = version_elem.text.strip() - - # Extract optional fields - description = None - desc_elem = root.find("description") - if desc_elem is not None and desc_elem.text: - description = desc_elem.text.strip() - - author = None - author_elem = root.find("author") - if author_elem is not None and author_elem.text: - author = author_elem.text.strip() - - author_email = None - email_elem = root.find("authorEmail") - if email_elem is not None and email_elem.text: - author_email = email_elem.text.strip() - - author_url = None - url_elem = root.find("authorUrl") - if url_elem is not None and url_elem.text: - author_url = url_elem.text.strip() - - copyright_text = None - copyright_elem = root.find("copyright") - if copyright_elem is not None and copyright_elem.text: - copyright_text = copyright_elem.text.strip() - - license_text = None - license_elem = root.find("license") - if license_elem is not None and license_elem.text: - license_text = license_elem.text.strip() - - creation_date = None - date_elem = root.find("creationDate") - if date_elem is not None and date_elem.text: - creation_date = date_elem.text.strip() - - return ManifestInfo( - path=manifest_path, - extension_type=ext_type, - name=name, - version=version, - description=description, - author=author, - author_email=author_email, - author_url=author_url, - copyright=copyright_text, - license=license_text, - creation_date=creation_date - ) - - except ET.ParseError as e: - common.die(f"Failed to parse manifest {manifest_path}: {e}") - except Exception as e: - common.die(f"Error reading manifest {manifest_path}: {e}") - - -def get_manifest_version(manifest_path: Path) -> str: - """ - Extract version from manifest. - - Args: - manifest_path: Path to manifest file - - Returns: - Version string - """ - info = parse_manifest(manifest_path) - return info.version - - -def get_manifest_name(manifest_path: Path) -> str: - """ - Extract name from manifest. - - Args: - manifest_path: Path to manifest file - - Returns: - Name string - """ - info = parse_manifest(manifest_path) - return info.name - - -def get_manifest_type(manifest_path: Path) -> str: - """ - Extract extension type from manifest. - - Args: - manifest_path: Path to manifest file - - Returns: - Extension type string - """ - info = parse_manifest(manifest_path) - return info.extension_type - - -# ============================================================================ -# Manifest Validation -# ============================================================================ - -def validate_manifest(manifest_path: Path) -> Tuple[bool, List[str]]: - """ - Validate a Joomla manifest. - - Args: - manifest_path: Path to manifest file - - Returns: - Tuple of (is_valid, list_of_warnings) - """ - warnings = [] - - try: - info = parse_manifest(manifest_path) - - # Check for recommended fields - if not info.description: - warnings.append("Missing description element") - - if not info.author: - warnings.append("Missing author element") - - if not info.copyright: - warnings.append("Missing copyright element") - - if not info.license: - warnings.append("Missing license element") - - if not info.creation_date: - warnings.append("Missing creationDate element") - - # Validate extension type - if info.extension_type not in ExtensionType.ALL_TYPES: - warnings.append(f"Unknown extension type: {info.extension_type}") - - return (True, warnings) - - except SystemExit: - return (False, ["Failed to parse manifest"]) - - -# ============================================================================ -# Main Entry Point (for testing) -# ============================================================================ - -def main() -> None: - """Test the manifest utilities.""" - import sys - - common.log_section("Testing Joomla Manifest Utilities") - - src_dir = sys.argv[1] if len(sys.argv) > 1 else "src" - - try: - manifest = find_manifest(src_dir) - common.log_success(f"Found manifest: {manifest}") - - info = parse_manifest(manifest) - - common.log_section("Manifest Information") - common.log_kv("Type", info.extension_type) - common.log_kv("Name", info.name) - common.log_kv("Version", info.version) - - if info.description: - common.log_kv("Description", info.description[:60] + "..." if len(info.description) > 60 else info.description) - - if info.author: - common.log_kv("Author", info.author) - - is_valid, warnings = validate_manifest(manifest) - - if is_valid: - common.log_success("Manifest is valid") - if warnings: - common.log_warn(f"Warnings: {len(warnings)}") - for warning in warnings: - print(f" - {warning}") - else: - common.log_error("Manifest validation failed") - - except SystemExit as e: - sys.exit(e.code) - - -if __name__ == "__main__": - main() diff --git a/scripts/release/detect_platform.py b/scripts/release/detect_platform.py deleted file mode 100755 index 829e65f..0000000 --- a/scripts/release/detect_platform.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python3 -""" -Detect extension platform and type. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Release -INGROUP: Extension.Detection -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/release/detect_platform.py -VERSION: 01.00.00 -BRIEF: Detect extension platform and type for build workflow -USAGE: ./scripts/release/detect_platform.py [src_dir] -""" - -import argparse -import sys -from pathlib import Path - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import extension_utils -except ImportError: - print("ERROR: Cannot import extension_utils library", file=sys.stderr) - sys.exit(1) - - -def main() -> int: - """Main entry point.""" - parser = argparse.ArgumentParser( - description="Detect extension platform and type", - formatter_class=argparse.RawDescriptionHelpFormatter - ) - - parser.add_argument( - "src_dir", - nargs="?", - default="src", - help="Source directory (default: src)" - ) - parser.add_argument( - "--format", - choices=["pipe", "json"], - default="pipe", - help="Output format (default: pipe)" - ) - - args = parser.parse_args() - - try: - ext_info = extension_utils.get_extension_info(args.src_dir) - - if not ext_info: - print(f"ERROR: No extension detected in {args.src_dir}", file=sys.stderr) - return 1 - - if args.format == "pipe": - # Format: platform|ext_type - print(f"{ext_info.platform.value}|{ext_info.extension_type}") - elif args.format == "json": - import json - data = { - "platform": ext_info.platform.value, - "extension_type": ext_info.extension_type, - "name": ext_info.name, - "version": ext_info.version - } - print(json.dumps(data)) - - return 0 - - except Exception as e: - print(f"ERROR: {e}", file=sys.stderr) - return 1 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/release/package_extension.py b/scripts/release/package_extension.py deleted file mode 100755 index c7df5c3..0000000 --- a/scripts/release/package_extension.py +++ /dev/null @@ -1,327 +0,0 @@ -#!/usr/bin/env python3 -""" -Package Joomla extension as distributable ZIP. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Release -INGROUP: Extension.Packaging -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/release/package_extension.py -VERSION: 01.00.00 -BRIEF: Package Joomla extension as distributable ZIP -USAGE: ./scripts/release/package_extension.py [output_dir] [version] -""" - -import argparse -import sys -import zipfile -from datetime import datetime -from pathlib import Path -from typing import Set - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common - import extension_utils -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -# Exclusion patterns for packaging -EXCLUDE_PATTERNS = { - # Version control - ".git", ".gitignore", ".gitattributes", - # IDE - ".vscode", ".idea", "*.sublime-*", - # Development - "node_modules", "vendor", ".env", ".env.*", - # Documentation (optional, can be included) - # Build artifacts - "dist", "build", ".phpunit.cache", - # Development tool caches and artifacts - ".phpstan.cache", ".psalm", ".rector", - "phpmd-cache", ".php-cs-fixer.cache", ".phplint-cache", - # OS files - ".DS_Store", "Thumbs.db", - # Logs - "*.log", - # Tests - "tests", "test", "Tests", - # CI/CD - ".github", - # Scripts - "scripts", - # Docs (can be included if needed) - "docs", - # Config files - "composer.json", "composer.lock", - "package.json", "package-lock.json", - "phpunit.xml", "phpstan.neon", "phpcs.xml", - "codeception.yml", "psalm.xml", ".php-cs-fixer.php", - # Others - "README.md", "CHANGELOG.md", "CONTRIBUTING.md", - "CODE_OF_CONDUCT.md", "SECURITY.md", "GOVERNANCE.md", - "Makefile", -} - - -def should_exclude(path: Path, base_path: Path, exclude_patterns: Set[str]) -> bool: - """ - Check if a path should be excluded from packaging. - - Args: - path: Path to check - base_path: Base directory path - exclude_patterns: Set of exclusion patterns - - Returns: - True if should be excluded - """ - relative_path = path.relative_to(base_path) - - # Check each part of the path - for part in relative_path.parts: - if part in exclude_patterns: - return True - # Check wildcard patterns - for pattern in exclude_patterns: - if "*" in pattern: - import fnmatch - if fnmatch.fnmatch(part, pattern): - return True - - return False - - -def create_package( - src_dir: str, - output_dir: str, - version: str = None, - repo_name: str = None, - exclude_patterns: Set[str] = None -) -> Path: - """ - Create a distributable ZIP package for a Joomla or Dolibarr extension. - - Args: - src_dir: Source directory containing extension files - output_dir: Output directory for ZIP file - version: Version string (auto-detected if not provided) - repo_name: Repository name for ZIP file naming - exclude_patterns: Patterns to exclude from packaging - - Returns: - Path to created ZIP file - """ - src_path = Path(src_dir) - if not src_path.is_dir(): - common.die(f"Source directory not found: {src_dir}") - - # Detect extension platform and get info - ext_info = extension_utils.get_extension_info(src_dir) - - if not ext_info: - common.die(f"No Joomla or Dolibarr extension found in {src_dir}") - - # Determine version - if not version: - version = ext_info.version - - # Determine repo name - if not repo_name: - try: - repo_root = common.git_root() - repo_name = repo_root.name - except Exception: - repo_name = "extension" - - # Determine exclusion patterns - if exclude_patterns is None: - exclude_patterns = EXCLUDE_PATTERNS - - # Create output directory - output_path = Path(output_dir) - common.ensure_dir(output_path) - - # Generate ZIP filename - timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") - platform_suffix = f"{ext_info.platform.value}-{ext_info.extension_type}" - zip_filename = f"{repo_name}-{version}-{platform_suffix}-{timestamp}.zip" - zip_path = output_path / zip_filename - - # Remove existing ZIP if present - if zip_path.exists(): - zip_path.unlink() - - common.log_section("Creating Extension Package") - common.log_kv("Platform", ext_info.platform.value.upper()) - common.log_kv("Extension", ext_info.name) - common.log_kv("Type", ext_info.extension_type) - common.log_kv("Version", version) - common.log_kv("Source", src_dir) - common.log_kv("Output", str(zip_path)) - print() - - # Create ZIP file - file_count = 0 - with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: - for item in src_path.rglob("*"): - if item.is_file(): - # Check if should be excluded - if should_exclude(item, src_path, exclude_patterns): - continue - - # Add to ZIP with relative path - arcname = item.relative_to(src_path) - zipf.write(item, arcname) - file_count += 1 - - if file_count % 10 == 0: - common.log_step(f"Added {file_count} files...") - - # Get ZIP file size - zip_size = zip_path.stat().st_size - zip_size_mb = zip_size / (1024 * 1024) - - print() - common.log_success(f"Package created: {zip_path.name}") - common.log_kv("Files", str(file_count)) - common.log_kv("Size", f"{zip_size_mb:.2f} MB") - - # Output JSON for machine consumption - result = { - "status": "ok", - "platform": ext_info.platform.value, - "extension": ext_info.name, - "ext_type": ext_info.extension_type, - "version": version, - "package": str(zip_path), - "files": file_count, - "size_bytes": zip_size - } - - print() - common.json_output(result) - - return zip_path - - -def main() -> int: - """Main entry point.""" - parser = argparse.ArgumentParser( - description="Package Joomla or Dolibarr extension as distributable ZIP", - formatter_class=argparse.RawDescriptionHelpFormatter, - epilog=""" -Examples: - # Package with auto-detected version - %(prog)s - - # Package to specific directory - %(prog)s dist - - # Package with specific version - %(prog)s dist 1.2.3 - - # Package with custom source - %(prog)s --src-dir my-extension dist 1.0.0 - -Supports both Joomla and Dolibarr extensions with automatic platform detection. -""" - ) - - parser.add_argument( - "output_dir", - nargs="?", - default="dist", - help="Output directory for ZIP file (default: dist)" - ) - parser.add_argument( - "version", - nargs="?", - help="Version string (default: auto-detected from manifest)" - ) - parser.add_argument( - "-s", "--src-dir", - default="src", - help="Source directory (default: src)" - ) - parser.add_argument( - "--repo-name", - help="Repository name for ZIP filename (default: auto-detected)" - ) - parser.add_argument( - "--include-docs", - action="store_true", - help="Include documentation files in package" - ) - parser.add_argument( - "--include-tests", - action="store_true", - help="Include test files in package" - ) - - args = parser.parse_args() - - try: - # Adjust exclusion patterns based on arguments - exclude_patterns = EXCLUDE_PATTERNS.copy() - if args.include_docs: - exclude_patterns.discard("docs") - exclude_patterns.discard("README.md") - exclude_patterns.discard("CHANGELOG.md") - if args.include_tests: - exclude_patterns.discard("tests") - exclude_patterns.discard("test") - exclude_patterns.discard("Tests") - - # Create package - zip_path = create_package( - src_dir=args.src_dir, - output_dir=args.output_dir, - version=args.version, - repo_name=args.repo_name, - exclude_patterns=exclude_patterns - ) - - result = { - "status": "success", - "zip_path": str(zip_path) - } - common.json_output(result) - - return 0 - - except Exception as e: - common.log_error(f"Packaging failed: {e}") - result = { - "status": "error", - "error": str(e) - } - common.json_output(result) - return 1 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/release/update_dates.sh b/scripts/release/update_dates.sh deleted file mode 100755 index a623911..0000000 --- a/scripts/release/update_dates.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env bash -# Copyright (C) 2025 Moko Consulting -# -# This file is part of a Moko Consulting project. -# -# SPDX-License-Identifier: GPL-3.0-or-later -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see https://www.gnu.org/licenses/ . -# -# FILE INFORMATION -# DEFGROUP: Release -# INGROUP: Moko-Cassiopeia -# PATH: scripts/release/update_dates.sh -# VERSION: 03.05.00 -# BRIEF: Normalize dates in release files - -set -euo pipefail - -# Accept date and version as arguments -TODAY="${1:-$(date +%Y-%m-%d)}" -VERSION="${2:-unknown}" - -# Validate date format (YYYY-MM-DD) -if ! [[ "${TODAY}" =~ ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ ]]; then - echo "ERROR: Invalid date format '${TODAY}'. Expected YYYY-MM-DD format." - exit 1 -fi - -echo "Date normalization script running..." -echo "TODAY: ${TODAY}" -echo "VERSION: ${VERSION}" - -# Escape special regex characters in VERSION for safe use in grep and sed -# Escapes: ] \ / $ * . ^ [ -VERSION_ESCAPED=$(printf '%s\n' "${VERSION}" | sed 's/[][\/$*.^]/\\&/g') - -# Update CHANGELOG.md - replace the date on the version heading line -if [ -f "CHANGELOG.md" ]; then - # Match lines like "## [03.05.00] 2026-01-04" and update the date - if grep -q "^## \[${VERSION_ESCAPED}\] " CHANGELOG.md; then - sed -i "s/^## \[${VERSION_ESCAPED}\] [0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}/## [${VERSION_ESCAPED}] ${TODAY}/" CHANGELOG.md - echo "โœ“ Updated CHANGELOG.md version [${VERSION}] date to ${TODAY}" - else - echo "โš  Warning: CHANGELOG.md does not contain version [${VERSION}] heading" - fi -else - echo "โš  Warning: CHANGELOG.md not found" -fi - -# Update src/templates/templateDetails.xml - replace the tag -if [ -f "src/templates/templateDetails.xml" ]; then - sed -i "s|[0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}|${TODAY}|" src/templates/templateDetails.xml - echo "โœ“ Updated src/templates/templateDetails.xml creationDate to ${TODAY}" -else - echo "โš  Warning: src/templates/templateDetails.xml not found" -fi - -echo "Date normalization complete." diff --git a/scripts/run/scaffold_extension.py b/scripts/run/scaffold_extension.py deleted file mode 100755 index 382ef03..0000000 --- a/scripts/run/scaffold_extension.py +++ /dev/null @@ -1,447 +0,0 @@ -#!/usr/bin/env python3 -""" -Create Joomla extension scaffolding. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Moko-Cassiopeia.Scripts -INGROUP: Scripts.Run -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/run/scaffold_extension.py -VERSION: 01.00.00 -BRIEF: Create scaffolding for different Joomla extension types -""" - -import argparse -import sys -from datetime import datetime -from pathlib import Path -from typing import Dict - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -# ============================================================================ -# Templates for Extension Scaffolding -# ============================================================================ - -def get_component_structure(name: str, description: str, author: str) -> Dict[str, str]: - """Get directory structure and files for a component.""" - safe_name = name.lower().replace(" ", "_") - com_name = f"com_{safe_name}" - - manifest = f""" - - {name} - {author} - {datetime.now().strftime("%Y-%m-%d")} - Copyright (C) {datetime.now().year} {author} - GPL-3.0-or-later - hello@example.com - https://example.com - 1.0.0 - {description} - - - src - - - - {name} - - services - sql - src - - - -""" - - return { - f"{com_name}.xml": manifest, - "site/src/.gitkeep": "", - "admin/services/provider.php": f" Dict[str, str]: - """Get directory structure and files for a module.""" - safe_name = name.lower().replace(" ", "_") - mod_name = f"mod_{safe_name}" - - manifest = f""" - - {name} - {author} - {datetime.now().strftime("%Y-%m-%d")} - Copyright (C) {datetime.now().year} {author} - GPL-3.0-or-later - hello@example.com - https://example.com - 1.0.0 - {description} - - - {mod_name}.php - {mod_name}.xml - tmpl - - -""" - - module_php = f"""get('layout', 'default')); -""" - - default_tmpl = f""" -
-

-
-""" - - return { - f"{mod_name}.xml": manifest, - f"{mod_name}.php": module_php, - "tmpl/default.php": default_tmpl, - } - - -def get_plugin_structure(name: str, description: str, author: str, group: str = "system") -> Dict[str, str]: - """Get directory structure and files for a plugin.""" - safe_name = name.lower().replace(" ", "_") - plg_name = f"{safe_name}" - - manifest = f""" - - plg_{group}_{safe_name} - {author} - {datetime.now().strftime("%Y-%m-%d")} - Copyright (C) {datetime.now().year} {author} - GPL-3.0-or-later - hello@example.com - https://example.com - 1.0.0 - {description} - - - {plg_name}.php - - -""" - - plugin_php = f""" Dict[str, str]: - """Get directory structure and files for a template.""" - safe_name = name.lower().replace(" ", "_") - - manifest = f""" - - {safe_name} - {datetime.now().strftime("%Y-%m-%d")} - {author} - hello@example.com - https://example.com - Copyright (C) {datetime.now().year} {author} - GPL-3.0-or-later - 1.0.0 - {description} - - - index.php - templateDetails.xml - css - js - images - - - - header - main - footer - - -""" - - index_php = f"""getDocument()->getWebAssetManager(); - -// Load template assets -$wa->useStyle('template.{safe_name}')->useScript('template.{safe_name}'); -?> - - - - - - - - -
- -
-
- -
-
- -
- - -""" - - return { - "templateDetails.xml": manifest, - "index.php": index_php, - "css/template.css": "/* Template styles */\n", - "js/template.js": "// Template JavaScript\n", - "images/.gitkeep": "", - } - - -def get_package_structure(name: str, description: str, author: str) -> Dict[str, str]: - """Get directory structure and files for a package.""" - safe_name = name.lower().replace(" ", "_") - pkg_name = f"pkg_{safe_name}" - - manifest = f""" - - {name} - {safe_name} - {author} - {datetime.now().strftime("%Y-%m-%d")} - Copyright (C) {datetime.now().year} {author} - GPL-3.0-or-later - hello@example.com - https://example.com - 1.0.0 - {description} - - - - - -""" - - return { - f"{pkg_name}.xml": manifest, - "packages/.gitkeep": "", - } - - -# ============================================================================ -# Scaffolding Functions -# ============================================================================ - -def create_extension( - ext_type: str, - name: str, - description: str, - author: str, - output_dir: str = "src", - **kwargs -) -> None: - """ - Create extension scaffolding. - - Args: - ext_type: Extension type (component, module, plugin, template, package) - name: Extension name - description: Extension description - author: Author name - output_dir: Output directory - **kwargs: Additional type-specific options - """ - output_path = Path(output_dir) - - # Get structure based on type - if ext_type == "component": - structure = get_component_structure(name, description, author) - elif ext_type == "module": - client = kwargs.get("client", "site") - structure = get_module_structure(name, description, author, client) - elif ext_type == "plugin": - group = kwargs.get("group", "system") - structure = get_plugin_structure(name, description, author, group) - elif ext_type == "template": - structure = get_template_structure(name, description, author) - elif ext_type == "package": - structure = get_package_structure(name, description, author) - else: - common.die(f"Unknown extension type: {ext_type}") - - # Create files - common.log_section(f"Creating {ext_type}: {name}") - - for file_path, content in structure.items(): - full_path = output_path / file_path - - # Create parent directories - full_path.parent.mkdir(parents=True, exist_ok=True) - - # Write file - full_path.write_text(content, encoding="utf-8") - common.log_success(f"Created: {file_path}") - - common.log_section("Scaffolding Complete") - common.log_info(f"Extension files created in: {output_path}") - common.log_info(f"Extension type: {ext_type}") - common.log_info(f"Extension name: {name}") - - -# ============================================================================ -# Command Line Interface -# ============================================================================ - -def main() -> None: - """Main entry point.""" - parser = argparse.ArgumentParser( - description="Create Joomla extension scaffolding", - formatter_class=argparse.RawDescriptionHelpFormatter, - epilog=""" -Examples: - # Create a component - %(prog)s component MyComponent "My Component Description" "John Doe" - - # Create a module - %(prog)s module MyModule "My Module Description" "John Doe" --client site - - # Create a plugin - %(prog)s plugin MyPlugin "My Plugin Description" "John Doe" --group system - - # Create a template - %(prog)s template mytheme "My Theme Description" "John Doe" - - # Create a package - %(prog)s package mypackage "My Package Description" "John Doe" -""" - ) - - parser.add_argument( - "type", - choices=["component", "module", "plugin", "template", "package"], - help="Extension type to create" - ) - parser.add_argument("name", help="Extension name") - parser.add_argument("description", help="Extension description") - parser.add_argument("author", help="Author name") - parser.add_argument( - "-o", "--output", - default="src", - help="Output directory (default: src)" - ) - parser.add_argument( - "--client", - choices=["site", "administrator"], - default="site", - help="Module client (site or administrator)" - ) - parser.add_argument( - "--group", - default="system", - help="Plugin group (system, content, user, etc.)" - ) - - args = parser.parse_args() - - try: - create_extension( - ext_type=args.type, - name=args.name, - description=args.description, - author=args.author, - output_dir=args.output, - client=args.client, - group=args.group - ) - except Exception as e: - common.log_error(f"Failed to create extension: {e}") - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/scripts/run/validate_all.py b/scripts/run/validate_all.py deleted file mode 100755 index c43e6e6..0000000 --- a/scripts/run/validate_all.py +++ /dev/null @@ -1,181 +0,0 @@ -#!/usr/bin/env python3 -""" -Run all validation scripts. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Run -INGROUP: Validation.Runner -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/run/validate_all.py -VERSION: 01.00.00 -BRIEF: Run all validation scripts -""" - -import subprocess -import sys -from pathlib import Path -from typing import Tuple - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -# Required validation scripts (must pass) -REQUIRED_SCRIPTS = [ - "scripts/validate/manifest.py", - "scripts/validate/xml_wellformed.py", - "scripts/validate/workflows.py", -] - -# Optional validation scripts (failures are warnings) -OPTIONAL_SCRIPTS = [ - "scripts/validate/changelog.py", - "scripts/validate/language_structure.py", - "scripts/validate/license_headers.py", - "scripts/validate/no_secrets.py", - "scripts/validate/paths.py", - "scripts/validate/php_syntax.py", - "scripts/validate/tabs.py", - "scripts/validate/version_alignment.py", - "scripts/validate/version_hierarchy.py", -] - - -def run_validation_script(script_path: str) -> Tuple[bool, str]: - """ - Run a validation script. - - Args: - script_path: Path to script - - Returns: - Tuple of (success, output) - """ - script = Path(script_path) - - if not script.exists(): - return (False, f"Script not found: {script_path}") - - try: - result = subprocess.run( - ["python3", str(script)], - capture_output=True, - text=True, - check=False - ) - - output = result.stdout + result.stderr - success = result.returncode == 0 - - return (success, output) - except Exception as e: - return (False, f"Error running script: {e}") - - -def main() -> int: - """Main entry point.""" - common.log_section("Running All Validations") - print() - - total_passed = 0 - total_failed = 0 - total_skipped = 0 - - # Run required scripts - common.log_info("=== Required Validations ===") - print() - - for script in REQUIRED_SCRIPTS: - script_name = Path(script).name - common.log_info(f"Running {script_name}...") - - success, output = run_validation_script(script) - - if success: - common.log_success(f"โœ“ {script_name} passed") - total_passed += 1 - else: - common.log_error(f"โœ— {script_name} FAILED") - if output: - print(output) - total_failed += 1 - print() - - # Run optional scripts - common.log_info("=== Optional Validations ===") - print() - - for script in OPTIONAL_SCRIPTS: - script_name = Path(script).name - - if not Path(script).exists(): - common.log_warn(f"โŠ˜ {script_name} not found (skipped)") - total_skipped += 1 - continue - - common.log_info(f"Running {script_name}...") - - success, output = run_validation_script(script) - - if success: - common.log_success(f"โœ“ {script_name} passed") - total_passed += 1 - else: - common.log_warn(f"โš  {script_name} failed (optional)") - if output: - print(output[:500]) # Limit output - total_failed += 1 - print() - - # Summary - common.log_section("Validation Summary") - common.log_kv("Total Passed", str(total_passed)) - common.log_kv("Total Failed", str(total_failed)) - common.log_kv("Total Skipped", str(total_skipped)) - print() - - # Check if any required validations failed - required_failed = sum( - 1 for script in REQUIRED_SCRIPTS - if Path(script).exists() and not run_validation_script(script)[0] - ) - - if required_failed > 0: - common.log_error(f"{required_failed} required validation(s) failed") - return 1 - - common.log_success("All required validations passed!") - - if total_failed > 0: - common.log_warn(f"{total_failed} optional validation(s) failed") - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/validate/manifest.py b/scripts/validate/manifest.py deleted file mode 100755 index c7a4351..0000000 --- a/scripts/validate/manifest.py +++ /dev/null @@ -1,168 +0,0 @@ -#!/usr/bin/env python3 -""" -Validate Joomla manifest files. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Moko-Cassiopeia.Scripts -INGROUP: Scripts.Validate -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/validate/manifest.py -VERSION: 01.00.00 -BRIEF: Validate Joomla extension manifest files -""" - -import argparse -import sys -from pathlib import Path - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common - import joomla_manifest -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -def validate_manifest_file(manifest_path: Path, verbose: bool = False) -> bool: - """ - Validate a single manifest file. - - Args: - manifest_path: Path to manifest file - verbose: Show detailed output - - Returns: - True if valid, False otherwise - """ - try: - info = joomla_manifest.parse_manifest(manifest_path) - is_valid, warnings = joomla_manifest.validate_manifest(manifest_path) - - if verbose: - common.log_section(f"Manifest: {manifest_path}") - common.log_kv("Type", info.extension_type) - common.log_kv("Name", info.name) - common.log_kv("Version", info.version) - - if warnings: - common.log_warn(f"Warnings ({len(warnings)}):") - for warning in warnings: - print(f" - {warning}") - - # Output JSON for machine consumption - result = { - "status": "ok" if is_valid else "error", - "manifest": str(manifest_path), - "ext_type": info.extension_type, - "name": info.name, - "version": info.version, - "warnings": warnings - } - - if not verbose: - common.json_output(result) - - if is_valid: - if not verbose: - print(f"manifest: ok ({manifest_path})") - else: - common.log_success("Manifest is valid") - return True - else: - common.log_error(f"Manifest validation failed: {manifest_path}") - return False - - except SystemExit: - common.log_error(f"Failed to parse manifest: {manifest_path}") - return False - - -def main() -> int: - """Main entry point.""" - parser = argparse.ArgumentParser( - description="Validate Joomla extension manifest files", - formatter_class=argparse.RawDescriptionHelpFormatter - ) - - parser.add_argument( - "-s", "--src-dir", - default="src", - help="Source directory to search for manifests (default: src)" - ) - parser.add_argument( - "-v", "--verbose", - action="store_true", - help="Show detailed output" - ) - parser.add_argument( - "manifest", - nargs="?", - help="Specific manifest file to validate (optional)" - ) - - args = parser.parse_args() - - try: - if args.manifest: - # Validate specific manifest - manifest_path = Path(args.manifest) - if not manifest_path.is_file(): - common.die(f"Manifest file not found: {args.manifest}") - - success = validate_manifest_file(manifest_path, args.verbose) - return 0 if success else 1 - else: - # Find and validate all manifests in src directory - manifests = joomla_manifest.find_all_manifests(args.src_dir) - - if not manifests: - common.die(f"No manifest files found in {args.src_dir}") - - if args.verbose: - common.log_section("Validating Manifests") - common.log_info(f"Found {len(manifests)} manifest(s)") - print() - - all_valid = True - for manifest in manifests: - if not validate_manifest_file(manifest, args.verbose): - all_valid = False - - if args.verbose: - print() - if all_valid: - common.log_success(f"All {len(manifests)} manifest(s) are valid") - else: - common.log_error("Some manifests failed validation") - - return 0 if all_valid else 1 - - except Exception as e: - common.log_error(f"Validation failed: {e}") - return 1 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/validate/no_secrets.py b/scripts/validate/no_secrets.py deleted file mode 100755 index 39e0f29..0000000 --- a/scripts/validate/no_secrets.py +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/env python3 -""" -Scan for accidentally committed secrets and credentials. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Validate -INGROUP: Security -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/validate/no_secrets.py -VERSION: 01.00.00 -BRIEF: Scan for accidentally committed secrets and credentials -NOTE: High-signal pattern detection to prevent credential exposure -""" - -import argparse -import json -import os -import re -import sys -from pathlib import Path -from typing import List, Dict - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -# High-signal patterns only. Any match is a hard fail. -SECRET_PATTERNS = [ - # Private keys - r'-----BEGIN (RSA|DSA|EC|OPENSSH) PRIVATE KEY-----', - r'PuTTY-User-Key-File-', - # AWS keys - r'AKIA[0-9A-Z]{16}', - r'ASIA[0-9A-Z]{16}', - # GitHub tokens - r'ghp_[A-Za-z0-9]{36}', - r'gho_[A-Za-z0-9]{36}', - r'github_pat_[A-Za-z0-9_]{20,}', - # Slack tokens - r'xox[baprs]-[0-9A-Za-z-]{10,48}', - # Stripe keys - r'sk_live_[0-9a-zA-Z]{20,}', -] - -# Directories to exclude from scanning -EXCLUDE_DIRS = { - 'vendor', - 'node_modules', - 'dist', - 'build', - '.git', -} - - -def scan_file(filepath: Path, patterns: List[re.Pattern]) -> List[Dict[str, str]]: - """ - Scan a file for secret patterns. - - Args: - filepath: Path to file to scan - patterns: Compiled regex patterns to search for - - Returns: - List of matches with file, line number, and content - """ - hits = [] - - try: - with open(filepath, 'r', encoding='utf-8', errors='ignore') as f: - for line_num, line in enumerate(f, 1): - for pattern in patterns: - if pattern.search(line): - hits.append({ - 'file': str(filepath), - 'line': line_num, - 'content': line.strip()[:100] # Limit to 100 chars - }) - except Exception as e: - common.log_warn(f"Could not read {filepath}: {e}") - - return hits - - -def scan_directory(src_dir: str, patterns: List[re.Pattern]) -> List[Dict[str, str]]: - """ - Recursively scan directory for secrets. - - Args: - src_dir: Directory to scan - patterns: Compiled regex patterns - - Returns: - List of all matches - """ - src_path = Path(src_dir) - all_hits = [] - - for item in src_path.rglob("*"): - # Skip directories - if not item.is_file(): - continue - - # Skip excluded directories - if any(excluded in item.parts for excluded in EXCLUDE_DIRS): - continue - - # Skip binary files (heuristic) - try: - with open(item, 'rb') as f: - chunk = f.read(1024) - if b'\x00' in chunk: # Contains null bytes = likely binary - continue - except Exception: - continue - - # Scan the file - hits = scan_file(item, patterns) - all_hits.extend(hits) - - return all_hits - - -def main() -> int: - """Main entry point.""" - parser = argparse.ArgumentParser( - description="Scan for accidentally committed secrets and credentials" - ) - parser.add_argument( - "-s", "--src-dir", - default=os.environ.get("SRC_DIR", "src"), - help="Source directory to scan (default: src)" - ) - - args = parser.parse_args() - - # Check if source directory exists - if not Path(args.src_dir).is_dir(): - result = { - "status": "fail", - "error": "src directory missing" - } - common.json_output(result) - return 1 - - # Compile patterns - compiled_patterns = [re.compile(pattern) for pattern in SECRET_PATTERNS] - - # Scan directory - hits = scan_directory(args.src_dir, compiled_patterns) - - if hits: - # Limit to first 50 hits - hits = hits[:50] - - result = { - "status": "fail", - "error": "secret_pattern_detected", - "hits": [{"hit": f"{h['file']}:{h['line']}: {h['content']}"} for h in hits] - } - - print(json.dumps(result)) - - # Also print human-readable output - print("\nERROR: Potential secrets detected!", file=sys.stderr) - print(f"\nFound {len(hits)} potential secret(s):", file=sys.stderr) - for hit in hits[:10]: # Show first 10 in detail - print(f" {hit['file']}:{hit['line']}", file=sys.stderr) - print(f" {hit['content']}", file=sys.stderr) - - if len(hits) > 10: - print(f" ... and {len(hits) - 10} more", file=sys.stderr) - - print("\nPlease remove any secrets and use environment variables or secret management instead.", file=sys.stderr) - - return 1 - - result = { - "status": "ok", - "src_dir": args.src_dir - } - common.json_output(result) - print("no_secrets: ok") - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/validate/paths.py b/scripts/validate/paths.py deleted file mode 100755 index 8fadf1d..0000000 --- a/scripts/validate/paths.py +++ /dev/null @@ -1,169 +0,0 @@ -#!/usr/bin/env python3 -""" -Detect Windows-style path separators (backslashes). - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Validate -INGROUP: Path.Normalization -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/validate/paths.py -VERSION: 01.00.00 -BRIEF: Detect Windows-style path separators (backslashes) -NOTE: Ensures cross-platform path compatibility -""" - -import mimetypes -import subprocess -import sys -from pathlib import Path -from typing import List, Tuple, Dict - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -def get_tracked_files() -> List[str]: - """ - Get list of files tracked by git. - - Returns: - List of file paths - """ - try: - result = common.run_command( - ["git", "ls-files", "-z"], - capture_output=True, - check=True - ) - files = [f for f in result.stdout.split('\0') if f.strip()] - return files - except subprocess.CalledProcessError: - return [] - - -def is_binary_file(filepath: str) -> bool: - """ - Check if a file is likely binary. - - Args: - filepath: Path to file - - Returns: - True if likely binary - """ - # Check mime type - mime_type, _ = mimetypes.guess_type(filepath) - if mime_type and mime_type.startswith(('application/', 'audio/', 'image/', 'video/')): - return True - - # Check for null bytes (heuristic for binary files) - try: - with open(filepath, 'rb') as f: - chunk = f.read(1024) - if b'\x00' in chunk: - return True - except Exception: - return True - - return False - - -def find_backslashes_in_file(filepath: str) -> List[Tuple[int, str]]: - """ - Find lines with backslashes in a file. - - Args: - filepath: Path to file - - Returns: - List of (line_number, line_content) tuples - """ - backslashes = [] - - try: - with open(filepath, 'r', encoding='utf-8', errors='ignore') as f: - for line_num, line in enumerate(f, 1): - if '\\' in line: - backslashes.append((line_num, line.rstrip())) - except Exception as e: - common.log_warn(f"Could not read {filepath}: {e}") - - return backslashes - - -def main() -> int: - """Main entry point.""" - tracked_files = get_tracked_files() - - if not tracked_files: - print("No files to check") - return 0 - - hits: Dict[str, List[Tuple[int, str]]] = {} - - for filepath in tracked_files: - # Skip binary files - if is_binary_file(filepath): - continue - - # Find backslashes - backslashes = find_backslashes_in_file(filepath) - if backslashes: - hits[filepath] = backslashes - - if hits: - print("ERROR: Windows-style path literals detected", file=sys.stderr) - print("", file=sys.stderr) - print(f"Found backslashes in {len(hits)} file(s):", file=sys.stderr) - - for filepath, lines in hits.items(): - print("", file=sys.stderr) - print(f" File: {filepath}", file=sys.stderr) - print(" Lines with backslashes:", file=sys.stderr) - - # Show first 5 lines - for line_num, line_content in lines[:5]: - print(f" {line_num}: {line_content[:80]}", file=sys.stderr) - - if len(lines) > 5: - print(f" ... and {len(lines) - 5} more", file=sys.stderr) - - print("", file=sys.stderr) - print("To fix:", file=sys.stderr) - print(" 1. Run: python3 scripts/fix/paths.py", file=sys.stderr) - print(" 2. Or manually replace backslashes (\\) with forward slashes (/)", file=sys.stderr) - print(" 3. Ensure paths use POSIX separators for cross-platform compatibility", file=sys.stderr) - print("", file=sys.stderr) - return 2 - - print("paths: ok") - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/validate/php_syntax.py b/scripts/validate/php_syntax.py deleted file mode 100755 index cd6251b..0000000 --- a/scripts/validate/php_syntax.py +++ /dev/null @@ -1,218 +0,0 @@ -#!/usr/bin/env python3 -""" -Validate PHP syntax in files. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Moko-Cassiopeia.Scripts -INGROUP: Scripts.Validate -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/validate/php_syntax.py -VERSION: 01.00.00 -BRIEF: Validate PHP syntax in all PHP files -""" - -import argparse -import subprocess -import sys -from pathlib import Path -from typing import List, Tuple - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -def check_php_file(file_path: Path) -> Tuple[bool, str]: - """ - Check PHP syntax of a single file. - - Args: - file_path: Path to PHP file - - Returns: - Tuple of (is_valid, error_message) - """ - try: - result = subprocess.run( - ["php", "-l", str(file_path)], - capture_output=True, - text=True, - timeout=10 - ) - - if result.returncode == 0: - return (True, "") - else: - return (False, result.stderr or result.stdout) - - except subprocess.TimeoutExpired: - return (False, "Syntax check timed out") - except Exception as e: - return (False, str(e)) - - -def find_php_files(src_dir: str, exclude_dirs: List[str] = None) -> List[Path]: - """ - Find all PHP files in a directory. - - Args: - src_dir: Directory to search - exclude_dirs: Directories to exclude - - Returns: - List of PHP file paths - """ - if exclude_dirs is None: - exclude_dirs = ["vendor", "node_modules", ".git"] - - src_path = Path(src_dir) - if not src_path.is_dir(): - return [] - - php_files = [] - for php_file in src_path.rglob("*.php"): - # Check if file is in an excluded directory - if any(excluded in php_file.parts for excluded in exclude_dirs): - continue - php_files.append(php_file) - - return sorted(php_files) - - -def main() -> int: - """Main entry point.""" - parser = argparse.ArgumentParser( - description="Validate PHP syntax in all PHP files", - formatter_class=argparse.RawDescriptionHelpFormatter - ) - - parser.add_argument( - "-s", "--src-dir", - default="src", - help="Source directory to search for PHP files (default: src)" - ) - parser.add_argument( - "-v", "--verbose", - action="store_true", - help="Show detailed output" - ) - parser.add_argument( - "--exclude", - action="append", - help="Directories to exclude (can be specified multiple times)" - ) - parser.add_argument( - "files", - nargs="*", - help="Specific files to check (optional)" - ) - - args = parser.parse_args() - - # Check if PHP is available - common.require_cmd("php") - - try: - # Determine which files to check - if args.files: - php_files = [Path(f) for f in args.files] - for f in php_files: - if not f.is_file(): - common.die(f"File not found: {f}") - else: - exclude_dirs = args.exclude or ["vendor", "node_modules", ".git"] - php_files = find_php_files(args.src_dir, exclude_dirs) - - if not php_files: - common.die(f"No PHP files found in {args.src_dir}") - - if args.verbose: - common.log_section("PHP Syntax Validation") - common.log_info(f"Checking {len(php_files)} PHP file(s)") - print() - - errors = [] - for php_file in php_files: - is_valid, error_msg = check_php_file(php_file) - - if is_valid: - if args.verbose: - common.log_success(f"OK: {php_file}") - else: - errors.append((php_file, error_msg)) - if args.verbose: - common.log_error(f"FAILED: {php_file}") - if error_msg: - print(f" {error_msg}") - - # Output results - if args.verbose: - print() - - if errors: - result = { - "status": "error", - "total": len(php_files), - "passed": len(php_files) - len(errors), - "failed": len(errors), - "errors": [{"file": str(f), "error": e} for f, e in errors] - } - - if not args.verbose: - common.json_output(result) - - common.log_error(f"PHP syntax check failed: {len(errors)} error(s)") - - if not args.verbose: - for file_path, error_msg in errors: - print(f"ERROR: {file_path}") - if error_msg: - print(f" {error_msg}") - - return 1 - else: - result = { - "status": "ok", - "total": len(php_files), - "passed": len(php_files) - } - - if not args.verbose: - common.json_output(result) - print(f"php_syntax: ok ({len(php_files)} file(s) checked)") - else: - common.log_success(f"All {len(php_files)} PHP file(s) are valid") - - return 0 - - except Exception as e: - common.log_error(f"Validation failed: {e}") - return 1 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/validate/tabs.py b/scripts/validate/tabs.py deleted file mode 100755 index fefff29..0000000 --- a/scripts/validate/tabs.py +++ /dev/null @@ -1,140 +0,0 @@ -#!/usr/bin/env python3 -""" -Detect TAB characters in YAML files where they are not allowed. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Validate -INGROUP: Code.Quality -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/validate/tabs.py -VERSION: 01.00.00 -BRIEF: Detect TAB characters in YAML files where they are not allowed -NOTE: YAML specification forbids tab characters -""" - -import subprocess -import sys -from pathlib import Path -from typing import List, Tuple - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -def get_yaml_files() -> List[str]: - """ - Get list of YAML files tracked by git. - - Returns: - List of YAML file paths - """ - try: - result = common.run_command( - ["git", "ls-files", "*.yml", "*.yaml"], - capture_output=True, - check=True - ) - files = [f.strip() for f in result.stdout.split('\n') if f.strip()] - return files - except subprocess.CalledProcessError: - return [] - - -def check_tabs_in_file(filepath: str) -> List[Tuple[int, str]]: - """ - Check for tab characters in a file. - - Args: - filepath: Path to file to check - - Returns: - List of (line_number, line_content) tuples with tabs - """ - tabs_found = [] - - try: - with open(filepath, 'r', encoding='utf-8', errors='ignore') as f: - for line_num, line in enumerate(f, 1): - if '\t' in line: - tabs_found.append((line_num, line.rstrip())) - except Exception as e: - common.log_warn(f"Could not read {filepath}: {e}") - - return tabs_found - - -def main() -> int: - """Main entry point.""" - yaml_files = get_yaml_files() - - if not yaml_files: - print("No files to check") - return 0 - - bad_files = [] - all_violations = {} - - for filepath in yaml_files: - tabs = check_tabs_in_file(filepath) - if tabs: - bad_files.append(filepath) - all_violations[filepath] = tabs - - print(f"TAB found in {filepath}", file=sys.stderr) - print(" Lines with tabs:", file=sys.stderr) - - # Show first 5 lines with tabs - for line_num, line_content in tabs[:5]: - print(f" {line_num}: {line_content[:80]}", file=sys.stderr) - - if len(tabs) > 5: - print(f" ... and {len(tabs) - 5} more", file=sys.stderr) - print("", file=sys.stderr) - - if bad_files: - print("", file=sys.stderr) - print("ERROR: Tabs found in repository files", file=sys.stderr) - print("", file=sys.stderr) - print("YAML specification forbids tab characters.", file=sys.stderr) - print(f"Found tabs in {len(bad_files)} file(s):", file=sys.stderr) - for f in bad_files: - print(f" - {f}", file=sys.stderr) - print("", file=sys.stderr) - print("To fix:", file=sys.stderr) - print(" 1. Run: python3 scripts/fix/tabs.py", file=sys.stderr) - print(" 2. Or manually replace tabs with spaces in your editor", file=sys.stderr) - print(" 3. Configure your editor to use spaces (not tabs) for YAML files", file=sys.stderr) - print("", file=sys.stderr) - return 2 - - print("tabs: ok") - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/validate/workflows.py b/scripts/validate/workflows.py deleted file mode 100755 index 99a9a83..0000000 --- a/scripts/validate/workflows.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env python3 -""" -Validate GitHub Actions workflow files. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Script.Validate -INGROUP: CI.Validation -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/validate/workflows.py -VERSION: 01.00.00 -BRIEF: Validate GitHub Actions workflow files -NOTE: Checks YAML syntax, structure, and best practices -""" - -import sys -from pathlib import Path - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -def validate_yaml_syntax(filepath: Path) -> bool: - """ - Validate YAML syntax of a workflow file. - - Args: - filepath: Path to workflow file - - Returns: - True if valid - """ - try: - import yaml - except ImportError: - common.log_warn("PyYAML module not installed. Install with: pip3 install pyyaml") - return True # Skip validation if yaml not available - - try: - with open(filepath, 'r', encoding='utf-8') as f: - yaml.safe_load(f) - print(f"โœ“ Valid YAML: {filepath.name}") - return True - except yaml.YAMLError as e: - print(f"โœ— YAML Error in {filepath.name}: {e}", file=sys.stderr) - return False - except Exception as e: - print(f"โœ— Error reading {filepath.name}: {e}", file=sys.stderr) - return False - - -def check_no_tabs(filepath: Path) -> bool: - """ - Check that file contains no tab characters. - - Args: - filepath: Path to file - - Returns: - True if no tabs found - """ - try: - with open(filepath, 'r', encoding='utf-8') as f: - content = f.read() - if '\t' in content: - common.log_error(f"โœ— File contains tab characters: {filepath.name}") - return False - except Exception as e: - common.log_warn(f"Could not read {filepath}: {e}") - return False - - return True - - -def check_workflow_structure(filepath: Path) -> bool: - """ - Check workflow file structure for required keys. - - Args: - filepath: Path to workflow file - - Returns: - True if structure is valid - """ - errors = 0 - - try: - with open(filepath, 'r', encoding='utf-8') as f: - content = f.read() - - # Check for required top-level keys - if 'name:' not in content and not content.startswith('name:'): - common.log_warn(f"Missing 'name:' in {filepath.name}") - - if 'on:' not in content and not content.startswith('on:'): - common.log_error(f"โœ— Missing 'on:' trigger in {filepath.name}") - errors += 1 - - if 'jobs:' not in content and not content.startswith('jobs:'): - common.log_error(f"โœ— Missing 'jobs:' in {filepath.name}") - errors += 1 - - except Exception as e: - common.log_error(f"Error reading {filepath}: {e}") - return False - - return errors == 0 - - -def validate_workflow_file(filepath: Path) -> bool: - """ - Validate a single workflow file. - - Args: - filepath: Path to workflow file - - Returns: - True if valid - """ - common.log_info(f"Validating: {filepath.name}") - - errors = 0 - - # Check YAML syntax - if not validate_yaml_syntax(filepath): - errors += 1 - - # Check for tabs - if not check_no_tabs(filepath): - errors += 1 - - # Check structure - if not check_workflow_structure(filepath): - errors += 1 - - if errors == 0: - common.log_info(f"โœ“ {filepath.name} passed all checks") - return True - else: - common.log_error(f"โœ— {filepath.name} failed {errors} check(s)") - return False - - -def main() -> int: - """Main entry point.""" - common.log_info("GitHub Actions Workflow Validation") - common.log_info("===================================") - print() - - workflows_dir = Path(".github/workflows") - - if not workflows_dir.is_dir(): - common.log_error(f"Workflows directory not found: {workflows_dir}") - return 1 - - # Find all workflow files - workflow_files = [] - for pattern in ["*.yml", "*.yaml"]: - workflow_files.extend(workflows_dir.glob(pattern)) - - if not workflow_files: - common.log_warn("No workflow files found") - return 0 - - total = len(workflow_files) - passed = 0 - failed = 0 - - for workflow in workflow_files: - if validate_workflow_file(workflow): - passed += 1 - else: - failed += 1 - print() - - common.log_info("===================================") - common.log_info("Summary:") - common.log_info(f" Total workflows: {total}") - common.log_info(f" Passed: {passed}") - common.log_info(f" Failed: {failed}") - common.log_info("===================================") - - if failed > 0: - common.log_error("Workflow validation failed") - return 1 - - common.log_info("All workflows validated successfully") - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/scripts/validate/xml_wellformed.py b/scripts/validate/xml_wellformed.py deleted file mode 100755 index 48adbc1..0000000 --- a/scripts/validate/xml_wellformed.py +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/env python3 -""" -Validate XML well-formedness. - -Copyright (C) 2025 Moko Consulting - -This file is part of a Moko Consulting project. - -SPDX-License-Identifier: GPL-3.0-or-later - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 3 of the License, or -(at your option) any later version. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program (./LICENSE.md). - -FILE INFORMATION -DEFGROUP: Moko-Cassiopeia.Scripts -INGROUP: Scripts.Validate -REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia -PATH: /scripts/validate/xml_wellformed.py -VERSION: 01.00.00 -BRIEF: Validate XML well-formedness in all XML files -""" - -import argparse -import sys -import xml.etree.ElementTree as ET -from pathlib import Path -from typing import List, Tuple - -# Add lib directory to path -sys.path.insert(0, str(Path(__file__).parent.parent / "lib")) - -try: - import common -except ImportError: - print("ERROR: Cannot import required libraries", file=sys.stderr) - sys.exit(1) - - -def check_xml_file(file_path: Path) -> Tuple[bool, str]: - """ - Check if an XML file is well-formed. - - Args: - file_path: Path to XML file - - Returns: - Tuple of (is_valid, error_message) - """ - try: - ET.parse(file_path) - return (True, "") - except ET.ParseError as e: - return (False, str(e)) - except Exception as e: - return (False, str(e)) - - -def find_xml_files(src_dir: str, exclude_dirs: List[str] = None) -> List[Path]: - """ - Find all XML files in a directory. - - Args: - src_dir: Directory to search - exclude_dirs: Directories to exclude - - Returns: - List of XML file paths - """ - if exclude_dirs is None: - exclude_dirs = ["vendor", "node_modules", ".git"] - - src_path = Path(src_dir) - if not src_path.is_dir(): - return [] - - xml_files = [] - for xml_file in src_path.rglob("*.xml"): - # Check if file is in an excluded directory - if any(excluded in xml_file.parts for excluded in exclude_dirs): - continue - xml_files.append(xml_file) - - return sorted(xml_files) - - -def main() -> int: - """Main entry point.""" - parser = argparse.ArgumentParser( - description="Validate XML well-formedness in all XML files", - formatter_class=argparse.RawDescriptionHelpFormatter - ) - - parser.add_argument( - "-s", "--src-dir", - default="src", - help="Source directory to search for XML files (default: src)" - ) - parser.add_argument( - "-v", "--verbose", - action="store_true", - help="Show detailed output" - ) - parser.add_argument( - "--exclude", - action="append", - help="Directories to exclude (can be specified multiple times)" - ) - parser.add_argument( - "files", - nargs="*", - help="Specific files to check (optional)" - ) - - args = parser.parse_args() - - try: - # Determine which files to check - if args.files: - xml_files = [Path(f) for f in args.files] - for f in xml_files: - if not f.is_file(): - common.die(f"File not found: {f}") - else: - exclude_dirs = args.exclude or ["vendor", "node_modules", ".git"] - xml_files = find_xml_files(args.src_dir, exclude_dirs) - - if not xml_files: - common.die(f"No XML files found in {args.src_dir}") - - if args.verbose: - common.log_section("XML Well-formedness Validation") - common.log_info(f"Checking {len(xml_files)} XML file(s)") - print() - - errors = [] - for xml_file in xml_files: - is_valid, error_msg = check_xml_file(xml_file) - - if is_valid: - if args.verbose: - common.log_success(f"OK: {xml_file}") - else: - errors.append((xml_file, error_msg)) - if args.verbose: - common.log_error(f"FAILED: {xml_file}") - if error_msg: - print(f" {error_msg}") - - # Output results - if args.verbose: - print() - - if errors: - result = { - "status": "error", - "src_dir": args.src_dir, - "xml_count": len(xml_files), - "passed": len(xml_files) - len(errors), - "failed": len(errors), - "errors": [{"file": str(f), "error": e} for f, e in errors] - } - - if not args.verbose: - common.json_output(result) - - common.log_error(f"XML validation failed: {len(errors)} error(s)") - - if not args.verbose: - for file_path, error_msg in errors: - print(f"ERROR: {file_path}") - if error_msg: - print(f" {error_msg}") - - return 1 - else: - result = { - "status": "ok", - "src_dir": args.src_dir, - "xml_count": len(xml_files) - } - - if not args.verbose: - common.json_output(result) - print(f"xml_wellformed: ok") - else: - common.log_success(f"All {len(xml_files)} XML file(s) are well-formed") - - return 0 - - except Exception as e: - common.log_error(f"Validation failed: {e}") - return 1 - - -if __name__ == "__main__": - sys.exit(main()) From 46026126970c3b905f472a65c0aad9e6d0c9f925 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 18 Jan 2026 01:41:45 +0000 Subject: [PATCH 3/6] Update documentation: remove script references and add colors_custom.css info Co-authored-by: jmiller-moko <230051081+jmiller-moko@users.noreply.github.com> --- README.md | 14 +++++++++++ docs/JOOMLA_DEVELOPMENT.md | 15 +++-------- docs/QUICK_START.md | 46 +++------------------------------- docs/WORKFLOW_GUIDE.md | 51 ++++++++++---------------------------- 4 files changed, 33 insertions(+), 93 deletions(-) diff --git a/README.md b/README.md index bab357a..cb05c85 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,20 @@ If upgrading from a prior version, Joomla will safely overwrite files - **Color Scheme**: Toggle light/dark defaults. - **Analytics/GTM**: Enable/disable optional expansions. +### Custom Color Palettes + +Moko-Cassiopeia supports custom color schemes for both light and dark modes: + +- **Standard**: Default Joomla Cassiopeia colors +- **Alternative**: Alternative color palette +- **Custom**: Create your own custom colors by adding `colors_custom.css` files + +To use custom colors: +1. Create `src/media/css/colors/light/colors_custom.css` for light mode +2. Create `src/media/css/colors/dark/colors_custom.css` for dark mode +3. Define your CSS variables in these files (see existing `colors_standard.css` for reference) +4. Select "Custom" in template settings under **Variables & Palettes** + ### Font Awesome 7 - Fully integrated into Joomla's asset manager. diff --git a/docs/JOOMLA_DEVELOPMENT.md b/docs/JOOMLA_DEVELOPMENT.md index b42431b..4c14bb4 100644 --- a/docs/JOOMLA_DEVELOPMENT.md +++ b/docs/JOOMLA_DEVELOPMENT.md @@ -41,16 +41,7 @@ All requirements are automatically installed in CI/CD pipelines. Package the Joomla template as a distributable ZIP file: ```bash -./scripts/release/package_extension.sh [output_dir] [version] -``` - -**Parameters:** -- `output_dir` (optional): Output directory for the ZIP file (default: `dist`) -- `version` (optional): Version string to use (default: extracted from manifest) - -**Example:** -```bash -./scripts/release/package_extension.sh dist 3.5.0 +make package ``` This creates a ZIP file in the `dist` directory with all necessary template files, excluding development files. @@ -246,7 +237,7 @@ phpcs --config-set installed_paths ~/.composer/vendor/phpcompatibility/php-compa 3. Run checks: ```bash # PHP syntax check -./scripts/validate/php_syntax.sh +make validate-required # CodeSniffer phpcs --standard=phpcs.xml src/ @@ -266,7 +257,7 @@ Use the package script to create a distribution: ```bash # Create package -./scripts/release/package_extension.sh dist 3.5.0 +make package # Upload to server scp dist/moko-cassiopeia-3.5.0-template.zip user@server:/path/to/joomla/ diff --git a/docs/QUICK_START.md b/docs/QUICK_START.md index 93b467b..6b63364 100644 --- a/docs/QUICK_START.md +++ b/docs/QUICK_START.md @@ -34,13 +34,7 @@ composer global require "phpcompatibility/php-compatibility:^9.0" composer global require codeception/codeception ``` -### 3. Install Git Hooks (Optional but Recommended) - -```bash -./scripts/git/install-hooks.sh -``` - -### 4. Validate Everything Works +### 3. Validate Everything Works ```bash # Quick validation @@ -111,9 +105,6 @@ make phpcompat # Package with auto-detected version make package -# Or specify directory and version -./scripts/release/package_extension.sh dist 03.05.00 - # Check package contents ls -lh dist/ unzip -l dist/moko-cassiopeia-*.zip @@ -157,13 +148,6 @@ moko-cassiopeia/ โ”‚ โ”œโ”€โ”€ media/ # Assets (CSS, JS, images) โ”‚ โ”œโ”€โ”€ language/ # Language files โ”‚ โ””โ”€โ”€ administrator/ # Admin files -โ”œโ”€โ”€ scripts/ # Automation scripts -โ”‚ โ”œโ”€โ”€ validate/ # Validation scripts -โ”‚ โ”œโ”€โ”€ fix/ # Fix/update scripts -โ”‚ โ”œโ”€โ”€ release/ # Release scripts -โ”‚ โ”œโ”€โ”€ run/ # Execution scripts -โ”‚ โ”œโ”€โ”€ git/ # Git hooks -โ”‚ โ””โ”€โ”€ lib/ # Shared libraries โ”œโ”€โ”€ tests/ # Test suites โ”œโ”€โ”€ docs/ # Documentation โ”œโ”€โ”€ .github/workflows/ # CI/CD workflows @@ -177,7 +161,6 @@ moko-cassiopeia/ 1. **Read the Workflow Guide**: [docs/WORKFLOW_GUIDE.md](./WORKFLOW_GUIDE.md) 2. **Review Joomla Development**: [docs/JOOMLA_DEVELOPMENT.md](./JOOMLA_DEVELOPMENT.md) -3. **Check Scripts Documentation**: [scripts/README.md](../scripts/README.md) ### Creating Your First Feature @@ -234,10 +217,6 @@ Use the Release Pipeline workflow to promote between stages. ```bash make fix-permissions -# Or manually: -chmod +x scripts/**/*.sh -``` - ### PHPStan/PHPCS Not Found ```bash @@ -246,29 +225,12 @@ make install composer global require "squizlabs/php_codesniffer:^3.0" phpstan/phpstan ``` -### Pre-commit Hook Fails - -```bash -# Run manually to see details -./scripts/git/pre-commit.sh - -# Quick mode (skip some checks) -./scripts/git/pre-commit.sh --quick - -# Skip quality checks -./scripts/git/pre-commit.sh --skip-quality - -# Bypass hook (not recommended) -git commit --no-verify -``` - ### CI Workflow Fails 1. Check the workflow logs in GitHub Actions -2. Run the same checks locally: +2. Run validation locally: ```bash - ./scripts/validate/manifest.sh - ./scripts/validate/php_syntax.sh + make validate-required make quality ``` @@ -326,7 +288,6 @@ make test ```bash # Setup make dev-setup # Initial setup -./scripts/git/install-hooks.sh # Install hooks # Development make validate-required # Quick validation @@ -342,7 +303,6 @@ make fix-permissions # Fix script permissions # Help make help # Show all commands -./scripts/run/validate_all.sh --help # Script help ``` --- diff --git a/docs/WORKFLOW_GUIDE.md b/docs/WORKFLOW_GUIDE.md index 5465a11..ab690a8 100644 --- a/docs/WORKFLOW_GUIDE.md +++ b/docs/WORKFLOW_GUIDE.md @@ -244,8 +244,7 @@ git checkout dev/X.Y.Z vim src/templates/index.php # 4. Validate locally -./scripts/validate/php_syntax.sh -./scripts/validate/manifest.sh +make validate-required # 5. Commit and push git add -A @@ -257,23 +256,19 @@ git push origin dev/X.Y.Z ```bash # Run comprehensive validation suite -./scripts/run/validate_all.sh +make validate-required -# Run with verbose output -./scripts/run/validate_all.sh -v - -# Run smoke tests -./scripts/run/smoke_test.sh +# Run quality checks +make quality ``` ### Creating a Release Package ```bash # Package with auto-detected version -./scripts/release/package_extension.sh - -# Package with specific version -./scripts/release/package_extension.sh dist 03.05.00 +```bash +# Package with auto-detected version +make package # Verify package contents unzip -l dist/moko-cassiopeia-*.zip @@ -283,21 +278,12 @@ unzip -l dist/moko-cassiopeia-*.zip ```bash # Via GitHub Actions (recommended) -# Actions โ†’ Create version branch - -# Or manually with scripts -./scripts/fix/versions.sh 03.05.00 +# Actions โ†’ Release Management workflow ``` ### Updating CHANGELOG -```bash -# Add new version entry -./scripts/release/update_changelog.sh 03.05.00 - -# Update release dates -./scripts/release/update_dates.sh 2025-01-15 03.05.00 -``` +Update CHANGELOG.md manually or via pull request following the existing format. ## Troubleshooting @@ -309,8 +295,8 @@ unzip -l dist/moko-cassiopeia-*.zip # Check specific file php -l src/templates/index.php -# Run validation script -./scripts/validate/php_syntax.sh +# Run validation +make validate-required ``` #### Manifest Validation Failed @@ -326,22 +312,12 @@ php -l src/templates/index.php #### Version Alignment Issues ```bash -# Check version in manifest matches CHANGELOG -./scripts/validate/version_alignment.sh - -# Fix versions -./scripts/fix/versions.sh 03.05.00 +# Check version consistency +make validate-required ``` ### Workflow Failures -#### "Permission denied" on scripts - -```bash -# Fix script permissions -chmod +x scripts/**/*.sh -``` - #### "Branch already exists" ```bash @@ -454,7 +430,6 @@ phpcs --standard=phpcs.xml --report=source src/ - [Main README](../README.md) - Project overview - [Joomla Development Guide](./JOOMLA_DEVELOPMENT.md) - Testing and quality -- [Scripts README](../scripts/README.md) - Script documentation - [CHANGELOG](../CHANGELOG.md) - Version history - [CONTRIBUTING](../CONTRIBUTING.md) - Contribution guidelines From 6464ed8d4325fa540370c6199ebb89d8f46cd572 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 18 Jan 2026 01:43:10 +0000 Subject: [PATCH 4/6] Fix remaining script references in documentation Co-authored-by: jmiller-moko <230051081+jmiller-moko@users.noreply.github.com> --- docs/WORKFLOW_GUIDE.md | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/docs/WORKFLOW_GUIDE.md b/docs/WORKFLOW_GUIDE.md index ab690a8..f538784 100644 --- a/docs/WORKFLOW_GUIDE.md +++ b/docs/WORKFLOW_GUIDE.md @@ -219,7 +219,7 @@ dev/X.Y.Z โ†’ rc/X.Y.Z โ†’ version/X.Y.Z โ†’ main **How to run:** 1. Go to Actions โ†’ Repo Health 2. Click "Run workflow" -3. Select profile (all/release/scripts/repo) +3. Select profile (all/release/repo) 4. Click "Run workflow" **Profiles:** @@ -302,11 +302,8 @@ make validate-required #### Manifest Validation Failed ```bash -# Validate manifest XML -./scripts/validate/manifest.sh - -# Check XML well-formedness -./scripts/validate/xml_wellformed.sh +# Validate manifest and XML files +make validate-required ``` #### Version Alignment Issues From 75d1590af73faf18ed131e2bfe407397f5a2f6b3 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 18 Jan 2026 01:49:34 +0000 Subject: [PATCH 5/6] Move social-media-demo.css from colors/ to global/ directory Co-authored-by: jmiller-moko <230051081+jmiller-moko@users.noreply.github.com> --- src/media/css/{colors => global}/social-media-demo.css | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/media/css/{colors => global}/social-media-demo.css (100%) diff --git a/src/media/css/colors/social-media-demo.css b/src/media/css/global/social-media-demo.css similarity index 100% rename from src/media/css/colors/social-media-demo.css rename to src/media/css/global/social-media-demo.css From 51238139c104ab9fb09fe741030107b7c945dc76 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 18 Jan 2026 01:53:24 +0000 Subject: [PATCH 6/6] Add social-media-demo.css to joomla.asset.json and index.php Co-authored-by: jmiller-moko <230051081+jmiller-moko@users.noreply.github.com> --- src/templates/index.php | 4 ++++ src/templates/joomla.asset.json | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/src/templates/index.php b/src/templates/index.php index 3fbaf72..e4783c6 100644 --- a/src/templates/index.php +++ b/src/templates/index.php @@ -98,6 +98,10 @@ $wa->useStyle('template.user'); // css/user.css // Optional vendor CSS $wa->useStyle('vendor.bootstrap-toc'); +// Optional demo/showcase CSS (available for use, not loaded by default) +// To use: Add 'template.global.social-media-demo' to your article/module +// $wa->useStyle('template.global.social-media-demo'); + // Color theme (light + optional dark) $colorLightKey = strtolower(preg_replace('/[^a-z0-9_.-]/i', '', $params_LightColorName)); $colorDarkKey = strtolower(preg_replace('/[^a-z0-9_.-]/i', '', $params_DarkColorName)); diff --git a/src/templates/joomla.asset.json b/src/templates/joomla.asset.json index 782098c..ece81e1 100644 --- a/src/templates/joomla.asset.json +++ b/src/templates/joomla.asset.json @@ -235,6 +235,11 @@ "name": "vendor.fa7free.solid.min", "type": "style", "uri": "media/templates/site/moko-cassiopeia/vendor/fa7free/css/solid.min.css" + }, + { + "name": "template.global.social-media-demo", + "type": "style", + "uri": "media/templates/site/moko-cassiopeia/css/global/social-media-demo.css" } ] }