Sync with MokoStandards: Remove local scripts and workflows #57

Merged
Copilot merged 6 commits from copilot/sync-mokostandards-requirements into main 2026-01-18 01:56:31 +00:00
37 changed files with 420 additions and 7633 deletions

View File

@@ -50,7 +50,7 @@ permissions:
jobs:
ci:
name: Repository Validation Pipeline
uses: ./.github/workflows/reusable-ci-validation.yml
uses: mokoconsulting-tech/MokoStandards/.github/workflows/reusable-ci-validation.yml@main
with:
profile: full
secrets: inherit

View File

@@ -1,222 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-build.yml
# VERSION: 01.00.00
# BRIEF: Reusable type-aware build workflow for Joomla, Dolibarr, and generic projects
# NOTE: Automatically detects project type and applies appropriate build steps
name: Reusable Build
on:
workflow_call:
inputs:
php-version:
description: 'PHP version to use for build'
required: false
type: string
default: '8.1'
node-version:
description: 'Node.js version to use for build'
required: false
type: string
default: '20.x'
working-directory:
description: 'Working directory for build'
required: false
type: string
default: '.'
upload-artifacts:
description: 'Upload build artifacts'
required: false
type: boolean
default: true
artifact-name:
description: 'Name for uploaded artifacts'
required: false
type: string
default: 'build-artifacts'
permissions:
contents: read
jobs:
detect:
name: Detect Project Type
uses: ./.github/workflows/reusable-project-detector.yml
with:
working-directory: ${{ inputs.working-directory }}
build:
name: Build (${{ needs.detect.outputs.project-type }})
runs-on: ubuntu-latest
needs: detect
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP
if: needs.detect.outputs.has-php == 'true'
uses: shivammathur/setup-php@v2
with:
php-version: ${{ inputs.php-version }}
extensions: mbstring, xml, zip, json
tools: composer:v2
- name: Setup Node.js
if: needs.detect.outputs.has-node == 'true'
uses: actions/setup-node@v6
with:
node-version: ${{ inputs.node-version }}
- name: Get Composer cache directory
if: needs.detect.outputs.has-php == 'true'
id: composer-cache
run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
- name: Cache Composer dependencies
if: needs.detect.outputs.has-php == 'true'
uses: actions/cache@v5
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: ${{ runner.os }}-composer-
- name: Cache Node modules
if: needs.detect.outputs.has-node == 'true'
uses: actions/cache@v5
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: ${{ runner.os }}-node-
- name: Install PHP dependencies
if: needs.detect.outputs.has-php == 'true'
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "composer.json" ]; then
composer install --prefer-dist --no-progress --no-interaction
echo "✅ Composer dependencies installed" >> $GITHUB_STEP_SUMMARY
fi
- name: Install Node dependencies
if: needs.detect.outputs.has-node == 'true'
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "package.json" ]; then
npm ci
echo "✅ npm dependencies installed" >> $GITHUB_STEP_SUMMARY
fi
- name: Build Joomla Extension
if: needs.detect.outputs.project-type == 'joomla'
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 🏗️ Building Joomla Extension" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Extension Type:** ${{ needs.detect.outputs.extension-type }}" >> $GITHUB_STEP_SUMMARY
# Run npm build if package.json has build script
if [ -f "package.json" ] && grep -q '"build"' package.json; then
echo "Running npm build..."
npm run build
echo "- ✅ npm build completed" >> $GITHUB_STEP_SUMMARY
fi
# Run composer scripts if available
if [ -f "composer.json" ] && grep -q '"build"' composer.json; then
echo "Running composer build..."
composer run-script build
echo "- ✅ composer build completed" >> $GITHUB_STEP_SUMMARY
fi
echo "- ✅ Joomla extension build completed" >> $GITHUB_STEP_SUMMARY
- name: Build Dolibarr Module
if: needs.detect.outputs.project-type == 'dolibarr'
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 🏗️ Building Dolibarr Module" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Run npm build if available
if [ -f "package.json" ] && grep -q '"build"' package.json; then
echo "Running npm build..."
npm run build
echo "- ✅ npm build completed" >> $GITHUB_STEP_SUMMARY
fi
# Install Dolibarr-specific dependencies
if [ -f "composer.json" ]; then
composer install --no-dev --optimize-autoloader
echo "- ✅ Production dependencies installed" >> $GITHUB_STEP_SUMMARY
fi
echo "- ✅ Dolibarr module build completed" >> $GITHUB_STEP_SUMMARY
- name: Build Generic Project
if: needs.detect.outputs.project-type == 'generic'
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 🏗️ Building Generic Project" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Try various build methods
if [ -f "package.json" ] && grep -q '"build"' package.json; then
npm run build
echo "- ✅ npm build completed" >> $GITHUB_STEP_SUMMARY
fi
if [ -f "Makefile" ]; then
make build 2>/dev/null || echo "- Makefile build not available" >> $GITHUB_STEP_SUMMARY
fi
echo "- ✅ Generic project build completed" >> $GITHUB_STEP_SUMMARY
- name: Verify build output
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 📦 Build Output" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Check for common build output directories
for dir in dist build public out; do
if [ -d "$dir" ]; then
echo "- ✅ Found build output: \`$dir/\`" >> $GITHUB_STEP_SUMMARY
du -sh "$dir" >> $GITHUB_STEP_SUMMARY
fi
done
- name: Upload build artifacts
if: inputs.upload-artifacts
uses: actions/upload-artifact@v6
with:
name: ${{ inputs.artifact-name }}-${{ needs.detect.outputs.project-type }}
path: |
${{ inputs.working-directory }}/dist/
${{ inputs.working-directory }}/build/
${{ inputs.working-directory }}/public/
${{ inputs.working-directory }}/out/
retention-days: 7
if-no-files-found: ignore

View File

@@ -1,534 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-ci-validation.yml
# VERSION: 01.00.00
# BRIEF: Reusable CI validation workflow for repository standards enforcement
# NOTE: Supports multiple validation profiles (basic, full, strict) with configurable checks
name: Reusable CI Validation
on:
workflow_call:
inputs:
profile:
description: 'Validation profile (basic, full, strict)'
required: false
type: string
default: 'basic'
node-version:
description: 'Node.js version for frontend validation'
required: false
type: string
default: '20.x'
php-version:
description: 'PHP version for backend validation'
required: false
type: string
default: '8.1'
working-directory:
description: 'Working directory for validation'
required: false
type: string
default: '.'
validate-manifests:
description: 'Validate XML manifests (Joomla/Dolibarr)'
required: false
type: boolean
default: true
validate-changelogs:
description: 'Validate CHANGELOG.md format and structure'
required: false
type: boolean
default: true
validate-licenses:
description: 'Validate license headers in source files'
required: false
type: boolean
default: true
validate-security:
description: 'Check for secrets and security issues'
required: false
type: boolean
default: true
fail-on-warnings:
description: 'Fail the workflow on validation warnings'
required: false
type: boolean
default: false
permissions:
contents: read
pull-requests: write
checks: write
jobs:
setup:
name: Setup Validation Environment
runs-on: ubuntu-latest
outputs:
has-php: ${{ steps.detect.outputs.has-php }}
has-node: ${{ steps.detect.outputs.has-node }}
has-scripts: ${{ steps.detect.outputs.has-scripts }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Detect project components
id: detect
working-directory: ${{ inputs.working-directory }}
run: |
# Detect PHP files
if find . -name "*.php" -type f | head -1 | grep -q .; then
echo "has-php=true" >> $GITHUB_OUTPUT
echo "✅ PHP files detected" >> $GITHUB_STEP_SUMMARY
else
echo "has-php=false" >> $GITHUB_OUTPUT
echo " No PHP files detected" >> $GITHUB_STEP_SUMMARY
fi
# Detect Node.js project
if [ -f "package.json" ]; then
echo "has-node=true" >> $GITHUB_OUTPUT
echo "✅ Node.js project detected" >> $GITHUB_STEP_SUMMARY
else
echo "has-node=false" >> $GITHUB_OUTPUT
echo " No Node.js project detected" >> $GITHUB_STEP_SUMMARY
fi
# Detect validation scripts
if [ -d "scripts/validate" ] || [ -d ".github/scripts/validate" ]; then
echo "has-scripts=true" >> $GITHUB_OUTPUT
echo "✅ Validation scripts found" >> $GITHUB_STEP_SUMMARY
else
echo "has-scripts=false" >> $GITHUB_OUTPUT
echo " No validation scripts found" >> $GITHUB_STEP_SUMMARY
fi
required-validations:
name: Required Validations
runs-on: ubuntu-latest
needs: setup
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Normalize line endings
run: git config --global core.autocrlf false
- name: Setup PHP
if: needs.setup.outputs.has-php == 'true'
uses: shivammathur/setup-php@v2
with:
php-version: ${{ inputs.php-version }}
extensions: mbstring, xml
coverage: none
- name: Setup Node.js
if: needs.setup.outputs.has-node == 'true'
uses: actions/setup-node@v6
with:
node-version: ${{ inputs.node-version }}
- name: Make scripts executable
if: needs.setup.outputs.has-scripts == 'true'
working-directory: ${{ inputs.working-directory }}
run: |
if [ -d "scripts" ]; then
find scripts -name "*.sh" -type f -exec chmod +x {} \;
fi
if [ -d ".github/scripts" ]; then
find .github/scripts -name "*.sh" -type f -exec chmod +x {} \;
fi
- name: Validate XML manifests
if: inputs.validate-manifests
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 📋 Manifest Validation" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ -f "scripts/validate/manifest.sh" ]; then
echo "Running manifest validation script..." >> $GITHUB_STEP_SUMMARY
if ./scripts/validate/manifest.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then
echo "✅ Manifest validation passed" >> $GITHUB_STEP_SUMMARY
else
echo "❌ Manifest validation script failed" >> $GITHUB_STEP_SUMMARY
exit 1
fi
elif [ -f ".github/scripts/validate/manifest.sh" ]; then
echo "Running manifest validation script..." >> $GITHUB_STEP_SUMMARY
if ./.github/scripts/validate/manifest.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then
echo "✅ Manifest validation passed" >> $GITHUB_STEP_SUMMARY
else
echo "❌ Manifest validation script failed" >> $GITHUB_STEP_SUMMARY
exit 1
fi
elif command -v xmllint >/dev/null 2>&1; then
# Basic XML validation using xmllint
echo "Using xmllint for basic XML validation..." >> $GITHUB_STEP_SUMMARY
XML_FOUND=false
ERROR_FOUND=false
while IFS= read -r file; do
XML_FOUND=true
if ! xmllint --noout "$file" 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then
echo "❌ Invalid XML: $file" >> $GITHUB_STEP_SUMMARY
ERROR_FOUND=true
fi
done < <(find . -name "*.xml" -type f ! -path "*/node_modules/*" ! -path "*/.git/*")
if [ "$ERROR_FOUND" = true ]; then
echo "" >> $GITHUB_STEP_SUMMARY
echo "❌ XML validation failed" >> $GITHUB_STEP_SUMMARY
exit 1
elif [ "$XML_FOUND" = true ]; then
echo "" >> $GITHUB_STEP_SUMMARY
echo "✅ Basic XML validation passed" >> $GITHUB_STEP_SUMMARY
else
echo " No XML files found to validate" >> $GITHUB_STEP_SUMMARY
fi
else
echo " No manifest validation script or xmllint available" >> $GITHUB_STEP_SUMMARY
echo "Skipping XML validation" >> $GITHUB_STEP_SUMMARY
fi
- name: Validate XML well-formedness
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "scripts/validate/xml_wellformed.sh" ]; then
./scripts/validate/xml_wellformed.sh
elif [ -f ".github/scripts/validate/xml_wellformed.sh" ]; then
./.github/scripts/validate/xml_wellformed.sh
else
echo " No XML well-formedness validation script found, skipping"
fi
- name: Validate PHP syntax
if: needs.setup.outputs.has-php == 'true'
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 🔍 PHP Syntax Validation" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ -f "scripts/validate/php_syntax.sh" ]; then
echo "Running PHP syntax validation script..." >> $GITHUB_STEP_SUMMARY
if ./scripts/validate/php_syntax.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then
echo "✅ PHP syntax validation passed" >> $GITHUB_STEP_SUMMARY
else
echo "❌ PHP syntax validation script failed" >> $GITHUB_STEP_SUMMARY
exit 1
fi
elif [ -f ".github/scripts/validate/php_syntax.sh" ]; then
echo "Running PHP syntax validation script..." >> $GITHUB_STEP_SUMMARY
if ./.github/scripts/validate/php_syntax.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then
echo "✅ PHP syntax validation passed" >> $GITHUB_STEP_SUMMARY
else
echo "❌ PHP syntax validation script failed" >> $GITHUB_STEP_SUMMARY
exit 1
fi
else
# Basic PHP syntax check
echo "Running basic PHP syntax check..." >> $GITHUB_STEP_SUMMARY
ERROR_FOUND=false
while IFS= read -r file; do
if ! php -l "$file" 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then
echo "❌ Syntax error in: $file" >> $GITHUB_STEP_SUMMARY
ERROR_FOUND=true
fi
done < <(find . -name "*.php" -type f ! -path "*/vendor/*" ! -path "*/node_modules/*")
if [ "$ERROR_FOUND" = true ]; then
echo "" >> $GITHUB_STEP_SUMMARY
echo "❌ PHP syntax errors found" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo "" >> $GITHUB_STEP_SUMMARY
echo "✅ PHP syntax validation passed" >> $GITHUB_STEP_SUMMARY
fi
fi
optional-validations:
name: Optional Validations (${{ inputs.profile }})
runs-on: ubuntu-latest
needs: setup
if: inputs.profile != 'basic'
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Setup PHP
if: needs.setup.outputs.has-php == 'true'
uses: shivammathur/setup-php@v2
with:
php-version: ${{ inputs.php-version }}
extensions: mbstring, xml
coverage: none
- name: Make scripts executable
if: needs.setup.outputs.has-scripts == 'true'
working-directory: ${{ inputs.working-directory }}
run: |
if [ -d "scripts" ]; then
find scripts -name "*.sh" -type f -exec chmod +x {} \;
fi
if [ -d ".github/scripts" ]; then
find .github/scripts -name "*.sh" -type f -exec chmod +x {} \;
fi
- name: Validate changelog
if: inputs.validate-changelogs
continue-on-error: ${{ !inputs.fail-on-warnings }}
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 📝 Changelog Validation" >> $GITHUB_STEP_SUMMARY
if [ -f "scripts/validate/changelog.sh" ]; then
./scripts/validate/changelog.sh
echo "✅ Changelog validation passed" >> $GITHUB_STEP_SUMMARY
elif [ -f ".github/scripts/validate/changelog.sh" ]; then
./.github/scripts/validate/changelog.sh
echo "✅ Changelog validation passed" >> $GITHUB_STEP_SUMMARY
elif [ -f "CHANGELOG.md" ]; then
# Basic changelog validation
if grep -q "## \[" CHANGELOG.md; then
echo "✅ Changelog appears well-formed" >> $GITHUB_STEP_SUMMARY
else
echo "⚠️ Changelog may not follow standard format" >> $GITHUB_STEP_SUMMARY
[ "${{ inputs.fail-on-warnings }}" = "true" ] && exit 1
fi
else
echo "⚠️ No CHANGELOG.md found" >> $GITHUB_STEP_SUMMARY
fi
- name: Validate license headers
if: inputs.validate-licenses
continue-on-error: ${{ !inputs.fail-on-warnings }}
working-directory: ${{ inputs.working-directory }}
run: |
echo "### ⚖️ License Header Validation" >> $GITHUB_STEP_SUMMARY
if [ -f "scripts/validate/license_headers.sh" ]; then
./scripts/validate/license_headers.sh
echo "✅ License headers validated" >> $GITHUB_STEP_SUMMARY
elif [ -f ".github/scripts/validate/license_headers.sh" ]; then
./.github/scripts/validate/license_headers.sh
echo "✅ License headers validated" >> $GITHUB_STEP_SUMMARY
else
# Basic license header check
COUNT_FILE=$(mktemp)
find . \( -name "*.php" -o -name "*.js" -o -name "*.py" \) -type f -exec sh -c 'if ! head -20 "$1" | grep -qi "license\|copyright\|spdx"; then echo "1"; fi' _ {} \; > "$COUNT_FILE"
FILES_WITHOUT_LICENSE=$(wc -l < "$COUNT_FILE")
rm -f "$COUNT_FILE"
if [ "$FILES_WITHOUT_LICENSE" -eq 0 ]; then
echo "✅ License headers appear present" >> $GITHUB_STEP_SUMMARY
else
echo "⚠️ Some files may be missing license headers" >> $GITHUB_STEP_SUMMARY
[ "${{ inputs.fail-on-warnings }}" = "true" ] && exit 1
fi
fi
- name: Validate language structure
continue-on-error: true
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "scripts/validate/language_structure.sh" ]; then
./scripts/validate/language_structure.sh
elif [ -f ".github/scripts/validate/language_structure.sh" ]; then
./.github/scripts/validate/language_structure.sh
else
echo " No language structure validation script found"
fi
- name: Validate paths
continue-on-error: true
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "scripts/validate/paths.sh" ]; then
./scripts/validate/paths.sh
elif [ -f ".github/scripts/validate/paths.sh" ]; then
./.github/scripts/validate/paths.sh
else
echo " No path validation script found"
fi
- name: Validate tabs/whitespace
continue-on-error: true
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "scripts/validate/tabs.sh" ]; then
./scripts/validate/tabs.sh
elif [ -f ".github/scripts/validate/tabs.sh" ]; then
./.github/scripts/validate/tabs.sh
else
echo " No tabs validation script found"
fi
- name: Validate version alignment
continue-on-error: ${{ !inputs.fail-on-warnings }}
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "scripts/validate/version_alignment.sh" ]; then
./scripts/validate/version_alignment.sh
elif [ -f ".github/scripts/validate/version_alignment.sh" ]; then
./.github/scripts/validate/version_alignment.sh
else
echo " No version alignment validation script found"
fi
security-validations:
name: Security Validations
runs-on: ubuntu-latest
if: inputs.validate-security
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Make scripts executable
working-directory: ${{ inputs.working-directory }}
run: |
if [ -d "scripts" ]; then
find scripts -name "*.sh" -type f -exec chmod +x {} \;
fi
if [ -d ".github/scripts" ]; then
find .github/scripts -name "*.sh" -type f -exec chmod +x {} \;
fi
- name: Check for secrets
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 🔒 Security Validation" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ -f "scripts/validate/no_secrets.sh" ]; then
if ./scripts/validate/no_secrets.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then
echo "✅ No secrets found" >> $GITHUB_STEP_SUMMARY
else
echo "❌ Secret validation script failed" >> $GITHUB_STEP_SUMMARY
exit 1
fi
elif [ -f ".github/scripts/validate/no_secrets.sh" ]; then
if ./.github/scripts/validate/no_secrets.sh 2>&1 | tee -a $GITHUB_STEP_SUMMARY; then
echo "✅ No secrets found" >> $GITHUB_STEP_SUMMARY
else
echo "❌ Secret validation script failed" >> $GITHUB_STEP_SUMMARY
exit 1
fi
else
# Basic secrets check using find to properly exclude directories
PATTERNS=(
"password\s*=\s*['\"][^'\"]+['\"]"
"api[_-]?key\s*=\s*['\"][^'\"]+['\"]"
"secret\s*=\s*['\"][^'\"]+['\"]"
"token\s*=\s*['\"][^'\"]+['\"]"
"BEGIN RSA PRIVATE KEY"
"BEGIN PRIVATE KEY"
)
FOUND=0
echo "Scanning for potential secrets..." >> $GITHUB_STEP_SUMMARY
for pattern in "${PATTERNS[@]}"; do
# Use find to exclude directories and files, then grep the results
while IFS= read -r file; do
if [ -f "$file" ]; then
if grep -HnE "$pattern" "$file" 2>/dev/null; then
FOUND=1
echo "⚠️ Found pattern in: $file" >> $GITHUB_STEP_SUMMARY
fi
fi
done < <(find . -type f \
! -path "*/.git/*" \
! -path "*/node_modules/*" \
! -path "*/vendor/*" \
! -path "*/.github/*" \
! -path "*/docs/*" \
! -name "*.md" \
2>/dev/null)
done
if [ $FOUND -eq 0 ]; then
echo "" >> $GITHUB_STEP_SUMMARY
echo "✅ Basic security check passed - no secrets detected" >> $GITHUB_STEP_SUMMARY
else
echo "" >> $GITHUB_STEP_SUMMARY
echo "❌ Potential secrets or credentials detected" >> $GITHUB_STEP_SUMMARY
echo "Please review the findings above and ensure they are test fixtures or documentation examples" >> $GITHUB_STEP_SUMMARY
exit 1
fi
fi
summary:
name: Validation Summary
runs-on: ubuntu-latest
needs: [required-validations, optional-validations, security-validations]
if: always()
steps:
- name: Generate validation summary
run: |
echo "### 🎯 CI Validation Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Profile:** ${{ inputs.profile }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Validation Stage | Status |" >> $GITHUB_STEP_SUMMARY
echo "|-----------------|--------|" >> $GITHUB_STEP_SUMMARY
echo "| Required Validations | ${{ needs.required-validations.result == 'success' && '✅ Passed' || '❌ Failed' }} |" >> $GITHUB_STEP_SUMMARY
echo "| Optional Validations | ${{ needs.optional-validations.result == 'success' && '✅ Passed' || needs.optional-validations.result == 'skipped' && '⏭️ Skipped' || '❌ Failed' }} |" >> $GITHUB_STEP_SUMMARY
echo "| Security Validations | ${{ needs.security-validations.result == 'success' && '✅ Passed' || needs.security-validations.result == 'skipped' && '⏭️ Skipped' || '❌ Failed' }} |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Repository:** $GITHUB_REPOSITORY" >> $GITHUB_STEP_SUMMARY
echo "**Branch:** $GITHUB_REF_NAME" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** $GITHUB_SHA" >> $GITHUB_STEP_SUMMARY
- name: Check validation results
run: |
if [ "${{ needs.required-validations.result }}" == "failure" ]; then
echo "❌ Required validations failed"
exit 1
fi
if [ "${{ needs.security-validations.result }}" == "failure" ]; then
echo "❌ Security validations failed"
exit 1
fi
if [ "${{ inputs.profile }}" == "strict" ] && [ "${{ needs.optional-validations.result }}" == "failure" ]; then
echo "❌ Optional validations failed in strict mode"
exit 1
fi
echo "✅ CI validation completed successfully"

View File

@@ -1,312 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-deploy.yml
# VERSION: 01.00.00
# BRIEF: Reusable type-aware deployment workflow for staging and production
# NOTE: Supports Joomla, Dolibarr, and generic deployments with health checks
name: Reusable Deploy
on:
workflow_call:
inputs:
environment:
description: 'Target environment (staging, production)'
required: true
type: string
version:
description: 'Version to deploy (optional, uses latest if not specified)'
required: false
type: string
deployment-method:
description: 'Deployment method (rsync, ftp, ssh, kubernetes, custom)'
required: false
type: string
default: 'custom'
health-check-url:
description: 'URL to check after deployment'
required: false
type: string
health-check-timeout:
description: 'Health check timeout in seconds'
required: false
type: number
default: 300
working-directory:
description: 'Working directory'
required: false
type: string
default: '.'
secrets:
DEPLOY_HOST:
description: 'Deployment host/server'
required: false
DEPLOY_USER:
description: 'Deployment user'
required: false
DEPLOY_KEY:
description: 'SSH private key or deployment credentials'
required: false
DEPLOY_PATH:
description: 'Deployment path on target server'
required: false
permissions:
contents: read
deployments: write
jobs:
detect:
name: Detect Project Type
uses: ./.github/workflows/reusable-project-detector.yml
with:
working-directory: ${{ inputs.working-directory }}
prepare:
name: Prepare Deployment
runs-on: ubuntu-latest
needs: detect
outputs:
deployment-id: ${{ steps.create-deployment.outputs.deployment_id }}
version: ${{ steps.version.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Determine version
id: version
run: |
if [ -n "${{ inputs.version }}" ]; then
VERSION="${{ inputs.version }}"
else
# Use latest tag or commit SHA
VERSION=$(git describe --tags --always)
fi
echo "version=${VERSION}" >> $GITHUB_OUTPUT
echo "Deploying version: ${VERSION}"
- name: Create deployment
id: create-deployment
uses: chrnorm/deployment-action@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
environment: ${{ inputs.environment }}
description: "Deploy ${{ needs.detect.outputs.project-type }} v${{ steps.version.outputs.version }}"
- name: Deployment info
run: |
echo "### 🚀 Deployment Preparation" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Environment:** ${{ inputs.environment }}" >> $GITHUB_STEP_SUMMARY
echo "**Version:** ${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "**Project Type:** ${{ needs.detect.outputs.project-type }}" >> $GITHUB_STEP_SUMMARY
echo "**Method:** ${{ inputs.deployment-method }}" >> $GITHUB_STEP_SUMMARY
build:
name: Build for Deployment
needs: [detect, prepare]
uses: ./.github/workflows/reusable-build.yml
with:
working-directory: ${{ inputs.working-directory }}
upload-artifacts: true
artifact-name: deployment-package
deploy:
name: Deploy to ${{ inputs.environment }}
runs-on: ubuntu-latest
needs: [detect, prepare, build]
environment:
name: ${{ inputs.environment }}
url: ${{ inputs.health-check-url }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Download build artifacts
uses: actions/download-artifact@v7
with:
name: deployment-package-${{ needs.detect.outputs.project-type }}
path: ./dist
- name: Setup SSH key
if: inputs.deployment-method == 'ssh' || inputs.deployment-method == 'rsync'
run: |
mkdir -p ~/.ssh
echo "${{ secrets.DEPLOY_KEY }}" > ~/.ssh/deploy_key
chmod 600 ~/.ssh/deploy_key
ssh-keyscan -H "${{ secrets.DEPLOY_HOST }}" >> ~/.ssh/known_hosts
- name: Deploy via rsync
if: inputs.deployment-method == 'rsync'
run: |
echo "Deploying via rsync to ${{ secrets.DEPLOY_HOST }}..."
rsync -avz --delete \
-e "ssh -i ~/.ssh/deploy_key -o StrictHostKeyChecking=no" \
./dist/ \
"${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}:${{ secrets.DEPLOY_PATH }}"
echo "✅ rsync deployment completed" >> $GITHUB_STEP_SUMMARY
- name: Deploy via SSH
if: inputs.deployment-method == 'ssh'
run: |
echo "Deploying via SSH to ${{ secrets.DEPLOY_HOST }}..."
# Create deployment package
tar -czf deployment.tar.gz -C ./dist .
# Copy to server
scp -i ~/.ssh/deploy_key deployment.tar.gz \
"${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}:/tmp/"
# Extract on server
ssh -i ~/.ssh/deploy_key "${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}" << 'EOF'
cd ${{ secrets.DEPLOY_PATH }}
tar -xzf /tmp/deployment.tar.gz
rm /tmp/deployment.tar.gz
EOF
echo "✅ SSH deployment completed" >> $GITHUB_STEP_SUMMARY
- name: Deploy Joomla Extension
if: needs.detect.outputs.project-type == 'joomla' && inputs.deployment-method == 'custom'
run: |
echo "### 🔧 Joomla Extension Deployment" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Custom Joomla deployment logic
echo "⚠️ Custom Joomla deployment logic required" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Typical steps:" >> $GITHUB_STEP_SUMMARY
echo "1. Upload extension package to Joomla server" >> $GITHUB_STEP_SUMMARY
echo "2. Install/update via Joomla Extension Manager API" >> $GITHUB_STEP_SUMMARY
echo "3. Clear Joomla cache" >> $GITHUB_STEP_SUMMARY
echo "4. Run database migrations if needed" >> $GITHUB_STEP_SUMMARY
# Placeholder for actual deployment commands
echo "Add your Joomla-specific deployment commands here"
- name: Deploy Dolibarr Module
if: needs.detect.outputs.project-type == 'dolibarr' && inputs.deployment-method == 'custom'
run: |
echo "### 🔧 Dolibarr Module Deployment" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Custom Dolibarr deployment logic
echo "⚠️ Custom Dolibarr deployment logic required" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Typical steps:" >> $GITHUB_STEP_SUMMARY
echo "1. Upload module to Dolibarr htdocs/custom directory" >> $GITHUB_STEP_SUMMARY
echo "2. Activate module via Dolibarr API or admin panel" >> $GITHUB_STEP_SUMMARY
echo "3. Run module setup hooks" >> $GITHUB_STEP_SUMMARY
echo "4. Clear Dolibarr cache" >> $GITHUB_STEP_SUMMARY
# Placeholder for actual deployment commands
echo "Add your Dolibarr-specific deployment commands here"
- name: Deploy Generic Application
if: needs.detect.outputs.project-type == 'generic' && inputs.deployment-method == 'custom'
run: |
echo "### 🔧 Generic Application Deployment" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "⚠️ Custom deployment logic required" >> $GITHUB_STEP_SUMMARY
echo "Add your application-specific deployment commands" >> $GITHUB_STEP_SUMMARY
- name: Health check
if: inputs.health-check-url != ''
run: |
echo "Running health check on ${{ inputs.health-check-url }}..."
TIMEOUT=${{ inputs.health-check-timeout }}
ELAPSED=0
INTERVAL=10
while [ $ELAPSED -lt $TIMEOUT ]; do
if curl -f -s -o /dev/null -w "%{http_code}" "${{ inputs.health-check-url }}" | grep -q "200"; then
echo "✅ Health check passed" >> $GITHUB_STEP_SUMMARY
exit 0
fi
echo "Health check attempt $((ELAPSED / INTERVAL + 1)) failed, retrying..."
sleep $INTERVAL
ELAPSED=$((ELAPSED + INTERVAL))
done
echo "❌ Health check failed after ${TIMEOUT}s" >> $GITHUB_STEP_SUMMARY
exit 1
- name: Update deployment status (success)
if: success()
uses: chrnorm/deployment-status@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
deployment-id: ${{ needs.prepare.outputs.deployment-id }}
state: success
environment-url: ${{ inputs.health-check-url }}
- name: Deployment summary
if: success()
run: |
echo "### ✅ Deployment Successful" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Environment:** ${{ inputs.environment }}" >> $GITHUB_STEP_SUMMARY
echo "**Version:** ${{ needs.prepare.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "**Project Type:** ${{ needs.detect.outputs.project-type }}" >> $GITHUB_STEP_SUMMARY
echo "**Time:** $(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> $GITHUB_STEP_SUMMARY
if [ -n "${{ inputs.health-check-url }}" ]; then
echo "**URL:** ${{ inputs.health-check-url }}" >> $GITHUB_STEP_SUMMARY
fi
rollback:
name: Rollback on Failure
runs-on: ubuntu-latest
needs: [prepare, deploy]
if: failure()
steps:
- name: Update deployment status (failure)
uses: chrnorm/deployment-status@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
deployment-id: ${{ needs.prepare.outputs.deployment-id }}
state: failure
- name: Rollback deployment
run: |
echo "### ❌ Deployment Failed - Initiating Rollback" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "⚠️ Rollback logic needs to be implemented" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Typical rollback steps:" >> $GITHUB_STEP_SUMMARY
echo "1. Restore previous version from backup" >> $GITHUB_STEP_SUMMARY
echo "2. Revert database migrations if applied" >> $GITHUB_STEP_SUMMARY
echo "3. Clear caches" >> $GITHUB_STEP_SUMMARY
echo "4. Verify health checks pass" >> $GITHUB_STEP_SUMMARY
# Add your rollback commands here

View File

@@ -1,356 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-joomla-testing.yml
# VERSION: 01.00.00
# BRIEF: Reusable Joomla testing workflow with matrix PHP/Joomla versions
# NOTE: Supports PHPUnit, integration tests, and code coverage
name: Reusable Joomla Testing
on:
workflow_call:
inputs:
php-versions:
description: 'JSON array of PHP versions to test'
required: false
type: string
default: '["7.4", "8.0", "8.1", "8.2"]'
joomla-versions:
description: 'JSON array of Joomla versions to test'
required: false
type: string
default: '["4.4", "5.0", "5.1"]'
coverage:
description: 'Enable code coverage reporting'
required: false
type: boolean
default: false
coverage-php-version:
description: 'PHP version to use for coverage reporting'
required: false
type: string
default: '8.1'
coverage-joomla-version:
description: 'Joomla version to use for coverage reporting'
required: false
type: string
default: '5.0'
working-directory:
description: 'Working directory for tests'
required: false
type: string
default: '.'
run-integration-tests:
description: 'Run integration tests with Joomla installation'
required: false
type: boolean
default: true
secrets:
CODECOV_TOKEN:
description: 'Codecov token for coverage uploads'
required: false
permissions:
contents: read
pull-requests: write
checks: write
jobs:
unit-tests:
name: PHPUnit (PHP ${{ matrix.php-version }}, Joomla ${{ matrix.joomla-version }})
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
php-version: ${{ fromJSON(inputs.php-versions) }}
joomla-version: ${{ fromJSON(inputs.joomla-versions) }}
exclude:
# PHP 7.4 not compatible with Joomla 5.x
- php-version: '7.4'
joomla-version: '5.0'
- php-version: '7.4'
joomla-version: '5.1'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP ${{ matrix.php-version }}
uses: shivammathur/setup-php@v2
with:
php-version: ${{ matrix.php-version }}
extensions: mbstring, xml, mysqli, zip, gd, intl
coverage: ${{ inputs.coverage && matrix.php-version == inputs.coverage-php-version && matrix.joomla-version == inputs.coverage-joomla-version && 'xdebug' || 'none' }}
tools: composer:v2
- name: Get Composer cache directory
id: composer-cache
run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
- name: Cache Composer dependencies
uses: actions/cache@v5
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-php-${{ matrix.php-version }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: |
${{ runner.os }}-php-${{ matrix.php-version }}-composer-
${{ runner.os }}-php-
- name: Validate composer.json
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "composer.json" ]; then
composer validate --strict
else
echo "No composer.json found, skipping validation"
fi
- name: Install dependencies
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "composer.json" ]; then
composer install --prefer-dist --no-progress --no-interaction
else
echo "No composer.json found, skipping dependency installation"
fi
- name: Setup Joomla test environment
working-directory: ${{ inputs.working-directory }}
run: |
echo "Setting up Joomla ${{ matrix.joomla-version }} test environment"
# Add Joomla-specific environment variables
echo "JOOMLA_VERSION=${{ matrix.joomla-version }}" >> $GITHUB_ENV
- name: Run PHPUnit tests
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "vendor/bin/phpunit" ]; then
if [ "${{ inputs.coverage && matrix.php-version == inputs.coverage-php-version && matrix.joomla-version == inputs.coverage-joomla-version }}" == "true" ]; then
vendor/bin/phpunit --coverage-text --coverage-clover=coverage.xml
else
vendor/bin/phpunit
fi
elif [ -f "phpunit.xml" ] || [ -f "phpunit.xml.dist" ]; then
if [ "${{ inputs.coverage && matrix.php-version == inputs.coverage-php-version && matrix.joomla-version == inputs.coverage-joomla-version }}" == "true" ]; then
php vendor/phpunit/phpunit/phpunit --coverage-text --coverage-clover=coverage.xml
else
php vendor/phpunit/phpunit/phpunit
fi
else
echo "⚠️ No PHPUnit configuration found, skipping tests"
exit 0
fi
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
if: inputs.coverage && matrix.php-version == inputs.coverage-php-version && matrix.joomla-version == inputs.coverage-joomla-version
with:
file: ${{ inputs.working-directory }}/coverage.xml
flags: unittests,php-${{ matrix.php-version }},joomla-${{ matrix.joomla-version }}
name: codecov-joomla-${{ matrix.php-version }}-${{ matrix.joomla-version }}
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false
integration-tests:
name: Integration (Joomla ${{ matrix.joomla-version }})
runs-on: ubuntu-latest
if: inputs.run-integration-tests
services:
mysql:
image: mysql:8.0
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: joomla_test
MYSQL_USER: joomla
MYSQL_PASSWORD: joomla
ports:
- 3306:3306
options: >-
--health-cmd="mysqladmin ping --silent"
--health-interval=10s
--health-timeout=5s
--health-retries=5
strategy:
fail-fast: false
matrix:
joomla-version: ${{ fromJSON(inputs.joomla-versions) }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: '8.1'
extensions: mbstring, xml, mysqli, zip, gd, intl, pdo_mysql
tools: composer:v2
- name: Get Composer cache directory
id: composer-cache
run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
- name: Cache Composer dependencies
uses: actions/cache@v5
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-integration-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: ${{ runner.os }}-integration-composer-
- name: Install dependencies
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "composer.json" ]; then
composer install --prefer-dist --no-progress --no-interaction
fi
- name: Download and setup Joomla ${{ matrix.joomla-version }}
run: |
echo "📦 Setting up Joomla ${{ matrix.joomla-version }} for integration testing"
# Create Joomla directory
mkdir -p /tmp/joomla
cd /tmp/joomla
# Determine Joomla version to download
JOOMLA_VERSION="${{ matrix.joomla-version }}"
# Download latest patch version for the specified minor version
if [[ "$JOOMLA_VERSION" == "4.4" ]]; then
DOWNLOAD_VERSION="4.4-Stable"
elif [[ "$JOOMLA_VERSION" == "5.0" ]]; then
DOWNLOAD_VERSION="5.0-Stable"
elif [[ "$JOOMLA_VERSION" == "5.1" ]]; then
DOWNLOAD_VERSION="5.1-Stable"
else
DOWNLOAD_VERSION="${JOOMLA_VERSION}-Stable"
fi
echo "Downloading Joomla ${DOWNLOAD_VERSION}..."
curl -L -o joomla.zip "https://downloads.joomla.org/cms/joomla${JOOMLA_VERSION%%.*}/${DOWNLOAD_VERSION}" || \
curl -L -o joomla.zip "https://github.com/joomla/joomla-cms/releases/download/${JOOMLA_VERSION}.0/Joomla_${JOOMLA_VERSION}.0-Stable-Full_Package.zip" || \
echo "⚠️ Could not download Joomla, integration tests may be limited"
if [ -f joomla.zip ]; then
unzip -q joomla.zip
echo "✅ Joomla extracted successfully"
fi
- name: Configure Joomla
run: |
echo "⚙️ Configuring Joomla for testing"
if [ -d "/tmp/joomla" ]; then
cd /tmp/joomla
# Create basic Joomla configuration
cat > configuration.php << 'EOF'
<?php
class JConfig {
public $dbtype = 'mysqli';
public $host = '127.0.0.1';
public $user = 'joomla';
public $password = 'joomla';
public $db = 'joomla_test';
public $dbprefix = 'jos_';
public $secret = 'test-secret';
public $debug = true;
public $error_reporting = 'maximum';
}
EOF
echo "✅ Joomla configuration created"
else
echo "⚠️ Joomla directory not found, skipping configuration"
fi
- name: Install extension into Joomla
working-directory: ${{ inputs.working-directory }}
run: |
echo "📦 Installing extension into Joomla"
if [ -d "/tmp/joomla" ]; then
# Copy extension files to Joomla
# This is a placeholder - actual implementation depends on extension type
echo "Extension installation logic would go here"
echo "Extension type detection and installation steps"
else
echo "⚠️ Skipping extension installation - Joomla not available"
fi
- name: Run integration tests
working-directory: ${{ inputs.working-directory }}
run: |
if [ -d "tests/Integration" ] && [ -f "vendor/bin/phpunit" ]; then
echo "🧪 Running integration tests"
# Try test suite first, then directory-based as fallback
if vendor/bin/phpunit --testsuite Integration; then
echo "✅ Integration tests passed (test suite)"
elif vendor/bin/phpunit tests/Integration/; then
echo "✅ Integration tests passed (directory)"
else
echo "❌ Integration tests failed"
exit 1
fi
else
echo " No integration tests found or PHPUnit not available"
echo "Looked for: tests/Integration/ directory"
fi
summary:
name: Testing Summary
runs-on: ubuntu-latest
needs: [unit-tests, integration-tests]
if: always()
steps:
- name: Generate test summary
run: |
echo "### 🧪 Joomla Testing Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Test Suite | Status |" >> $GITHUB_STEP_SUMMARY
echo "|------------|--------|" >> $GITHUB_STEP_SUMMARY
echo "| Unit Tests | ${{ needs.unit-tests.result == 'success' && '✅ Passed' || needs.unit-tests.result == 'skipped' && '⏭️ Skipped' || '❌ Failed' }} |" >> $GITHUB_STEP_SUMMARY
echo "| Integration Tests | ${{ needs.integration-tests.result == 'success' && '✅ Passed' || needs.integration-tests.result == 'skipped' && '⏭️ Skipped' || '❌ Failed' }} |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Repository:** $GITHUB_REPOSITORY" >> $GITHUB_STEP_SUMMARY
echo "**Branch:** $GITHUB_REF_NAME" >> $GITHUB_STEP_SUMMARY
echo "**Commit:** $GITHUB_SHA" >> $GITHUB_STEP_SUMMARY
- name: Check test results
run: |
if [ "${{ needs.unit-tests.result }}" == "failure" ]; then
echo "❌ Unit tests failed"
exit 1
fi
if [ "${{ needs.integration-tests.result }}" == "failure" ]; then
echo "❌ Integration tests failed"
exit 1
fi
echo "✅ All test suites passed or were skipped"

View File

@@ -1,297 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-php-quality.yml
# VERSION: 01.00.00
# BRIEF: Reusable PHP code quality analysis workflow
# NOTE: Supports PHPCS, PHPStan, Psalm with configurable PHP versions and tools
name: Reusable PHP Quality
on:
workflow_call:
inputs:
php-versions:
description: 'JSON array of PHP versions to test'
required: false
type: string
default: '["7.4", "8.0", "8.1", "8.2"]'
tools:
description: 'JSON array of quality tools to run (phpcs, phpstan, psalm)'
required: false
type: string
default: '["phpcs", "phpstan", "psalm"]'
working-directory:
description: 'Working directory for the quality checks'
required: false
type: string
default: '.'
phpcs-standard:
description: 'PHPCS coding standard to use'
required: false
type: string
default: 'PSR12'
phpstan-level:
description: 'PHPStan analysis level (0-9)'
required: false
type: string
default: '5'
psalm-level:
description: 'Psalm error level (1-8)'
required: false
type: string
default: '4'
fail-on-error:
description: 'Fail the workflow if quality checks find issues'
required: false
type: boolean
default: true
outputs:
quality-score:
description: 'Overall quality score percentage'
value: ${{ jobs.aggregate.outputs.score }}
permissions:
contents: read
pull-requests: write
checks: write
jobs:
phpcs:
name: PHP_CodeSniffer (PHP ${{ matrix.php-version }})
runs-on: ubuntu-latest
if: contains(fromJSON(inputs.tools), 'phpcs')
strategy:
fail-fast: false
matrix:
php-version: ${{ fromJSON(inputs.php-versions) }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP ${{ matrix.php-version }}
uses: shivammathur/setup-php@v2
with:
php-version: ${{ matrix.php-version }}
extensions: mbstring, xml
tools: composer:v2, phpcs
coverage: none
- name: Get Composer cache directory
id: composer-cache
run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
- name: Cache Composer dependencies
uses: actions/cache@v5
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: ${{ runner.os }}-composer-
- name: Install dependencies
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "composer.json" ]; then
composer install --prefer-dist --no-progress --no-interaction
fi
- name: Run PHP_CodeSniffer
working-directory: ${{ inputs.working-directory }}
continue-on-error: ${{ !inputs.fail-on-error }}
run: |
if [ -f "phpcs.xml" ] || [ -f "phpcs.xml.dist" ]; then
phpcs --standard=phpcs.xml --report=summary --report-width=120
elif [ -f "vendor/bin/phpcs" ]; then
vendor/bin/phpcs --standard=${{ inputs.phpcs-standard }} --report=summary --report-width=120 src/
else
phpcs --standard=${{ inputs.phpcs-standard }} --report=summary --report-width=120 . || echo "No PHP files found or PHPCS configuration missing"
fi
phpstan:
name: PHPStan (PHP ${{ matrix.php-version }})
runs-on: ubuntu-latest
if: contains(fromJSON(inputs.tools), 'phpstan')
strategy:
fail-fast: false
matrix:
php-version: ${{ fromJSON(inputs.php-versions) }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP ${{ matrix.php-version }}
uses: shivammathur/setup-php@v2
with:
php-version: ${{ matrix.php-version }}
extensions: mbstring, xml
tools: composer:v2, phpstan
coverage: none
- name: Get Composer cache directory
id: composer-cache
run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
- name: Cache Composer dependencies
uses: actions/cache@v5
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: ${{ runner.os }}-composer-
- name: Install dependencies
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "composer.json" ]; then
composer install --prefer-dist --no-progress --no-interaction
fi
- name: Run PHPStan
working-directory: ${{ inputs.working-directory }}
continue-on-error: ${{ !inputs.fail-on-error }}
run: |
if [ -f "phpstan.neon" ] || [ -f "phpstan.neon.dist" ]; then
phpstan analyse --no-progress --error-format=github
elif [ -f "vendor/bin/phpstan" ]; then
vendor/bin/phpstan analyse src/ --level=${{ inputs.phpstan-level }} --no-progress --error-format=github
else
phpstan analyse . --level=${{ inputs.phpstan-level }} --no-progress --error-format=github || echo "No PHP files found or PHPStan configuration missing"
fi
psalm:
name: Psalm (PHP ${{ matrix.php-version }})
runs-on: ubuntu-latest
if: contains(fromJSON(inputs.tools), 'psalm')
strategy:
fail-fast: false
matrix:
php-version: ${{ fromJSON(inputs.php-versions) }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP ${{ matrix.php-version }}
uses: shivammathur/setup-php@v2
with:
php-version: ${{ matrix.php-version }}
extensions: mbstring, xml
tools: composer:v2, psalm
coverage: none
- name: Get Composer cache directory
id: composer-cache
run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
- name: Cache Composer dependencies
uses: actions/cache@v5
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: ${{ runner.os }}-composer-
- name: Install dependencies
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "composer.json" ]; then
composer install --prefer-dist --no-progress --no-interaction
fi
- name: Run Psalm
working-directory: ${{ inputs.working-directory }}
continue-on-error: ${{ !inputs.fail-on-error }}
run: |
if [ -f "psalm.xml" ] || [ -f "psalm.xml.dist" ]; then
psalm --no-progress --output-format=github --show-info=false
elif [ -f "vendor/bin/psalm" ]; then
# Initialize Psalm config if it doesn't exist
if [ ! -f "psalm.xml" ]; then
echo "Initializing Psalm configuration..."
if ! vendor/bin/psalm --init src/ ${{ inputs.psalm-level }}; then
echo "⚠️ Psalm initialization failed, proceeding with defaults"
fi
fi
vendor/bin/psalm --no-progress --output-format=github --show-info=false
else
psalm --no-progress --output-format=github --show-info=false || echo "No PHP files found or Psalm configuration missing"
fi
aggregate:
name: Quality Check Summary
runs-on: ubuntu-latest
needs: [phpcs, phpstan, psalm]
if: always()
outputs:
score: ${{ steps.calculate.outputs.score }}
steps:
- name: Calculate quality score
id: calculate
run: |
# Count successful jobs
SUCCESS=0
TOTAL=0
if [ "${{ needs.phpcs.result }}" != "skipped" ]; then
TOTAL=$((TOTAL + 1))
[ "${{ needs.phpcs.result }}" == "success" ] && SUCCESS=$((SUCCESS + 1))
fi
if [ "${{ needs.phpstan.result }}" != "skipped" ]; then
TOTAL=$((TOTAL + 1))
[ "${{ needs.phpstan.result }}" == "success" ] && SUCCESS=$((SUCCESS + 1))
fi
if [ "${{ needs.psalm.result }}" != "skipped" ]; then
TOTAL=$((TOTAL + 1))
[ "${{ needs.psalm.result }}" == "success" ] && SUCCESS=$((SUCCESS + 1))
fi
# Calculate percentage
if [ $TOTAL -gt 0 ]; then
SCORE=$((SUCCESS * 100 / TOTAL))
else
SCORE=100
fi
echo "score=$SCORE" >> $GITHUB_OUTPUT
echo "Quality Score: $SCORE%" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- PHPCS: ${{ needs.phpcs.result }}" >> $GITHUB_STEP_SUMMARY
echo "- PHPStan: ${{ needs.phpstan.result }}" >> $GITHUB_STEP_SUMMARY
echo "- Psalm: ${{ needs.psalm.result }}" >> $GITHUB_STEP_SUMMARY
- name: Check overall status
if: inputs.fail-on-error
run: |
if [ "${{ needs.phpcs.result }}" == "failure" ] || \
[ "${{ needs.phpstan.result }}" == "failure" ] || \
[ "${{ needs.psalm.result }}" == "failure" ]; then
echo "❌ Quality checks failed"
exit 1
fi
echo "✅ All quality checks passed"

View File

@@ -1,138 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-project-detector.yml
# VERSION: 01.00.00
# BRIEF: Reusable workflow for detecting project type (Joomla, Dolibarr, Generic)
# NOTE: Provides project_type and extension_type outputs for downstream workflows
name: Reusable Project Type Detection
on:
workflow_call:
inputs:
working-directory:
description: 'Working directory for detection'
required: false
type: string
default: '.'
outputs:
project-type:
description: 'Detected project type (joomla, dolibarr, generic)'
value: ${{ jobs.detect.outputs.project_type }}
extension-type:
description: 'Detected extension type (component, module, plugin, etc.)'
value: ${{ jobs.detect.outputs.extension_type }}
has-php:
description: 'Whether project contains PHP files'
value: ${{ jobs.detect.outputs.has_php }}
has-node:
description: 'Whether project contains Node.js/package.json'
value: ${{ jobs.detect.outputs.has_node }}
permissions:
contents: read
jobs:
detect:
name: Detect Project Type
runs-on: ubuntu-latest
outputs:
project_type: ${{ steps.detect.outputs.project_type }}
extension_type: ${{ steps.detect.outputs.extension_type }}
has_php: ${{ steps.detect.outputs.has_php }}
has_node: ${{ steps.detect.outputs.has_node }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Detect project type and components
id: detect
working-directory: ${{ inputs.working-directory }}
run: |
echo "### 🔍 Project Detection" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Detection priority: Joomla > Dolibarr > Generic
# Check for Joomla indicators
if [ -f "joomla.xml" ] || \
find . -maxdepth 2 \( -name "mod_*.xml" -o -name "plg_*.xml" -o -name "com_*.xml" -o -name "pkg_*.xml" -o -name "tpl_*.xml" \) 2>/dev/null | head -1 | grep -q .; then
echo "project_type=joomla" >> $GITHUB_OUTPUT
echo "**Project Type:** Joomla" >> $GITHUB_STEP_SUMMARY
# Detect Joomla extension type
if [ -d "administrator/components" ] || [ -d "components" ]; then
echo "extension_type=component" >> $GITHUB_OUTPUT
echo "**Extension Type:** Component" >> $GITHUB_STEP_SUMMARY
elif find . -maxdepth 1 -name "mod_*.xml" 2>/dev/null | head -1 | grep -q .; then
echo "extension_type=module" >> $GITHUB_OUTPUT
echo "**Extension Type:** Module" >> $GITHUB_STEP_SUMMARY
elif find . -maxdepth 1 -name "plg_*.xml" 2>/dev/null | head -1 | grep -q .; then
echo "extension_type=plugin" >> $GITHUB_OUTPUT
echo "**Extension Type:** Plugin" >> $GITHUB_STEP_SUMMARY
elif find . -maxdepth 1 -name "pkg_*.xml" 2>/dev/null | head -1 | grep -q .; then
echo "extension_type=package" >> $GITHUB_OUTPUT
echo "**Extension Type:** Package" >> $GITHUB_STEP_SUMMARY
elif find . -maxdepth 1 -name "tpl_*.xml" 2>/dev/null | head -1 | grep -q .; then
echo "extension_type=template" >> $GITHUB_OUTPUT
echo "**Extension Type:** Template" >> $GITHUB_STEP_SUMMARY
else
echo "extension_type=component" >> $GITHUB_OUTPUT
echo "**Extension Type:** Component (default)" >> $GITHUB_STEP_SUMMARY
fi
# Check for Dolibarr indicators
elif [ -d "htdocs" ] || [ -d "core/modules" ] || \
([ -f "composer.json" ] && grep -q "dolibarr" composer.json 2>/dev/null); then
echo "project_type=dolibarr" >> $GITHUB_OUTPUT
echo "extension_type=module" >> $GITHUB_OUTPUT
echo "**Project Type:** Dolibarr" >> $GITHUB_STEP_SUMMARY
echo "**Extension Type:** Module" >> $GITHUB_STEP_SUMMARY
# Default to Generic
else
echo "project_type=generic" >> $GITHUB_OUTPUT
echo "extension_type=application" >> $GITHUB_OUTPUT
echo "**Project Type:** Generic" >> $GITHUB_STEP_SUMMARY
echo "**Extension Type:** Application" >> $GITHUB_STEP_SUMMARY
fi
# Detect PHP presence
if find . -name "*.php" -type f 2>/dev/null | head -1 | grep -q .; then
echo "has_php=true" >> $GITHUB_OUTPUT
echo "- ✅ PHP files detected" >> $GITHUB_STEP_SUMMARY
else
echo "has_php=false" >> $GITHUB_OUTPUT
echo "- No PHP files detected" >> $GITHUB_STEP_SUMMARY
fi
# Detect Node.js presence
if [ -f "package.json" ]; then
echo "has_node=true" >> $GITHUB_OUTPUT
echo "- ✅ Node.js project detected (package.json)" >> $GITHUB_STEP_SUMMARY
else
echo "has_node=false" >> $GITHUB_OUTPUT
echo "- No Node.js project detected" >> $GITHUB_STEP_SUMMARY
fi

View File

@@ -1,397 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-release.yml
# VERSION: 01.00.00
# BRIEF: Reusable type-aware release workflow for Joomla, Dolibarr, and generic projects
# NOTE: Creates releases with type-specific packaging and optional marketplace publishing
name: Reusable Release
on:
workflow_call:
inputs:
version:
description: 'Release version (semver format)'
required: true
type: string
prerelease:
description: 'Mark as pre-release'
required: false
type: boolean
default: false
draft:
description: 'Create as draft release'
required: false
type: boolean
default: false
php-version:
description: 'PHP version for build'
required: false
type: string
default: '8.1'
create-github-release:
description: 'Create GitHub release'
required: false
type: boolean
default: true
publish-to-marketplace:
description: 'Publish to marketplace (Joomla/Dolibarr)'
required: false
type: boolean
default: false
working-directory:
description: 'Working directory'
required: false
type: string
default: '.'
secrets:
MARKETPLACE_TOKEN:
description: 'Marketplace API token (JED/Dolistore)'
required: false
permissions:
contents: write
jobs:
detect:
name: Detect Project Type
uses: ./.github/workflows/reusable-project-detector.yml
with:
working-directory: ${{ inputs.working-directory }}
build-package:
name: Build Release Package
runs-on: ubuntu-latest
needs: detect
outputs:
package-name: ${{ steps.package.outputs.name }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Setup PHP
if: needs.detect.outputs.has-php == 'true'
uses: shivammathur/setup-php@v2
with:
php-version: ${{ inputs.php-version }}
extensions: mbstring, xml, zip
tools: composer:v2
- name: Setup Node.js
if: needs.detect.outputs.has-node == 'true'
uses: actions/setup-node@v6
with:
node-version: '20.x'
- name: Validate version format
run: |
VERSION="${{ inputs.version }}"
if ! echo "$VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.-]+)?$'; then
echo "❌ Invalid version format: $VERSION"
echo "Expected semver format: X.Y.Z or X.Y.Z-prerelease"
exit 1
fi
echo "✅ Version format valid: $VERSION"
- name: Install dependencies
working-directory: ${{ inputs.working-directory }}
run: |
if [ -f "composer.json" ]; then
composer install --no-dev --optimize-autoloader --no-interaction
echo "✅ Composer dependencies installed" >> $GITHUB_STEP_SUMMARY
fi
if [ -f "package.json" ]; then
npm ci
if grep -q '"build"' package.json; then
npm run build
fi
echo "✅ Node dependencies installed and built" >> $GITHUB_STEP_SUMMARY
fi
- name: Update version in files
working-directory: ${{ inputs.working-directory }}
run: |
VERSION="${{ inputs.version }}"
# Update version in XML manifests (Joomla/Dolibarr)
if [ "${{ needs.detect.outputs.project-type }}" == "joomla" ] || \
[ "${{ needs.detect.outputs.project-type }}" == "dolibarr" ]; then
find . -name "*.xml" -type f -not -path "*/node_modules/*" -not -path "*/vendor/*" \
-exec sed -i "s/<version>[^<]*<\/version>/<version>${VERSION}<\/version>/g" {} \;
echo "- ✅ Updated version in XML manifests" >> $GITHUB_STEP_SUMMARY
fi
# Update version in package.json
if [ -f "package.json" ]; then
sed -i "s/\"version\": \"[^\"]*\"/\"version\": \"${VERSION}\"/g" package.json
echo "- ✅ Updated version in package.json" >> $GITHUB_STEP_SUMMARY
fi
# Update version in composer.json
if [ -f "composer.json" ]; then
sed -i "s/\"version\": \"[^\"]*\"/\"version\": \"${VERSION}\"/g" composer.json
echo "- ✅ Updated version in composer.json" >> $GITHUB_STEP_SUMMARY
fi
- name: Create Joomla package
if: needs.detect.outputs.project-type == 'joomla'
working-directory: ${{ inputs.working-directory }}
run: |
mkdir -p build/package
# Copy files excluding development artifacts
rsync -av \
--exclude='build' \
--exclude='tests' \
--exclude='.git*' \
--exclude='composer.json' \
--exclude='composer.lock' \
--exclude='phpunit.xml*' \
--exclude='phpcs.xml*' \
--exclude='phpstan.neon*' \
--exclude='psalm.xml*' \
--exclude='node_modules' \
--exclude='.github' \
--exclude='package.json' \
--exclude='package-lock.json' \
. build/package/
# Determine extension name from manifest
MANIFEST=$(find . -maxdepth 1 -name "*.xml" -not -name "phpunit.xml*" -type f | head -1)
if [ -n "$MANIFEST" ]; then
EXT_NAME=$(basename "$MANIFEST" .xml)
else
EXT_NAME=$(basename "$GITHUB_REPOSITORY" | sed 's/^joomla-//')
fi
# Create ZIP package
cd build/package
VERSION="${{ inputs.version }}"
PACKAGE_NAME="${EXT_NAME}-${VERSION}.zip"
zip -r "../${PACKAGE_NAME}" .
cd ../..
echo "PACKAGE_NAME=${PACKAGE_NAME}" >> $GITHUB_ENV
echo "✅ Created Joomla package: ${PACKAGE_NAME}" >> $GITHUB_STEP_SUMMARY
- name: Create Dolibarr package
if: needs.detect.outputs.project-type == 'dolibarr'
working-directory: ${{ inputs.working-directory }}
run: |
mkdir -p build/package
# Copy module files
rsync -av \
--exclude='build' \
--exclude='tests' \
--exclude='.git*' \
--exclude='node_modules' \
--exclude='.github' \
. build/package/
# Determine module name
if [ -f "core/modules/modMyModule.class.php" ]; then
MODULE_NAME=$(grep -oP "class modMyModule extends DolibarrModules" core/modules/*.php | head -1 | sed 's/class mod//' | sed 's/ extends.*//')
else
MODULE_NAME=$(basename "$GITHUB_REPOSITORY" | sed 's/^dolibarr-//')
fi
# Create ZIP package
cd build/package
VERSION="${{ inputs.version }}"
PACKAGE_NAME="${MODULE_NAME}-${VERSION}.zip"
zip -r "../${PACKAGE_NAME}" .
cd ../..
echo "PACKAGE_NAME=${PACKAGE_NAME}" >> $GITHUB_ENV
echo "✅ Created Dolibarr package: ${PACKAGE_NAME}" >> $GITHUB_STEP_SUMMARY
- name: Create Generic package
if: needs.detect.outputs.project-type == 'generic'
working-directory: ${{ inputs.working-directory }}
run: |
mkdir -p build/package
# Copy relevant build artifacts
if [ -d "dist" ]; then
cp -r dist/* build/package/
elif [ -d "build" ]; then
cp -r build/* build/package/
else
# Copy all files excluding development artifacts
rsync -av \
--exclude='build' \
--exclude='tests' \
--exclude='.git*' \
--exclude='node_modules' \
. build/package/
fi
# Create package
REPO_NAME=$(basename "$GITHUB_REPOSITORY")
VERSION="${{ inputs.version }}"
PACKAGE_NAME="${REPO_NAME}-${VERSION}.tar.gz"
cd build
tar -czf "${PACKAGE_NAME}" package/
cd ..
echo "PACKAGE_NAME=${PACKAGE_NAME}" >> $GITHUB_ENV
echo "✅ Created generic package: ${PACKAGE_NAME}" >> $GITHUB_STEP_SUMMARY
- name: Generate checksums
working-directory: ${{ inputs.working-directory }}
run: |
cd build
PACKAGE="${PACKAGE_NAME}"
if [ -f "$PACKAGE" ]; then
sha256sum "$PACKAGE" > "${PACKAGE}.sha256"
md5sum "$PACKAGE" > "${PACKAGE}.md5"
echo "✅ Generated checksums" >> $GITHUB_STEP_SUMMARY
fi
- name: Output package info
id: package
run: |
echo "name=${PACKAGE_NAME}" >> $GITHUB_OUTPUT
- name: Upload release artifacts
uses: actions/upload-artifact@v6
with:
name: release-package
path: |
${{ inputs.working-directory }}/build/*.zip
${{ inputs.working-directory }}/build/*.tar.gz
${{ inputs.working-directory }}/build/*.sha256
${{ inputs.working-directory }}/build/*.md5
retention-days: 30
create-release:
name: Create GitHub Release
runs-on: ubuntu-latest
needs: [detect, build-package]
if: inputs.create-github-release
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Download release artifacts
uses: actions/download-artifact@v7
with:
name: release-package
path: ./artifacts
- name: Extract changelog
id: changelog
run: |
VERSION="${{ inputs.version }}"
if [ -f "CHANGELOG.md" ]; then
# Extract changelog for this version
awk "/## \[${VERSION}\]/,/## \[/{if(/## \[${VERSION}\]/)next;else if(/## \[/)exit;else print}" CHANGELOG.md > release_notes.md
if [ ! -s release_notes.md ]; then
echo "## Release ${VERSION}" > release_notes.md
echo "" >> release_notes.md
echo "No specific changelog found for this version." >> release_notes.md
echo "Please refer to the full CHANGELOG.md for details." >> release_notes.md
fi
else
echo "## Release ${VERSION}" > release_notes.md
echo "" >> release_notes.md
echo "Release created from ${{ needs.detect.outputs.project-type }} project." >> release_notes.md
fi
- name: Create GitHub Release
uses: softprops/action-gh-release@v2
with:
tag_name: v${{ inputs.version }}
name: Release ${{ inputs.version }}
body_path: release_notes.md
draft: ${{ inputs.draft }}
prerelease: ${{ inputs.prerelease }}
files: |
artifacts/*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Release summary
run: |
echo "### 🚀 Release Created Successfully" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Version:** ${{ inputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "**Project Type:** ${{ needs.detect.outputs.project-type }}" >> $GITHUB_STEP_SUMMARY
echo "**Extension Type:** ${{ needs.detect.outputs.extension-type }}" >> $GITHUB_STEP_SUMMARY
echo "**Pre-release:** ${{ inputs.prerelease }}" >> $GITHUB_STEP_SUMMARY
echo "**Draft:** ${{ inputs.draft }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Package:** ${{ needs.build-package.outputs.package-name }}" >> $GITHUB_STEP_SUMMARY
publish-marketplace:
name: Publish to Marketplace
runs-on: ubuntu-latest
needs: [detect, build-package, create-release]
if: inputs.publish-to-marketplace && (needs.detect.outputs.project-type == 'joomla' || needs.detect.outputs.project-type == 'dolibarr')
steps:
- name: Download release artifacts
uses: actions/download-artifact@v7
with:
name: release-package
path: ./artifacts
- name: Publish to marketplace
run: |
echo "### 🌐 Marketplace Publishing" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
PROJECT_TYPE="${{ needs.detect.outputs.project-type }}"
if [ "$PROJECT_TYPE" == "joomla" ]; then
echo "⚠️ Joomla Extensions Directory (JED) publishing requires manual submission" >> $GITHUB_STEP_SUMMARY
echo "Package ready at: artifacts/${{ needs.build-package.outputs.package-name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "To publish to JED:" >> $GITHUB_STEP_SUMMARY
echo "1. Visit https://extensions.joomla.org/" >> $GITHUB_STEP_SUMMARY
echo "2. Login and submit the extension package" >> $GITHUB_STEP_SUMMARY
elif [ "$PROJECT_TYPE" == "dolibarr" ]; then
echo "⚠️ Dolistore publishing requires manual submission" >> $GITHUB_STEP_SUMMARY
echo "Package ready at: artifacts/${{ needs.build-package.outputs.package-name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "To publish to Dolistore:" >> $GITHUB_STEP_SUMMARY
echo "1. Visit https://www.dolistore.com/" >> $GITHUB_STEP_SUMMARY
echo "2. Login and submit the module package" >> $GITHUB_STEP_SUMMARY
fi
# Note: Automated marketplace publishing would require
# marketplace-specific API implementation here
# For now, we provide manual instructions

View File

@@ -1,210 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflows
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-script-executor.yml
# VERSION: 01.00.00
# BRIEF: Reusable workflow to execute MokoStandards scripts in any repository
# NOTE: Provides unified script execution with proper environment setup
name: Execute MokoStandards Script
on:
workflow_call:
inputs:
script_path:
description: 'Path to script relative to scripts/ directory (e.g., validate/no_secrets.py)'
required: true
type: string
script_args:
description: 'Arguments to pass to the script'
required: false
type: string
default: ''
python_version:
description: 'Python version to use'
required: false
type: string
default: '3.11'
install_dependencies:
description: 'Install Python dependencies (pyyaml, etc.)'
required: false
type: boolean
default: true
working_directory:
description: 'Working directory for script execution'
required: false
type: string
default: '.'
create_summary:
description: 'Create GitHub step summary'
required: false
type: boolean
default: true
outputs:
exit_code:
description: 'Script exit code'
value: ${{ jobs.execute-script.outputs.exit_code }}
script_output:
description: 'Script output (truncated to 1000 chars)'
value: ${{ jobs.execute-script.outputs.script_output }}
jobs:
execute-script:
name: Execute ${{ inputs.script_path }}
runs-on: ubuntu-latest
outputs:
exit_code: ${{ steps.run-script.outputs.exit_code }}
script_output: ${{ steps.run-script.outputs.script_output }}
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Python
if: endsWith(inputs.script_path, '.py')
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python_version }}
- name: Install Python dependencies
if: endsWith(inputs.script_path, '.py') && inputs.install_dependencies
run: |
python -m pip install --upgrade pip
pip install pyyaml
# Install additional dependencies if requirements file exists
if [ -f "requirements.txt" ]; then
pip install -r requirements.txt
fi
if [ "${{ inputs.create_summary }}" == "true" ]; then
echo "## 📦 Dependencies Installed" >> $GITHUB_STEP_SUMMARY
echo "- Python ${{ inputs.python_version }}" >> $GITHUB_STEP_SUMMARY
echo "- PyYAML (for configuration)" >> $GITHUB_STEP_SUMMARY
fi
- name: Setup Bash
if: endsWith(inputs.script_path, '.sh')
run: |
bash --version
- name: Verify script exists
id: verify
run: |
SCRIPT_PATH="scripts/${{ inputs.script_path }}"
if [ ! -f "$SCRIPT_PATH" ]; then
echo "❌ Script not found: $SCRIPT_PATH" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Available scripts:" >> $GITHUB_STEP_SUMMARY
find scripts -name "*.py" -o -name "*.sh" | sort >> $GITHUB_STEP_SUMMARY
exit 1
fi
echo "script_full_path=$SCRIPT_PATH" >> $GITHUB_OUTPUT
if [ "${{ inputs.create_summary }}" == "true" ]; then
echo "## ✅ Script Found" >> $GITHUB_STEP_SUMMARY
echo "**Path:** \`$SCRIPT_PATH\`" >> $GITHUB_STEP_SUMMARY
echo "**Type:** $(file -b $SCRIPT_PATH)" >> $GITHUB_STEP_SUMMARY
fi
- name: Make script executable
run: |
chmod +x ${{ steps.verify.outputs.script_full_path }}
- name: Run script
id: run-script
working-directory: ${{ inputs.working_directory }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
SCRIPT_PATH="${{ steps.verify.outputs.script_full_path }}"
SCRIPT_ARGS="${{ inputs.script_args }}"
echo "## 🚀 Executing Script" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Script:** \`$SCRIPT_PATH\`" >> $GITHUB_STEP_SUMMARY
echo "**Arguments:** \`$SCRIPT_ARGS\`" >> $GITHUB_STEP_SUMMARY
echo "**Working Directory:** \`${{ inputs.working_directory }}\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Output" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
# Execute script and capture output
set +e
if [[ "$SCRIPT_PATH" == *.py ]]; then
OUTPUT=$(python3 "$SCRIPT_PATH" $SCRIPT_ARGS 2>&1)
EXIT_CODE=$?
elif [[ "$SCRIPT_PATH" == *.sh ]]; then
OUTPUT=$(bash "$SCRIPT_PATH" $SCRIPT_ARGS 2>&1)
EXIT_CODE=$?
else
OUTPUT=$("$SCRIPT_PATH" $SCRIPT_ARGS 2>&1)
EXIT_CODE=$?
fi
set -e
# Save outputs
echo "exit_code=$EXIT_CODE" >> $GITHUB_OUTPUT
# Truncate output for GitHub output (max 1000 chars)
OUTPUT_TRUNCATED="${OUTPUT:0:1000}"
echo "script_output<<EOF" >> $GITHUB_OUTPUT
echo "$OUTPUT_TRUNCATED" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Show full output in summary (with line limit)
echo "$OUTPUT" | head -n 100 >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Report exit code
if [ $EXIT_CODE -eq 0 ]; then
echo "### ✅ Script Completed Successfully" >> $GITHUB_STEP_SUMMARY
echo "**Exit Code:** $EXIT_CODE" >> $GITHUB_STEP_SUMMARY
else
echo "### ❌ Script Failed" >> $GITHUB_STEP_SUMMARY
echo "**Exit Code:** $EXIT_CODE" >> $GITHUB_STEP_SUMMARY
fi
exit $EXIT_CODE
- name: Upload script output
if: always()
uses: actions/upload-artifact@v6
with:
name: script-output-${{ github.run_id }}
path: |
*.log
*.json
*.csv
retention-days: 7
if-no-files-found: ignore

View File

@@ -63,7 +63,7 @@ permissions:
jobs:
compliance:
name: Standards Compliance Validation
uses: ./.github/workflows/reusable-ci-validation.yml
uses: mokoconsulting-tech/MokoStandards/.github/workflows/reusable-ci-validation.yml@main
with:
profile: ${{ inputs.profile || 'full' }}
validate-manifests: true

File diff suppressed because it is too large Load Diff

View File

@@ -61,9 +61,6 @@ cd moko-cassiopeia
# Run development setup
make dev-setup
# Install Git hooks (optional but recommended)
./scripts/git/install-hooks.sh
```
See [docs/QUICK_START.md](./docs/QUICK_START.md) for detailed setup instructions.
@@ -75,8 +72,7 @@ The repository provides several tools to streamline development:
* **Makefile**: Common development tasks (`make help` to see all commands)
* **Pre-commit Hooks**: Automatic local validation before commits
* **VS Code Tasks**: Pre-configured tasks for common operations
* **Validation Scripts**: Located in `scripts/validate/`
* **CI/CD Workflows**: Automated testing and deployment
* **CI/CD Workflows**: Automated testing and deployment via MokoStandards
Run `make validate-required` before submitting PRs to catch common issues early.

View File

@@ -103,6 +103,20 @@ If upgrading from a prior version, Joomla will safely overwrite files
- **Color Scheme**: Toggle light/dark defaults.
- **Analytics/GTM**: Enable/disable optional expansions.
### Custom Color Palettes
Moko-Cassiopeia supports custom color schemes for both light and dark modes:
- **Standard**: Default Joomla Cassiopeia colors
- **Alternative**: Alternative color palette
- **Custom**: Create your own custom colors by adding `colors_custom.css` files
To use custom colors:
1. Create `src/media/css/colors/light/colors_custom.css` for light mode
2. Create `src/media/css/colors/dark/colors_custom.css` for dark mode
3. Define your CSS variables in these files (see existing `colors_standard.css` for reference)
4. Select "Custom" in template settings under **Variables & Palettes**
### Font Awesome 7
- Fully integrated into Joomla's asset manager.
@@ -150,9 +164,6 @@ make quality
# Create distribution package
make package
# Install Git hooks (optional but recommended)
python3 ./scripts/git/install-hooks.py
```
**New to the project?** See [Quick Start Guide](./docs/QUICK_START.md) for a 5-minute walkthrough.
@@ -162,23 +173,21 @@ python3 ./scripts/git/install-hooks.py
- **[Quick Start Guide](./docs/QUICK_START.md)** - Get up and running in 5 minutes
- **[Workflow Guide](./docs/WORKFLOW_GUIDE.md)** - Complete workflow reference with examples
- **[Joomla Development Guide](./docs/JOOMLA_DEVELOPMENT.md)** - Testing, quality checks, and deployment
- **[Scripts Documentation](./scripts/README.md)** - Available automation scripts
- **[Contributing Guide](./CONTRIBUTING.md)** - How to contribute
### Available Tools
- **Makefile**: Run `make help` to see all available commands
- **Python Scripts**: All automation scripts are now Python-based for cross-platform compatibility
- **Pre-commit Hooks**: Automatic validation before commits
- **VS Code Tasks**: Pre-configured development tasks
- **GitHub Actions**: Automated CI/CD pipelines
### Cross-Platform Support
All scripts are now written in Python for maximum cross-platform compatibility:
All automation is handled through the Makefile and GitHub Actions workflows for maximum cross-platform compatibility:
- **Joomla Extension Support**: Full support for Joomla 4.x and 5.x templates, components, modules, and plugins
- **Dolibarr Module Support**: Automatic detection and packaging of Dolibarr modules
- **Platform Detection**: Scripts automatically detect whether you're working with Joomla or Dolibarr extensions
- **Platform Detection**: Workflows automatically detect whether you're working with Joomla extensions
- **MokoStandards Integration**: Uses reusable workflows from MokoStandards for consistency
### Joomla Development Workflows

View File

@@ -41,16 +41,7 @@ All requirements are automatically installed in CI/CD pipelines.
Package the Joomla template as a distributable ZIP file:
```bash
./scripts/release/package_extension.sh [output_dir] [version]
```
**Parameters:**
- `output_dir` (optional): Output directory for the ZIP file (default: `dist`)
- `version` (optional): Version string to use (default: extracted from manifest)
**Example:**
```bash
./scripts/release/package_extension.sh dist 3.5.0
make package
```
This creates a ZIP file in the `dist` directory with all necessary template files, excluding development files.
@@ -246,7 +237,7 @@ phpcs --config-set installed_paths ~/.composer/vendor/phpcompatibility/php-compa
3. Run checks:
```bash
# PHP syntax check
./scripts/validate/php_syntax.sh
make validate-required
# CodeSniffer
phpcs --standard=phpcs.xml src/
@@ -266,7 +257,7 @@ Use the package script to create a distribution:
```bash
# Create package
./scripts/release/package_extension.sh dist 3.5.0
make package
# Upload to server
scp dist/moko-cassiopeia-3.5.0-template.zip user@server:/path/to/joomla/

View File

@@ -34,13 +34,7 @@ composer global require "phpcompatibility/php-compatibility:^9.0"
composer global require codeception/codeception
```
### 3. Install Git Hooks (Optional but Recommended)
```bash
./scripts/git/install-hooks.sh
```
### 4. Validate Everything Works
### 3. Validate Everything Works
```bash
# Quick validation
@@ -111,9 +105,6 @@ make phpcompat
# Package with auto-detected version
make package
# Or specify directory and version
./scripts/release/package_extension.sh dist 03.05.00
# Check package contents
ls -lh dist/
unzip -l dist/moko-cassiopeia-*.zip
@@ -157,13 +148,6 @@ moko-cassiopeia/
│ ├── media/ # Assets (CSS, JS, images)
│ ├── language/ # Language files
│ └── administrator/ # Admin files
├── scripts/ # Automation scripts
│ ├── validate/ # Validation scripts
│ ├── fix/ # Fix/update scripts
│ ├── release/ # Release scripts
│ ├── run/ # Execution scripts
│ ├── git/ # Git hooks
│ └── lib/ # Shared libraries
├── tests/ # Test suites
├── docs/ # Documentation
├── .github/workflows/ # CI/CD workflows
@@ -177,7 +161,6 @@ moko-cassiopeia/
1. **Read the Workflow Guide**: [docs/WORKFLOW_GUIDE.md](./WORKFLOW_GUIDE.md)
2. **Review Joomla Development**: [docs/JOOMLA_DEVELOPMENT.md](./JOOMLA_DEVELOPMENT.md)
3. **Check Scripts Documentation**: [scripts/README.md](../scripts/README.md)
### Creating Your First Feature
@@ -234,10 +217,6 @@ Use the Release Pipeline workflow to promote between stages.
```bash
make fix-permissions
# Or manually:
chmod +x scripts/**/*.sh
```
### PHPStan/PHPCS Not Found
```bash
@@ -246,29 +225,12 @@ make install
composer global require "squizlabs/php_codesniffer:^3.0" phpstan/phpstan
```
### Pre-commit Hook Fails
```bash
# Run manually to see details
./scripts/git/pre-commit.sh
# Quick mode (skip some checks)
./scripts/git/pre-commit.sh --quick
# Skip quality checks
./scripts/git/pre-commit.sh --skip-quality
# Bypass hook (not recommended)
git commit --no-verify
```
### CI Workflow Fails
1. Check the workflow logs in GitHub Actions
2. Run the same checks locally:
2. Run validation locally:
```bash
./scripts/validate/manifest.sh
./scripts/validate/php_syntax.sh
make validate-required
make quality
```
@@ -326,7 +288,6 @@ make test
```bash
# Setup
make dev-setup # Initial setup
./scripts/git/install-hooks.sh # Install hooks
# Development
make validate-required # Quick validation
@@ -342,7 +303,6 @@ make fix-permissions # Fix script permissions
# Help
make help # Show all commands
./scripts/run/validate_all.sh --help # Script help
```
---

View File

@@ -69,7 +69,6 @@ moko-cassiopeia/
├── src/ # Template source code
│ ├── templates/ # Joomla template files
│ └── media/ # Assets (CSS, JS, images)
├── scripts/ # Build and automation scripts
├── tests/ # Automated tests
└── .github/ # GitHub configuration and workflows
```

View File

@@ -219,7 +219,7 @@ dev/X.Y.Z → rc/X.Y.Z → version/X.Y.Z → main
**How to run:**
1. Go to Actions → Repo Health
2. Click "Run workflow"
3. Select profile (all/release/scripts/repo)
3. Select profile (all/release/repo)
4. Click "Run workflow"
**Profiles:**
@@ -244,8 +244,7 @@ git checkout dev/X.Y.Z
vim src/templates/index.php
# 4. Validate locally
./scripts/validate/php_syntax.sh
./scripts/validate/manifest.sh
make validate-required
# 5. Commit and push
git add -A
@@ -257,23 +256,19 @@ git push origin dev/X.Y.Z
```bash
# Run comprehensive validation suite
./scripts/run/validate_all.sh
make validate-required
# Run with verbose output
./scripts/run/validate_all.sh -v
# Run smoke tests
./scripts/run/smoke_test.sh
# Run quality checks
make quality
```
### Creating a Release Package
```bash
# Package with auto-detected version
./scripts/release/package_extension.sh
# Package with specific version
./scripts/release/package_extension.sh dist 03.05.00
```bash
# Package with auto-detected version
make package
# Verify package contents
unzip -l dist/moko-cassiopeia-*.zip
@@ -283,21 +278,12 @@ unzip -l dist/moko-cassiopeia-*.zip
```bash
# Via GitHub Actions (recommended)
# Actions → Create version branch
# Or manually with scripts
./scripts/fix/versions.sh 03.05.00
# Actions → Release Management workflow
```
### Updating CHANGELOG
```bash
# Add new version entry
./scripts/release/update_changelog.sh 03.05.00
# Update release dates
./scripts/release/update_dates.sh 2025-01-15 03.05.00
```
Update CHANGELOG.md manually or via pull request following the existing format.
## Troubleshooting
@@ -309,39 +295,26 @@ unzip -l dist/moko-cassiopeia-*.zip
# Check specific file
php -l src/templates/index.php
# Run validation script
./scripts/validate/php_syntax.sh
# Run validation
make validate-required
```
#### Manifest Validation Failed
```bash
# Validate manifest XML
./scripts/validate/manifest.sh
# Check XML well-formedness
./scripts/validate/xml_wellformed.sh
# Validate manifest and XML files
make validate-required
```
#### Version Alignment Issues
```bash
# Check version in manifest matches CHANGELOG
./scripts/validate/version_alignment.sh
# Fix versions
./scripts/fix/versions.sh 03.05.00
# Check version consistency
make validate-required
```
### Workflow Failures
#### "Permission denied" on scripts
```bash
# Fix script permissions
chmod +x scripts/**/*.sh
```
#### "Branch already exists"
```bash
@@ -454,7 +427,6 @@ phpcs --standard=phpcs.xml --report=source src/
- [Main README](../README.md) - Project overview
- [Joomla Development Guide](./JOOMLA_DEVELOPMENT.md) - Testing and quality
- [Scripts README](../scripts/README.md) - Script documentation
- [CHANGELOG](../CHANGELOG.md) - Version history
- [CONTRIBUTING](../CONTRIBUTING.md) - Contribution guidelines

View File

@@ -1,583 +0,0 @@
<!-- Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see https://www.gnu.org/licenses/ .
FILE INFORMATION
DEFGROUP: Moko-Cassiopeia.Documentation
INGROUP: Scripts.Documentation
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
FILE: ./scripts/ENTERPRISE.md
VERSION: 01.00.00
BRIEF: Enterprise-grade scripting standards and best practices
-->
# Enterprise Standards for Scripts
This document defines the enterprise-grade standards and best practices
implemented across all automation scripts in this repository.
## Table of Contents
- [Overview](#overview)
- [Core Principles](#core-principles)
- [Script Structure](#script-structure)
- [Error Handling](#error-handling)
- [Logging and Observability](#logging-and-observability)
- [Security Standards](#security-standards)
- [Dependency Management](#dependency-management)
- [Exit Codes](#exit-codes)
- [Documentation Requirements](#documentation-requirements)
- [Testing and Validation](#testing-and-validation)
- [Operational Considerations](#operational-considerations)
## Overview
All scripts in this repository follow enterprise-grade standards to ensure:
- **Reliability**: Predictable behavior in all environments
- **Security**: Protection against vulnerabilities and credential exposure
- **Observability**: Clear logging and error reporting
- **Maintainability**: Consistent patterns and documentation
- **Portability**: Cross-platform compatibility
## Core Principles
### 1. Fail Fast, Fail Clearly
Scripts must fail immediately when encountering errors and provide clear,
actionable error messages.
```bash
set -euo pipefail # Required at top of all bash scripts
```
- `-e`: Exit on first error
- `-u`: Exit on undefined variable reference
- `-o pipefail`: Propagate pipeline failures
### 2. Zero Assumptions
- Always validate inputs
- Check for required dependencies
- Verify file/directory existence before access
- Never assume environment state
### 3. Idempotency Where Possible
Scripts should be safe to run multiple times without causing harm or
inconsistency.
### 4. Least Privilege
Scripts should:
- Never require root unless absolutely necessary
- Use minimal file system permissions
- Validate before modifying files
## Script Structure
### Standard Header Template
Every script must include:
```bash
#!/usr/bin/env bash
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# [Full license text...]
# ============================================================================
# ============================================================================
# FILE INFORMATION
# ============================================================================
# DEFGROUP: Script.Category
# INGROUP: Subcategory
# REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
# PATH: /scripts/path/to/script.sh
# VERSION: XX.XX.XX
# BRIEF: One-line description of script purpose
# NOTE: Additional context or usage notes
# ============================================================================
set -euo pipefail
```
### Usage Function
User-facing scripts must provide a usage/help function:
```bash
usage() {
cat <<-USAGE
Usage: $0 [OPTIONS] <ARGS>
Description of what the script does.
Options:
-h, --help Show this help message
-v, --verbose Enable verbose output
Arguments:
ARG1 Description of first argument
ARG2 Description of second argument
Examples:
$0 example_value
$0 -v example_value
Exit codes:
0 - Success
1 - Error
2 - Invalid arguments
USAGE
exit 0
}
```
### Argument Parsing
```bash
# Parse arguments
if [ "${1:-}" = "-h" ] || [ "${1:-}" = "--help" ]; then
usage
fi
[ $# -ge 1 ] || usage
```
### Library Sourcing
```bash
SCRIPT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
. "${SCRIPT_DIR}/lib/common.sh"
# Check dependencies
check_dependencies python3 git
```
## Error Handling
### Error Messages
Error messages must be:
- **Clear**: Explain what went wrong
- **Actionable**: Tell user how to fix it
- **Contextual**: Include relevant details
- **Verbose**: Provide comprehensive information by default
```bash
# Bad
die "Error"
# Good - Verbose with context and solutions
die "Required file not found: ${CONFIG_FILE}
Current directory: $(pwd)
Expected location: ./config/${CONFIG_FILE}
To fix:
1. Run setup script: ./scripts/setup.sh
2. Or create the file manually: touch config/${CONFIG_FILE}
"
```
### Error Output
- Always show full error output for failed operations
- Include line numbers and file paths
- Show error summaries with troubleshooting steps
- Provide installation guides for missing dependencies
Example verbose error from validation:
```
ERROR: PHP syntax validation failed
Files checked: 90
Files with errors: 2
Failed files and errors:
File: src/test.php
Error: Parse error: syntax error, unexpected '}' in src/test.php on line 42
File: src/helper.php
Error: Parse error: syntax error, unexpected T_STRING in src/helper.php on line 15
To fix: Review and correct the syntax errors in the files listed above.
Run 'php -l <filename>' on individual files for detailed error messages.
```
### Validation
```bash
# Validate inputs
validate_version() {
local v="$1"
if ! printf '%s' "$v" | grep -Eq '^[0-9]+\.[0-9]+\.[0-9]+$'; then
die "Invalid version format: $v (expected X.Y.Z)"
fi
}
# Check file existence
assert_file_exists "${MANIFEST}" || die "Manifest not found: ${MANIFEST}"
# Verify directory
assert_dir_exists "${SRC_DIR}" || die "Source directory missing: ${SRC_DIR}"
```
## Logging and Observability
### Logging Functions
Use standard logging functions from `lib/common.sh`:
```bash
log_info "Starting process..." # Informational messages
log_warn "Configuration missing" # Warnings (non-fatal)
log_error "Validation failed" # Errors (fatal)
die "Critical error occurred" # Fatal with exit
```
### Timestamps
Include timestamps for audit trails:
```bash
log_info "Start time: $(log_timestamp)"
# ... work ...
log_info "End time: $(log_timestamp)"
```
### Structured Output
For machine-readable output, use JSON:
```bash
printf '{"status":"ok","files_checked":%s}\n' "${count}"
```
### Progress Reporting
For long-running operations:
```bash
log_section "Phase 1: Validation"
log_step "Checking manifests..."
log_success "✓ Manifests valid"
log_kv "Files processed" "${count}"
```
## Security Standards
### 1. No Hardcoded Secrets
- Never commit credentials
- Use environment variables for sensitive data
- Validate against secret patterns
### 2. Input Sanitization
```bash
# Validate user input
if [[ "${input}" =~ [^a-zA-Z0-9._-] ]]; then
die "Invalid input: contains disallowed characters"
fi
```
### 3. File Operations
```bash
# Use explicit paths
FILE="/full/path/to/file"
# Avoid user-controlled paths without validation
# Validate before rm/mv operations
```
### 4. Command Injection Prevention
```bash
# Use arrays for command arguments
args=("$file1" "$file2")
command "${args[@]}"
# Quote all variables
grep "${pattern}" "${file}"
```
## Dependency Management
### Required Dependencies Check
```bash
# At script start
check_dependencies python3 git sed
# Or inline
require_cmd xmllint || die "xmllint not available"
```
### Graceful Degradation
When optional dependencies are missing:
```bash
if ! command -v php >/dev/null 2>&1; then
log_warn "PHP not available, skipping syntax check"
exit 0
fi
```
## Exit Codes
Standard exit codes across all scripts:
| Code | Meaning | Usage |
|------|---------|-------|
| 0 | Success | All operations completed successfully |
| 1 | Error | Fatal error occurred |
| 2 | Invalid arguments | Bad command-line arguments or usage |
```bash
# Success
exit 0
# Fatal error
die "Error message" # Exits with code 1
# Invalid arguments
usage # Exits with code 0 (help shown)
# or
log_error "Invalid argument"
exit 2
```
## Documentation Requirements
### 1. Script Headers
Must include:
- Copyright notice
- SPDX license identifier
- FILE INFORMATION section
- Version number
- Brief description
### 2. Inline Comments
Use comments for:
- Complex logic explanation
- Why decisions were made (not what code does)
- Security considerations
- Performance notes
```bash
# Use git ls-files for performance vs. find
files=$(git ls-files '*.yml' '*.yaml')
# NOTE: Binary detection prevents corrupting image files
if file --mime-type "$f" | grep -q '^application/'; then
continue
fi
```
### 3. README Documentation
Update `scripts/README.md` when:
- Adding new scripts
- Changing script behavior
- Adding new library functions
## Testing and Validation
### Self-Testing
Scripts should validate their own requirements:
```bash
# Validate environment
[ -d "${SRC_DIR}" ] || die "Source directory not found"
# Validate configuration
[ -n "${VERSION}" ] || die "VERSION must be set"
```
### Script Health Checking
Use the script health checker to validate all scripts follow standards:
```bash
./scripts/run/script_health.sh # Check all scripts
./scripts/run/script_health.sh -v # Verbose mode with details
```
The health checker validates:
- Copyright headers present
- SPDX license identifiers
- FILE INFORMATION sections
- Error handling (set -euo pipefail)
- Executable permissions
### Integration Testing
Run validation suite before commits:
```bash
./scripts/run/validate_all.sh
```
### Smoke Testing
Basic health checks:
```bash
./scripts/run/smoke_test.sh
```
## Operational Considerations
### 1. Timeout Handling
For long-running operations:
```bash
run_with_timeout 300 long_running_command
```
### 2. Cleanup
Use traps for cleanup:
```bash
cleanup() {
rm -f "${TEMP_FILE}"
}
trap cleanup EXIT
```
### 3. Lock Files
For singleton operations:
```bash
LOCK_FILE="/tmp/script.lock"
if [ -f "${LOCK_FILE}" ]; then
die "Script already running (lock file exists)"
fi
touch "${LOCK_FILE}"
trap "rm -f ${LOCK_FILE}" EXIT
```
### 4. Signal Handling
```bash
handle_interrupt() {
log_warn "Interrupted by user"
cleanup
exit 130
}
trap handle_interrupt INT TERM
```
### 5. Dry Run Mode
For destructive operations:
```bash
DRY_RUN="${DRY_RUN:-false}"
if [ "${DRY_RUN}" = "true" ]; then
log_info "DRY RUN: Would execute: $command"
else
"$command"
fi
```
## CI/CD Integration
### Environment Variables
Scripts should respect:
```bash
CI="${CI:-false}" # Running in CI
VERBOSE="${VERBOSE:-false}" # Verbose output
DEBUG="${DEBUG:-false}" # Debug mode
```
### CI-Specific Behavior
```bash
if is_ci; then
# CI-specific settings
set -x # Echo commands for debugging
fi
```
### Job Summaries
For GitHub Actions:
```bash
if [ -n "${GITHUB_STEP_SUMMARY:-}" ]; then
echo "### Validation Results" >> "$GITHUB_STEP_SUMMARY"
echo "Status: PASSED" >> "$GITHUB_STEP_SUMMARY"
fi
```
## Review Checklist
Before committing new or modified scripts:
- [ ] Includes proper copyright header
- [ ] Uses `set -euo pipefail`
- [ ] Has usage/help function (if user-facing)
- [ ] Validates all inputs
- [ ] Checks dependencies with `check_dependencies`
- [ ] Uses structured logging (`log_info`, `log_error`, etc.)
- [ ] Includes timestamps for audit trails
- [ ] Returns appropriate exit codes (0=success, 1=error, 2=invalid args)
- [ ] Includes inline comments for complex logic
- [ ] Documented in scripts/README.md
- [ ] Tested locally
- [ ] Passes `./scripts/run/script_health.sh`
- [ ] Passes all validation checks (`./scripts/run/validate_all.sh`)
- [ ] Passes `shellcheck` (if available)
Quick validation command:
```bash
# Run all checks
./scripts/run/script_health.sh && ./scripts/run/validate_all.sh
```
## Version History
| Version | Date | Description |
| ------- | ---------- | ----------- |
| 01.00.00 | 2025-01-03 | Initial enterprise standards documentation |
## Metadata
- **Document:** scripts/ENTERPRISE.md
- **Repository:** https://github.com/mokoconsulting-tech/moko-cassiopeia
- **Version:** 01.00.00
- **Status:** Active

View File

@@ -1,75 +0,0 @@
# Scripts Documentation
All automation scripts for the moko-cassiopeia project are written in Python for cross-platform compatibility and support both Joomla and Dolibarr extensions.
## Quick Reference
```bash
# Run all validations
make validate
python3 scripts/run/validate_all.py
# Run specific validations
python3 scripts/validate/manifest.py
python3 scripts/validate/xml_wellformed.py
# Create distribution package (auto-detects Joomla or Dolibarr)
make package
python3 scripts/release/package_extension.py dist
```
## Platform Support
All scripts automatically detect and support:
- **Joomla Extensions**: Templates, Components, Modules, Plugins, Packages
- **Dolibarr Modules**: Automatic detection and packaging
## Available Scripts
### Validation Scripts (`scripts/validate/`)
- `manifest.py` - Validate extension manifests (Joomla/Dolibarr)
- `xml_wellformed.py` - Validate XML syntax
- `workflows.py` - Validate GitHub Actions workflows
- `tabs.py` - Check for tab characters in YAML
- `no_secrets.py` - Scan for secrets/credentials
- `paths.py` - Check for Windows-style paths
- `php_syntax.py` - Validate PHP syntax
### Release Scripts (`scripts/release/`)
- `package_extension.py` - Create distributable ZIP packages
### Run Scripts (`scripts/run/`)
- `validate_all.py` - Run all validation scripts
- `scaffold_extension.py` - Create new extension scaffolding
### Library Scripts (`scripts/lib/`)
- `common.py` - Common utilities
- `joomla_manifest.py` - Joomla manifest parsing
- `dolibarr_manifest.py` - Dolibarr module parsing
- `extension_utils.py` - Unified extension detection
## Requirements
- Python 3.6+
- Git
- PHP (for PHP syntax validation)
## Migration from Shell Scripts
All shell scripts have been converted to Python. Use Python equivalents:
```bash
# Old (removed) # New
./scripts/validate/manifest.sh → python3 scripts/validate/manifest.py
./scripts/release/package.sh → python3 scripts/release/package_extension.py
```
For detailed documentation, see individual script help:
```bash
python3 scripts/validate/manifest.py --help
python3 scripts/release/package_extension.py --help
```
## License
GPL-3.0-or-later - See [LICENSE](../LICENSE)

View File

@@ -1,452 +0,0 @@
#!/usr/bin/env python3
"""
Common utilities for Moko-Cassiopeia scripts.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Library
INGROUP: Common
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/lib/common.py
VERSION: 01.00.00
BRIEF: Unified shared Python utilities for all CI and local scripts
"""
import json
import os
import shutil
import sys
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
import subprocess
import traceback
# ============================================================================
# Environment and Detection
# ============================================================================
def is_ci() -> bool:
"""Check if running in CI environment."""
return os.environ.get("CI", "").lower() == "true"
def require_cmd(command: str) -> None:
"""
Ensure a required command is available.
Args:
command: Command name to check
Raises:
SystemExit: If command is not found
"""
if not shutil.which(command):
log_error(f"Required command not found: {command}")
sys.exit(1)
# ============================================================================
# Logging
# ============================================================================
class Colors:
"""ANSI color codes for terminal output."""
RED = '\033[0;31m'
GREEN = '\033[0;32m'
YELLOW = '\033[1;33m'
BLUE = '\033[0;34m'
CYAN = '\033[0;36m'
BOLD = '\033[1m'
NC = '\033[0m' # No Color
@classmethod
def enabled(cls) -> bool:
"""Check if colors should be enabled."""
return sys.stdout.isatty() and os.environ.get("NO_COLOR") is None
def log_info(message: str) -> None:
"""Log informational message."""
print(f"INFO: {message}")
def log_warn(message: str) -> None:
"""Log warning message."""
color = Colors.YELLOW if Colors.enabled() else ""
nc = Colors.NC if Colors.enabled() else ""
print(f"{color}WARN: {message}{nc}", file=sys.stderr)
def log_error(message: str) -> None:
"""Log error message."""
color = Colors.RED if Colors.enabled() else ""
nc = Colors.NC if Colors.enabled() else ""
print(f"{color}ERROR: {message}{nc}", file=sys.stderr)
def log_success(message: str) -> None:
"""Log success message."""
color = Colors.GREEN if Colors.enabled() else ""
nc = Colors.NC if Colors.enabled() else ""
print(f"{color}{message}{nc}")
def log_step(message: str) -> None:
"""Log a step in a process."""
color = Colors.CYAN if Colors.enabled() else ""
nc = Colors.NC if Colors.enabled() else ""
print(f"{color}{message}{nc}")
def log_section(title: str) -> None:
"""Log a section header."""
print()
print("=" * 60)
print(title)
print("=" * 60)
def log_kv(key: str, value: str) -> None:
"""Log a key-value pair."""
print(f" {key}: {value}")
def die(message: str, exit_code: int = 1) -> None:
"""
Log error and exit.
Args:
message: Error message
exit_code: Exit code (default: 1)
"""
log_error(message)
if os.environ.get("VERBOSE_ERRORS", "true").lower() == "true":
print("", file=sys.stderr)
print("Stack trace:", file=sys.stderr)
traceback.print_stack(file=sys.stderr)
print("", file=sys.stderr)
print("Environment:", file=sys.stderr)
print(f" PWD: {os.getcwd()}", file=sys.stderr)
print(f" USER: {os.environ.get('USER', 'unknown')}", file=sys.stderr)
print(f" PYTHON: {sys.version}", file=sys.stderr)
print(f" CI: {is_ci()}", file=sys.stderr)
print("", file=sys.stderr)
sys.exit(exit_code)
# ============================================================================
# Validation Helpers
# ============================================================================
def assert_file_exists(path: Union[str, Path]) -> None:
"""
Assert that a file exists.
Args:
path: Path to file
Raises:
SystemExit: If file doesn't exist
"""
if not Path(path).is_file():
die(f"Required file missing: {path}")
def assert_dir_exists(path: Union[str, Path]) -> None:
"""
Assert that a directory exists.
Args:
path: Path to directory
Raises:
SystemExit: If directory doesn't exist
"""
if not Path(path).is_dir():
die(f"Required directory missing: {path}")
def assert_not_empty(value: Any, name: str) -> None:
"""
Assert that a value is not empty.
Args:
value: Value to check
name: Name of the value for error message
Raises:
SystemExit: If value is empty
"""
if not value:
die(f"Required value is empty: {name}")
# ============================================================================
# JSON Utilities
# ============================================================================
def json_escape(text: str) -> str:
"""
Escape text for JSON.
Args:
text: Text to escape
Returns:
Escaped text
"""
return json.dumps(text)[1:-1] # Remove surrounding quotes
def json_output(data: Dict[str, Any], pretty: bool = False) -> None:
"""
Output data as JSON.
Args:
data: Dictionary to output
pretty: Whether to pretty-print
"""
if pretty:
print(json.dumps(data, indent=2, sort_keys=True))
else:
print(json.dumps(data, separators=(',', ':')))
# ============================================================================
# Path Utilities
# ============================================================================
def script_root() -> Path:
"""
Get the root scripts directory.
Returns:
Path to scripts directory
"""
return Path(__file__).parent.parent
def repo_root() -> Path:
"""
Get the repository root directory.
Returns:
Path to repository root
"""
return script_root().parent
def normalize_path(path: Union[str, Path]) -> str:
"""
Normalize a path (resolve, absolute, forward slashes).
Args:
path: Path to normalize
Returns:
Normalized path string
"""
return str(Path(path).resolve()).replace("\\", "/")
# ============================================================================
# File Operations
# ============================================================================
def read_file(path: Union[str, Path], encoding: str = "utf-8") -> str:
"""
Read a file.
Args:
path: Path to file
encoding: File encoding
Returns:
File contents
"""
assert_file_exists(path)
return Path(path).read_text(encoding=encoding)
def write_file(path: Union[str, Path], content: str, encoding: str = "utf-8") -> None:
"""
Write a file.
Args:
path: Path to file
content: Content to write
encoding: File encoding
"""
Path(path).write_text(content, encoding=encoding)
def ensure_dir(path: Union[str, Path]) -> None:
"""
Ensure a directory exists.
Args:
path: Path to directory
"""
Path(path).mkdir(parents=True, exist_ok=True)
# ============================================================================
# Command Execution
# ============================================================================
def run_command(
cmd: List[str],
capture_output: bool = True,
check: bool = True,
cwd: Optional[Union[str, Path]] = None,
env: Optional[Dict[str, str]] = None
) -> subprocess.CompletedProcess:
"""
Run a command.
Args:
cmd: Command and arguments
capture_output: Whether to capture stdout/stderr
check: Whether to raise on non-zero exit
cwd: Working directory
env: Environment variables
Returns:
CompletedProcess instance
"""
return subprocess.run(
cmd,
capture_output=capture_output,
text=True,
check=check,
cwd=cwd,
env=env
)
def run_shell(
script: str,
capture_output: bool = True,
check: bool = True,
cwd: Optional[Union[str, Path]] = None
) -> subprocess.CompletedProcess:
"""
Run a shell script.
Args:
script: Shell script
capture_output: Whether to capture stdout/stderr
check: Whether to raise on non-zero exit
cwd: Working directory
Returns:
CompletedProcess instance
"""
return subprocess.run(
script,
shell=True,
capture_output=capture_output,
text=True,
check=check,
cwd=cwd
)
# ============================================================================
# Git Utilities
# ============================================================================
def git_root() -> Path:
"""
Get git repository root.
Returns:
Path to git root
"""
result = run_command(
["git", "rev-parse", "--show-toplevel"],
capture_output=True,
check=True
)
return Path(result.stdout.strip())
def git_status(porcelain: bool = True) -> str:
"""
Get git status.
Args:
porcelain: Use porcelain format
Returns:
Git status output
"""
cmd = ["git", "status"]
if porcelain:
cmd.append("--porcelain")
result = run_command(cmd, capture_output=True, check=True)
return result.stdout
def git_branch() -> str:
"""
Get current git branch.
Returns:
Branch name
"""
result = run_command(
["git", "rev-parse", "--abbrev-ref", "HEAD"],
capture_output=True,
check=True
)
return result.stdout.strip()
# ============================================================================
# Main Entry Point (for testing)
# ============================================================================
def main() -> None:
"""Test the common utilities."""
log_section("Testing Common Utilities")
log_info("This is an info message")
log_warn("This is a warning message")
log_success("This is a success message")
log_step("This is a step message")
log_section("Environment")
log_kv("CI", str(is_ci()))
log_kv("Script Root", str(script_root()))
log_kv("Repo Root", str(repo_root()))
log_kv("Git Root", str(git_root()))
log_kv("Git Branch", git_branch())
log_section("Tests Passed")
if __name__ == "__main__":
main()

View File

@@ -1,356 +0,0 @@
#!/usr/bin/env python3
"""
Extension utilities for Joomla and Dolibarr.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Library
INGROUP: Extension.Utils
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/lib/extension_utils.py
VERSION: 01.00.00
BRIEF: Platform-aware extension utilities for Joomla and Dolibarr
"""
import re
import xml.etree.ElementTree as ET
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from typing import Optional, Union
class Platform(Enum):
"""Supported extension platforms."""
JOOMLA = "joomla"
DOLIBARR = "dolibarr"
UNKNOWN = "unknown"
@dataclass
class ExtensionInfo:
"""Extension information."""
platform: Platform
name: str
version: str
extension_type: str
manifest_path: Path
description: Optional[str] = None
author: Optional[str] = None
author_email: Optional[str] = None
license: Optional[str] = None
def detect_joomla_manifest(src_dir: Union[str, Path]) -> Optional[Path]:
"""
Detect Joomla manifest file.
Args:
src_dir: Source directory
Returns:
Path to manifest file or None
"""
src_path = Path(src_dir)
# Common Joomla manifest locations and patterns
manifest_patterns = [
"templateDetails.xml",
"pkg_*.xml",
"com_*.xml",
"mod_*.xml",
"plg_*.xml",
]
# Search in src_dir and subdirectories (max depth 4)
for pattern in manifest_patterns:
# Direct match
matches = list(src_path.glob(pattern))
if matches:
return matches[0]
# Search in subdirectories
matches = list(src_path.glob(f"*/{pattern}"))
if matches:
return matches[0]
matches = list(src_path.glob(f"*/*/{pattern}"))
if matches:
return matches[0]
# Fallback: search for any XML with <extension tag
for xml_file in src_path.rglob("*.xml"):
if xml_file.name.startswith("."):
continue
try:
tree = ET.parse(xml_file)
root = tree.getroot()
if root.tag == "extension":
return xml_file
except Exception:
continue
return None
def detect_dolibarr_manifest(src_dir: Union[str, Path]) -> Optional[Path]:
"""
Detect Dolibarr module descriptor file.
Args:
src_dir: Source directory
Returns:
Path to descriptor file or None
"""
src_path = Path(src_dir)
# Dolibarr module descriptors follow pattern: core/modules/mod*.class.php
descriptor_patterns = [
"core/modules/mod*.class.php",
"*/modules/mod*.class.php",
"mod*.class.php",
]
for pattern in descriptor_patterns:
matches = list(src_path.glob(pattern))
if matches:
# Verify it's actually a Dolibarr module descriptor
# Look for extends DolibarrModules pattern
for match in matches:
try:
content = match.read_text(encoding="utf-8")
# Check for Dolibarr module inheritance pattern
if re.search(r'extends\s+DolibarrModules', content):
return match
except Exception:
continue
return None
def parse_joomla_manifest(manifest_path: Path) -> Optional[ExtensionInfo]:
"""
Parse Joomla manifest XML.
Args:
manifest_path: Path to manifest file
Returns:
ExtensionInfo or None
"""
try:
tree = ET.parse(manifest_path)
root = tree.getroot()
if root.tag != "extension":
return None
# Get extension type
ext_type = root.get("type", "unknown")
# Get name
name_elem = root.find("name")
name = name_elem.text if name_elem is not None else "unknown"
# Get version
version_elem = root.find("version")
version = version_elem.text if version_elem is not None else "0.0.0"
# Get description
desc_elem = root.find("description")
description = desc_elem.text if desc_elem is not None else None
# Get author
author_elem = root.find("author")
author = author_elem.text if author_elem is not None else None
# Get author email
author_email_elem = root.find("authorEmail")
author_email = author_email_elem.text if author_email_elem is not None else None
# Get license
license_elem = root.find("license")
license_text = license_elem.text if license_elem is not None else None
return ExtensionInfo(
platform=Platform.JOOMLA,
name=name,
version=version,
extension_type=ext_type,
manifest_path=manifest_path,
description=description,
author=author,
author_email=author_email,
license=license_text
)
except Exception:
return None
def parse_dolibarr_descriptor(descriptor_path: Path) -> Optional[ExtensionInfo]:
"""
Parse Dolibarr module descriptor PHP file.
Args:
descriptor_path: Path to descriptor file
Returns:
ExtensionInfo or None
"""
try:
content = descriptor_path.read_text(encoding="utf-8")
# Extract module name from class that extends DolibarrModules
# Pattern: class ModMyModule extends DolibarrModules
class_match = re.search(r'class\s+(\w+)\s+extends\s+DolibarrModules', content)
if not class_match:
# Fallback: try to find any class definition
class_match = re.search(r'class\s+(\w+)', content)
name = class_match.group(1) if class_match else "unknown"
# Extract version
version_match = re.search(r'\$this->version\s*=\s*[\'"]([^\'"]+)[\'"]', content)
version = version_match.group(1) if version_match else "0.0.0"
# Extract description
desc_match = re.search(r'\$this->description\s*=\s*[\'"]([^\'"]+)[\'"]', content)
description = desc_match.group(1) if desc_match else None
# Extract author
author_match = re.search(r'\$this->editor_name\s*=\s*[\'"]([^\'"]+)[\'"]', content)
author = author_match.group(1) if author_match else None
return ExtensionInfo(
platform=Platform.DOLIBARR,
name=name,
version=version,
extension_type="module",
manifest_path=descriptor_path,
description=description,
author=author,
author_email=None,
license=None
)
except Exception:
return None
def get_extension_info(src_dir: Union[str, Path]) -> Optional[ExtensionInfo]:
"""
Detect and parse extension information from source directory.
Supports both Joomla and Dolibarr platforms.
Args:
src_dir: Source directory containing extension files
Returns:
ExtensionInfo or None if not detected
"""
src_path = Path(src_dir)
if not src_path.is_dir():
return None
# Try Joomla first
joomla_manifest = detect_joomla_manifest(src_path)
if joomla_manifest:
ext_info = parse_joomla_manifest(joomla_manifest)
if ext_info:
return ext_info
# Try Dolibarr
dolibarr_descriptor = detect_dolibarr_manifest(src_path)
if dolibarr_descriptor:
ext_info = parse_dolibarr_descriptor(dolibarr_descriptor)
if ext_info:
return ext_info
return None
def is_joomla_extension(src_dir: Union[str, Path]) -> bool:
"""
Check if directory contains a Joomla extension.
Args:
src_dir: Source directory
Returns:
True if Joomla extension detected
"""
ext_info = get_extension_info(src_dir)
return ext_info is not None and ext_info.platform == Platform.JOOMLA
def is_dolibarr_extension(src_dir: Union[str, Path]) -> bool:
"""
Check if directory contains a Dolibarr module.
Args:
src_dir: Source directory
Returns:
True if Dolibarr module detected
"""
ext_info = get_extension_info(src_dir)
return ext_info is not None and ext_info.platform == Platform.DOLIBARR
def main() -> None:
"""Test the extension utilities."""
import sys
sys.path.insert(0, str(Path(__file__).parent))
import common
common.log_section("Testing Extension Utilities")
# Test with current directory's src
repo_root = common.repo_root()
src_dir = repo_root / "src"
if not src_dir.is_dir():
common.log_warn(f"Source directory not found: {src_dir}")
return
ext_info = get_extension_info(src_dir)
if ext_info:
common.log_success("Extension detected!")
common.log_kv("Platform", ext_info.platform.value.upper())
common.log_kv("Name", ext_info.name)
common.log_kv("Version", ext_info.version)
common.log_kv("Type", ext_info.extension_type)
common.log_kv("Manifest", str(ext_info.manifest_path))
if ext_info.description:
common.log_kv("Description", ext_info.description[:60] + "...")
if ext_info.author:
common.log_kv("Author", ext_info.author)
else:
common.log_error("No extension detected")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -1,430 +0,0 @@
#!/usr/bin/env python3
"""
Joomla manifest parsing and validation utilities.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Library
INGROUP: Joomla.Manifest
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/lib/joomla_manifest.py
VERSION: 01.00.00
BRIEF: Joomla manifest parsing and validation utilities
"""
import xml.etree.ElementTree as ET
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from dataclasses import dataclass
try:
from . import common
except ImportError:
import common
# ============================================================================
# Joomla Extension Types
# ============================================================================
class ExtensionType:
"""Joomla extension types."""
COMPONENT = "component"
MODULE = "module"
PLUGIN = "plugin"
TEMPLATE = "template"
LIBRARY = "library"
PACKAGE = "package"
FILE = "file"
LANGUAGE = "language"
ALL_TYPES = [
COMPONENT, MODULE, PLUGIN, TEMPLATE,
LIBRARY, PACKAGE, FILE, LANGUAGE
]
# ============================================================================
# Manifest Data Class
# ============================================================================
@dataclass
class ManifestInfo:
"""Information extracted from a Joomla manifest."""
path: Path
extension_type: str
name: str
version: str
description: Optional[str] = None
author: Optional[str] = None
author_email: Optional[str] = None
author_url: Optional[str] = None
copyright: Optional[str] = None
license: Optional[str] = None
creation_date: Optional[str] = None
def to_dict(self) -> Dict[str, str]:
"""Convert to dictionary."""
return {
"path": str(self.path),
"ext_type": self.extension_type,
"name": self.name,
"version": self.version,
"description": self.description or "",
"author": self.author or "",
"author_email": self.author_email or "",
"author_url": self.author_url or "",
"copyright": self.copyright or "",
"license": self.license or "",
"creation_date": self.creation_date or ""
}
# ============================================================================
# Manifest Discovery
# ============================================================================
def find_manifest(src_dir: str = "src") -> Path:
"""
Find the primary Joomla manifest in the given directory.
Args:
src_dir: Source directory to search
Returns:
Path to manifest file
Raises:
SystemExit: If no manifest found
"""
src_path = Path(src_dir)
if not src_path.is_dir():
common.die(f"Source directory missing: {src_dir}")
# Template manifest (templateDetails.xml)
template_manifest = src_path / "templateDetails.xml"
if template_manifest.is_file():
return template_manifest
# Also check in templates subdirectory
templates_dir = src_path / "templates"
if templates_dir.is_dir():
for template_file in templates_dir.glob("templateDetails.xml"):
return template_file
# Package manifest (pkg_*.xml)
pkg_manifests = list(src_path.rglob("pkg_*.xml"))
if pkg_manifests:
return pkg_manifests[0]
# Component manifest (com_*.xml)
com_manifests = list(src_path.rglob("com_*.xml"))
if com_manifests:
return com_manifests[0]
# Module manifest (mod_*.xml)
mod_manifests = list(src_path.rglob("mod_*.xml"))
if mod_manifests:
return mod_manifests[0]
# Plugin manifest (plg_*.xml)
plg_manifests = list(src_path.rglob("plg_*.xml"))
if plg_manifests:
return plg_manifests[0]
# Fallback: any XML with <extension
for xml_file in src_path.rglob("*.xml"):
try:
content = xml_file.read_text(encoding="utf-8")
if "<extension" in content:
return xml_file
except Exception:
continue
common.die(f"No Joomla manifest XML found under {src_dir}")
def find_all_manifests(src_dir: str = "src") -> List[Path]:
"""
Find all Joomla manifests in the given directory.
Args:
src_dir: Source directory to search
Returns:
List of manifest paths
"""
src_path = Path(src_dir)
if not src_path.is_dir():
return []
manifests = []
# Template manifests
manifests.extend(src_path.rglob("templateDetails.xml"))
# Package manifests
manifests.extend(src_path.rglob("pkg_*.xml"))
# Component manifests
manifests.extend(src_path.rglob("com_*.xml"))
# Module manifests
manifests.extend(src_path.rglob("mod_*.xml"))
# Plugin manifests
manifests.extend(src_path.rglob("plg_*.xml"))
return manifests
# ============================================================================
# Manifest Parsing
# ============================================================================
def parse_manifest(manifest_path: Path) -> ManifestInfo:
"""
Parse a Joomla manifest file.
Args:
manifest_path: Path to manifest file
Returns:
ManifestInfo object
Raises:
SystemExit: If parsing fails
"""
if not manifest_path.is_file():
common.die(f"Manifest not found: {manifest_path}")
try:
tree = ET.parse(manifest_path)
root = tree.getroot()
# Extract extension type
ext_type = root.attrib.get("type", "").strip().lower()
if not ext_type:
common.die(f"Manifest missing type attribute: {manifest_path}")
# Extract name
name_elem = root.find("name")
if name_elem is None or not name_elem.text:
common.die(f"Manifest missing name element: {manifest_path}")
name = name_elem.text.strip()
# Extract version
version_elem = root.find("version")
if version_elem is None or not version_elem.text:
common.die(f"Manifest missing version element: {manifest_path}")
version = version_elem.text.strip()
# Extract optional fields
description = None
desc_elem = root.find("description")
if desc_elem is not None and desc_elem.text:
description = desc_elem.text.strip()
author = None
author_elem = root.find("author")
if author_elem is not None and author_elem.text:
author = author_elem.text.strip()
author_email = None
email_elem = root.find("authorEmail")
if email_elem is not None and email_elem.text:
author_email = email_elem.text.strip()
author_url = None
url_elem = root.find("authorUrl")
if url_elem is not None and url_elem.text:
author_url = url_elem.text.strip()
copyright_text = None
copyright_elem = root.find("copyright")
if copyright_elem is not None and copyright_elem.text:
copyright_text = copyright_elem.text.strip()
license_text = None
license_elem = root.find("license")
if license_elem is not None and license_elem.text:
license_text = license_elem.text.strip()
creation_date = None
date_elem = root.find("creationDate")
if date_elem is not None and date_elem.text:
creation_date = date_elem.text.strip()
return ManifestInfo(
path=manifest_path,
extension_type=ext_type,
name=name,
version=version,
description=description,
author=author,
author_email=author_email,
author_url=author_url,
copyright=copyright_text,
license=license_text,
creation_date=creation_date
)
except ET.ParseError as e:
common.die(f"Failed to parse manifest {manifest_path}: {e}")
except Exception as e:
common.die(f"Error reading manifest {manifest_path}: {e}")
def get_manifest_version(manifest_path: Path) -> str:
"""
Extract version from manifest.
Args:
manifest_path: Path to manifest file
Returns:
Version string
"""
info = parse_manifest(manifest_path)
return info.version
def get_manifest_name(manifest_path: Path) -> str:
"""
Extract name from manifest.
Args:
manifest_path: Path to manifest file
Returns:
Name string
"""
info = parse_manifest(manifest_path)
return info.name
def get_manifest_type(manifest_path: Path) -> str:
"""
Extract extension type from manifest.
Args:
manifest_path: Path to manifest file
Returns:
Extension type string
"""
info = parse_manifest(manifest_path)
return info.extension_type
# ============================================================================
# Manifest Validation
# ============================================================================
def validate_manifest(manifest_path: Path) -> Tuple[bool, List[str]]:
"""
Validate a Joomla manifest.
Args:
manifest_path: Path to manifest file
Returns:
Tuple of (is_valid, list_of_warnings)
"""
warnings = []
try:
info = parse_manifest(manifest_path)
# Check for recommended fields
if not info.description:
warnings.append("Missing description element")
if not info.author:
warnings.append("Missing author element")
if not info.copyright:
warnings.append("Missing copyright element")
if not info.license:
warnings.append("Missing license element")
if not info.creation_date:
warnings.append("Missing creationDate element")
# Validate extension type
if info.extension_type not in ExtensionType.ALL_TYPES:
warnings.append(f"Unknown extension type: {info.extension_type}")
return (True, warnings)
except SystemExit:
return (False, ["Failed to parse manifest"])
# ============================================================================
# Main Entry Point (for testing)
# ============================================================================
def main() -> None:
"""Test the manifest utilities."""
import sys
common.log_section("Testing Joomla Manifest Utilities")
src_dir = sys.argv[1] if len(sys.argv) > 1 else "src"
try:
manifest = find_manifest(src_dir)
common.log_success(f"Found manifest: {manifest}")
info = parse_manifest(manifest)
common.log_section("Manifest Information")
common.log_kv("Type", info.extension_type)
common.log_kv("Name", info.name)
common.log_kv("Version", info.version)
if info.description:
common.log_kv("Description", info.description[:60] + "..." if len(info.description) > 60 else info.description)
if info.author:
common.log_kv("Author", info.author)
is_valid, warnings = validate_manifest(manifest)
if is_valid:
common.log_success("Manifest is valid")
if warnings:
common.log_warn(f"Warnings: {len(warnings)}")
for warning in warnings:
print(f" - {warning}")
else:
common.log_error("Manifest validation failed")
except SystemExit as e:
sys.exit(e.code)
if __name__ == "__main__":
main()

View File

@@ -1,98 +0,0 @@
#!/usr/bin/env python3
"""
Detect extension platform and type.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Release
INGROUP: Extension.Detection
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/release/detect_platform.py
VERSION: 01.00.00
BRIEF: Detect extension platform and type for build workflow
USAGE: ./scripts/release/detect_platform.py [src_dir]
"""
import argparse
import sys
from pathlib import Path
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import extension_utils
except ImportError:
print("ERROR: Cannot import extension_utils library", file=sys.stderr)
sys.exit(1)
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Detect extension platform and type",
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
"src_dir",
nargs="?",
default="src",
help="Source directory (default: src)"
)
parser.add_argument(
"--format",
choices=["pipe", "json"],
default="pipe",
help="Output format (default: pipe)"
)
args = parser.parse_args()
try:
ext_info = extension_utils.get_extension_info(args.src_dir)
if not ext_info:
print(f"ERROR: No extension detected in {args.src_dir}", file=sys.stderr)
return 1
if args.format == "pipe":
# Format: platform|ext_type
print(f"{ext_info.platform.value}|{ext_info.extension_type}")
elif args.format == "json":
import json
data = {
"platform": ext_info.platform.value,
"extension_type": ext_info.extension_type,
"name": ext_info.name,
"version": ext_info.version
}
print(json.dumps(data))
return 0
except Exception as e:
print(f"ERROR: {e}", file=sys.stderr)
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,327 +0,0 @@
#!/usr/bin/env python3
"""
Package Joomla extension as distributable ZIP.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Release
INGROUP: Extension.Packaging
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/release/package_extension.py
VERSION: 01.00.00
BRIEF: Package Joomla extension as distributable ZIP
USAGE: ./scripts/release/package_extension.py [output_dir] [version]
"""
import argparse
import sys
import zipfile
from datetime import datetime
from pathlib import Path
from typing import Set
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
import extension_utils
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
# Exclusion patterns for packaging
EXCLUDE_PATTERNS = {
# Version control
".git", ".gitignore", ".gitattributes",
# IDE
".vscode", ".idea", "*.sublime-*",
# Development
"node_modules", "vendor", ".env", ".env.*",
# Documentation (optional, can be included)
# Build artifacts
"dist", "build", ".phpunit.cache",
# Development tool caches and artifacts
".phpstan.cache", ".psalm", ".rector",
"phpmd-cache", ".php-cs-fixer.cache", ".phplint-cache",
# OS files
".DS_Store", "Thumbs.db",
# Logs
"*.log",
# Tests
"tests", "test", "Tests",
# CI/CD
".github",
# Scripts
"scripts",
# Docs (can be included if needed)
"docs",
# Config files
"composer.json", "composer.lock",
"package.json", "package-lock.json",
"phpunit.xml", "phpstan.neon", "phpcs.xml",
"codeception.yml", "psalm.xml", ".php-cs-fixer.php",
# Others
"README.md", "CHANGELOG.md", "CONTRIBUTING.md",
"CODE_OF_CONDUCT.md", "SECURITY.md", "GOVERNANCE.md",
"Makefile",
}
def should_exclude(path: Path, base_path: Path, exclude_patterns: Set[str]) -> bool:
"""
Check if a path should be excluded from packaging.
Args:
path: Path to check
base_path: Base directory path
exclude_patterns: Set of exclusion patterns
Returns:
True if should be excluded
"""
relative_path = path.relative_to(base_path)
# Check each part of the path
for part in relative_path.parts:
if part in exclude_patterns:
return True
# Check wildcard patterns
for pattern in exclude_patterns:
if "*" in pattern:
import fnmatch
if fnmatch.fnmatch(part, pattern):
return True
return False
def create_package(
src_dir: str,
output_dir: str,
version: str = None,
repo_name: str = None,
exclude_patterns: Set[str] = None
) -> Path:
"""
Create a distributable ZIP package for a Joomla or Dolibarr extension.
Args:
src_dir: Source directory containing extension files
output_dir: Output directory for ZIP file
version: Version string (auto-detected if not provided)
repo_name: Repository name for ZIP file naming
exclude_patterns: Patterns to exclude from packaging
Returns:
Path to created ZIP file
"""
src_path = Path(src_dir)
if not src_path.is_dir():
common.die(f"Source directory not found: {src_dir}")
# Detect extension platform and get info
ext_info = extension_utils.get_extension_info(src_dir)
if not ext_info:
common.die(f"No Joomla or Dolibarr extension found in {src_dir}")
# Determine version
if not version:
version = ext_info.version
# Determine repo name
if not repo_name:
try:
repo_root = common.git_root()
repo_name = repo_root.name
except Exception:
repo_name = "extension"
# Determine exclusion patterns
if exclude_patterns is None:
exclude_patterns = EXCLUDE_PATTERNS
# Create output directory
output_path = Path(output_dir)
common.ensure_dir(output_path)
# Generate ZIP filename
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
platform_suffix = f"{ext_info.platform.value}-{ext_info.extension_type}"
zip_filename = f"{repo_name}-{version}-{platform_suffix}-{timestamp}.zip"
zip_path = output_path / zip_filename
# Remove existing ZIP if present
if zip_path.exists():
zip_path.unlink()
common.log_section("Creating Extension Package")
common.log_kv("Platform", ext_info.platform.value.upper())
common.log_kv("Extension", ext_info.name)
common.log_kv("Type", ext_info.extension_type)
common.log_kv("Version", version)
common.log_kv("Source", src_dir)
common.log_kv("Output", str(zip_path))
print()
# Create ZIP file
file_count = 0
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for item in src_path.rglob("*"):
if item.is_file():
# Check if should be excluded
if should_exclude(item, src_path, exclude_patterns):
continue
# Add to ZIP with relative path
arcname = item.relative_to(src_path)
zipf.write(item, arcname)
file_count += 1
if file_count % 10 == 0:
common.log_step(f"Added {file_count} files...")
# Get ZIP file size
zip_size = zip_path.stat().st_size
zip_size_mb = zip_size / (1024 * 1024)
print()
common.log_success(f"Package created: {zip_path.name}")
common.log_kv("Files", str(file_count))
common.log_kv("Size", f"{zip_size_mb:.2f} MB")
# Output JSON for machine consumption
result = {
"status": "ok",
"platform": ext_info.platform.value,
"extension": ext_info.name,
"ext_type": ext_info.extension_type,
"version": version,
"package": str(zip_path),
"files": file_count,
"size_bytes": zip_size
}
print()
common.json_output(result)
return zip_path
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Package Joomla or Dolibarr extension as distributable ZIP",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Package with auto-detected version
%(prog)s
# Package to specific directory
%(prog)s dist
# Package with specific version
%(prog)s dist 1.2.3
# Package with custom source
%(prog)s --src-dir my-extension dist 1.0.0
Supports both Joomla and Dolibarr extensions with automatic platform detection.
"""
)
parser.add_argument(
"output_dir",
nargs="?",
default="dist",
help="Output directory for ZIP file (default: dist)"
)
parser.add_argument(
"version",
nargs="?",
help="Version string (default: auto-detected from manifest)"
)
parser.add_argument(
"-s", "--src-dir",
default="src",
help="Source directory (default: src)"
)
parser.add_argument(
"--repo-name",
help="Repository name for ZIP filename (default: auto-detected)"
)
parser.add_argument(
"--include-docs",
action="store_true",
help="Include documentation files in package"
)
parser.add_argument(
"--include-tests",
action="store_true",
help="Include test files in package"
)
args = parser.parse_args()
try:
# Adjust exclusion patterns based on arguments
exclude_patterns = EXCLUDE_PATTERNS.copy()
if args.include_docs:
exclude_patterns.discard("docs")
exclude_patterns.discard("README.md")
exclude_patterns.discard("CHANGELOG.md")
if args.include_tests:
exclude_patterns.discard("tests")
exclude_patterns.discard("test")
exclude_patterns.discard("Tests")
# Create package
zip_path = create_package(
src_dir=args.src_dir,
output_dir=args.output_dir,
version=args.version,
repo_name=args.repo_name,
exclude_patterns=exclude_patterns
)
result = {
"status": "success",
"zip_path": str(zip_path)
}
common.json_output(result)
return 0
except Exception as e:
common.log_error(f"Packaging failed: {e}")
result = {
"status": "error",
"error": str(e)
}
common.json_output(result)
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,69 +0,0 @@
#!/usr/bin/env bash
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see https://www.gnu.org/licenses/ .
#
# FILE INFORMATION
# DEFGROUP: Release
# INGROUP: Moko-Cassiopeia
# PATH: scripts/release/update_dates.sh
# VERSION: 03.05.00
# BRIEF: Normalize dates in release files
set -euo pipefail
# Accept date and version as arguments
TODAY="${1:-$(date +%Y-%m-%d)}"
VERSION="${2:-unknown}"
# Validate date format (YYYY-MM-DD)
if ! [[ "${TODAY}" =~ ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ ]]; then
echo "ERROR: Invalid date format '${TODAY}'. Expected YYYY-MM-DD format."
exit 1
fi
echo "Date normalization script running..."
echo "TODAY: ${TODAY}"
echo "VERSION: ${VERSION}"
# Escape special regex characters in VERSION for safe use in grep and sed
# Escapes: ] \ / $ * . ^ [
VERSION_ESCAPED=$(printf '%s\n' "${VERSION}" | sed 's/[][\/$*.^]/\\&/g')
# Update CHANGELOG.md - replace the date on the version heading line
if [ -f "CHANGELOG.md" ]; then
# Match lines like "## [03.05.00] 2026-01-04" and update the date
if grep -q "^## \[${VERSION_ESCAPED}\] " CHANGELOG.md; then
sed -i "s/^## \[${VERSION_ESCAPED}\] [0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}/## [${VERSION_ESCAPED}] ${TODAY}/" CHANGELOG.md
echo "✓ Updated CHANGELOG.md version [${VERSION}] date to ${TODAY}"
else
echo "⚠ Warning: CHANGELOG.md does not contain version [${VERSION}] heading"
fi
else
echo "⚠ Warning: CHANGELOG.md not found"
fi
# Update src/templates/templateDetails.xml - replace the <creationDate> tag
if [ -f "src/templates/templateDetails.xml" ]; then
sed -i "s|<creationDate>[0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}</creationDate>|<creationDate>${TODAY}</creationDate>|" src/templates/templateDetails.xml
echo "✓ Updated src/templates/templateDetails.xml creationDate to ${TODAY}"
else
echo "⚠ Warning: src/templates/templateDetails.xml not found"
fi
echo "Date normalization complete."

View File

@@ -1,447 +0,0 @@
#!/usr/bin/env python3
"""
Create Joomla extension scaffolding.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Moko-Cassiopeia.Scripts
INGROUP: Scripts.Run
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/run/scaffold_extension.py
VERSION: 01.00.00
BRIEF: Create scaffolding for different Joomla extension types
"""
import argparse
import sys
from datetime import datetime
from pathlib import Path
from typing import Dict
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
# ============================================================================
# Templates for Extension Scaffolding
# ============================================================================
def get_component_structure(name: str, description: str, author: str) -> Dict[str, str]:
"""Get directory structure and files for a component."""
safe_name = name.lower().replace(" ", "_")
com_name = f"com_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="component" version="4.0" method="upgrade">
<name>{name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<files folder="site">
<folder>src</folder>
</files>
<administration>
<menu>{name}</menu>
<files folder="admin">
<folder>services</folder>
<folder>sql</folder>
<folder>src</folder>
</files>
</administration>
</extension>
"""
return {
f"{com_name}.xml": manifest,
"site/src/.gitkeep": "",
"admin/services/provider.php": f"<?php\n// Service provider for {name}\n",
"admin/sql/install.mysql.utf8.sql": "-- Installation SQL\n",
"admin/sql/uninstall.mysql.utf8.sql": "-- Uninstallation SQL\n",
"admin/src/.gitkeep": "",
}
def get_module_structure(name: str, description: str, author: str, client: str = "site") -> Dict[str, str]:
"""Get directory structure and files for a module."""
safe_name = name.lower().replace(" ", "_")
mod_name = f"mod_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="module" version="4.0" client="{client}" method="upgrade">
<name>{name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<files>
<filename module="{mod_name}">{mod_name}.php</filename>
<filename>{mod_name}.xml</filename>
<folder>tmpl</folder>
</files>
</extension>
"""
module_php = f"""<?php
/**
* @package {name}
* @copyright Copyright (C) {datetime.now().year} {author}
* @license GPL-3.0-or-later
*/
defined('_JEXEC') or die;
// Module logic here
require JModuleHelper::getLayoutPath('mod_{safe_name}', $params->get('layout', 'default'));
"""
default_tmpl = f"""<?php
/**
* @package {name}
* @copyright Copyright (C) {datetime.now().year} {author}
* @license GPL-3.0-or-later
*/
defined('_JEXEC') or die;
?>
<div class="{mod_name}">
<p><?php echo JText::_('MOD_{safe_name.upper()}_DESCRIPTION'); ?></p>
</div>
"""
return {
f"{mod_name}.xml": manifest,
f"{mod_name}.php": module_php,
"tmpl/default.php": default_tmpl,
}
def get_plugin_structure(name: str, description: str, author: str, group: str = "system") -> Dict[str, str]:
"""Get directory structure and files for a plugin."""
safe_name = name.lower().replace(" ", "_")
plg_name = f"{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="plugin" version="4.0" group="{group}" method="upgrade">
<name>plg_{group}_{safe_name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<files>
<filename plugin="{plg_name}">{plg_name}.php</filename>
</files>
</extension>
"""
plugin_php = f"""<?php
/**
* @package {name}
* @copyright Copyright (C) {datetime.now().year} {author}
* @license GPL-3.0-or-later
*/
defined('_JEXEC') or die;
use Joomla\\CMS\\Plugin\\CMSPlugin;
class Plg{group.capitalize()}{plg_name.capitalize()} extends CMSPlugin
{{
protected $autoloadLanguage = true;
public function onContentPrepare($context, &$article, &$params, $limitstart = 0)
{{
// Plugin logic here
}}
}}
"""
return {
f"plg_{group}_{safe_name}.xml": manifest,
f"{plg_name}.php": plugin_php,
}
def get_template_structure(name: str, description: str, author: str) -> Dict[str, str]:
"""Get directory structure and files for a template."""
safe_name = name.lower().replace(" ", "_")
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="template" version="4.0" client="site" method="upgrade">
<name>{safe_name}</name>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<author>{author}</author>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<version>1.0.0</version>
<description>{description}</description>
<files>
<filename>index.php</filename>
<filename>templateDetails.xml</filename>
<folder>css</folder>
<folder>js</folder>
<folder>images</folder>
</files>
<positions>
<position>header</position>
<position>main</position>
<position>footer</position>
</positions>
</extension>
"""
index_php = f"""<?php
/**
* @package {name}
* @copyright Copyright (C) {datetime.now().year} {author}
* @license GPL-3.0-or-later
*/
defined('_JEXEC') or die;
use Joomla\\CMS\\Factory;
use Joomla\\CMS\\HTML\\HTMLHelper;
use Joomla\\CMS\\Uri\\Uri;
$app = Factory::getApplication();
$wa = $app->getDocument()->getWebAssetManager();
// Load template assets
$wa->useStyle('template.{safe_name}')->useScript('template.{safe_name}');
?>
<!DOCTYPE html>
<html lang="<?php echo $this->language; ?>" dir="<?php echo $this->direction; ?>">
<head>
<jdoc:include type="metas" />
<jdoc:include type="styles" />
<jdoc:include type="scripts" />
</head>
<body>
<header>
<jdoc:include type="modules" name="header" style="html5" />
</header>
<main>
<jdoc:include type="component" />
</main>
<footer>
<jdoc:include type="modules" name="footer" style="html5" />
</footer>
</body>
</html>
"""
return {
"templateDetails.xml": manifest,
"index.php": index_php,
"css/template.css": "/* Template styles */\n",
"js/template.js": "// Template JavaScript\n",
"images/.gitkeep": "",
}
def get_package_structure(name: str, description: str, author: str) -> Dict[str, str]:
"""Get directory structure and files for a package."""
safe_name = name.lower().replace(" ", "_")
pkg_name = f"pkg_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="package" version="4.0" method="upgrade">
<name>{name}</name>
<packagename>{safe_name}</packagename>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<files folder="packages">
<!-- Add extension packages here -->
</files>
</extension>
"""
return {
f"{pkg_name}.xml": manifest,
"packages/.gitkeep": "",
}
# ============================================================================
# Scaffolding Functions
# ============================================================================
def create_extension(
ext_type: str,
name: str,
description: str,
author: str,
output_dir: str = "src",
**kwargs
) -> None:
"""
Create extension scaffolding.
Args:
ext_type: Extension type (component, module, plugin, template, package)
name: Extension name
description: Extension description
author: Author name
output_dir: Output directory
**kwargs: Additional type-specific options
"""
output_path = Path(output_dir)
# Get structure based on type
if ext_type == "component":
structure = get_component_structure(name, description, author)
elif ext_type == "module":
client = kwargs.get("client", "site")
structure = get_module_structure(name, description, author, client)
elif ext_type == "plugin":
group = kwargs.get("group", "system")
structure = get_plugin_structure(name, description, author, group)
elif ext_type == "template":
structure = get_template_structure(name, description, author)
elif ext_type == "package":
structure = get_package_structure(name, description, author)
else:
common.die(f"Unknown extension type: {ext_type}")
# Create files
common.log_section(f"Creating {ext_type}: {name}")
for file_path, content in structure.items():
full_path = output_path / file_path
# Create parent directories
full_path.parent.mkdir(parents=True, exist_ok=True)
# Write file
full_path.write_text(content, encoding="utf-8")
common.log_success(f"Created: {file_path}")
common.log_section("Scaffolding Complete")
common.log_info(f"Extension files created in: {output_path}")
common.log_info(f"Extension type: {ext_type}")
common.log_info(f"Extension name: {name}")
# ============================================================================
# Command Line Interface
# ============================================================================
def main() -> None:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Create Joomla extension scaffolding",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Create a component
%(prog)s component MyComponent "My Component Description" "John Doe"
# Create a module
%(prog)s module MyModule "My Module Description" "John Doe" --client site
# Create a plugin
%(prog)s plugin MyPlugin "My Plugin Description" "John Doe" --group system
# Create a template
%(prog)s template mytheme "My Theme Description" "John Doe"
# Create a package
%(prog)s package mypackage "My Package Description" "John Doe"
"""
)
parser.add_argument(
"type",
choices=["component", "module", "plugin", "template", "package"],
help="Extension type to create"
)
parser.add_argument("name", help="Extension name")
parser.add_argument("description", help="Extension description")
parser.add_argument("author", help="Author name")
parser.add_argument(
"-o", "--output",
default="src",
help="Output directory (default: src)"
)
parser.add_argument(
"--client",
choices=["site", "administrator"],
default="site",
help="Module client (site or administrator)"
)
parser.add_argument(
"--group",
default="system",
help="Plugin group (system, content, user, etc.)"
)
args = parser.parse_args()
try:
create_extension(
ext_type=args.type,
name=args.name,
description=args.description,
author=args.author,
output_dir=args.output,
client=args.client,
group=args.group
)
except Exception as e:
common.log_error(f"Failed to create extension: {e}")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -1,181 +0,0 @@
#!/usr/bin/env python3
"""
Run all validation scripts.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Run
INGROUP: Validation.Runner
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/run/validate_all.py
VERSION: 01.00.00
BRIEF: Run all validation scripts
"""
import subprocess
import sys
from pathlib import Path
from typing import Tuple
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
# Required validation scripts (must pass)
REQUIRED_SCRIPTS = [
"scripts/validate/manifest.py",
"scripts/validate/xml_wellformed.py",
"scripts/validate/workflows.py",
]
# Optional validation scripts (failures are warnings)
OPTIONAL_SCRIPTS = [
"scripts/validate/changelog.py",
"scripts/validate/language_structure.py",
"scripts/validate/license_headers.py",
"scripts/validate/no_secrets.py",
"scripts/validate/paths.py",
"scripts/validate/php_syntax.py",
"scripts/validate/tabs.py",
"scripts/validate/version_alignment.py",
"scripts/validate/version_hierarchy.py",
]
def run_validation_script(script_path: str) -> Tuple[bool, str]:
"""
Run a validation script.
Args:
script_path: Path to script
Returns:
Tuple of (success, output)
"""
script = Path(script_path)
if not script.exists():
return (False, f"Script not found: {script_path}")
try:
result = subprocess.run(
["python3", str(script)],
capture_output=True,
text=True,
check=False
)
output = result.stdout + result.stderr
success = result.returncode == 0
return (success, output)
except Exception as e:
return (False, f"Error running script: {e}")
def main() -> int:
"""Main entry point."""
common.log_section("Running All Validations")
print()
total_passed = 0
total_failed = 0
total_skipped = 0
# Run required scripts
common.log_info("=== Required Validations ===")
print()
for script in REQUIRED_SCRIPTS:
script_name = Path(script).name
common.log_info(f"Running {script_name}...")
success, output = run_validation_script(script)
if success:
common.log_success(f"{script_name} passed")
total_passed += 1
else:
common.log_error(f"{script_name} FAILED")
if output:
print(output)
total_failed += 1
print()
# Run optional scripts
common.log_info("=== Optional Validations ===")
print()
for script in OPTIONAL_SCRIPTS:
script_name = Path(script).name
if not Path(script).exists():
common.log_warn(f"{script_name} not found (skipped)")
total_skipped += 1
continue
common.log_info(f"Running {script_name}...")
success, output = run_validation_script(script)
if success:
common.log_success(f"{script_name} passed")
total_passed += 1
else:
common.log_warn(f"{script_name} failed (optional)")
if output:
print(output[:500]) # Limit output
total_failed += 1
print()
# Summary
common.log_section("Validation Summary")
common.log_kv("Total Passed", str(total_passed))
common.log_kv("Total Failed", str(total_failed))
common.log_kv("Total Skipped", str(total_skipped))
print()
# Check if any required validations failed
required_failed = sum(
1 for script in REQUIRED_SCRIPTS
if Path(script).exists() and not run_validation_script(script)[0]
)
if required_failed > 0:
common.log_error(f"{required_failed} required validation(s) failed")
return 1
common.log_success("All required validations passed!")
if total_failed > 0:
common.log_warn(f"{total_failed} optional validation(s) failed")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,168 +0,0 @@
#!/usr/bin/env python3
"""
Validate Joomla manifest files.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Moko-Cassiopeia.Scripts
INGROUP: Scripts.Validate
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/validate/manifest.py
VERSION: 01.00.00
BRIEF: Validate Joomla extension manifest files
"""
import argparse
import sys
from pathlib import Path
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
import joomla_manifest
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
def validate_manifest_file(manifest_path: Path, verbose: bool = False) -> bool:
"""
Validate a single manifest file.
Args:
manifest_path: Path to manifest file
verbose: Show detailed output
Returns:
True if valid, False otherwise
"""
try:
info = joomla_manifest.parse_manifest(manifest_path)
is_valid, warnings = joomla_manifest.validate_manifest(manifest_path)
if verbose:
common.log_section(f"Manifest: {manifest_path}")
common.log_kv("Type", info.extension_type)
common.log_kv("Name", info.name)
common.log_kv("Version", info.version)
if warnings:
common.log_warn(f"Warnings ({len(warnings)}):")
for warning in warnings:
print(f" - {warning}")
# Output JSON for machine consumption
result = {
"status": "ok" if is_valid else "error",
"manifest": str(manifest_path),
"ext_type": info.extension_type,
"name": info.name,
"version": info.version,
"warnings": warnings
}
if not verbose:
common.json_output(result)
if is_valid:
if not verbose:
print(f"manifest: ok ({manifest_path})")
else:
common.log_success("Manifest is valid")
return True
else:
common.log_error(f"Manifest validation failed: {manifest_path}")
return False
except SystemExit:
common.log_error(f"Failed to parse manifest: {manifest_path}")
return False
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Validate Joomla extension manifest files",
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
"-s", "--src-dir",
default="src",
help="Source directory to search for manifests (default: src)"
)
parser.add_argument(
"-v", "--verbose",
action="store_true",
help="Show detailed output"
)
parser.add_argument(
"manifest",
nargs="?",
help="Specific manifest file to validate (optional)"
)
args = parser.parse_args()
try:
if args.manifest:
# Validate specific manifest
manifest_path = Path(args.manifest)
if not manifest_path.is_file():
common.die(f"Manifest file not found: {args.manifest}")
success = validate_manifest_file(manifest_path, args.verbose)
return 0 if success else 1
else:
# Find and validate all manifests in src directory
manifests = joomla_manifest.find_all_manifests(args.src_dir)
if not manifests:
common.die(f"No manifest files found in {args.src_dir}")
if args.verbose:
common.log_section("Validating Manifests")
common.log_info(f"Found {len(manifests)} manifest(s)")
print()
all_valid = True
for manifest in manifests:
if not validate_manifest_file(manifest, args.verbose):
all_valid = False
if args.verbose:
print()
if all_valid:
common.log_success(f"All {len(manifests)} manifest(s) are valid")
else:
common.log_error("Some manifests failed validation")
return 0 if all_valid else 1
except Exception as e:
common.log_error(f"Validation failed: {e}")
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,212 +0,0 @@
#!/usr/bin/env python3
"""
Scan for accidentally committed secrets and credentials.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Validate
INGROUP: Security
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/validate/no_secrets.py
VERSION: 01.00.00
BRIEF: Scan for accidentally committed secrets and credentials
NOTE: High-signal pattern detection to prevent credential exposure
"""
import argparse
import json
import os
import re
import sys
from pathlib import Path
from typing import List, Dict
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
# High-signal patterns only. Any match is a hard fail.
SECRET_PATTERNS = [
# Private keys
r'-----BEGIN (RSA|DSA|EC|OPENSSH) PRIVATE KEY-----',
r'PuTTY-User-Key-File-',
# AWS keys
r'AKIA[0-9A-Z]{16}',
r'ASIA[0-9A-Z]{16}',
# GitHub tokens
r'ghp_[A-Za-z0-9]{36}',
r'gho_[A-Za-z0-9]{36}',
r'github_pat_[A-Za-z0-9_]{20,}',
# Slack tokens
r'xox[baprs]-[0-9A-Za-z-]{10,48}',
# Stripe keys
r'sk_live_[0-9a-zA-Z]{20,}',
]
# Directories to exclude from scanning
EXCLUDE_DIRS = {
'vendor',
'node_modules',
'dist',
'build',
'.git',
}
def scan_file(filepath: Path, patterns: List[re.Pattern]) -> List[Dict[str, str]]:
"""
Scan a file for secret patterns.
Args:
filepath: Path to file to scan
patterns: Compiled regex patterns to search for
Returns:
List of matches with file, line number, and content
"""
hits = []
try:
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
for line_num, line in enumerate(f, 1):
for pattern in patterns:
if pattern.search(line):
hits.append({
'file': str(filepath),
'line': line_num,
'content': line.strip()[:100] # Limit to 100 chars
})
except Exception as e:
common.log_warn(f"Could not read {filepath}: {e}")
return hits
def scan_directory(src_dir: str, patterns: List[re.Pattern]) -> List[Dict[str, str]]:
"""
Recursively scan directory for secrets.
Args:
src_dir: Directory to scan
patterns: Compiled regex patterns
Returns:
List of all matches
"""
src_path = Path(src_dir)
all_hits = []
for item in src_path.rglob("*"):
# Skip directories
if not item.is_file():
continue
# Skip excluded directories
if any(excluded in item.parts for excluded in EXCLUDE_DIRS):
continue
# Skip binary files (heuristic)
try:
with open(item, 'rb') as f:
chunk = f.read(1024)
if b'\x00' in chunk: # Contains null bytes = likely binary
continue
except Exception:
continue
# Scan the file
hits = scan_file(item, patterns)
all_hits.extend(hits)
return all_hits
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Scan for accidentally committed secrets and credentials"
)
parser.add_argument(
"-s", "--src-dir",
default=os.environ.get("SRC_DIR", "src"),
help="Source directory to scan (default: src)"
)
args = parser.parse_args()
# Check if source directory exists
if not Path(args.src_dir).is_dir():
result = {
"status": "fail",
"error": "src directory missing"
}
common.json_output(result)
return 1
# Compile patterns
compiled_patterns = [re.compile(pattern) for pattern in SECRET_PATTERNS]
# Scan directory
hits = scan_directory(args.src_dir, compiled_patterns)
if hits:
# Limit to first 50 hits
hits = hits[:50]
result = {
"status": "fail",
"error": "secret_pattern_detected",
"hits": [{"hit": f"{h['file']}:{h['line']}: {h['content']}"} for h in hits]
}
print(json.dumps(result))
# Also print human-readable output
print("\nERROR: Potential secrets detected!", file=sys.stderr)
print(f"\nFound {len(hits)} potential secret(s):", file=sys.stderr)
for hit in hits[:10]: # Show first 10 in detail
print(f" {hit['file']}:{hit['line']}", file=sys.stderr)
print(f" {hit['content']}", file=sys.stderr)
if len(hits) > 10:
print(f" ... and {len(hits) - 10} more", file=sys.stderr)
print("\nPlease remove any secrets and use environment variables or secret management instead.", file=sys.stderr)
return 1
result = {
"status": "ok",
"src_dir": args.src_dir
}
common.json_output(result)
print("no_secrets: ok")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,169 +0,0 @@
#!/usr/bin/env python3
"""
Detect Windows-style path separators (backslashes).
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Validate
INGROUP: Path.Normalization
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/validate/paths.py
VERSION: 01.00.00
BRIEF: Detect Windows-style path separators (backslashes)
NOTE: Ensures cross-platform path compatibility
"""
import mimetypes
import subprocess
import sys
from pathlib import Path
from typing import List, Tuple, Dict
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
def get_tracked_files() -> List[str]:
"""
Get list of files tracked by git.
Returns:
List of file paths
"""
try:
result = common.run_command(
["git", "ls-files", "-z"],
capture_output=True,
check=True
)
files = [f for f in result.stdout.split('\0') if f.strip()]
return files
except subprocess.CalledProcessError:
return []
def is_binary_file(filepath: str) -> bool:
"""
Check if a file is likely binary.
Args:
filepath: Path to file
Returns:
True if likely binary
"""
# Check mime type
mime_type, _ = mimetypes.guess_type(filepath)
if mime_type and mime_type.startswith(('application/', 'audio/', 'image/', 'video/')):
return True
# Check for null bytes (heuristic for binary files)
try:
with open(filepath, 'rb') as f:
chunk = f.read(1024)
if b'\x00' in chunk:
return True
except Exception:
return True
return False
def find_backslashes_in_file(filepath: str) -> List[Tuple[int, str]]:
"""
Find lines with backslashes in a file.
Args:
filepath: Path to file
Returns:
List of (line_number, line_content) tuples
"""
backslashes = []
try:
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
for line_num, line in enumerate(f, 1):
if '\\' in line:
backslashes.append((line_num, line.rstrip()))
except Exception as e:
common.log_warn(f"Could not read {filepath}: {e}")
return backslashes
def main() -> int:
"""Main entry point."""
tracked_files = get_tracked_files()
if not tracked_files:
print("No files to check")
return 0
hits: Dict[str, List[Tuple[int, str]]] = {}
for filepath in tracked_files:
# Skip binary files
if is_binary_file(filepath):
continue
# Find backslashes
backslashes = find_backslashes_in_file(filepath)
if backslashes:
hits[filepath] = backslashes
if hits:
print("ERROR: Windows-style path literals detected", file=sys.stderr)
print("", file=sys.stderr)
print(f"Found backslashes in {len(hits)} file(s):", file=sys.stderr)
for filepath, lines in hits.items():
print("", file=sys.stderr)
print(f" File: {filepath}", file=sys.stderr)
print(" Lines with backslashes:", file=sys.stderr)
# Show first 5 lines
for line_num, line_content in lines[:5]:
print(f" {line_num}: {line_content[:80]}", file=sys.stderr)
if len(lines) > 5:
print(f" ... and {len(lines) - 5} more", file=sys.stderr)
print("", file=sys.stderr)
print("To fix:", file=sys.stderr)
print(" 1. Run: python3 scripts/fix/paths.py", file=sys.stderr)
print(" 2. Or manually replace backslashes (\\) with forward slashes (/)", file=sys.stderr)
print(" 3. Ensure paths use POSIX separators for cross-platform compatibility", file=sys.stderr)
print("", file=sys.stderr)
return 2
print("paths: ok")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,218 +0,0 @@
#!/usr/bin/env python3
"""
Validate PHP syntax in files.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Moko-Cassiopeia.Scripts
INGROUP: Scripts.Validate
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/validate/php_syntax.py
VERSION: 01.00.00
BRIEF: Validate PHP syntax in all PHP files
"""
import argparse
import subprocess
import sys
from pathlib import Path
from typing import List, Tuple
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
def check_php_file(file_path: Path) -> Tuple[bool, str]:
"""
Check PHP syntax of a single file.
Args:
file_path: Path to PHP file
Returns:
Tuple of (is_valid, error_message)
"""
try:
result = subprocess.run(
["php", "-l", str(file_path)],
capture_output=True,
text=True,
timeout=10
)
if result.returncode == 0:
return (True, "")
else:
return (False, result.stderr or result.stdout)
except subprocess.TimeoutExpired:
return (False, "Syntax check timed out")
except Exception as e:
return (False, str(e))
def find_php_files(src_dir: str, exclude_dirs: List[str] = None) -> List[Path]:
"""
Find all PHP files in a directory.
Args:
src_dir: Directory to search
exclude_dirs: Directories to exclude
Returns:
List of PHP file paths
"""
if exclude_dirs is None:
exclude_dirs = ["vendor", "node_modules", ".git"]
src_path = Path(src_dir)
if not src_path.is_dir():
return []
php_files = []
for php_file in src_path.rglob("*.php"):
# Check if file is in an excluded directory
if any(excluded in php_file.parts for excluded in exclude_dirs):
continue
php_files.append(php_file)
return sorted(php_files)
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Validate PHP syntax in all PHP files",
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
"-s", "--src-dir",
default="src",
help="Source directory to search for PHP files (default: src)"
)
parser.add_argument(
"-v", "--verbose",
action="store_true",
help="Show detailed output"
)
parser.add_argument(
"--exclude",
action="append",
help="Directories to exclude (can be specified multiple times)"
)
parser.add_argument(
"files",
nargs="*",
help="Specific files to check (optional)"
)
args = parser.parse_args()
# Check if PHP is available
common.require_cmd("php")
try:
# Determine which files to check
if args.files:
php_files = [Path(f) for f in args.files]
for f in php_files:
if not f.is_file():
common.die(f"File not found: {f}")
else:
exclude_dirs = args.exclude or ["vendor", "node_modules", ".git"]
php_files = find_php_files(args.src_dir, exclude_dirs)
if not php_files:
common.die(f"No PHP files found in {args.src_dir}")
if args.verbose:
common.log_section("PHP Syntax Validation")
common.log_info(f"Checking {len(php_files)} PHP file(s)")
print()
errors = []
for php_file in php_files:
is_valid, error_msg = check_php_file(php_file)
if is_valid:
if args.verbose:
common.log_success(f"OK: {php_file}")
else:
errors.append((php_file, error_msg))
if args.verbose:
common.log_error(f"FAILED: {php_file}")
if error_msg:
print(f" {error_msg}")
# Output results
if args.verbose:
print()
if errors:
result = {
"status": "error",
"total": len(php_files),
"passed": len(php_files) - len(errors),
"failed": len(errors),
"errors": [{"file": str(f), "error": e} for f, e in errors]
}
if not args.verbose:
common.json_output(result)
common.log_error(f"PHP syntax check failed: {len(errors)} error(s)")
if not args.verbose:
for file_path, error_msg in errors:
print(f"ERROR: {file_path}")
if error_msg:
print(f" {error_msg}")
return 1
else:
result = {
"status": "ok",
"total": len(php_files),
"passed": len(php_files)
}
if not args.verbose:
common.json_output(result)
print(f"php_syntax: ok ({len(php_files)} file(s) checked)")
else:
common.log_success(f"All {len(php_files)} PHP file(s) are valid")
return 0
except Exception as e:
common.log_error(f"Validation failed: {e}")
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,140 +0,0 @@
#!/usr/bin/env python3
"""
Detect TAB characters in YAML files where they are not allowed.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Validate
INGROUP: Code.Quality
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/validate/tabs.py
VERSION: 01.00.00
BRIEF: Detect TAB characters in YAML files where they are not allowed
NOTE: YAML specification forbids tab characters
"""
import subprocess
import sys
from pathlib import Path
from typing import List, Tuple
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
def get_yaml_files() -> List[str]:
"""
Get list of YAML files tracked by git.
Returns:
List of YAML file paths
"""
try:
result = common.run_command(
["git", "ls-files", "*.yml", "*.yaml"],
capture_output=True,
check=True
)
files = [f.strip() for f in result.stdout.split('\n') if f.strip()]
return files
except subprocess.CalledProcessError:
return []
def check_tabs_in_file(filepath: str) -> List[Tuple[int, str]]:
"""
Check for tab characters in a file.
Args:
filepath: Path to file to check
Returns:
List of (line_number, line_content) tuples with tabs
"""
tabs_found = []
try:
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
for line_num, line in enumerate(f, 1):
if '\t' in line:
tabs_found.append((line_num, line.rstrip()))
except Exception as e:
common.log_warn(f"Could not read {filepath}: {e}")
return tabs_found
def main() -> int:
"""Main entry point."""
yaml_files = get_yaml_files()
if not yaml_files:
print("No files to check")
return 0
bad_files = []
all_violations = {}
for filepath in yaml_files:
tabs = check_tabs_in_file(filepath)
if tabs:
bad_files.append(filepath)
all_violations[filepath] = tabs
print(f"TAB found in {filepath}", file=sys.stderr)
print(" Lines with tabs:", file=sys.stderr)
# Show first 5 lines with tabs
for line_num, line_content in tabs[:5]:
print(f" {line_num}: {line_content[:80]}", file=sys.stderr)
if len(tabs) > 5:
print(f" ... and {len(tabs) - 5} more", file=sys.stderr)
print("", file=sys.stderr)
if bad_files:
print("", file=sys.stderr)
print("ERROR: Tabs found in repository files", file=sys.stderr)
print("", file=sys.stderr)
print("YAML specification forbids tab characters.", file=sys.stderr)
print(f"Found tabs in {len(bad_files)} file(s):", file=sys.stderr)
for f in bad_files:
print(f" - {f}", file=sys.stderr)
print("", file=sys.stderr)
print("To fix:", file=sys.stderr)
print(" 1. Run: python3 scripts/fix/tabs.py", file=sys.stderr)
print(" 2. Or manually replace tabs with spaces in your editor", file=sys.stderr)
print(" 3. Configure your editor to use spaces (not tabs) for YAML files", file=sys.stderr)
print("", file=sys.stderr)
return 2
print("tabs: ok")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,216 +0,0 @@
#!/usr/bin/env python3
"""
Validate GitHub Actions workflow files.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Script.Validate
INGROUP: CI.Validation
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/validate/workflows.py
VERSION: 01.00.00
BRIEF: Validate GitHub Actions workflow files
NOTE: Checks YAML syntax, structure, and best practices
"""
import sys
from pathlib import Path
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
def validate_yaml_syntax(filepath: Path) -> bool:
"""
Validate YAML syntax of a workflow file.
Args:
filepath: Path to workflow file
Returns:
True if valid
"""
try:
import yaml
except ImportError:
common.log_warn("PyYAML module not installed. Install with: pip3 install pyyaml")
return True # Skip validation if yaml not available
try:
with open(filepath, 'r', encoding='utf-8') as f:
yaml.safe_load(f)
print(f"✓ Valid YAML: {filepath.name}")
return True
except yaml.YAMLError as e:
print(f"✗ YAML Error in {filepath.name}: {e}", file=sys.stderr)
return False
except Exception as e:
print(f"✗ Error reading {filepath.name}: {e}", file=sys.stderr)
return False
def check_no_tabs(filepath: Path) -> bool:
"""
Check that file contains no tab characters.
Args:
filepath: Path to file
Returns:
True if no tabs found
"""
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
if '\t' in content:
common.log_error(f"✗ File contains tab characters: {filepath.name}")
return False
except Exception as e:
common.log_warn(f"Could not read {filepath}: {e}")
return False
return True
def check_workflow_structure(filepath: Path) -> bool:
"""
Check workflow file structure for required keys.
Args:
filepath: Path to workflow file
Returns:
True if structure is valid
"""
errors = 0
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
# Check for required top-level keys
if 'name:' not in content and not content.startswith('name:'):
common.log_warn(f"Missing 'name:' in {filepath.name}")
if 'on:' not in content and not content.startswith('on:'):
common.log_error(f"✗ Missing 'on:' trigger in {filepath.name}")
errors += 1
if 'jobs:' not in content and not content.startswith('jobs:'):
common.log_error(f"✗ Missing 'jobs:' in {filepath.name}")
errors += 1
except Exception as e:
common.log_error(f"Error reading {filepath}: {e}")
return False
return errors == 0
def validate_workflow_file(filepath: Path) -> bool:
"""
Validate a single workflow file.
Args:
filepath: Path to workflow file
Returns:
True if valid
"""
common.log_info(f"Validating: {filepath.name}")
errors = 0
# Check YAML syntax
if not validate_yaml_syntax(filepath):
errors += 1
# Check for tabs
if not check_no_tabs(filepath):
errors += 1
# Check structure
if not check_workflow_structure(filepath):
errors += 1
if errors == 0:
common.log_info(f"{filepath.name} passed all checks")
return True
else:
common.log_error(f"{filepath.name} failed {errors} check(s)")
return False
def main() -> int:
"""Main entry point."""
common.log_info("GitHub Actions Workflow Validation")
common.log_info("===================================")
print()
workflows_dir = Path(".github/workflows")
if not workflows_dir.is_dir():
common.log_error(f"Workflows directory not found: {workflows_dir}")
return 1
# Find all workflow files
workflow_files = []
for pattern in ["*.yml", "*.yaml"]:
workflow_files.extend(workflows_dir.glob(pattern))
if not workflow_files:
common.log_warn("No workflow files found")
return 0
total = len(workflow_files)
passed = 0
failed = 0
for workflow in workflow_files:
if validate_workflow_file(workflow):
passed += 1
else:
failed += 1
print()
common.log_info("===================================")
common.log_info("Summary:")
common.log_info(f" Total workflows: {total}")
common.log_info(f" Passed: {passed}")
common.log_info(f" Failed: {failed}")
common.log_info("===================================")
if failed > 0:
common.log_error("Workflow validation failed")
return 1
common.log_info("All workflows validated successfully")
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,206 +0,0 @@
#!/usr/bin/env python3
"""
Validate XML well-formedness.
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program (./LICENSE.md).
FILE INFORMATION
DEFGROUP: Moko-Cassiopeia.Scripts
INGROUP: Scripts.Validate
REPO: https://github.com/mokoconsulting-tech/moko-cassiopeia
PATH: /scripts/validate/xml_wellformed.py
VERSION: 01.00.00
BRIEF: Validate XML well-formedness in all XML files
"""
import argparse
import sys
import xml.etree.ElementTree as ET
from pathlib import Path
from typing import List, Tuple
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
def check_xml_file(file_path: Path) -> Tuple[bool, str]:
"""
Check if an XML file is well-formed.
Args:
file_path: Path to XML file
Returns:
Tuple of (is_valid, error_message)
"""
try:
ET.parse(file_path)
return (True, "")
except ET.ParseError as e:
return (False, str(e))
except Exception as e:
return (False, str(e))
def find_xml_files(src_dir: str, exclude_dirs: List[str] = None) -> List[Path]:
"""
Find all XML files in a directory.
Args:
src_dir: Directory to search
exclude_dirs: Directories to exclude
Returns:
List of XML file paths
"""
if exclude_dirs is None:
exclude_dirs = ["vendor", "node_modules", ".git"]
src_path = Path(src_dir)
if not src_path.is_dir():
return []
xml_files = []
for xml_file in src_path.rglob("*.xml"):
# Check if file is in an excluded directory
if any(excluded in xml_file.parts for excluded in exclude_dirs):
continue
xml_files.append(xml_file)
return sorted(xml_files)
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Validate XML well-formedness in all XML files",
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
"-s", "--src-dir",
default="src",
help="Source directory to search for XML files (default: src)"
)
parser.add_argument(
"-v", "--verbose",
action="store_true",
help="Show detailed output"
)
parser.add_argument(
"--exclude",
action="append",
help="Directories to exclude (can be specified multiple times)"
)
parser.add_argument(
"files",
nargs="*",
help="Specific files to check (optional)"
)
args = parser.parse_args()
try:
# Determine which files to check
if args.files:
xml_files = [Path(f) for f in args.files]
for f in xml_files:
if not f.is_file():
common.die(f"File not found: {f}")
else:
exclude_dirs = args.exclude or ["vendor", "node_modules", ".git"]
xml_files = find_xml_files(args.src_dir, exclude_dirs)
if not xml_files:
common.die(f"No XML files found in {args.src_dir}")
if args.verbose:
common.log_section("XML Well-formedness Validation")
common.log_info(f"Checking {len(xml_files)} XML file(s)")
print()
errors = []
for xml_file in xml_files:
is_valid, error_msg = check_xml_file(xml_file)
if is_valid:
if args.verbose:
common.log_success(f"OK: {xml_file}")
else:
errors.append((xml_file, error_msg))
if args.verbose:
common.log_error(f"FAILED: {xml_file}")
if error_msg:
print(f" {error_msg}")
# Output results
if args.verbose:
print()
if errors:
result = {
"status": "error",
"src_dir": args.src_dir,
"xml_count": len(xml_files),
"passed": len(xml_files) - len(errors),
"failed": len(errors),
"errors": [{"file": str(f), "error": e} for f, e in errors]
}
if not args.verbose:
common.json_output(result)
common.log_error(f"XML validation failed: {len(errors)} error(s)")
if not args.verbose:
for file_path, error_msg in errors:
print(f"ERROR: {file_path}")
if error_msg:
print(f" {error_msg}")
return 1
else:
result = {
"status": "ok",
"src_dir": args.src_dir,
"xml_count": len(xml_files)
}
if not args.verbose:
common.json_output(result)
print(f"xml_wellformed: ok")
else:
common.log_success(f"All {len(xml_files)} XML file(s) are well-formed")
return 0
except Exception as e:
common.log_error(f"Validation failed: {e}")
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -98,6 +98,10 @@ $wa->useStyle('template.user'); // css/user.css
// Optional vendor CSS
$wa->useStyle('vendor.bootstrap-toc');
// Optional demo/showcase CSS (available for use, not loaded by default)
// To use: Add 'template.global.social-media-demo' to your article/module
// $wa->useStyle('template.global.social-media-demo');
// Color theme (light + optional dark)
$colorLightKey = strtolower(preg_replace('/[^a-z0-9_.-]/i', '', $params_LightColorName));
$colorDarkKey = strtolower(preg_replace('/[^a-z0-9_.-]/i', '', $params_DarkColorName));

View File

@@ -235,6 +235,11 @@
"name": "vendor.fa7free.solid.min",
"type": "style",
"uri": "media/templates/site/moko-cassiopeia/vendor/fa7free/css/solid.min.css"
},
{
"name": "template.global.social-media-demo",
"type": "style",
"uri": "media/templates/site/moko-cassiopeia/css/global/social-media-demo.css"
}
]
}