Delete workflows/, scripts/ directories

Co-authored-by: jmiller-moko <230051081+jmiller-moko@users.noreply.github.com>
This commit is contained in:
copilot-swe-agent[bot]
2026-02-08 07:13:49 +00:00
parent 948a5a2bde
commit 6da543f208
13 changed files with 0 additions and 4749 deletions

View File

@@ -1,413 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.CI
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/unified-ci.yml.template
# VERSION: 02.00.00
# BRIEF: Unified Continuous Integration for all platforms (Joomla, Dolibarr, Generic)
# NOTE: Auto-detects platform and languages, runs appropriate validation
name: Unified CI
on:
push:
branches:
- main
- dev/**
- rc/**
- version/**
pull_request:
branches:
- main
- dev/**
- rc/**
- version/**
workflow_dispatch:
permissions:
contents: read
jobs:
detect:
name: Detect Platform & Languages
runs-on: ubuntu-latest
outputs:
platform: ${{ steps.platform.outputs.type }}
has_nodejs: ${{ steps.languages.outputs.has_nodejs }}
has_python: ${{ steps.languages.outputs.has_python }}
has_php: ${{ steps.languages.outputs.has_php }}
has_go: ${{ steps.languages.outputs.has_go }}
has_ruby: ${{ steps.languages.outputs.has_ruby }}
has_rust: ${{ steps.languages.outputs.has_rust }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Detect platform type
id: platform
run: |
# Detect Joomla
if [ -f "joomla.xml" ] || find . -maxdepth 2 \( -name "mod_*.xml" -o -name "plg_*.xml" -o -name "com_*.xml" -o -name "pkg_*.xml" -o -name "tpl_*.xml" \) 2>/dev/null | head -1 | grep -q .; then
echo "type=joomla" >> $GITHUB_OUTPUT
echo "✓ Detected: Joomla extension" >> $GITHUB_STEP_SUMMARY
# Detect Dolibarr
elif [ -d "htdocs" ] || [ -d "core/modules" ] || ([ -f "composer.json" ] && grep -q "dolibarr" composer.json 2>/dev/null); then
echo "type=dolibarr" >> $GITHUB_OUTPUT
echo "✓ Detected: Dolibarr module" >> $GITHUB_STEP_SUMMARY
else
echo "type=generic" >> $GITHUB_OUTPUT
echo "✓ Detected: Generic project" >> $GITHUB_STEP_SUMMARY
fi
- name: Detect languages
id: languages
run: |
# Node.js
if [ -f "package.json" ]; then
echo "has_nodejs=true" >> $GITHUB_OUTPUT
echo "- Node.js: ✓" >> $GITHUB_STEP_SUMMARY
else
echo "has_nodejs=false" >> $GITHUB_OUTPUT
fi
# Python
if [ -f "requirements.txt" ] || [ -f "pyproject.toml" ] || [ -f "setup.py" ]; then
echo "has_python=true" >> $GITHUB_OUTPUT
echo "- Python: ✓" >> $GITHUB_STEP_SUMMARY
else
echo "has_python=false" >> $GITHUB_OUTPUT
fi
# PHP
if [ -f "composer.json" ] || find . -maxdepth 2 -name "*.php" 2>/dev/null | head -1 | grep -q .; then
echo "has_php=true" >> $GITHUB_OUTPUT
echo "- PHP: ✓" >> $GITHUB_STEP_SUMMARY
else
echo "has_php=false" >> $GITHUB_OUTPUT
fi
# Go
if [ -f "go.mod" ]; then
echo "has_go=true" >> $GITHUB_OUTPUT
echo "- Go: ✓" >> $GITHUB_STEP_SUMMARY
else
echo "has_go=false" >> $GITHUB_OUTPUT
fi
# Ruby
if [ -f "Gemfile" ]; then
echo "has_ruby=true" >> $GITHUB_OUTPUT
echo "- Ruby: ✓" >> $GITHUB_STEP_SUMMARY
else
echo "has_ruby=false" >> $GITHUB_OUTPUT
fi
# Rust
if [ -f "Cargo.toml" ]; then
echo "has_rust=true" >> $GITHUB_OUTPUT
echo "- Rust: ✓" >> $GITHUB_STEP_SUMMARY
else
echo "has_rust=false" >> $GITHUB_OUTPUT
fi
joomla-validation:
name: Joomla Validation
runs-on: ubuntu-latest
needs: detect
if: needs.detect.outputs.platform == 'joomla'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: '8.2'
extensions: mbstring, xml
- name: Validate XML manifests
run: |
echo "Validating Joomla manifests..."
for manifest in $(find . -maxdepth 2 -name "*.xml" -not -path "*/vendor/*" -not -name "phpunit.xml*" -not -name "phpcs.xml*"); do
echo "Checking $manifest"
xmllint --noout "$manifest" || exit 1
done
echo "✓ All manifests valid" >> $GITHUB_STEP_SUMMARY
- name: Validate PHP syntax
run: |
echo "Checking PHP syntax..."
find . -name "*.php" -not -path "*/vendor/*" -exec php -l {} \; | grep -v "No syntax errors"
echo "✓ PHP syntax valid" >> $GITHUB_STEP_SUMMARY
- name: Check Joomla standards
run: |
if [ -f "scripts/validate/manifest.sh" ]; then
bash scripts/validate/manifest.sh
fi
dolibarr-validation:
name: Dolibarr Validation
runs-on: ubuntu-latest
needs: detect
if: needs.detect.outputs.platform == 'dolibarr'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: '8.2'
extensions: mbstring, xml, mysqli
- name: Validate module structure
run: |
echo "Validating Dolibarr module structure..."
if [ ! -d "core" ] && [ ! -d "htdocs" ]; then
echo "⚠️ Warning: Neither 'core' nor 'htdocs' directory found" >> $GITHUB_STEP_SUMMARY
else
echo "✓ Module structure valid" >> $GITHUB_STEP_SUMMARY
fi
- name: Validate PHP syntax
run: |
echo "Checking PHP syntax..."
find . -name "*.php" -not -path "*/vendor/*" -exec php -l {} \; | grep -v "No syntax errors"
echo "✓ PHP syntax valid" >> $GITHUB_STEP_SUMMARY
- name: Check Dolibarr compatibility
run: |
if [ -f "scripts/validate/dolibarr_validate.sh" ]; then
bash scripts/validate/dolibarr_validate.sh
fi
nodejs-ci:
name: Node.js CI
runs-on: ubuntu-latest
needs: detect
if: needs.detect.outputs.has_nodejs == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: '20'
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Run linter
run: npm run lint
if: hashFiles('package.json') != '' && contains(fromJSON('["lint"]'), 'lint')
continue-on-error: true
- name: Run tests
run: npm test
if: hashFiles('package.json') != '' && contains(fromJSON('["test"]'), 'test')
python-ci:
name: Python CI
runs-on: ubuntu-latest
needs: detect
if: needs.detect.outputs.has_python == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: '3.11'
cache: 'pip'
- name: Install dependencies
run: |
if [ -f "requirements.txt" ]; then
pip install -r requirements.txt
fi
if [ -f "pyproject.toml" ]; then
pip install .
fi
- name: Run linter
run: |
if [ -f ".pylintrc" ]; then
pip install pylint
pylint **/*.py || true
fi
- name: Run tests
run: |
if [ -f "pytest.ini" ] || [ -d "tests" ]; then
pip install pytest
pytest
fi
php-ci:
name: PHP CI
runs-on: ubuntu-latest
needs: detect
if: needs.detect.outputs.has_php == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: '8.2'
extensions: mbstring, xml
tools: composer:v2
- name: Validate composer.json
run: composer validate --strict
if: hashFiles('composer.json') != ''
- name: Install dependencies
run: composer install --prefer-dist --no-progress
if: hashFiles('composer.json') != ''
- name: PHP Syntax Check
run: |
find . -name "*.php" -not -path "*/vendor/*" -exec php -l {} \;
go-ci:
name: Go CI
runs-on: ubuntu-latest
needs: detect
if: needs.detect.outputs.has_go == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Go
uses: actions/setup-go@v6
with:
go-version: '1.21'
cache: true
- name: Build
run: go build -v ./...
- name: Test
run: go test -v ./...
ruby-ci:
name: Ruby CI
runs-on: ubuntu-latest
needs: detect
if: needs.detect.outputs.has_ruby == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '3.2'
bundler-cache: true
- name: Install dependencies
run: bundle install
- name: Run tests
run: bundle exec rake test
rust-ci:
name: Rust CI
runs-on: ubuntu-latest
needs: detect
if: needs.detect.outputs.has_rust == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- name: Build
run: cargo build --verbose
- name: Test
run: cargo test --verbose
summary:
name: CI Summary
runs-on: ubuntu-latest
needs: [detect, joomla-validation, dolibarr-validation, nodejs-ci, python-ci, php-ci, go-ci, ruby-ci, rust-ci]
if: always()
steps:
- name: Generate CI summary
run: |
echo "# Unified CI Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Platform**: ${{ needs.detect.outputs.platform }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "## Validation Results" >> $GITHUB_STEP_SUMMARY
if [ "${{ needs.detect.outputs.platform }}" = "joomla" ]; then
echo "- Joomla Validation: ${{ needs.joomla-validation.result }}" >> $GITHUB_STEP_SUMMARY
elif [ "${{ needs.detect.outputs.platform }}" = "dolibarr" ]; then
echo "- Dolibarr Validation: ${{ needs.dolibarr-validation.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [ "${{ needs.detect.outputs.has_nodejs }}" = "true" ]; then
echo "- Node.js CI: ${{ needs.nodejs-ci.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [ "${{ needs.detect.outputs.has_python }}" = "true" ]; then
echo "- Python CI: ${{ needs.python-ci.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [ "${{ needs.detect.outputs.has_php }}" = "true" ]; then
echo "- PHP CI: ${{ needs.php-ci.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [ "${{ needs.detect.outputs.has_go }}" = "true" ]; then
echo "- Go CI: ${{ needs.go-ci.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [ "${{ needs.detect.outputs.has_ruby }}" = "true" ]; then
echo "- Ruby CI: ${{ needs.ruby-ci.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [ "${{ needs.detect.outputs.has_rust }}" = "true" ]; then
echo "- Rust CI: ${{ needs.rust-ci.result }}" >> $GITHUB_STEP_SUMMARY
fi

View File

@@ -1,202 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/joomla/release.yml
# VERSION: 01.00.00
# BRIEF: Automated release workflow for Joomla extensions
# NOTE: Creates release packages and publishes to GitHub Releases
name: Create Release
on:
push:
tags:
- 'v*.*.*'
workflow_dispatch:
inputs:
version:
description: 'Release version (e.g., 1.0.0)'
required: true
type: string
prerelease:
description: 'Mark as pre-release'
required: false
type: boolean
default: false
permissions:
contents: write
jobs:
build:
name: Build Release Package
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: '8.1'
extensions: mbstring, xml, zip
tools: composer:v2
- name: Get version
id: version
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
VERSION="${{ inputs.version }}"
else
VERSION=${GITHUB_REF#refs/tags/v}
fi
echo "version=${VERSION}" >> $GITHUB_OUTPUT
echo "Building version: ${VERSION}"
- name: Install dependencies
run: |
if [ -f "composer.json" ]; then
composer install --no-dev --optimize-autoloader
fi
- name: Update version in manifest
run: |
VERSION="${{ steps.version.outputs.version }}"
# Update version in XML manifest files
find . -name "*.xml" -type f -exec sed -i "s/<version>.*<\/version>/<version>${VERSION}<\/version>/g" {} \;
- name: Create package structure
run: |
mkdir -p build/package
# Copy extension files (excluding development files)
rsync -av --exclude='build' \
--exclude='tests' \
--exclude='.git*' \
--exclude='composer.json' \
--exclude='composer.lock' \
--exclude='phpunit.xml*' \
--exclude='phpcs.xml*' \
--exclude='phpstan.neon*' \
--exclude='psalm.xml*' \
--exclude='node_modules' \
--exclude='.github' \
. build/package/
- name: Create ZIP package
run: |
cd build/package
EXTENSION_NAME=$(basename $GITHUB_REPOSITORY | sed 's/^joomla-//' | sed 's/^mod_//' | sed 's/^com_//' | sed 's/^plg_//')
VERSION="${{ steps.version.outputs.version }}"
ZIP_NAME="${EXTENSION_NAME}-${VERSION}.zip"
zip -r "../${ZIP_NAME}" .
cd ../..
echo "ZIP_NAME=${ZIP_NAME}" >> $GITHUB_ENV
echo "Created package: ${ZIP_NAME}"
- name: Generate checksums
run: |
cd build
sha256sum "${ZIP_NAME}" > "${ZIP_NAME}.sha256"
md5sum "${ZIP_NAME}" > "${ZIP_NAME}.md5"
- name: Upload build artifacts
uses: actions/upload-artifact@v6
with:
name: release-package
path: |
build/*.zip
build/*.sha256
build/*.md5
release:
name: Create GitHub Release
runs-on: ubuntu-latest
needs: build
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Download build artifacts
uses: actions/download-artifact@v7.0.0
with:
name: release-package
path: ./artifacts
- name: Get version
id: version
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
VERSION="${{ inputs.version }}"
else
VERSION=${GITHUB_REF#refs/tags/v}
fi
echo "version=${VERSION}" >> $GITHUB_OUTPUT
- name: Extract changelog
id: changelog
run: |
if [ -f "CHANGELOG.md" ]; then
# Extract changelog for this version
VERSION="${{ steps.version.outputs.version }}"
awk "/## \[${VERSION}\]/,/## \[/{if(/## \[${VERSION}\]/)print;else if(/## \[/)exit;else print}" CHANGELOG.md > release_notes.md
if [ ! -s release_notes.md ]; then
echo "No specific changelog found for version ${VERSION}" > release_notes.md
echo "" >> release_notes.md
echo "Please refer to the full CHANGELOG.md for details." >> release_notes.md
fi
else
echo "Release version ${{ steps.version.outputs.version }}" > release_notes.md
fi
- name: Create Release
uses: softprops/action-gh-release@v2
with:
tag_name: v${{ steps.version.outputs.version }}
name: Release ${{ steps.version.outputs.version }}
body_path: release_notes.md
draft: false
prerelease: ${{ inputs.prerelease || false }}
files: |
artifacts/*.zip
artifacts/*.sha256
artifacts/*.md5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Release summary
run: |
echo "### Release Created Successfully" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- Version: ${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "- Repository: $GITHUB_REPOSITORY" >> $GITHUB_STEP_SUMMARY
echo "- Tag: v${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "- Pre-release: ${{ inputs.prerelease || false }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Package files:" >> $GITHUB_STEP_SUMMARY
ls -lh artifacts/ >> $GITHUB_STEP_SUMMARY

View File

@@ -1,295 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
# SPDX-License-Identifier: GPL-3.0-or-later
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-deploy.yml
# VERSION: 01.00.00
# BRIEF: Reusable type-aware deployment workflow for staging and production
# NOTE: Supports Joomla, Dolibarr, and generic deployments with health checks
name: Reusable Deploy
on:
workflow_call:
inputs:
environment:
description: 'Target environment (staging, production)'
required: true
type: string
version:
description: 'Version to deploy (optional, uses latest if not specified)'
required: false
type: string
deployment-method:
description: 'Deployment method (rsync, ftp, ssh, kubernetes, custom)'
required: false
type: string
default: 'custom'
health-check-url:
description: 'URL to check after deployment'
required: false
type: string
health-check-timeout:
description: 'Health check timeout in seconds'
required: false
type: number
default: 300
working-directory:
description: 'Working directory'
required: false
type: string
default: '.'
secrets:
DEPLOY_HOST:
description: 'Deployment host/server'
required: false
DEPLOY_USER:
description: 'Deployment user'
required: false
DEPLOY_KEY:
description: 'SSH private key or deployment credentials'
required: false
DEPLOY_PATH:
description: 'Deployment path on target server'
required: false
permissions:
contents: read
deployments: write
jobs:
detect:
name: Detect Project Type
uses: ./.github/workflows/reusable-project-detector.yml
with:
working-directory: ${{ inputs.working-directory }}
prepare:
name: Prepare Deployment
runs-on: ubuntu-latest
needs: detect
outputs:
deployment-id: ${{ steps.create-deployment.outputs.deployment_id }}
version: ${{ steps.version.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Determine version
id: version
run: |
if [ -n "${{ inputs.version }}" ]; then
VERSION="${{ inputs.version }}"
else
# Use latest tag or commit SHA
VERSION=$(git describe --tags --always)
fi
echo "version=${VERSION}" >> $GITHUB_OUTPUT
echo "Deploying version: ${VERSION}"
- name: Create deployment
id: create-deployment
uses: chrnorm/deployment-action@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
environment: ${{ inputs.environment }}
description: "Deploy ${{ needs.detect.outputs.project-type }} v${{ steps.version.outputs.version }}"
- name: Deployment info
run: |
echo "### 🚀 Deployment Preparation" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Environment:** ${{ inputs.environment }}" >> $GITHUB_STEP_SUMMARY
echo "**Version:** ${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "**Project Type:** ${{ needs.detect.outputs.project-type }}" >> $GITHUB_STEP_SUMMARY
echo "**Method:** ${{ inputs.deployment-method }}" >> $GITHUB_STEP_SUMMARY
build:
name: Build for Deployment
needs: [detect, prepare]
uses: ./.github/workflows/reusable-build.yml
with:
working-directory: ${{ inputs.working-directory }}
upload-artifacts: true
artifact-name: deployment-package
deploy:
name: Deploy to ${{ inputs.environment }}
runs-on: ubuntu-latest
needs: [detect, prepare, build]
environment:
name: ${{ inputs.environment }}
url: ${{ inputs.health-check-url }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Download build artifacts
uses: actions/download-artifact@v7.0.0
with:
name: deployment-package-${{ needs.detect.outputs.project-type }}
path: ./dist
- name: Setup SSH key
if: inputs.deployment-method == 'ssh' || inputs.deployment-method == 'rsync'
run: |
mkdir -p ~/.ssh
echo "${{ secrets.DEPLOY_KEY }}" > ~/.ssh/deploy_key
chmod 600 ~/.ssh/deploy_key
ssh-keyscan -H "${{ secrets.DEPLOY_HOST }}" >> ~/.ssh/known_hosts
- name: Deploy via rsync
if: inputs.deployment-method == 'rsync'
run: |
echo "Deploying via rsync to ${{ secrets.DEPLOY_HOST }}..."
rsync -avz --delete \
-e "ssh -i ~/.ssh/deploy_key -o StrictHostKeyChecking=no" \
./dist/ \
"${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}:${{ secrets.DEPLOY_PATH }}"
echo "✅ rsync deployment completed" >> $GITHUB_STEP_SUMMARY
- name: Deploy via SSH
if: inputs.deployment-method == 'ssh'
run: |
echo "Deploying via SSH to ${{ secrets.DEPLOY_HOST }}..."
# Create deployment package
tar -czf deployment.tar.gz -C ./dist .
# Copy to server
scp -i ~/.ssh/deploy_key deployment.tar.gz \
"${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}:/tmp/"
# Extract on server
ssh -i ~/.ssh/deploy_key "${{ secrets.DEPLOY_USER }}@${{ secrets.DEPLOY_HOST }}" << 'EOF'
cd ${{ secrets.DEPLOY_PATH }}
tar -xzf /tmp/deployment.tar.gz
rm /tmp/deployment.tar.gz
EOF
echo "✅ SSH deployment completed" >> $GITHUB_STEP_SUMMARY
- name: Deploy Joomla Extension
if: needs.detect.outputs.project-type == 'joomla' && inputs.deployment-method == 'custom'
run: |
echo "### 🔧 Joomla Extension Deployment" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Custom Joomla deployment logic
echo "⚠️ Custom Joomla deployment logic required" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Typical steps:" >> $GITHUB_STEP_SUMMARY
echo "1. Upload extension package to Joomla server" >> $GITHUB_STEP_SUMMARY
echo "2. Install/update via Joomla Extension Manager API" >> $GITHUB_STEP_SUMMARY
echo "3. Clear Joomla cache" >> $GITHUB_STEP_SUMMARY
echo "4. Run database migrations if needed" >> $GITHUB_STEP_SUMMARY
# Placeholder for actual deployment commands
echo "Add your Joomla-specific deployment commands here"
- name: Deploy Dolibarr Module
if: needs.detect.outputs.project-type == 'dolibarr' && inputs.deployment-method == 'custom'
run: |
echo "### 🔧 Dolibarr Module Deployment" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Custom Dolibarr deployment logic
echo "⚠️ Custom Dolibarr deployment logic required" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Typical steps:" >> $GITHUB_STEP_SUMMARY
echo "1. Upload module to Dolibarr htdocs/custom directory" >> $GITHUB_STEP_SUMMARY
echo "2. Activate module via Dolibarr API or admin panel" >> $GITHUB_STEP_SUMMARY
echo "3. Run module setup hooks" >> $GITHUB_STEP_SUMMARY
echo "4. Clear Dolibarr cache" >> $GITHUB_STEP_SUMMARY
# Placeholder for actual deployment commands
echo "Add your Dolibarr-specific deployment commands here"
- name: Deploy Generic Application
if: needs.detect.outputs.project-type == 'generic' && inputs.deployment-method == 'custom'
run: |
echo "### 🔧 Generic Application Deployment" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "⚠️ Custom deployment logic required" >> $GITHUB_STEP_SUMMARY
echo "Add your application-specific deployment commands" >> $GITHUB_STEP_SUMMARY
- name: Health check
if: inputs.health-check-url != ''
run: |
echo "Running health check on ${{ inputs.health-check-url }}..."
TIMEOUT=${{ inputs.health-check-timeout }}
ELAPSED=0
INTERVAL=10
while [ $ELAPSED -lt $TIMEOUT ]; do
if curl -f -s -o /dev/null -w "%{http_code}" "${{ inputs.health-check-url }}" | grep -q "200"; then
echo "✅ Health check passed" >> $GITHUB_STEP_SUMMARY
exit 0
fi
echo "Health check attempt $((ELAPSED / INTERVAL + 1)) failed, retrying..."
sleep $INTERVAL
ELAPSED=$((ELAPSED + INTERVAL))
done
echo "❌ Health check failed after ${TIMEOUT}s" >> $GITHUB_STEP_SUMMARY
exit 1
- name: Update deployment status (success)
if: success()
uses: chrnorm/deployment-status@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
deployment-id: ${{ needs.prepare.outputs.deployment-id }}
state: success
environment-url: ${{ inputs.health-check-url }}
- name: Deployment summary
if: success()
run: |
echo "### ✅ Deployment Successful" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Environment:** ${{ inputs.environment }}" >> $GITHUB_STEP_SUMMARY
echo "**Version:** ${{ needs.prepare.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "**Project Type:** ${{ needs.detect.outputs.project-type }}" >> $GITHUB_STEP_SUMMARY
echo "**Time:** $(date -u +"%Y-%m-%d %H:%M:%S UTC")" >> $GITHUB_STEP_SUMMARY
if [ -n "${{ inputs.health-check-url }}" ]; then
echo "**URL:** ${{ inputs.health-check-url }}" >> $GITHUB_STEP_SUMMARY
fi
rollback:
name: Rollback on Failure
runs-on: ubuntu-latest
needs: [prepare, deploy]
if: failure()
steps:
- name: Update deployment status (failure)
uses: chrnorm/deployment-status@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
deployment-id: ${{ needs.prepare.outputs.deployment-id }}
state: failure
- name: Rollback deployment
run: |
echo "### ❌ Deployment Failed - Initiating Rollback" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "⚠️ Rollback logic needs to be implemented" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Typical rollback steps:" >> $GITHUB_STEP_SUMMARY
echo "1. Restore previous version from backup" >> $GITHUB_STEP_SUMMARY
echo "2. Revert database migrations if applied" >> $GITHUB_STEP_SUMMARY
echo "3. Clear caches" >> $GITHUB_STEP_SUMMARY
echo "4. Verify health checks pass" >> $GITHUB_STEP_SUMMARY
# Add your rollback commands here

View File

@@ -1,193 +0,0 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
# SPDX-License-Identifier: GPL-3.0-or-later
# FILE INFORMATION
# DEFGROUP: GitHub.Workflows
# INGROUP: MokoStandards.Reusable
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/reusable-script-executor.yml
# VERSION: 01.00.00
# BRIEF: Reusable workflow to execute MokoStandards scripts in any repository
# NOTE: Provides unified script execution with proper environment setup
name: Execute MokoStandards Script
on:
workflow_call:
inputs:
script_path:
description: 'Path to script relative to scripts/ directory (e.g., validate/no_secrets.py)'
required: true
type: string
script_args:
description: 'Arguments to pass to the script'
required: false
type: string
default: ''
python_version:
description: 'Python version to use'
required: false
type: string
default: '3.11'
install_dependencies:
description: 'Install Python dependencies (pyyaml, etc.)'
required: false
type: boolean
default: true
working_directory:
description: 'Working directory for script execution'
required: false
type: string
default: '.'
create_summary:
description: 'Create GitHub step summary'
required: false
type: boolean
default: true
outputs:
exit_code:
description: 'Script exit code'
value: ${{ jobs.execute-script.outputs.exit_code }}
script_output:
description: 'Script output (truncated to 1000 chars)'
value: ${{ jobs.execute-script.outputs.script_output }}
jobs:
execute-script:
name: Execute ${{ inputs.script_path }}
runs-on: ubuntu-latest
outputs:
exit_code: ${{ steps.run-script.outputs.exit_code }}
script_output: ${{ steps.run-script.outputs.script_output }}
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Setup Python
if: endsWith(inputs.script_path, '.py')
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python_version }}
- name: Install Python dependencies
if: endsWith(inputs.script_path, '.py') && inputs.install_dependencies
run: |
python -m pip install --upgrade pip
pip install pyyaml
# Install additional dependencies if requirements file exists
if [ -f "requirements.txt" ]; then
pip install -r requirements.txt
fi
if [ "${{ inputs.create_summary }}" == "true" ]; then
echo "## 📦 Dependencies Installed" >> $GITHUB_STEP_SUMMARY
echo "- Python ${{ inputs.python_version }}" >> $GITHUB_STEP_SUMMARY
echo "- PyYAML (for configuration)" >> $GITHUB_STEP_SUMMARY
fi
- name: Setup Bash
if: endsWith(inputs.script_path, '.sh')
run: |
bash --version
- name: Verify script exists
id: verify
run: |
SCRIPT_PATH="scripts/${{ inputs.script_path }}"
if [ ! -f "$SCRIPT_PATH" ]; then
echo "❌ Script not found: $SCRIPT_PATH" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Available scripts:" >> $GITHUB_STEP_SUMMARY
find scripts -name "*.py" -o -name "*.sh" | sort >> $GITHUB_STEP_SUMMARY
exit 1
fi
echo "script_full_path=$SCRIPT_PATH" >> $GITHUB_OUTPUT
if [ "${{ inputs.create_summary }}" == "true" ]; then
echo "## ✅ Script Found" >> $GITHUB_STEP_SUMMARY
echo "**Path:** \`$SCRIPT_PATH\`" >> $GITHUB_STEP_SUMMARY
echo "**Type:** $(file -b $SCRIPT_PATH)" >> $GITHUB_STEP_SUMMARY
fi
- name: Make script executable
run: |
chmod +x ${{ steps.verify.outputs.script_full_path }}
- name: Run script
id: run-script
working-directory: ${{ inputs.working_directory }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
SCRIPT_PATH="${{ steps.verify.outputs.script_full_path }}"
SCRIPT_ARGS="${{ inputs.script_args }}"
echo "## 🚀 Executing Script" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Script:** \`$SCRIPT_PATH\`" >> $GITHUB_STEP_SUMMARY
echo "**Arguments:** \`$SCRIPT_ARGS\`" >> $GITHUB_STEP_SUMMARY
echo "**Working Directory:** \`${{ inputs.working_directory }}\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Output" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
# Execute script and capture output
set +e
if [[ "$SCRIPT_PATH" == *.py ]]; then
OUTPUT=$(python3 "$SCRIPT_PATH" $SCRIPT_ARGS 2>&1)
EXIT_CODE=$?
elif [[ "$SCRIPT_PATH" == *.sh ]]; then
OUTPUT=$(bash "$SCRIPT_PATH" $SCRIPT_ARGS 2>&1)
EXIT_CODE=$?
else
OUTPUT=$("$SCRIPT_PATH" $SCRIPT_ARGS 2>&1)
EXIT_CODE=$?
fi
set -e
# Save outputs
echo "exit_code=$EXIT_CODE" >> $GITHUB_OUTPUT
# Truncate output for GitHub output (max 1000 chars)
OUTPUT_TRUNCATED="${OUTPUT:0:1000}"
echo "script_output<<EOF" >> $GITHUB_OUTPUT
echo "$OUTPUT_TRUNCATED" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
# Show full output in summary (with line limit)
echo "$OUTPUT" | head -n 100 >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Report exit code
if [ $EXIT_CODE -eq 0 ]; then
echo "### ✅ Script Completed Successfully" >> $GITHUB_STEP_SUMMARY
echo "**Exit Code:** $EXIT_CODE" >> $GITHUB_STEP_SUMMARY
else
echo "### ❌ Script Failed" >> $GITHUB_STEP_SUMMARY
echo "**Exit Code:** $EXIT_CODE" >> $GITHUB_STEP_SUMMARY
fi
exit $EXIT_CODE
- name: Upload script output
if: always()
uses: actions/upload-artifact@v6
with:
name: script-output-${{ github.run_id }}
path: |
*.log
*.json
*.csv
retention-days: 7
if-no-files-found: ignore

View File

@@ -1,362 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<repository-structure xmlns="http://mokoconsulting.com/schemas/repository-structure"
version="1.0"
schema-version="1.0">
<metadata>
<name>MokoCRM Module</name>
<description>Standard repository structure for MokoCRM (Dolibarr) modules</description>
<repository-type>crm-module</repository-type>
<platform>mokokrm</platform>
<last-updated>2026-01-07T00:00:00Z</last-updated>
<maintainer>Moko Consulting</maintainer>
</metadata>
<structure>
<!-- Root level files -->
<root-files>
<file extension="md">
<name>README.md</name>
<description>Developer-focused documentation for contributors and maintainers</description>
<required>true</required>
<audience>developer</audience>
<stub-content><![CDATA[# {MODULE_NAME}
## For Developers
This README is for developers contributing to this module.
### Development Setup
1. Clone this repository
2. Install dependencies: `make install-dev`
3. Run tests: `make test`
### Building
```bash
make build
```
### Testing
```bash
make test
make lint
```
### Contributing
See CONTRIBUTING.md for contribution guidelines.
## For End Users
End user documentation is available in `src/README.md` after installation.
## License
See LICENSE file for details.
]]></stub-content>
</file>
<file extension="md">
<name>CONTRIBUTING.md</name>
<description>Contribution guidelines</description>
<required>true</required>
<audience>contributor</audience>
</file>
<file extension="md">
<name>ROADMAP.md</name>
<description>Project roadmap with version goals and milestones</description>
<required>false</required>
<audience>general</audience>
</file>
<file extension="">
<name>LICENSE</name>
<description>License file (GPL-3.0-or-later) - Default for Dolibarr/CRM modules</description>
<required>true</required>
<audience>general</audience>
<template>templates/licenses/GPL-3.0</template>
<license-type>GPL-3.0-or-later</license-type>
</file>
<file extension="md">
<name>CHANGELOG.md</name>
<description>Version history and changes</description>
<required>true</required>
<audience>general</audience>
</file>
<file>
<name>Makefile</name>
<description>Build automation using MokoStandards templates</description>
<required>true</required>
<always-overwrite>true</always-overwrite>
<audience>developer</audience>
<source>
<path>templates/makefiles</path>
<filename>Makefile.dolibarr.template</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>Makefile</filename>
<create-path>false</create-path>
</destination>
<template>templates/makefiles/Makefile.dolibarr.template</template>
</file>
<file extension="editorconfig">
<name>.editorconfig</name>
<description>Editor configuration for consistent coding style</description>
<required>true</required>
<audience>developer</audience>
</file>
<file extension="gitignore">
<name>.gitignore</name>
<description>Git ignore patterns - preserved during sync operations</description>
<required>true</required>
<always-overwrite>false</always-overwrite>
<audience>developer</audience>
</file>
<file extension="gitattributes">
<name>.gitattributes</name>
<description>Git attributes configuration</description>
<required>true</required>
<audience>developer</audience>
</file>
</root-files>
<!-- Directory structure -->
<directories>
<!-- Source directory -->
<directory path="src">
<name>src</name>
<description>Module source code for deployment</description>
<required>true</required>
<purpose>Contains the actual module code that gets deployed to Dolibarr</purpose>
<files>
<file extension="md">
<name>README.md</name>
<description>End-user documentation deployed with the module</description>
<required>true</required>
<audience>end-user</audience>
<stub-content><![CDATA[# {MODULE_NAME}
## For End Users
This module provides {MODULE_DESCRIPTION}.
### Installation
1. Navigate to Home → Setup → Modules/Applications
2. Find "{MODULE_NAME}" in the list
3. Click "Activate"
### Configuration
After activation, configure the module:
1. Go to Home → Setup → Modules/Applications
2. Click on the module settings icon
3. Configure as needed
### Usage
{USAGE_INSTRUCTIONS}
### Support
For support, contact: {SUPPORT_EMAIL}
## Version
Current version: {VERSION}
See CHANGELOG.md for version history.
]]></stub-content>
</file>
<file extension="php">
<name>core/modules/mod{ModuleName}.class.php</name>
<description>Main module descriptor file</description>
<required>true</required>
<audience>developer</audience>
</file>
</files>
<subdirectories>
<directory path="src/core">
<name>core</name>
<description>Core module files</description>
<required>true</required>
</directory>
<directory path="src/langs">
<name>langs</name>
<description>Language translation files</description>
<required>true</required>
</directory>
<directory path="src/sql">
<name>sql</name>
<description>Database schema files</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="src/css">
<name>css</name>
<description>Stylesheets</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="src/js">
<name>js</name>
<description>JavaScript files</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="src/class">
<name>class</name>
<description>PHP class files</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="src/lib">
<name>lib</name>
<description>Library files</description>
<requirement-status>suggested</requirement-status>
</directory>
</subdirectories>
</directory>
<!-- Documentation directory -->
<directory path="docs">
<name>docs</name>
<description>Developer and technical documentation</description>
<required>true</required>
<purpose>Contains technical documentation, API docs, architecture diagrams</purpose>
<files>
<file extension="md">
<name>index.md</name>
<description>Documentation index</description>
<required>true</required>
</file>
</files>
</directory>
<!-- Scripts directory -->
<directory path="scripts">
<name>scripts</name>
<description>Build and maintenance scripts</description>
<required>true</required>
<purpose>Contains scripts for building, testing, and deploying</purpose>
<files>
<file extension="md">
<name>index.md</name>
<description>Scripts documentation</description>
<requirement-status>required</requirement-status>
</file>
<file extension="sh">
<name>build_package.sh</name>
<description>Package building script for Dolibarr module</description>
<requirement-status>suggested</requirement-status>
<template>templates/scripts/release/package_dolibarr.sh</template>
</file>
<file extension="sh">
<name>validate_module.sh</name>
<description>Module validation script</description>
<requirement-status>suggested</requirement-status>
<template>templates/scripts/validate/dolibarr_module.sh</template>
</file>
<file extension="xml">
<name>MokoStandards.override.xml</name>
<description>MokoStandards sync override configuration</description>
<requirement-status>optional</requirement-status>
<always-overwrite>false</always-overwrite>
</file>
</files>
</directory>
<!-- Tests directory -->
<directory path="tests">
<name>tests</name>
<description>Test files</description>
<required>true</required>
<purpose>Contains unit tests, integration tests, and test fixtures</purpose>
<subdirectories>
<directory path="tests/unit">
<name>unit</name>
<description>Unit tests</description>
<required>true</required>
</directory>
<directory path="tests/integration">
<name>integration</name>
<description>Integration tests</description>
<requirement-status>suggested</requirement-status>
</directory>
</subdirectories>
</directory>
<!-- Templates directory -->
<directory path="templates">
<name>templates</name>
<description>Template files for code generation</description>
<requirement-status>suggested</requirement-status>
<purpose>Contains templates used by build scripts</purpose>
</directory>
<!-- .github directory -->
<directory path=".github">
<name>.github</name>
<description>GitHub-specific configuration</description>
<requirement-status>suggested</requirement-status>
<purpose>Contains GitHub Actions workflows, issue templates, etc.</purpose>
<subdirectories>
<directory path=".github/workflows">
<name>workflows</name>
<description>GitHub Actions workflows</description>
<requirement-status>required</requirement-status>
<files>
<file extension="yml">
<name>ci-dolibarr.yml</name>
<description>Dolibarr-specific CI workflow</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<template>templates/workflows/dolibarr/ci-dolibarr.yml.template</template>
</file>
<file extension="yml">
<name>codeql-analysis.yml</name>
<description>CodeQL security analysis workflow</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<template>templates/workflows/generic/codeql-analysis.yml.template</template>
</file>
<file extension="yml">
<name>standards-compliance.yml</name>
<description>MokoStandards compliance validation</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<template>.github/workflows/standards-compliance.yml</template>
</file>
</files>
</directory>
</subdirectories>
</directory>
</directories>
</structure>
</repository-structure>

View File

@@ -1,595 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<repository-structure xmlns="http://mokoconsulting.com/schemas/repository-structure"
version="1.0"
schema-version="1.0">
<metadata>
<name>Default Repository Structure</name>
<description>Default repository structure applicable to all repository types with minimal requirements</description>
<repository-type>library</repository-type>
<platform>multi-platform</platform>
<last-updated>2026-01-16T00:00:00Z</last-updated>
<maintainer>Moko Consulting</maintainer>
</metadata>
<structure>
<!-- Root level files -->
<root-files>
<file extension="md">
<name>README.md</name>
<description>Project overview and documentation</description>
<requirement-status>required</requirement-status>
<audience>general</audience>
<source>
<path>templates/docs/required</path>
<filename>template-README.md</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>README.md</filename>
<create-path>false</create-path>
</destination>
<template>templates/docs/required/template-README.md</template>
</file>
<file extension="">
<name>LICENSE</name>
<description>License file (GPL-3.0-or-later)</description>
<requirement-status>required</requirement-status>
<audience>general</audience>
<source>
<path>templates/licenses</path>
<filename>GPL-3.0</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>LICENSE</filename>
<create-path>false</create-path>
</destination>
<template>templates/licenses/GPL-3.0</template>
</file>
<file extension="md">
<name>CHANGELOG.md</name>
<description>Version history and changes</description>
<requirement-status>required</requirement-status>
<audience>general</audience>
<source>
<path>templates/docs/required</path>
<filename>template-CHANGELOG.md</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>CHANGELOG.md</filename>
<create-path>false</create-path>
</destination>
<template>templates/docs/required/template-CHANGELOG.md</template>
</file>
<file extension="md">
<name>CONTRIBUTING.md</name>
<description>Contribution guidelines</description>
<requirement-status>required</requirement-status>
<audience>contributor</audience>
<source>
<path>templates/docs/required</path>
<filename>template-CONTRIBUTING.md</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>CONTRIBUTING.md</filename>
<create-path>false</create-path>
</destination>
<template>templates/docs/required/template-CONTRIBUTING.md</template>
</file>
<file extension="md">
<name>SECURITY.md</name>
<description>Security policy and vulnerability reporting</description>
<requirement-status>required</requirement-status>
<audience>general</audience>
<source>
<path>templates/docs/required</path>
<filename>template-SECURITY.md</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>SECURITY.md</filename>
<create-path>false</create-path>
</destination>
<template>templates/docs/required/template-SECURITY.md</template>
</file>
<file extension="md">
<name>CODE_OF_CONDUCT.md</name>
<description>Community code of conduct</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<audience>contributor</audience>
<source>
<path>templates/docs/extra</path>
<filename>template-CODE_OF_CONDUCT.md</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>CODE_OF_CONDUCT.md</filename>
<create-path>false</create-path>
</destination>
<template>templates/docs/extra/template-CODE_OF_CONDUCT.md</template>
</file>
<file extension="md">
<name>ROADMAP.md</name>
<description>Project roadmap with version goals and milestones</description>
<requirement-status>suggested</requirement-status>
<audience>general</audience>
<source>
<path>templates/docs/extra</path>
<filename>template-ROADMAP.md</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>ROADMAP.md</filename>
<create-path>false</create-path>
</destination>
<template>templates/docs/extra/template-ROADMAP.md</template>
</file>
<file extension="gitignore">
<name>.gitignore</name>
<description>Git ignore patterns</description>
<requirement-status>required</requirement-status>
<always-overwrite>false</always-overwrite>
<audience>developer</audience>
</file>
<file extension="gitattributes">
<name>.gitattributes</name>
<description>Git attributes configuration</description>
<requirement-status>required</requirement-status>
<audience>developer</audience>
</file>
<file extension="editorconfig">
<name>.editorconfig</name>
<description>Editor configuration for consistent coding style</description>
<requirement-status>required</requirement-status>
<always-overwrite>false</always-overwrite>
<audience>developer</audience>
</file>
<file>
<name>Makefile</name>
<description>Build automation</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<audience>developer</audience>
<source>
<path>templates/makefiles</path>
<filename>Makefile.generic.template</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>Makefile</filename>
<create-path>false</create-path>
</destination>
<template>templates/makefiles/Makefile.generic.template</template>
</file>
</root-files>
<!-- Directory structure -->
<directories>
<!-- Documentation directory -->
<directory path="docs">
<name>docs</name>
<description>Documentation directory</description>
<requirement-status>required</requirement-status>
<purpose>Contains comprehensive project documentation</purpose>
<files>
<file extension="md">
<name>index.md</name>
<description>Documentation index</description>
<requirement-status>suggested</requirement-status>
<template>templates/docs/index.md</template>
</file>
<file extension="md">
<name>INSTALLATION.md</name>
<description>Installation and setup instructions</description>
<requirement-status>required</requirement-status>
<audience>general</audience>
<source>
<path>templates/docs/required</path>
<filename>template-INSTALLATION.md</filename>
<type>template</type>
</source>
<destination>
<path>docs</path>
<filename>INSTALLATION.md</filename>
<create-path>true</create-path>
</destination>
<template>templates/docs/required/template-INSTALLATION.md</template>
</file>
<file extension="md">
<name>API.md</name>
<description>API documentation</description>
<requirement-status>suggested</requirement-status>
</file>
<file extension="md">
<name>ARCHITECTURE.md</name>
<description>Architecture documentation</description>
<requirement-status>suggested</requirement-status>
</file>
</files>
</directory>
<!-- Scripts directory -->
<directory path="scripts">
<name>scripts</name>
<description>Build and automation scripts</description>
<requirement-status>required</requirement-status>
<purpose>Contains scripts for building, testing, and deploying</purpose>
<files>
<file extension="sh">
<name>validate_structure.sh</name>
<description>Repository structure validation script</description>
<requirement-status>suggested</requirement-status>
<template>templates/scripts/validate/structure.sh</template>
</file>
<file extension="xml">
<name>MokoStandards.override.xml</name>
<description>MokoStandards sync override configuration</description>
<requirement-status>optional</requirement-status>
<always-overwrite>false</always-overwrite>
</file>
</files>
</directory>
<!-- Source directory -->
<directory path="src">
<name>src</name>
<description>Source code directory</description>
<requirement-status>required</requirement-status>
<purpose>Contains application source code</purpose>
</directory>
<!-- Tests directory -->
<directory path="tests">
<name>tests</name>
<description>Test files</description>
<requirement-status>suggested</requirement-status>
<purpose>Contains unit tests, integration tests, and test fixtures</purpose>
<subdirectories>
<directory path="tests/unit">
<name>unit</name>
<description>Unit tests</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="tests/integration">
<name>integration</name>
<description>Integration tests</description>
<requirement-status>optional</requirement-status>
</directory>
</subdirectories>
</directory>
<!-- .github directory -->
<directory path=".github">
<name>.github</name>
<description>GitHub-specific configuration</description>
<requirement-status>required</requirement-status>
<purpose>Contains GitHub Actions workflows, issue templates, etc.</purpose>
<subdirectories>
<directory path=".github/workflows">
<name>workflows</name>
<description>GitHub Actions workflows</description>
<requirement-status>required</requirement-status>
<files>
<file extension="yml">
<name>ci.yml</name>
<description>Continuous integration workflow</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<source>
<path>templates/workflows/generic</path>
<filename>ci.yml.template</filename>
<type>template</type>
</source>
<destination>
<path>.github/workflows</path>
<filename>ci.yml</filename>
<create-path>true</create-path>
</destination>
<template>templates/workflows/generic/ci.yml.template</template>
</file>
<file extension="yml">
<name>test.yml</name>
<description>Comprehensive testing workflow</description>
<requirement-status>optional</requirement-status>
<always-overwrite>true</always-overwrite>
<source>
<path>templates/workflows/generic</path>
<filename>test.yml.template</filename>
<type>template</type>
</source>
<destination>
<path>.github/workflows</path>
<filename>test.yml</filename>
<create-path>true</create-path>
</destination>
<template>templates/workflows/generic/test.yml.template</template>
</file>
<file extension="yml">
<name>code-quality.yml</name>
<description>Code quality and linting workflow</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<source>
<path>templates/workflows/generic</path>
<filename>code-quality.yml.template</filename>
<type>template</type>
</source>
<destination>
<path>.github/workflows</path>
<filename>code-quality.yml</filename>
<create-path>true</create-path>
</destination>
<template>templates/workflows/generic/code-quality.yml.template</template>
</file>
<file extension="yml">
<name>codeql-analysis.yml</name>
<description>CodeQL security analysis workflow</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<source>
<path>templates/workflows/generic</path>
<filename>codeql-analysis.yml.template</filename>
<type>template</type>
</source>
<destination>
<path>.github/workflows</path>
<filename>codeql-analysis.yml</filename>
<create-path>true</create-path>
</destination>
<template>templates/workflows/generic/codeql-analysis.yml.template</template>
</file>
<file extension="yml">
<name>deploy.yml</name>
<description>Deployment workflow</description>
<requirement-status>optional</requirement-status>
<always-overwrite>true</always-overwrite>
<source>
<path>templates/workflows/generic</path>
<filename>deploy.yml.template</filename>
<type>template</type>
</source>
<destination>
<path>.github/workflows</path>
<filename>deploy.yml</filename>
<create-path>true</create-path>
</destination>
<template>templates/workflows/generic/deploy.yml.template</template>
</file>
<file extension="yml">
<name>repo-health.yml</name>
<description>Repository health monitoring</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<source>
<path>templates/workflows/generic</path>
<filename>repo_health.yml.template</filename>
<type>template</type>
</source>
<destination>
<path>.github/workflows</path>
<filename>repo-health.yml</filename>
<create-path>true</create-path>
</destination>
<template>templates/workflows/generic/repo_health.yml.template</template>
</file>
<file extension="yml">
<name>release-cycle.yml</name>
<description>Release management workflow with automated release flow</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<source>
<path>.github/workflows</path>
<filename>release-cycle.yml</filename>
<type>copy</type>
</source>
<destination>
<path>.github/workflows</path>
<filename>release-cycle.yml</filename>
<create-path>true</create-path>
</destination>
<template>.github/workflows/release-cycle.yml</template>
</file>
<file extension="yml">
<name>standards-compliance.yml</name>
<description>MokoStandards compliance validation</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<source>
<path>.github/workflows</path>
<filename>standards-compliance.yml</filename>
<type>copy</type>
</source>
<destination>
<path>.github/workflows</path>
<filename>standards-compliance.yml</filename>
<create-path>true</create-path>
</destination>
<template>.github/workflows/standards-compliance.yml</template>
</file>
</files>
</directory>
</subdirectories>
</directory>
<!-- Not-allowed directories (generated/build artifacts) -->
<directory path="node_modules">
<name>node_modules</name>
<description>Node.js dependencies (generated)</description>
<requirement-status>not-allowed</requirement-status>
<purpose>Generated directory that should not be committed</purpose>
</directory>
<directory path="vendor">
<name>vendor</name>
<description>PHP dependencies (generated)</description>
<requirement-status>not-allowed</requirement-status>
<purpose>Generated directory that should not be committed</purpose>
</directory>
<directory path="build">
<name>build</name>
<description>Build artifacts (generated)</description>
<requirement-status>not-allowed</requirement-status>
<purpose>Generated directory that should not be committed</purpose>
</directory>
<directory path="dist">
<name>dist</name>
<description>Distribution files (generated)</description>
<requirement-status>not-allowed</requirement-status>
<purpose>Generated directory that should not be committed</purpose>
</directory>
</directories>
<!-- Repository Requirements -->
<repository-requirements>
<!-- Required Secrets -->
<secrets>
<secret>
<name>GITHUB_TOKEN</name>
<description>GitHub API token (automatically provided)</description>
<required>true</required>
<scope>repository</scope>
<used-in>GitHub Actions workflows</used-in>
</secret>
<secret>
<name>CODECOV_TOKEN</name>
<description>Codecov upload token for code coverage reporting</description>
<required>false</required>
<scope>repository</scope>
<used-in>CI workflow code coverage step</used-in>
</secret>
</secrets>
<!-- Required Variables -->
<variables>
<variable>
<name>NODE_VERSION</name>
<description>Node.js version for CI/CD</description>
<default-value>18</default-value>
<required>false</required>
<scope>repository</scope>
</variable>
<variable>
<name>PYTHON_VERSION</name>
<description>Python version for CI/CD</description>
<default-value>3.9</default-value>
<required>false</required>
<scope>repository</scope>
</variable>
</variables>
<!-- Branch Protections -->
<branch-protections>
<branch-protection>
<branch-pattern>main</branch-pattern>
<require-pull-request>true</require-pull-request>
<required-approvals>1</required-approvals>
<require-code-owner-review>false</require-code-owner-review>
<dismiss-stale-reviews>true</dismiss-stale-reviews>
<require-status-checks>true</require-status-checks>
<required-status-checks>
<check>ci</check>
<check>code-quality</check>
</required-status-checks>
<enforce-admins>false</enforce-admins>
<restrict-pushes>true</restrict-pushes>
</branch-protection>
<branch-protection>
<branch-pattern>master</branch-pattern>
<require-pull-request>true</require-pull-request>
<required-approvals>1</required-approvals>
<require-code-owner-review>false</require-code-owner-review>
<dismiss-stale-reviews>true</dismiss-stale-reviews>
<require-status-checks>true</require-status-checks>
<required-status-checks>
<check>ci</check>
</required-status-checks>
<enforce-admins>false</enforce-admins>
<restrict-pushes>true</restrict-pushes>
</branch-protection>
</branch-protections>
<!-- Repository Settings -->
<repository-settings>
<has-issues>true</has-issues>
<has-projects>true</has-projects>
<has-wiki>false</has-wiki>
<has-discussions>false</has-discussions>
<allow-merge-commit>true</allow-merge-commit>
<allow-squash-merge>true</allow-squash-merge>
<allow-rebase-merge>false</allow-rebase-merge>
<delete-branch-on-merge>true</delete-branch-on-merge>
<allow-auto-merge>false</allow-auto-merge>
</repository-settings>
<!-- Required Labels -->
<labels>
<label>
<name>bug</name>
<color>d73a4a</color>
<description>Something isn't working</description>
</label>
<label>
<name>enhancement</name>
<color>a2eeef</color>
<description>New feature or request</description>
</label>
<label>
<name>documentation</name>
<color>0075ca</color>
<description>Improvements or additions to documentation</description>
</label>
<label>
<name>security</name>
<color>ee0701</color>
<description>Security vulnerability or concern</description>
</label>
</labels>
</repository-requirements>
</structure>
</repository-structure>

View File

@@ -1,389 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<repository-structure xmlns="http://mokoconsulting.com/schemas/repository-structure"
version="1.0"
schema-version="1.0">
<metadata>
<name>MokoWaaS Component</name>
<description>Standard repository structure for MokoWaaS (Joomla) components</description>
<repository-type>waas-component</repository-type>
<platform>mokowaas</platform>
<last-updated>2026-01-15T00:00:00Z</last-updated>
<maintainer>Moko Consulting</maintainer>
</metadata>
<structure>
<!-- Root level files -->
<root-files>
<file extension="md">
<name>README.md</name>
<description>Developer-focused documentation for contributors and maintainers</description>
<required>true</required>
<audience>developer</audience>
</file>
<file extension="">
<name>LICENSE</name>
<description>License file (GPL-3.0-or-later) - Default for Joomla/WaaS components</description>
<required>true</required>
<audience>general</audience>
<template>templates/licenses/GPL-3.0</template>
<license-type>GPL-3.0-or-later</license-type>
</file>
<file extension="md">
<name>CHANGELOG.md</name>
<description>Version history and changes</description>
<required>true</required>
<audience>general</audience>
</file>
<file extension="md">
<name>SECURITY.md</name>
<description>Security policy and vulnerability reporting</description>
<required>true</required>
<audience>general</audience>
</file>
<file extension="md">
<name>CODE_OF_CONDUCT.md</name>
<description>Community code of conduct</description>
<required>true</required>
<always-overwrite>true</always-overwrite>
<audience>contributor</audience>
</file>
<file extension="md">
<name>ROADMAP.md</name>
<description>Project roadmap with version goals and milestones</description>
<required>false</required>
<audience>general</audience>
</file>
<file extension="md">
<name>CONTRIBUTING.md</name>
<description>Contribution guidelines</description>
<required>true</required>
<audience>contributor</audience>
</file>
<file>
<name>Makefile</name>
<description>Build automation using MokoStandards templates</description>
<required>true</required>
<always-overwrite>true</always-overwrite>
<audience>developer</audience>
<source>
<path>templates/makefiles</path>
<filename>Makefile.joomla.template</filename>
<type>template</type>
</source>
<destination>
<path>.</path>
<filename>Makefile</filename>
<create-path>false</create-path>
</destination>
<template>templates/makefiles/Makefile.joomla.template</template>
</file>
<file extension="gitignore">
<name>.gitignore</name>
<description>Git ignore patterns for Joomla development - preserved during sync operations</description>
<required>true</required>
<always-overwrite>false</always-overwrite>
<audience>developer</audience>
<template>templates/configs/.gitignore.joomla</template>
<validation-rules>
<rule>
<type>content-pattern</type>
<description>Must contain sftp-config pattern to ignore SFTP sync configuration files</description>
<pattern>sftp-config</pattern>
<severity>error</severity>
</rule>
<rule>
<type>content-pattern</type>
<description>Must contain user.css pattern to ignore custom user CSS overrides</description>
<pattern>user\.css</pattern>
<severity>error</severity>
</rule>
<rule>
<type>content-pattern</type>
<description>Must contain user.js pattern to ignore custom user JavaScript overrides</description>
<pattern>user\.js</pattern>
<severity>error</severity>
</rule>
<rule>
<type>content-pattern</type>
<description>Must contain modulebuilder.txt pattern to ignore Joomla Module Builder artifacts</description>
<pattern>modulebuilder\.txt</pattern>
<severity>error</severity>
</rule>
<rule>
<type>content-pattern</type>
<description>Must contain colors_custom.css pattern to ignore custom color scheme overrides</description>
<pattern>colors_custom\.css</pattern>
<severity>error</severity>
</rule>
</validation-rules>
</file>
<file extension="gitattributes">
<name>.gitattributes</name>
<description>Git attributes configuration</description>
<required>true</required>
<audience>developer</audience>
</file>
<file extension="editorconfig">
<name>.editorconfig</name>
<description>Editor configuration for consistent coding style - preserved during sync</description>
<required>true</required>
<always-overwrite>false</always-overwrite>
<audience>developer</audience>
</file>
</root-files>
<!-- Directory structure -->
<directories>
<!-- Source directory for site (frontend) -->
<directory path="site">
<name>site</name>
<description>Component frontend (site) code</description>
<required>true</required>
<purpose>Contains frontend component code deployed to site</purpose>
<files>
<file extension="php">
<name>controller.php</name>
<description>Main site controller</description>
<required>true</required>
<audience>developer</audience>
</file>
<file extension="xml">
<name>manifest.xml</name>
<description>Component manifest for site</description>
<required>true</required>
<audience>developer</audience>
</file>
</files>
<subdirectories>
<directory path="site/controllers">
<name>controllers</name>
<description>Site controllers</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="site/models">
<name>models</name>
<description>Site models</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="site/views">
<name>views</name>
<description>Site views</description>
<required>true</required>
</directory>
</subdirectories>
</directory>
<!-- Admin directory -->
<directory path="admin">
<name>admin</name>
<description>Component backend (admin) code</description>
<required>true</required>
<purpose>Contains backend component code for administrator</purpose>
<files>
<file extension="php">
<name>controller.php</name>
<description>Main admin controller</description>
<required>true</required>
<audience>developer</audience>
</file>
</files>
<subdirectories>
<directory path="admin/controllers">
<name>controllers</name>
<description>Admin controllers</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="admin/models">
<name>models</name>
<description>Admin models</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="admin/views">
<name>views</name>
<description>Admin views</description>
<required>true</required>
</directory>
<directory path="admin/sql">
<name>sql</name>
<description>Database schema files</description>
<requirement-status>suggested</requirement-status>
</directory>
</subdirectories>
</directory>
<!-- Media directory -->
<directory path="media">
<name>media</name>
<description>Media files (CSS, JS, images)</description>
<requirement-status>suggested</requirement-status>
<purpose>Contains static assets</purpose>
<subdirectories>
<directory path="media/css">
<name>css</name>
<description>Stylesheets</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="media/js">
<name>js</name>
<description>JavaScript files</description>
<requirement-status>suggested</requirement-status>
</directory>
<directory path="media/images">
<name>images</name>
<description>Image files</description>
<requirement-status>suggested</requirement-status>
</directory>
</subdirectories>
</directory>
<!-- Language directory -->
<directory path="language">
<name>language</name>
<description>Language translation files</description>
<required>true</required>
<purpose>Contains language INI files</purpose>
</directory>
<!-- Documentation directory -->
<directory path="docs">
<name>docs</name>
<description>Developer and technical documentation</description>
<required>true</required>
<purpose>Contains technical documentation, API docs, architecture diagrams</purpose>
<files>
<file extension="md">
<name>index.md</name>
<description>Documentation index</description>
<required>true</required>
</file>
</files>
</directory>
<!-- Scripts directory -->
<directory path="scripts">
<name>scripts</name>
<description>Build and maintenance scripts</description>
<required>true</required>
<purpose>Contains scripts for building, testing, and deploying</purpose>
<files>
<file extension="md">
<name>index.md</name>
<description>Scripts documentation</description>
<requirement-status>required</requirement-status>
</file>
<file extension="sh">
<name>build_package.sh</name>
<description>Package building script for Joomla component</description>
<requirement-status>suggested</requirement-status>
<template>templates/scripts/release/package_joomla.sh</template>
</file>
<file extension="sh">
<name>validate_manifest.sh</name>
<description>Manifest validation script</description>
<requirement-status>suggested</requirement-status>
<template>templates/scripts/validate/manifest.sh</template>
</file>
<file extension="xml">
<name>MokoStandards.override.xml</name>
<description>MokoStandards sync override configuration - preserved during sync</description>
<requirement-status>suggested</requirement-status>
<always-overwrite>false</always-overwrite>
<audience>developer</audience>
</file>
</files>
</directory>
<!-- Tests directory -->
<directory path="tests">
<name>tests</name>
<description>Test files</description>
<required>true</required>
<purpose>Contains unit tests, integration tests, and test fixtures</purpose>
<subdirectories>
<directory path="tests/unit">
<name>unit</name>
<description>Unit tests</description>
<required>true</required>
</directory>
<directory path="tests/integration">
<name>integration</name>
<description>Integration tests</description>
<requirement-status>suggested</requirement-status>
</directory>
</subdirectories>
</directory>
<!-- .github directory -->
<directory path=".github">
<name>.github</name>
<description>GitHub-specific configuration</description>
<requirement-status>suggested</requirement-status>
<purpose>Contains GitHub Actions workflows, issue templates, etc.</purpose>
<subdirectories>
<directory path=".github/workflows">
<name>workflows</name>
<description>GitHub Actions workflows</description>
<requirement-status>required</requirement-status>
<files>
<file extension="yml">
<name>ci-joomla.yml</name>
<description>Joomla-specific CI workflow</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<template>templates/workflows/joomla/ci-joomla.yml.template</template>
</file>
<file extension="yml">
<name>codeql-analysis.yml</name>
<description>CodeQL security analysis workflow</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<template>templates/workflows/generic/codeql-analysis.yml.template</template>
</file>
<file extension="yml">
<name>standards-compliance.yml</name>
<description>MokoStandards compliance validation</description>
<requirement-status>required</requirement-status>
<always-overwrite>true</always-overwrite>
<template>.github/workflows/standards-compliance.yml</template>
</file>
</files>
</directory>
</subdirectories>
</directory>
</directories>
</structure>
</repository-structure>

View File

@@ -1,451 +0,0 @@
#!/usr/bin/env python3
"""
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-LICENSE-IDENTIFIER: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License (./LICENSE).
# FILE INFORMATION
DEFGROUP: MokoStandards
INGROUP: MokoStandards.Scripts
REPO: https://github.com/mokoconsulting-tech/MokoStandards/
VERSION: 05.00.00
PATH: ./scripts/release_version.py
BRIEF: Script to release a version by moving UNRELEASED items to versioned section
NOTE: Updates CHANGELOG.md and optionally updates VERSION in files
"""
import argparse
import json
import re
import subprocess
import sys
from datetime import datetime
from pathlib import Path
from typing import List, Optional, Tuple
class VersionReleaser:
"""Manages version releases in CHANGELOG.md and updates VERSION in files."""
UNRELEASED_PATTERN = r"## \[Unreleased\]" # Standard Keep a Changelog format
VERSION_PATTERN = r"## \[(\d+\.\d+\.\d+)\]"
VERSION_HEADER_PATTERN = r"VERSION:\s*(\d+\.\d+\.\d+)"
CHANGELOG_H1_PATTERN = r"^# CHANGELOG - .+ \(VERSION: (\d+\.\d+\.\d+)\)" # H1 format
def __init__(self, changelog_path: Path, repo_root: Path):
"""
Initialize the version releaser.
Args:
changelog_path: Path to CHANGELOG.md file
repo_root: Path to repository root
"""
self.changelog_path = changelog_path
self.repo_root = repo_root
self.lines: List[str] = []
def read_changelog(self) -> bool:
"""Read the changelog file."""
try:
with open(self.changelog_path, "r", encoding="utf-8") as f:
self.lines = f.readlines()
return True
except FileNotFoundError:
print(f"Error: CHANGELOG.md not found at {self.changelog_path}", file=sys.stderr)
return False
except Exception as e:
print(f"Error reading CHANGELOG.md: {e}", file=sys.stderr)
return False
def write_changelog(self) -> bool:
"""Write the updated changelog back to file."""
try:
with open(self.changelog_path, "w", encoding="utf-8") as f:
f.writelines(self.lines)
return True
except Exception as e:
print(f"Error writing CHANGELOG.md: {e}", file=sys.stderr)
return False
def find_unreleased_section(self) -> Optional[int]:
"""Find the UNRELEASED section in the changelog."""
for i, line in enumerate(self.lines):
if re.match(self.UNRELEASED_PATTERN, line):
return i
return None
def find_next_version_section(self, start_index: int) -> Optional[int]:
"""Find the next version section after a given index."""
for i in range(start_index + 1, len(self.lines)):
if re.match(self.VERSION_PATTERN, self.lines[i]):
return i
return None
def has_unreleased_content(self, unreleased_index: int, next_version_index: Optional[int]) -> bool:
"""Check if UNRELEASED section has any content."""
end_index = next_version_index if next_version_index else len(self.lines)
for i in range(unreleased_index + 1, end_index):
line = self.lines[i].strip()
# Skip empty lines and headers
if line and not line.startswith("##"):
return True
return False
def validate_version(self, version: str) -> bool:
"""Validate version format (XX.YY.ZZ)."""
pattern = r"^\d{2}\.\d{2}\.\d{2}$"
return bool(re.match(pattern, version))
def release_version(self, version: str, date: Optional[str] = None) -> bool:
"""
Move UNRELEASED content to a new version section.
Args:
version: Version number (XX.YY.ZZ format)
date: Release date (YYYY-MM-DD format), defaults to today
Returns:
True if successful, False otherwise
"""
if not self.validate_version(version):
print(f"Error: Invalid version format '{version}'. Must be XX.YY.ZZ (e.g., 05.01.00)",
file=sys.stderr)
return False
if date is None:
date = datetime.now().strftime("%Y-%m-%d")
unreleased_index = self.find_unreleased_section()
if unreleased_index is None:
print("Error: UNRELEASED section not found in CHANGELOG.md", file=sys.stderr)
return False
next_version_index = self.find_next_version_section(unreleased_index)
# Check if UNRELEASED has content
if not self.has_unreleased_content(unreleased_index, next_version_index):
print("Warning: UNRELEASED section is empty. Nothing to release.", file=sys.stderr)
return False
# Get the content between UNRELEASED and next version
if next_version_index:
unreleased_content = self.lines[unreleased_index + 1:next_version_index]
else:
unreleased_content = self.lines[unreleased_index + 1:]
# Remove the old UNRELEASED content
if next_version_index:
del self.lines[unreleased_index + 1:next_version_index]
else:
del self.lines[unreleased_index + 1:]
# Insert new version section after UNRELEASED
new_version_lines = [
"\n",
f"## [{version}] - {date}\n"
]
new_version_lines.extend(unreleased_content)
# Insert after UNRELEASED heading
insert_index = unreleased_index + 1
for line in reversed(new_version_lines):
self.lines.insert(insert_index, line)
# Update H1 header version
self.update_changelog_h1_version(version)
return True
def update_changelog_h1_version(self, version: str) -> bool:
"""
Update the version in the H1 header of CHANGELOG.
Format: # CHANGELOG - RepoName (VERSION: X.Y.Z)
Args:
version: New version number
Returns:
True if updated, False otherwise
"""
for i, line in enumerate(self.lines):
if re.match(self.CHANGELOG_H1_PATTERN, line):
# Extract repo name from current H1
match = re.match(r"^# CHANGELOG - (.+) \(VERSION: \d+\.\d+\.\d+\)", line)
if match:
repo_name = match.group(1)
self.lines[i] = f"# CHANGELOG - {repo_name} (VERSION: {version})\n"
return True
return False
def update_file_versions(self, version: str, dry_run: bool = False) -> List[Path]:
"""
Update VERSION in all files in the repository.
Args:
version: New version number
dry_run: If True, don't actually update files
Returns:
List of files that were (or would be) updated
"""
updated_files = []
# Find all markdown, Python, and text files
patterns = ["**/*.md", "**/*.py", "**/*.txt", "**/*.yml", "**/*.yaml"]
files_to_check = []
for pattern in patterns:
files_to_check.extend(self.repo_root.glob(pattern))
for file_path in files_to_check:
# Skip certain directories
skip_dirs = [".git", "node_modules", "vendor", "__pycache__", ".venv"]
if any(skip_dir in file_path.parts for skip_dir in skip_dirs):
continue
try:
with open(file_path, "r", encoding="utf-8") as f:
content = f.read()
# Check if file has VERSION header
if re.search(self.VERSION_HEADER_PATTERN, content):
new_content = re.sub(
self.VERSION_HEADER_PATTERN,
f"VERSION: {version}",
content
)
if new_content != content:
if not dry_run:
with open(file_path, "w", encoding="utf-8") as f:
f.write(new_content)
updated_files.append(file_path.relative_to(self.repo_root))
except (UnicodeDecodeError, PermissionError):
# Skip binary files or files we can't read
continue
except Exception as e:
print(f"Warning: Error processing {file_path}: {e}", file=sys.stderr)
continue
return updated_files
def extract_release_notes(self, version: str) -> Optional[str]:
"""
Extract release notes for a specific version from CHANGELOG.
Args:
version: Version number to extract notes for
Returns:
Release notes content or None if not found
"""
version_pattern = rf"## \[{re.escape(version)}\]"
notes_lines = []
in_version = False
for line in self.lines:
if re.match(version_pattern, line):
in_version = True
continue
elif in_version:
# Stop at next version heading
if line.startswith("## ["):
break
notes_lines.append(line)
if notes_lines:
return "".join(notes_lines).strip()
return None
def create_github_release(self, version: str, dry_run: bool = False) -> bool:
"""
Create a GitHub release using gh CLI.
Args:
version: Version number
dry_run: If True, don't actually create release
Returns:
True if successful, False otherwise
"""
# Check if gh CLI is available
try:
subprocess.run(["gh", "--version"], capture_output=True, check=True)
except (subprocess.CalledProcessError, FileNotFoundError):
print("Warning: gh CLI not found. Skipping GitHub release creation.", file=sys.stderr)
print("Install gh CLI: https://cli.github.com/", file=sys.stderr)
return False
# Extract release notes from changelog
release_notes = self.extract_release_notes(version)
if not release_notes:
print(f"Warning: Could not extract release notes for version {version}", file=sys.stderr)
release_notes = f"Release {version}"
tag_name = f"v{version}"
title = f"Release {version}"
if dry_run:
print(f"\n[DRY RUN] Would create GitHub release:")
print(f" Tag: {tag_name}")
print(f" Title: {title}")
print(f" Notes:\n{release_notes[:200]}...")
return True
try:
# Create the release
cmd = [
"gh", "release", "create", tag_name,
"--title", title,
"--notes", release_notes
]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
print(f"\nSuccessfully created GitHub release: {tag_name}")
print(f"Release URL: {result.stdout.strip()}")
return True
except subprocess.CalledProcessError as e:
print(f"Error creating GitHub release: {e.stderr}", file=sys.stderr)
return False
def main() -> int:
"""Main entry point for the version release script."""
parser = argparse.ArgumentParser(
description="Release a version by moving UNRELEASED items to versioned section",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Release version 05.01.00 with today's date
%(prog)s --version 05.01.00
# Release version with specific date
%(prog)s --version 05.01.00 --date 2026-01-15
# Release and update VERSION in all files
%(prog)s --version 05.01.00 --update-files
# Release, update files, and create GitHub release
%(prog)s --version 05.01.00 --update-files --create-release
# Dry run to see what would be updated
%(prog)s --version 05.01.00 --update-files --create-release --dry-run
Version format: XX.YY.ZZ (e.g., 05.01.00)
"""
)
parser.add_argument(
"--version",
type=str,
required=True,
help="Version number in XX.YY.ZZ format (e.g., 05.01.00)"
)
parser.add_argument(
"--date",
type=str,
help="Release date in YYYY-MM-DD format (defaults to today)"
)
parser.add_argument(
"--changelog",
type=Path,
default=Path("CHANGELOG.md"),
help="Path to CHANGELOG.md file (default: ./CHANGELOG.md)"
)
parser.add_argument(
"--update-files",
action="store_true",
help="Update VERSION header in all repository files"
)
parser.add_argument(
"--create-release",
action="store_true",
help="Create a GitHub release using gh CLI"
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Show what would be done without making changes"
)
args = parser.parse_args()
# Find repository root
current_dir = Path.cwd()
repo_root = current_dir
while repo_root.parent != repo_root:
if (repo_root / ".git").exists():
break
repo_root = repo_root.parent
else:
repo_root = current_dir
# Resolve changelog path
if not args.changelog.is_absolute():
changelog_path = repo_root / args.changelog
else:
changelog_path = args.changelog
releaser = VersionReleaser(changelog_path, repo_root)
if not releaser.read_changelog():
return 1
# Release the version
if args.dry_run:
print(f"[DRY RUN] Would release version {args.version}")
else:
if releaser.release_version(args.version, args.date):
if releaser.write_changelog():
print(f"Successfully released version {args.version} in CHANGELOG.md")
else:
return 1
else:
return 1
# Update file versions if requested
if args.update_files:
updated_files = releaser.update_file_versions(args.version, args.dry_run)
if updated_files:
if args.dry_run:
print(f"\n[DRY RUN] Would update VERSION in {len(updated_files)} files:")
else:
print(f"\nUpdated VERSION to {args.version} in {len(updated_files)} files:")
for file_path in sorted(updated_files):
print(f" - {file_path}")
else:
print("\nNo files with VERSION headers found to update.")
# Create GitHub release if requested
if args.create_release:
if not releaser.create_github_release(args.version, args.dry_run):
print("\nNote: GitHub release creation failed or was skipped.", file=sys.stderr)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,319 +0,0 @@
#!/usr/bin/env python3
"""
Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-LICENSE-IDENTIFIER: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License (./LICENSE).
# FILE INFORMATION
DEFGROUP: MokoStandards
INGROUP: MokoStandards.Scripts
REPO: https://github.com/mokoconsulting-tech/MokoStandards/
VERSION: 05.00.00
PATH: ./scripts/update_changelog.py
BRIEF: Script to update CHANGELOG.md with entries to UNRELEASED section
NOTE: Follows Keep a Changelog format, supports Added/Changed/Deprecated/Removed/Fixed/Security
"""
import argparse
import os
import re
import sys
from datetime import datetime
from pathlib import Path
from typing import List, Optional
class ChangelogUpdater:
"""Updates CHANGELOG.md following Keep a Changelog format."""
VALID_CATEGORIES = ["Added", "Changed", "Deprecated", "Removed", "Fixed", "Security"]
UNRELEASED_PATTERN = r"## \[Unreleased\]" # Standard Keep a Changelog format
def __init__(self, changelog_path: Path):
"""
Initialize the changelog updater.
Args:
changelog_path: Path to CHANGELOG.md file
"""
self.changelog_path = changelog_path
self.lines: List[str] = []
def read_changelog(self) -> bool:
"""
Read the changelog file.
Returns:
True if successful, False otherwise
"""
try:
with open(self.changelog_path, "r", encoding="utf-8") as f:
self.lines = f.readlines()
return True
except FileNotFoundError:
print(f"Error: CHANGELOG.md not found at {self.changelog_path}", file=sys.stderr)
return False
except Exception as e:
print(f"Error reading CHANGELOG.md: {e}", file=sys.stderr)
return False
def find_unreleased_section(self) -> Optional[int]:
"""
Find the UNRELEASED section in the changelog.
Returns:
Line index of UNRELEASED section, or None if not found
"""
for i, line in enumerate(self.lines):
if re.match(self.UNRELEASED_PATTERN, line):
return i
return None
def find_next_version_section(self, start_index: int) -> Optional[int]:
"""
Find the next version section after UNRELEASED.
Args:
start_index: Index to start searching from
Returns:
Line index of next version section, or None if not found
"""
version_pattern = r"## \[\d+\.\d+\.\d+\]"
for i in range(start_index + 1, len(self.lines)):
if re.match(version_pattern, self.lines[i]):
return i
return None
def get_category_index(self, unreleased_index: int, next_version_index: Optional[int],
category: str) -> Optional[int]:
"""
Find the index of a specific category within UNRELEASED section.
Args:
unreleased_index: Index of UNRELEASED heading
next_version_index: Index of next version section (or None)
category: Category name (e.g., "Added", "Changed")
Returns:
Line index of category heading, or None if not found
"""
end_index = next_version_index if next_version_index else len(self.lines)
category_pattern = rf"### {category}"
for i in range(unreleased_index + 1, end_index):
if re.match(category_pattern, self.lines[i]):
return i
return None
def add_entry(self, category: str, entry: str, subcategory: Optional[str] = None) -> bool:
"""
Add an entry to the UNRELEASED section.
Args:
category: Category (Added/Changed/Deprecated/Removed/Fixed/Security)
entry: Entry text to add
subcategory: Optional subcategory/subheading
Returns:
True if successful, False otherwise
"""
if category not in self.VALID_CATEGORIES:
print(f"Error: Invalid category '{category}'. Must be one of: {', '.join(self.VALID_CATEGORIES)}",
file=sys.stderr)
return False
unreleased_index = self.find_unreleased_section()
if unreleased_index is None:
print("Error: UNRELEASED section not found in CHANGELOG.md", file=sys.stderr)
return False
next_version_index = self.find_next_version_section(unreleased_index)
category_index = self.get_category_index(unreleased_index, next_version_index, category)
# Format entry with proper indentation
if subcategory:
formatted_entry = f" - **{subcategory}**: {entry}\n"
else:
formatted_entry = f"- {entry}\n"
if category_index is None:
# Category doesn't exist, create it
# Find insertion point (after UNRELEASED heading, before next section)
insert_index = unreleased_index + 1
# Skip any blank lines after UNRELEASED
while insert_index < len(self.lines) and self.lines[insert_index].strip() == "":
insert_index += 1
# Insert category heading and entry
self.lines.insert(insert_index, f"### {category}\n")
self.lines.insert(insert_index + 1, formatted_entry)
self.lines.insert(insert_index + 2, "\n")
else:
# Category exists, add entry after the category heading
insert_index = category_index + 1
# Skip existing entries to add at the end of the category
while insert_index < len(self.lines):
line = self.lines[insert_index]
# Stop if we hit another category or version section
if line.startswith("###") or line.startswith("##"):
break
# Stop if we hit a blank line followed by non-entry content
if line.strip() == "" and insert_index + 1 < len(self.lines):
next_line = self.lines[insert_index + 1]
if next_line.startswith("###") or next_line.startswith("##"):
break
insert_index += 1
# Insert entry before any blank lines
while insert_index > category_index + 1 and self.lines[insert_index - 1].strip() == "":
insert_index -= 1
self.lines.insert(insert_index, formatted_entry)
return True
def write_changelog(self) -> bool:
"""
Write the updated changelog back to file.
Returns:
True if successful, False otherwise
"""
try:
with open(self.changelog_path, "w", encoding="utf-8") as f:
f.writelines(self.lines)
return True
except Exception as e:
print(f"Error writing CHANGELOG.md: {e}", file=sys.stderr)
return False
def display_unreleased(self) -> None:
"""Display the current UNRELEASED section."""
unreleased_index = self.find_unreleased_section()
if unreleased_index is None:
print("UNRELEASED section not found")
return
next_version_index = self.find_next_version_section(unreleased_index)
end_index = next_version_index if next_version_index else len(self.lines)
print("Current UNRELEASED section:")
print("=" * 60)
for i in range(unreleased_index, end_index):
print(self.lines[i], end="")
print("=" * 60)
def main() -> int:
"""
Main entry point for the changelog updater script.
Returns:
Exit code (0 for success, non-zero for error)
"""
parser = argparse.ArgumentParser(
description="Update CHANGELOG.md with entries to UNRELEASED section",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Add a simple entry
%(prog)s --category Added --entry "New feature X"
# Add an entry with subcategory
%(prog)s --category Changed --entry "Updated API endpoints" --subcategory "API"
# Display current UNRELEASED section
%(prog)s --show
Categories: Added, Changed, Deprecated, Removed, Fixed, Security
"""
)
parser.add_argument(
"--changelog",
type=Path,
default=Path("CHANGELOG.md"),
help="Path to CHANGELOG.md file (default: ./CHANGELOG.md)"
)
parser.add_argument(
"--category",
choices=ChangelogUpdater.VALID_CATEGORIES,
help="Category for the entry"
)
parser.add_argument(
"--entry",
type=str,
help="Entry text to add to the changelog"
)
parser.add_argument(
"--subcategory",
type=str,
help="Optional subcategory/subheading for the entry"
)
parser.add_argument(
"--show",
action="store_true",
help="Display the current UNRELEASED section"
)
args = parser.parse_args()
# Resolve changelog path
if not args.changelog.is_absolute():
# Try to find repository root
current_dir = Path.cwd()
repo_root = current_dir
while repo_root.parent != repo_root:
if (repo_root / ".git").exists():
break
repo_root = repo_root.parent
else:
repo_root = current_dir
changelog_path = repo_root / args.changelog
else:
changelog_path = args.changelog
updater = ChangelogUpdater(changelog_path)
if not updater.read_changelog():
return 1
if args.show:
updater.display_unreleased()
return 0
if not args.category or not args.entry:
parser.error("--category and --entry are required (or use --show)")
if updater.add_entry(args.category, args.entry, args.subcategory):
if updater.write_changelog():
print(f"Successfully added entry to UNRELEASED section: [{args.category}] {args.entry}")
return 0
else:
return 1
else:
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,332 +0,0 @@
#!/usr/bin/env python3
"""
Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
FILE INFORMATION
DEFGROUP: MokoStandards.Scripts
INGROUP: MokoStandards.Validation
REPO: https://github.com/mokoconsulting-tech/MokoStandards
PATH: /scripts/validate_file_headers.py
VERSION: 05.00.00
BRIEF: Validate copyright headers and file information in repository files
"""
import os
import sys
from pathlib import Path
from typing import List, Tuple, Dict
# File extensions that require headers
HEADER_REQUIRED_EXTENSIONS = {
'.py': 'python',
'.php': 'php',
'.md': 'markdown',
'.yml': 'yaml',
'.yaml': 'yaml',
'.sh': 'shell',
'.js': 'javascript',
'.ts': 'typescript',
'.css': 'css',
}
# Files that are exempt from header requirements
EXEMPT_FILES = {
'package.json',
'package-lock.json',
'composer.json',
'composer.lock',
'Gemfile.lock',
'.gitignore',
'.gitattributes',
'.editorconfig',
'LICENSE',
}
# Patterns indicating generated files
GENERATED_PATTERNS = [
'DO NOT EDIT',
'AUTO-GENERATED',
'AUTOGENERATED',
'Generated by',
]
# Required patterns in header
REQUIRED_HEADER_PATTERNS = [
'Copyright (C)',
'Moko Consulting',
'GPL-3.0-or-later',
]
# Required file information patterns
REQUIRED_FILE_INFO_PATTERNS = [
'FILE INFORMATION',
'DEFGROUP:',
'REPO:',
'PATH:',
'VERSION:',
'BRIEF:',
]
# Required markdown metadata patterns
REQUIRED_MARKDOWN_METADATA = [
'## Metadata',
'## Revision History',
]
def is_exempt_file(filepath: Path) -> bool:
"""Check if file is exempt from header requirements."""
if filepath.name in EXEMPT_FILES:
return True
# Check if in vendor or node_modules
if 'vendor' in filepath.parts or 'node_modules' in filepath.parts:
return True
# Check if in .git directory
if '.git' in filepath.parts:
return True
return False
def is_generated_file(content: str) -> bool:
"""Check if file appears to be auto-generated."""
first_lines = content[:1000]
return any(pattern in first_lines for pattern in GENERATED_PATTERNS)
def check_copyright_header(content: str, filepath: Path) -> Tuple[bool, List[str]]:
"""Check if file has proper copyright header."""
issues = []
first_section = content[:2000]
for pattern in REQUIRED_HEADER_PATTERNS:
if pattern not in first_section:
issues.append(f"Missing required pattern: {pattern}")
return len(issues) == 0, issues
def check_file_information(content: str, filepath: Path) -> Tuple[bool, List[str]]:
"""Check if file has proper file information block."""
issues = []
first_section = content[:2000]
for pattern in REQUIRED_FILE_INFO_PATTERNS:
if pattern not in first_section:
issues.append(f"Missing required file info: {pattern}")
return len(issues) == 0, issues
def check_markdown_metadata(content: str, filepath: Path) -> Tuple[bool, List[str]]:
"""Check if markdown file has metadata and revision history."""
issues = []
for pattern in REQUIRED_MARKDOWN_METADATA:
if pattern not in content:
issues.append(f"Missing required section: {pattern}")
return len(issues) == 0, issues
def validate_file(filepath: Path) -> Dict[str, any]:
"""Validate a single file."""
result = {
'path': str(filepath),
'valid': True,
'issues': [],
'exempt': False,
'generated': False,
}
# Check if exempt
if is_exempt_file(filepath):
result['exempt'] = True
return result
# Check file extension
if filepath.suffix not in HEADER_REQUIRED_EXTENSIONS:
result['exempt'] = True
return result
# Read file content
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
except Exception as e:
result['valid'] = False
result['issues'].append(f"Error reading file: {e}")
return result
# Check if generated
if is_generated_file(content):
result['generated'] = True
return result
# Check copyright header
valid, issues = check_copyright_header(content, filepath)
if not valid:
result['valid'] = False
result['issues'].extend(issues)
# Check file information
valid, issues = check_file_information(content, filepath)
if not valid:
result['valid'] = False
result['issues'].extend(issues)
# Additional checks for markdown files
if filepath.suffix == '.md':
valid, issues = check_markdown_metadata(content, filepath)
if not valid:
result['valid'] = False
result['issues'].extend(issues)
return result
def validate_repository(repo_path: Path) -> Dict[str, any]:
"""Validate all files in repository."""
results = {
'total': 0,
'validated': 0,
'valid': 0,
'invalid': 0,
'exempt': 0,
'generated': 0,
'files': [],
}
# Find all tracked files
for filepath in repo_path.rglob('*'):
if not filepath.is_file():
continue
results['total'] += 1
result = validate_file(filepath)
results['files'].append(result)
if result['exempt']:
results['exempt'] += 1
elif result['generated']:
results['generated'] += 1
else:
results['validated'] += 1
if result['valid']:
results['valid'] += 1
else:
results['invalid'] += 1
return results
def print_report(results: Dict[str, any], verbose: bool = False):
"""Print validation report."""
print("=" * 70)
print("FILE HEADER VALIDATION REPORT")
print("=" * 70)
print()
print(f"Total files found: {results['total']}")
print(f"Files validated: {results['validated']}")
print(f"Valid headers: {results['valid']}")
print(f"Invalid headers: {results['invalid']}")
print(f"Exempt files: {results['exempt']}")
print(f"Generated files: {results['generated']}")
print()
if results['invalid'] > 0:
print("FILES WITH ISSUES:")
print("-" * 70)
for file_result in results['files']:
if not file_result['valid'] and not file_result['exempt'] and not file_result['generated']:
print(f"\n{file_result['path']}")
for issue in file_result['issues']:
print(f"{issue}")
print()
if verbose and results['valid'] > 0:
print("\nVALID FILES:")
print("-" * 70)
for file_result in results['files']:
if file_result['valid']:
print(f"{file_result['path']}")
print()
print("=" * 70)
if results['invalid'] > 0:
compliance_rate = (results['valid'] / results['validated'] * 100) if results['validated'] > 0 else 0
print(f"Compliance Rate: {compliance_rate:.1f}%")
print()
print("ACTION REQUIRED: Fix files with missing or invalid headers")
return False
else:
print("✓ All validated files have proper headers")
return True
def main():
"""Main entry point."""
import argparse
parser = argparse.ArgumentParser(
description='Validate copyright headers and file information'
)
parser.add_argument(
'--path',
default='.',
help='Path to repository (default: current directory)'
)
parser.add_argument(
'--verbose',
action='store_true',
help='Show all files including valid ones'
)
parser.add_argument(
'--fail-on-invalid',
action='store_true',
help='Exit with error code if invalid headers found'
)
args = parser.parse_args()
repo_path = Path(args.path).resolve()
if not repo_path.exists():
print(f"Error: Path does not exist: {repo_path}", file=sys.stderr)
sys.exit(1)
print(f"Validating files in: {repo_path}")
print()
results = validate_repository(repo_path)
success = print_report(results, args.verbose)
if args.fail_on_invalid and not success:
sys.exit(1)
sys.exit(0 if success else 0)
if __name__ == "__main__":
main()

View File

@@ -1,533 +0,0 @@
#!/usr/bin/env python3
"""Auto-Detect Repository Platform v03.00.00 - Critical Validator Infrastructure.
This script automatically detects repository platform types with confidence scoring
and provides JSON/CLI output for automation workflows.
Platform detection capabilities:
- Joomla/WaaS components (manifest patterns, version detection)
- Dolibarr/CRM modules (module.php, core/ structure)
- Generic repositories (fallback with confidence scoring)
Usage:
python3 auto_detect_platform.py [--repo-path PATH] [--json] [--verbose] [--cache]
Examples:
# Auto-detect current repository with JSON output
python3 auto_detect_platform.py --json
# Detect specific repository with caching
python3 auto_detect_platform.py --repo-path /path/to/repo --cache --verbose
# JSON output for CI/CD automation
python3 auto_detect_platform.py --json | jq '.platform_type'
Exit codes:
0: Success (platform detected successfully)
1: Detection failed (no platform could be determined)
2: Configuration error (invalid arguments or paths)
"""
import argparse
import hashlib
import json
import os
import pickle
import sys
import xml.etree.ElementTree as ET
from dataclasses import dataclass, asdict
from enum import Enum
from pathlib import Path
from typing import Dict, List, Optional, Tuple
# Version
__version__ = "03.00.00"
class PlatformType(Enum):
"""Repository platform types enumeration."""
JOOMLA = "joomla"
DOLIBARR = "dolibarr"
GENERIC = "generic"
@dataclass
class DetectionResult:
"""Platform detection result with confidence scoring.
Attributes:
platform_type: Detected platform type enum value.
confidence: Confidence score from 0-100.
indicators: List of detection indicators found.
metadata: Additional platform-specific metadata.
"""
platform_type: PlatformType
confidence: int
indicators: List[str]
metadata: Dict[str, str]
def to_dict(self) -> Dict[str, any]:
"""Convert detection result to dictionary for JSON serialization.
Returns:
Dictionary representation with platform_type as string value.
"""
return {
"platform_type": self.platform_type.value,
"confidence": self.confidence,
"indicators": self.indicators,
"metadata": self.metadata
}
class DetectionCache:
"""Simple file-based cache for platform detection results.
Caches detection results based on repository path hash to avoid
re-scanning the same repository repeatedly.
"""
def __init__(self, cache_dir: Optional[Path] = None) -> None:
"""Initialize detection cache.
Args:
cache_dir: Directory for cache files. Defaults to ~/.cache/mokostudios.
"""
if cache_dir is None:
cache_dir = Path.home() / ".cache" / "mokostudios" / "platform_detection"
self.cache_dir = cache_dir
self.cache_dir.mkdir(parents=True, exist_ok=True)
def _get_cache_key(self, repo_path: Path) -> str:
"""Generate cache key from repository path.
Args:
repo_path: Absolute path to repository.
Returns:
SHA256 hash of the repository path as hex string.
"""
return hashlib.sha256(str(repo_path).encode()).hexdigest()
def get(self, repo_path: Path) -> Optional[DetectionResult]:
"""Retrieve cached detection result.
Args:
repo_path: Path to repository.
Returns:
Cached DetectionResult if available, None otherwise.
"""
cache_file = self.cache_dir / f"{self._get_cache_key(repo_path)}.pkl"
if not cache_file.exists():
return None
try:
with open(cache_file, 'rb') as f:
return pickle.load(f)
except (pickle.PickleError, OSError, EOFError):
return None
def set(self, repo_path: Path, result: DetectionResult) -> None:
"""Store detection result in cache.
Args:
repo_path: Path to repository.
result: Detection result to cache.
"""
cache_file = self.cache_dir / f"{self._get_cache_key(repo_path)}.pkl"
try:
with open(cache_file, 'wb') as f:
pickle.dump(result, f)
except (pickle.PickleError, OSError):
pass
def clear(self) -> None:
"""Clear all cached detection results."""
for cache_file in self.cache_dir.glob("*.pkl"):
try:
cache_file.unlink()
except OSError:
pass
class PlatformDetector:
"""Detects repository platform type with enhanced detection algorithms.
Provides platform detection for Joomla, Dolibarr, and generic repositories
with confidence scoring and detailed indicators.
"""
def __init__(self, repo_path: Path, use_cache: bool = False) -> None:
"""Initialize platform detector.
Args:
repo_path: Path to repository to analyze.
use_cache: Enable caching for performance optimization.
"""
self.repo_path = Path(repo_path).resolve()
self.use_cache = use_cache
self.cache = DetectionCache() if use_cache else None
if not self.repo_path.exists():
raise ValueError(f"Repository path does not exist: {self.repo_path}")
def detect(self) -> DetectionResult:
"""Detect repository platform type.
Executes platform-specific detection methods in order:
1. Joomla detection (manifest patterns, directory structure)
2. Dolibarr detection (module.php, core/ structure)
3. Generic fallback (confidence-based scoring)
Returns:
DetectionResult with platform type and confidence score.
"""
if self.use_cache and self.cache:
cached_result = self.cache.get(self.repo_path)
if cached_result:
return cached_result
joomla_result = self._detect_joomla()
if joomla_result.confidence >= 50:
if self.use_cache and self.cache:
self.cache.set(self.repo_path, joomla_result)
return joomla_result
dolibarr_result = self._detect_dolibarr()
if dolibarr_result.confidence >= 50:
if self.use_cache and self.cache:
self.cache.set(self.repo_path, dolibarr_result)
return dolibarr_result
generic_result = self._detect_generic()
if self.use_cache and self.cache:
self.cache.set(self.repo_path, generic_result)
return generic_result
def _detect_joomla(self) -> DetectionResult:
"""Detect Joomla component with enhanced manifest pattern matching.
Detection criteria:
- XML manifest files with <extension> or <install> root tags
- Extension type attribute (component, module, plugin, etc.)
- Joomla version tags in manifest
- Directory structure (site/, admin/, administrator/)
- Language directories (language/en-GB/)
Returns:
DetectionResult for Joomla platform with confidence score.
"""
confidence = 0
indicators: List[str] = []
metadata: Dict[str, str] = {}
manifest_patterns = ["**/*.xml"]
skip_dirs = {".git", "vendor", "node_modules", ".github"}
for xml_file in self.repo_path.glob("**/*.xml"):
if any(skip_dir in xml_file.parts for skip_dir in skip_dirs):
continue
try:
tree = ET.parse(xml_file)
root = tree.getroot()
if root.tag in ["extension", "install"]:
ext_type = root.get("type", "")
if ext_type in ["component", "module", "plugin", "library", "template", "file"]:
confidence += 50
rel_path = xml_file.relative_to(self.repo_path)
indicators.append(f"Joomla manifest: {rel_path} (type={ext_type})")
metadata["manifest_file"] = str(rel_path)
metadata["extension_type"] = ext_type
version_elem = root.find("version")
if version_elem is not None and version_elem.text:
confidence += 10
metadata["version"] = version_elem.text.strip()
indicators.append(f"Joomla version tag: {version_elem.text.strip()}")
name_elem = root.find("name")
if name_elem is not None and name_elem.text:
metadata["extension_name"] = name_elem.text.strip()
break
except (ET.ParseError, OSError):
continue
joomla_dirs = ["site", "admin", "administrator"]
for dir_name in joomla_dirs:
if (self.repo_path / dir_name).is_dir():
confidence += 15
indicators.append(f"Joomla directory structure: {dir_name}/")
if (self.repo_path / "language" / "en-GB").exists():
confidence += 10
indicators.append("Joomla language directory: language/en-GB/")
media_dir = self.repo_path / "media"
if media_dir.is_dir() and list(media_dir.glob("**/*.css")):
confidence += 5
indicators.append("Joomla media directory with assets")
confidence = min(confidence, 100)
return DetectionResult(
platform_type=PlatformType.JOOMLA,
confidence=confidence,
indicators=indicators,
metadata=metadata
)
def _detect_dolibarr(self) -> DetectionResult:
"""Detect Dolibarr module with enhanced structure analysis.
Detection criteria:
- Module descriptor files (mod*.class.php)
- DolibarrModules class extension patterns
- core/modules/ directory structure
- SQL migration files in sql/
- Class and lib directories
Returns:
DetectionResult for Dolibarr platform with confidence score.
"""
confidence = 0
indicators: List[str] = []
metadata: Dict[str, str] = {}
descriptor_patterns = ["**/mod*.class.php", "**/core/modules/**/*.php"]
skip_dirs = {".git", "vendor", "node_modules"}
for pattern in descriptor_patterns:
for php_file in self.repo_path.glob(pattern):
if any(skip_dir in php_file.parts for skip_dir in skip_dirs):
continue
try:
content = php_file.read_text(encoding="utf-8", errors="ignore")
dolibarr_patterns = [
"extends DolibarrModules",
"class mod",
"$this->numero",
"$this->rights_class",
"DolibarrModules",
"dol_include_once"
]
pattern_matches = sum(1 for p in dolibarr_patterns if p in content)
if pattern_matches >= 3:
confidence += 60
rel_path = php_file.relative_to(self.repo_path)
indicators.append(f"Dolibarr module descriptor: {rel_path}")
metadata["descriptor_file"] = str(rel_path)
if "class mod" in content:
import re
match = re.search(r'class\s+(mod\w+)', content)
if match:
metadata["module_class"] = match.group(1)
break
except (OSError, UnicodeDecodeError):
continue
dolibarr_dirs = ["core/modules", "sql", "class", "lib", "langs"]
for dir_name in dolibarr_dirs:
dir_path = self.repo_path / dir_name
if dir_path.exists():
confidence += 8
indicators.append(f"Dolibarr directory structure: {dir_name}/")
sql_dir = self.repo_path / "sql"
if sql_dir.is_dir():
sql_files = list(sql_dir.glob("*.sql"))
if sql_files:
confidence += 10
indicators.append(f"Dolibarr SQL files: {len(sql_files)} migration scripts")
metadata["sql_files_count"] = str(len(sql_files))
confidence = min(confidence, 100)
return DetectionResult(
platform_type=PlatformType.DOLIBARR,
confidence=confidence,
indicators=indicators,
metadata=metadata
)
def _detect_generic(self) -> DetectionResult:
"""Fallback detection for generic repositories with confidence scoring.
Provides baseline detection when no specific platform is identified.
Confidence score based on standard repository structure indicators.
Returns:
DetectionResult for generic platform with confidence score.
"""
confidence = 50
indicators: List[str] = ["No platform-specific markers found"]
metadata: Dict[str, str] = {
"checked_platforms": "Joomla, Dolibarr",
"detection_reason": "Generic repository fallback"
}
standard_files = ["README.md", "LICENSE", ".gitignore", "composer.json", "package.json"]
found_files = []
for file_name in standard_files:
if (self.repo_path / file_name).exists():
found_files.append(file_name)
confidence += 5
if found_files:
indicators.append(f"Standard repository files: {', '.join(found_files)}")
standard_dirs = ["src", "tests", "docs", ".github"]
found_dirs = []
for dir_name in standard_dirs:
if (self.repo_path / dir_name).is_dir():
found_dirs.append(dir_name)
confidence += 3
if found_dirs:
indicators.append(f"Standard directory structure: {', '.join(found_dirs)}")
confidence = min(confidence, 100)
return DetectionResult(
platform_type=PlatformType.GENERIC,
confidence=confidence,
indicators=indicators,
metadata=metadata
)
def main() -> int:
"""Main entry point for platform detection CLI.
Returns:
Exit code: 0 for success, 1 for detection failure, 2 for config error.
"""
parser = argparse.ArgumentParser(
description=f"Auto-detect repository platform v{__version__}",
epilog="For more information, see docs/scripts/validate/"
)
parser.add_argument(
"--repo-path",
type=str,
default=".",
help="Path to repository to analyze (default: current directory)"
)
parser.add_argument(
"--json",
action="store_true",
help="Output results in JSON format for automation"
)
parser.add_argument(
"--verbose",
action="store_true",
help="Enable verbose output with detailed indicators"
)
parser.add_argument(
"--cache",
action="store_true",
help="Enable caching for performance (stores results in ~/.cache/mokostudios)"
)
parser.add_argument(
"--clear-cache",
action="store_true",
help="Clear detection cache and exit"
)
parser.add_argument(
"--version",
action="version",
version=f"%(prog)s {__version__}"
)
args = parser.parse_args()
if args.clear_cache:
cache = DetectionCache()
cache.clear()
if not args.json:
print("✓ Detection cache cleared")
return 0
try:
repo_path = Path(args.repo_path).resolve()
if not repo_path.exists():
if args.json:
print(json.dumps({"error": "Repository path does not exist", "path": str(repo_path)}))
else:
print(f"✗ Error: Repository path does not exist: {repo_path}", file=sys.stderr)
return 2
detector = PlatformDetector(repo_path, use_cache=args.cache)
result = detector.detect()
if args.json:
output = result.to_dict()
output["repo_path"] = str(repo_path)
output["version"] = __version__
print(json.dumps(output, indent=2))
else:
print("=" * 70)
print(f"Platform Auto-Detection v{__version__}")
print("=" * 70)
print()
print(f"📁 Repository: {repo_path}")
print(f"🔍 Platform: {result.platform_type.value.upper()}")
print(f"📊 Confidence: {result.confidence}%")
print()
if args.verbose and result.indicators:
print("Detection Indicators:")
for indicator in result.indicators:
print(f"{indicator}")
print()
if args.verbose and result.metadata:
print("Metadata:")
for key, value in result.metadata.items():
print(f" {key}: {value}")
print()
if args.cache:
print("💾 Result cached for future runs")
print()
print("=" * 70)
return 0
except ValueError as e:
if args.json:
print(json.dumps({"error": str(e)}))
else:
print(f"✗ Error: {e}", file=sys.stderr)
return 2
except Exception as e:
if args.json:
print(json.dumps({"error": f"Unexpected error: {str(e)}"}))
else:
print(f"✗ Unexpected error: {e}", file=sys.stderr)
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@@ -1,258 +0,0 @@
#!/usr/bin/env python3
"""
Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
This file is part of a Moko Consulting project.
SPDX-License-Identifier: GPL-3.0-or-later
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
FILE INFORMATION
DEFGROUP: MokoStandards.Scripts.Validate
INGROUP: MokoStandards
REPO: https://github.com/mokoconsulting-tech/MokoStandards
PATH: /scripts/validate/validate_codeql_config.py
VERSION: 01.00.00
BRIEF: Validates CodeQL workflow language configuration matches repository contents
"""
import argparse
import sys
from pathlib import Path
from typing import Dict, List, Set, Tuple
try:
import yaml
except ImportError:
print("Error: PyYAML is required. Install with: pip install pyyaml", file=sys.stderr)
sys.exit(1)
# Language to file extension mapping
LANGUAGE_EXTENSIONS = {
'python': {'.py'},
'javascript': {'.js', '.jsx', '.ts', '.tsx', '.mjs', '.cjs'},
'php': {'.php'},
'java': {'.java'},
'go': {'.go'},
'ruby': {'.rb'},
'cpp': {'.cpp', '.cc', '.cxx', '.c', '.h', '.hpp'},
'csharp': {'.cs'},
}
def detect_languages_in_repo(repo_path: Path, exclude_dirs: Set[str] = None) -> Dict[str, int]:
"""
Detect programming languages present in the repository by scanning file extensions.
Args:
repo_path: Path to the repository root
exclude_dirs: Set of directory names to exclude from scanning
Returns:
Dictionary mapping language names to file counts
"""
if exclude_dirs is None:
exclude_dirs = {'.git', 'vendor', 'node_modules', '.venv', 'venv', '__pycache__'}
language_counts = {}
for language, extensions in LANGUAGE_EXTENSIONS.items():
count = 0
for ext in extensions:
for file_path in repo_path.rglob(f'*{ext}'):
# Skip excluded directories
if any(excluded in file_path.parts for excluded in exclude_dirs):
continue
if file_path.is_file():
count += 1
if count > 0:
language_counts[language] = count
return language_counts
def parse_codeql_workflow(workflow_path: Path) -> Tuple[List[str], bool]:
"""
Parse CodeQL workflow file and extract configured languages.
Args:
workflow_path: Path to the CodeQL workflow YAML file
Returns:
Tuple of (list of configured languages, whether parsing succeeded)
"""
try:
with open(workflow_path, 'r') as f:
workflow = yaml.safe_load(f)
# Navigate to the matrix.language configuration
jobs = workflow.get('jobs', {})
for job_name, job_config in jobs.items():
strategy = job_config.get('strategy', {})
matrix = strategy.get('matrix', {})
languages = matrix.get('language', [])
if languages:
return languages, True
return [], False
except Exception as e:
print(f"Error parsing workflow: {e}", file=sys.stderr)
return [], False
def validate_codeql_config(repo_path: Path, workflow_path: Path) -> Tuple[bool, List[str], List[str]]:
"""
Validate that CodeQL workflow languages match repository contents.
Args:
repo_path: Path to the repository root
workflow_path: Path to the CodeQL workflow file
Returns:
Tuple of (is_valid, list of errors, list of warnings)
"""
errors = []
warnings = []
# Check if workflow file exists
if not workflow_path.exists():
errors.append(f"CodeQL workflow not found at: {workflow_path}")
return False, errors, warnings
# Detect languages in repository
detected_languages = detect_languages_in_repo(repo_path)
if not detected_languages:
warnings.append("No supported programming languages detected in repository")
return True, errors, warnings
# Parse CodeQL workflow configuration
configured_languages, parse_success = parse_codeql_workflow(workflow_path)
if not parse_success:
errors.append("Could not find language configuration in CodeQL workflow")
return False, errors, warnings
if not configured_languages:
errors.append("No languages configured in CodeQL workflow matrix")
return False, errors, warnings
# Compare detected vs configured languages
detected_set = set(detected_languages.keys())
configured_set = set(configured_languages)
# Languages configured but not present in repo
extra_languages = configured_set - detected_set
if extra_languages:
for lang in extra_languages:
errors.append(
f"Language '{lang}' is configured in CodeQL but no {lang.upper()} files found in repository. "
f"This will cause CodeQL analysis to fail."
)
# Languages present but not configured
missing_languages = detected_set - configured_set
if missing_languages:
for lang in missing_languages:
file_count = detected_languages[lang]
warnings.append(
f"Language '{lang}' has {file_count} files in repository but is not configured in CodeQL workflow. "
f"Consider adding it for security scanning."
)
is_valid = len(errors) == 0
return is_valid, errors, warnings
def main():
"""Main entry point for the validation script."""
parser = argparse.ArgumentParser(
description='Validate CodeQL workflow language configuration against repository contents'
)
parser.add_argument(
'--repo-path',
type=Path,
default=Path('.'),
help='Path to repository root (default: current directory)'
)
parser.add_argument(
'--workflow-path',
type=Path,
help='Path to CodeQL workflow file (default: .github/workflows/codeql-analysis.yml)'
)
parser.add_argument(
'--strict',
action='store_true',
help='Treat warnings as errors'
)
args = parser.parse_args()
repo_path = args.repo_path.resolve()
workflow_path = args.workflow_path
if workflow_path is None:
workflow_path = repo_path / '.github' / 'workflows' / 'codeql-analysis.yml'
else:
workflow_path = workflow_path.resolve()
print(f"Validating CodeQL configuration...")
print(f"Repository: {repo_path}")
print(f"Workflow: {workflow_path}")
print()
# Detect languages first for informational purposes
detected_languages = detect_languages_in_repo(repo_path)
if detected_languages:
print("Detected languages in repository:")
for lang, count in sorted(detected_languages.items()):
print(f" - {lang}: {count} files")
print()
# Validate configuration
is_valid, errors, warnings = validate_codeql_config(repo_path, workflow_path)
# Print results
if errors:
print("❌ ERRORS:")
for error in errors:
print(f" - {error}")
print()
if warnings:
print("⚠️ WARNINGS:")
for warning in warnings:
print(f" - {warning}")
print()
if is_valid and not warnings:
print("✅ CodeQL configuration is valid and matches repository contents")
return 0
elif is_valid:
print("✅ CodeQL configuration is valid (with warnings)")
if args.strict:
print("❌ Strict mode enabled: treating warnings as errors")
return 1
return 0
else:
print("❌ CodeQL configuration validation failed")
return 1
if __name__ == '__main__':
sys.exit(main())

View File

@@ -1,407 +0,0 @@
#!/usr/bin/env python3
"""
Repository Structure Validator (XML/JSON Support)
Validates repository structure against XML or JSON schema definitions.
Checks for required files, directories, validates naming conventions, and enforces
requirement statuses (required, suggested, optional, not-allowed).
Supports both XML and JSON schema formats for maximum flexibility.
Usage:
python3 validate_structure_v2.py [--schema SCHEMA_FILE] [--format xml|json|auto] [--repo-path PATH]
Examples:
# Auto-detect format from file extension
python3 validate_structure_v2.py --schema scripts/definitions/default-repository.xml
python3 validate_structure_v2.py --schema scripts/definitions/default-repository.json
# Explicit format specification
python3 validate_structure_v2.py --schema my-schema.txt --format json --repo-path /path/to/repo
Exit codes:
0: Success (all validations passed)
1: Validation errors found (required items missing or not-allowed items present)
2: Validation warnings (suggested items missing)
3: Configuration error (invalid schema, missing files, etc.)
"""
import sys
import os
import argparse
import xml.etree.ElementTree as ET
import json
from pathlib import Path
from typing import List, Dict, Tuple, Optional, Any
from dataclasses import dataclass
from enum import Enum
class Severity(Enum):
"""Validation severity levels"""
ERROR = "error"
WARNING = "warning"
INFO = "info"
class RequirementStatus(Enum):
"""Requirement status levels"""
REQUIRED = "required"
SUGGESTED = "suggested"
OPTIONAL = "optional"
NOT_ALLOWED = "not-allowed"
@dataclass
class ValidationResult:
"""Result of a validation check"""
severity: Severity
message: str
path: str
requirement_status: Optional[RequirementStatus] = None
rule_type: Optional[str] = None
class RepositoryStructureValidator:
"""Validates repository structure against XML or JSON definition"""
def __init__(self, schema_path: str, repo_path: str = ".", schema_format: str = "auto"):
"""
Initialize validator
Args:
schema_path: Path to schema definition (XML or JSON)
repo_path: Path to repository to validate (default: current directory)
schema_format: Format of schema file ('xml', 'json', or 'auto' for auto-detection)
"""
self.schema_path = schema_path
self.repo_path = Path(repo_path).resolve()
self.results: List[ValidationResult] = []
self.schema_format = schema_format
self.structure_data = None
# Determine format
if self.schema_format == "auto":
self.schema_format = self._detect_format()
# Load schema
try:
if self.schema_format == "xml":
self._load_xml_schema()
elif self.schema_format == "json":
self._load_json_schema()
else:
raise ValueError(f"Unsupported schema format: {self.schema_format}")
except Exception as e:
print(f"Error loading schema: {e}", file=sys.stderr)
sys.exit(3)
def _detect_format(self) -> str:
"""Auto-detect schema format from file extension"""
ext = Path(self.schema_path).suffix.lower()
if ext == ".json":
return "json"
elif ext in [".xml", ""]:
return "xml"
else:
# Try to detect from content
try:
with open(self.schema_path, 'r') as f:
content = f.read().strip()
if content.startswith('{') or content.startswith('['):
return "json"
elif content.startswith('<?xml') or content.startswith('<'):
return "xml"
except Exception:
pass
# Unable to detect format
raise ValueError(f"Unable to detect schema format for {self.schema_path}")
def _load_xml_schema(self):
"""Load XML schema"""
self.tree = ET.parse(self.schema_path)
self.root = self.tree.getroot()
self.namespace = {'rs': 'http://mokoconsulting.com/schemas/repository-structure'}
self.structure_data = self._parse_xml_to_dict()
def _load_json_schema(self):
"""Load JSON schema"""
with open(self.schema_path, 'r') as f:
self.structure_data = json.load(f)
def _parse_xml_to_dict(self) -> Dict[str, Any]:
"""Convert XML structure to dictionary format for unified processing"""
structure = {}
# Parse metadata
metadata_elem = self.root.find('rs:metadata', self.namespace)
if metadata_elem is not None:
structure['metadata'] = {
'name': self._get_element_text(metadata_elem, 'name'),
'description': self._get_element_text(metadata_elem, 'description'),
'repositoryType': self._get_element_text(metadata_elem, 'repository-type'),
'platform': self._get_element_text(metadata_elem, 'platform'),
}
# Parse structure
structure_elem = self.root.find('rs:structure', self.namespace)
if structure_elem is not None:
structure['structure'] = {}
# Parse root files
root_files_elem = structure_elem.find('rs:root-files', self.namespace)
if root_files_elem is not None:
structure['structure']['rootFiles'] = []
for file_elem in root_files_elem.findall('rs:file', self.namespace):
structure['structure']['rootFiles'].append(self._parse_xml_file(file_elem))
# Parse directories
directories_elem = structure_elem.find('rs:directories', self.namespace)
if directories_elem is not None:
structure['structure']['directories'] = []
for dir_elem in directories_elem.findall('rs:directory', self.namespace):
structure['structure']['directories'].append(self._parse_xml_directory(dir_elem))
return structure
def _parse_xml_file(self, file_elem) -> Dict[str, Any]:
"""Parse XML file element to dictionary"""
file_data = {
'name': self._get_element_text(file_elem, 'name'),
'description': self._get_element_text(file_elem, 'description'),
'requirementStatus': self._get_element_text(file_elem, 'requirement-status', 'required'),
'audience': self._get_element_text(file_elem, 'audience'),
'template': self._get_element_text(file_elem, 'template'),
}
# Handle extension attribute
if 'extension' in file_elem.attrib:
file_data['extension'] = file_elem.attrib['extension']
return {k: v for k, v in file_data.items() if v is not None}
def _parse_xml_directory(self, dir_elem) -> Dict[str, Any]:
"""Parse XML directory element to dictionary"""
dir_data = {
'name': self._get_element_text(dir_elem, 'name'),
'path': dir_elem.attrib.get('path'),
'description': self._get_element_text(dir_elem, 'description'),
'requirementStatus': self._get_element_text(dir_elem, 'requirement-status', 'required'),
'purpose': self._get_element_text(dir_elem, 'purpose'),
}
# Parse files within directory
files_elem = dir_elem.find('rs:files', self.namespace)
if files_elem is not None:
dir_data['files'] = []
for file_elem in files_elem.findall('rs:file', self.namespace):
dir_data['files'].append(self._parse_xml_file(file_elem))
# Parse subdirectories
subdirs_elem = dir_elem.find('rs:subdirectories', self.namespace)
if subdirs_elem is not None:
dir_data['subdirectories'] = []
for subdir_elem in subdirs_elem.findall('rs:directory', self.namespace):
dir_data['subdirectories'].append(self._parse_xml_directory(subdir_elem))
return {k: v for k, v in dir_data.items() if v is not None}
def _get_element_text(self, parent, tag_name, default=None):
"""Get text content of XML element"""
if self.schema_format == "xml":
elem = parent.find(f'rs:{tag_name}', self.namespace)
return elem.text if elem is not None else default
return default
def validate(self) -> List[ValidationResult]:
"""
Run all validation checks
Returns:
List of validation results
"""
self.results = []
print(f"Validating repository: {self.repo_path}")
print(f"Against schema: {self.schema_path} (format: {self.schema_format})")
print("-" * 80)
# Validate root files
if 'structure' in self.structure_data and 'rootFiles' in self.structure_data['structure']:
for file_def in self.structure_data['structure']['rootFiles']:
self._validate_file(file_def, self.repo_path)
# Validate directories
if 'structure' in self.structure_data and 'directories' in self.structure_data['structure']:
for dir_def in self.structure_data['structure']['directories']:
self._validate_directory(dir_def, self.repo_path)
return self.results
def _validate_file(self, file_def: Dict[str, Any], parent_path: Path):
"""Validate a file requirement"""
file_name = file_def.get('name')
requirement_status = RequirementStatus(file_def.get('requirementStatus', 'required'))
file_path = parent_path / file_name
exists = file_path.exists() and file_path.is_file()
if requirement_status == RequirementStatus.REQUIRED and not exists:
self.results.append(ValidationResult(
severity=Severity.ERROR,
message=f"Required file missing: {file_name}",
path=str(file_path.relative_to(self.repo_path)),
requirement_status=requirement_status
))
elif requirement_status == RequirementStatus.SUGGESTED and not exists:
self.results.append(ValidationResult(
severity=Severity.WARNING,
message=f"Suggested file missing: {file_name}",
path=str(file_path.relative_to(self.repo_path)),
requirement_status=requirement_status
))
elif requirement_status == RequirementStatus.NOT_ALLOWED and exists:
self.results.append(ValidationResult(
severity=Severity.ERROR,
message=f"Not-allowed file present: {file_name} (should not be committed)",
path=str(file_path.relative_to(self.repo_path)),
requirement_status=requirement_status
))
elif exists:
self.results.append(ValidationResult(
severity=Severity.INFO,
message=f"File present: {file_name}",
path=str(file_path.relative_to(self.repo_path)),
requirement_status=requirement_status
))
def _validate_directory(self, dir_def: Dict[str, Any], parent_path: Path):
"""Validate a directory requirement"""
dir_name = dir_def.get('name')
dir_path_str = dir_def.get('path', dir_name)
requirement_status = RequirementStatus(dir_def.get('requirementStatus', 'required'))
dir_path = self.repo_path / dir_path_str
exists = dir_path.exists() and dir_path.is_dir()
if requirement_status == RequirementStatus.REQUIRED and not exists:
self.results.append(ValidationResult(
severity=Severity.ERROR,
message=f"Required directory missing: {dir_name}",
path=dir_path_str,
requirement_status=requirement_status
))
return # Skip validating contents if directory doesn't exist
elif requirement_status == RequirementStatus.SUGGESTED and not exists:
self.results.append(ValidationResult(
severity=Severity.WARNING,
message=f"Suggested directory missing: {dir_name}",
path=dir_path_str,
requirement_status=requirement_status
))
return
elif requirement_status == RequirementStatus.NOT_ALLOWED and exists:
self.results.append(ValidationResult(
severity=Severity.ERROR,
message=f"Not-allowed directory present: {dir_name} (should not be committed)",
path=dir_path_str,
requirement_status=requirement_status
))
return
elif exists:
self.results.append(ValidationResult(
severity=Severity.INFO,
message=f"Directory present: {dir_name}",
path=dir_path_str,
requirement_status=requirement_status
))
# Validate files within directory
if exists and 'files' in dir_def:
for file_def in dir_def['files']:
self._validate_file(file_def, dir_path)
# Validate subdirectories
if exists and 'subdirectories' in dir_def:
for subdir_def in dir_def['subdirectories']:
self._validate_directory(subdir_def, dir_path)
def print_results(self):
"""Print validation results"""
errors = [r for r in self.results if r.severity == Severity.ERROR]
warnings = [r for r in self.results if r.severity == Severity.WARNING]
infos = [r for r in self.results if r.severity == Severity.INFO]
print("\n" + "=" * 80)
print("VALIDATION RESULTS")
print("=" * 80)
if errors:
print(f"\n❌ ERRORS ({len(errors)}):")
for result in errors:
print(f" {result.path}: {result.message}")
if warnings:
print(f"\n⚠️ WARNINGS ({len(warnings)}):")
for result in warnings:
print(f" {result.path}: {result.message}")
if infos:
print(f"\n✓ INFO ({len(infos)} items validated successfully)")
print("\n" + "=" * 80)
print(f"Summary: {len(errors)} errors, {len(warnings)} warnings, {len(infos)} info")
print("=" * 80)
return len(errors), len(warnings)
def main():
"""Main entry point"""
parser = argparse.ArgumentParser(
description='Validate repository structure against XML or JSON schema',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=__doc__
)
parser.add_argument(
'--schema',
default='scripts/definitions/default-repository.xml',
help='Path to schema file (XML or JSON). Default: scripts/definitions/default-repository.xml'
)
parser.add_argument(
'--format',
choices=['xml', 'json', 'auto'],
default='auto',
help='Schema format (xml, json, or auto-detect). Default: auto'
)
parser.add_argument(
'--repo-path',
default='.',
help='Path to repository to validate. Default: current directory'
)
args = parser.parse_args()
# Create validator
validator = RepositoryStructureValidator(
schema_path=args.schema,
repo_path=args.repo_path,
schema_format=args.format
)
# Run validation
validator.validate()
# Print results
errors, warnings = validator.print_results()
# Exit with appropriate code
if errors > 0:
sys.exit(1) # Errors found
elif warnings > 0:
sys.exit(2) # Only warnings
else:
sys.exit(0) # Success
if __name__ == '__main__':
main()