Scripts/Validation Update

This commit is contained in:
2026-01-03 12:50:10 -06:00
parent 8401568fc0
commit c9ac58d342
18 changed files with 1035 additions and 264 deletions

View File

@@ -23,7 +23,7 @@
# INGROUP: GitHub.Actions.ContinuousIntegration
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/ci.yml
# VERSION: 03.05.00
# VERSION: 01.00.00
# BRIEF: Continuous integration governance workflow for standards enforcement.
# NOTE: Runs on every push. Auto-normalizes YAML tabs to two spaces before validation.
# ============================================================================
@@ -35,8 +35,8 @@ on:
pull_request:
workflow_dispatch:
inputs:
auto_fix_tabs:
description: "Run scripts/fix_tabs.sh before validation (does not commit changes)"
auto_fix/tabs:
description: "Run scripts/fix/tabs.sh before validation (does not commit changes)"
required: false
default: false
type: boolean
@@ -65,7 +65,7 @@ jobs:
fetch-depth: 0
- name: Auto-fix YAML tabs when YAML changes detected
if: ${{ github.event_name != 'workflow_dispatch' || inputs.auto_fix_tabs }}
if: ${{ github.event_name != 'workflow_dispatch' || inputs.auto_fix/tabs }}
run: |
set -euo pipefail
@@ -96,50 +96,50 @@ jobs:
fi
if [ -n "$CHANGED_YAML" ]; then
echo "YAML changes detected. Running fix_tabs.sh"
if [ -x "./scripts/fix_tabs.sh" ]; then
./scripts/fix_tabs.sh
echo "YAML changes detected. Running fix/tabs.sh"
if [ -x "./scripts/fix/tabs.sh" ]; then
./scripts/fix/tabs.sh
else
echo "fix_tabs.sh not present, skipping"
echo "fix/tabs.sh not present, skipping"
fi
else
echo "No YAML changes detected. Skipping fix_tabs.sh"
echo "No YAML changes detected. Skipping fix/tabs.sh"
fi
- name: Validate YAML tabs usage
run: |
set -euo pipefail
if [ -x "./scripts/validate_tabs.sh" ]; then
./scripts/validate_tabs.sh
if [ -x "./scripts/validate/tabs.sh" ]; then
./scripts/validate/tabs.sh
else
echo "validate_tabs.sh not present, skipping"
echo "validate/tabs.sh not present, skipping"
fi
- name: Validate file paths
run: |
set -euo pipefail
if [ -x "./scripts/validate_paths.sh" ]; then
./scripts/validate_paths.sh
if [ -x "./scripts/validate/paths.sh" ]; then
./scripts/validate/paths.sh
else
echo "validate_paths.sh not present, skipping"
echo "validate/paths.sh not present, skipping"
fi
- name: Validate CHANGELOG governance
run: |
set -euo pipefail
if [ -x "./scripts/validate_changelog.sh" ]; then
./scripts/validate_changelog.sh
if [ -x "./scripts/validate/changelog.sh" ]; then
./scripts/validate/changelog.sh
else
echo "validate_changelog.sh not present, skipping"
echo "validate/changelog.sh not present, skipping"
fi
- name: Validate Joomla manifests
run: |
set -euo pipefail
if [ -x "./scripts/validate_manifest.sh" ]; then
./scripts/validate_manifest.sh
if [ -x "./scripts/validate/manifest.sh" ]; then
./scripts/validate/manifest.sh
else
echo "validate_manifest.sh not present, skipping"
echo "validate/manifest.sh not present, skipping"
fi
- name: Continuous integration completion

View File

@@ -23,7 +23,7 @@
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/release_pipeline.yml
# VERSION: 03.05.00
# VERSION: 01.00.00
# BRIEF: Enterprise release pipeline enforcing dev to rc to version to main. Creates prerelease when rc is created. Creates full release when version is created and promotes to main while retaining the version branch.
# NOTE:
# ============================================================================
@@ -199,10 +199,10 @@ jobs:
echo " \"repository\": \"${GITHUB_REPOSITORY}\","
echo " \"workflow\": \"${GITHUB_WORKFLOW}\","
echo " \"job\": \"${GITHUB_JOB}\","
echo " \"run_id\": ${GITHUB_RUN_ID},"
echo " \"run_number\": ${GITHUB_RUN_NUMBER},"
echo " \"run_attempt\": ${GITHUB_RUN_ATTEMPT},"
echo " \"run_url\": \"${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}\","
echo " \"run/id\": ${GITHUB_run/ID},"
echo " \"run/number\": ${GITHUB_run/NUMBER},"
echo " \"run/attempt\": ${GITHUB_run/ATTEMPT},"
echo " \"run/url\": \"${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_run/ID}\","
echo " \"actor\": \"${GITHUB_ACTOR}\","
echo " \"actor_permission\": \"${{ steps.auth.outputs.permission }}\","
echo " \"sha\": \"${GITHUB_SHA}\","
@@ -232,10 +232,10 @@ jobs:
printf '"repository":"%s",' "${GITHUB_REPOSITORY}"
printf '"workflow":"%s",' "${GITHUB_WORKFLOW}"
printf '"job":"%s",' "${GITHUB_JOB}"
printf '"run_id":%s,' "${GITHUB_RUN_ID}"
printf '"run_number":%s,' "${GITHUB_RUN_NUMBER}"
printf '"run_attempt":%s,' "${GITHUB_RUN_ATTEMPT}"
printf '"run_url":"%s",' "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}"
printf '"run/id":%s,' "${GITHUB_run/ID}"
printf '"run/number":%s,' "${GITHUB_run/NUMBER}"
printf '"run/attempt":%s,' "${GITHUB_run/ATTEMPT}"
printf '"run/url":"%s",' "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_run/ID}"
printf '"actor":"%s",' "${GITHUB_ACTOR}"
printf '"event":"%s",' "${GITHUB_EVENT_NAME}"
printf '"ref_name":"%s",' "${GITHUB_REF_NAME}"
@@ -697,7 +697,7 @@ jobs:
{
echo "### Build report"
echo "```json"
echo "{\"repository\":\"${GITHUB_REPOSITORY}\",\"workflow\":\"${GITHUB_WORKFLOW}\",\"job\":\"${GITHUB_JOB}\",\"run_id\":${GITHUB_RUN_ID},\"run_number\":${GITHUB_RUN_NUMBER},\"run_attempt\":${GITHUB_RUN_ATTEMPT},\"run_url\":\"${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}\",\"actor\":\"${GITHUB_ACTOR}\",\"sha\":\"${GITHUB_SHA}\",\"archive_policy\":\"src_only\",\"manifest\":\"${MANIFEST}\",\"extension_type\":\"${EXT_TYPE}\",\"zip\":\"${DIST_DIR}/${ZIP}\",\"zip_bytes\":${ZIP_BYTES}}"
echo "{\"repository\":\"${GITHUB_REPOSITORY}\",\"workflow\":\"${GITHUB_WORKFLOW}\",\"job\":\"${GITHUB_JOB}\",\"run/id\":${GITHUB_run/ID},\"run/number\":${GITHUB_run/NUMBER},\"run/attempt\":${GITHUB_run/ATTEMPT},\"run/url\":\"${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_run/ID}\",\"actor\":\"${GITHUB_ACTOR}\",\"sha\":\"${GITHUB_SHA}\",\"archive_policy\":\"src_only\",\"manifest\":\"${MANIFEST}\",\"extension_type\":\"${EXT_TYPE}\",\"zip\":\"${DIST_DIR}/${ZIP}\",\"zip_bytes\":${ZIP_BYTES}}"
echo "```"
} >> "${GITHUB_STEP_SUMMARY}"

View File

@@ -10,7 +10,7 @@
# INGROUP: MokoStandards.Validation
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/repo_health.yml
# VERSION: 03.05.00
# VERSION: 01.00.00
# BRIEF: Enforces repository guardrails by validating release configuration, scripts governance, tooling availability, and core repository health artifacts.
# NOTE: Field is user-managed.
# ============================================================================

View File

@@ -20,7 +20,7 @@
# INGROUP: GitHub.Versioning.Branching
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/version_branch.yml
# VERSION: 03.05.00
# VERSION: 01.00.00
# BRIEF: Create a dev/<version> branch and align versions across governed files
# NOTE: Enterprise gates: required artifacts, namespace defense, deterministic reporting, control character guard
@@ -256,9 +256,9 @@ jobs:
source "$CI_HELPERS"
moko_init "Branch namespace collision defense"
PREFIX_TOP="${BRANCH_PREFIX%%/*}"
if git ls-remote --exit-code --heads origin "${PREFIX_TOP}" >/dev/null 2>&1; then
echo "[FATAL] Branch namespace collision detected: '${PREFIX_TOP}' exists on origin." >&2
PREfix/TOP="${BRANCH_PREFIX%%/*}"
if git ls-remote --exit-code --heads origin "${PREfix/TOP}" >/dev/null 2>&1; then
echo "[FATAL] Branch namespace collision detected: '${PREfix/TOP}' exists on origin." >&2
exit 2
fi

View File

@@ -21,8 +21,8 @@
# DEFGROUP: MokoStandards
# INGROUP: Generic.Script
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/fix_paths.sh
# VERSION: 03.05.00
# PATH: /scripts/fix/paths.sh
# VERSION: 01.00.00
# BRIEF: Replace Windows-style path separators with POSIX separators in text files.#
# Purpose:
# - Normalize path separators in text files to forward slashes (/).
@@ -31,7 +31,7 @@
# - Preserves file contents aside from path separator normalization.
#
# Usage:
# ./scripts/fix_paths.sh
# ./scripts/fix/paths.sh
# =============================================================================
set -euo pipefail

View File

@@ -23,8 +23,8 @@
# DEFGROUP: MokoStandards
# INGROUP: GitHub.Actions.Utilities
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/fix_tabs.sh
# VERSION: 03.05.00
# PATH: /scripts/fix/tabs.sh
# VERSION: 01.00.00
# BRIEF: Utility script to replace tab characters with two spaces in YAML files.
# NOTE: Intended for local developer use. Not executed automatically in CI.
# ============================================================================
@@ -35,8 +35,8 @@ log() {
printf '%s\n' "$*"
}
log "[fix_tabs] Scope: *.yml, *.yaml"
log "[fix_tabs] Action: replace tab characters with two spaces"
log "[fix/tabs] Scope: *.yml, *.yaml"
log "[fix/tabs] Action: replace tab characters with two spaces"
changed=0
@@ -48,7 +48,7 @@ else
fi
if [ -z "${files}" ]; then
log "[fix_tabs] No YAML files found. Nothing to fix."
log "[fix/tabs] No YAML files found. Nothing to fix."
exit 0
fi
@@ -57,17 +57,17 @@ while IFS= read -r f; do
[ -f "$f" ] || continue
if LC_ALL=C grep -q $'\t' -- "$f"; then
log "[fix_tabs] Fixing tabs in: $f"
log "[fix/tabs] Fixing tabs in: $f"
# Replace literal tab characters with exactly two spaces
sed -i 's/\t/ /g' "$f"
changed=1
else
log "[fix_tabs] Clean: $f"
log "[fix/tabs] Clean: $f"
fi
done <<< "${files}"
if [ "$changed" -eq 1 ]; then
log "[fix_tabs] Completed with modifications"
log "[fix/tabs] Completed with modifications"
else
log "[fix_tabs] No changes required"
log "[fix/tabs] No changes required"
fi

View File

@@ -23,7 +23,7 @@
# INGROUP: Generic.Script
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/update_changelog.sh
# VERSION: 03.05.00
# VERSION: 01.00.00
# BRIEF: Insert a versioned CHANGELOG.md entry immediately after the main Changelog heading
# Purpose:
# - Apply the MokoWaaS-Brand CHANGELOG template entry for a given version.
@@ -55,7 +55,7 @@ require_cmd() {
command -v "$1" >/dev/null 2>&1 || die "Missing required command: $1"
}
validate_version() {
validate/version() {
local v="$1"
[[ "$v" =~ ^[0-9]{2}\.[0-9]{2}\.[0-9]{2}$ ]] || die "Invalid version '$v'. Expected NN.NN.NN (example 03.01.00)."
}
@@ -68,7 +68,7 @@ main() {
[[ $# -eq 1 ]] || die "Usage: $0 <VERSION>"
local version="$1"
validate_version "$version"
validate/version "$version"
[[ -f "$CHANGELOG_FILE" ]] || die "Missing $CHANGELOG_FILE in repo root."

View File

@@ -24,7 +24,7 @@
# INGROUP: Date.Normalization
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/release/update_dates.sh
# VERSION: 03.05.00
# VERSION: 01.00.00
# BRIEF: Normalize release dates across manifests and CHANGELOG using a single authoritative UTC date.
# NOTE: Repo-controlled script only. CI-fatal on malformed inputs. Outputs a JSON report to stdout.

View File

@@ -1,66 +1,213 @@
#!/usr/bin/env bash
#
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: MokoStandards
# INGROUP: Tooling.Changelog
# FILE: verify_changelog.sh
# BRIEF: Validate CHANGELOG.md governance rules for CI enforcement
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# PURPOSE:
# Validate that CHANGELOG.md contains only released, properly ordered entries and complies with MokoStandards governance rules.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/validate/changelog.sh
# VERSION: 01.00.00
# BRIEF: Validates CHANGELOG.md structure and confirms a release section exists for the current version inferred from branch, tag, or env.
# NOTE:
# ============================================================================
set -euo pipefail
CHANGELOG="CHANGELOG.md"
json_escape() {
python3 - <<'PY' "$1"
import json,sys
print(json.dumps(sys.argv[1]))
PY
}
if [ ! -f "$CHANGELOG" ]; then
echo "ERROR: CHANGELOG.md not found at repository root" >&2
exit 1
fail() {
local msg="$1"
local extra="${2:-}"
if [ -n "${extra}" ]; then
printf '{"status":"fail","error":%s,%s}\n' "$(json_escape "${msg}")" "${extra}"
else
printf '{"status":"fail","error":%s}\n' "$(json_escape "${msg}")"
fi
exit 1
}
ok() {
local extra="${1:-}"
if [ -n "${extra}" ]; then
printf '{"status":"ok",%s}\n' "${extra}"
else
printf '{"status":"ok"}\n'
fi
}
# Version resolution order:
# 1) explicit env: RELEASE_VERSION or VERSION
# 2) branch name (GITHUB_REF_NAME): rc/x.y.z or version/x.y.z or dev/x.y.z
# 3) tag name (GITHUB_REF_NAME): vX.Y.Z or vX.Y.Z-rc
# 4) git describe tag fallback
VERSION_IN="${RELEASE_VERSION:-${VERSION:-}}"
ref_name="${GITHUB_REF_NAME:-}"
infer_from_ref() {
local r="$1"
if printf '%s' "${r}" | grep -Eq '^(dev|rc|version)/[0-9]+\.[0-9]+\.[0-9]+$'; then
printf '%s' "${r#*/}"
return 0
fi
if printf '%s' "${r}" | grep -Eq '^v[0-9]+\.[0-9]+\.[0-9]+(-rc)?$'; then
r="${r#v}"
r="${r%-rc}"
printf '%s' "${r}"
return 0
fi
return 1
}
VERSION_RESOLVED=""
if [ -n "${VERSION_IN}" ]; then
if ! printf '%s' "${VERSION_IN}" | grep -Eq '^[0-9]+\.[0-9]+\.[0-9]+$'; then
fail "Invalid version format in env" "\"version\":$(json_escape "${VERSION_IN}")"
fi
VERSION_RESOLVED="${VERSION_IN}"
else
if [ -n "${ref_name}" ]; then
if v="$(infer_from_ref "${ref_name}" 2>/dev/null)"; then
VERSION_RESOLVED="${v}"
fi
fi
if [ -z "${VERSION_RESOLVED}" ]; then
tag="$(git describe --tags --abbrev=0 2>/dev/null || true)"
if [ -n "${tag}" ]; then
if v="$(infer_from_ref "${tag}" 2>/dev/null)"; then
VERSION_RESOLVED="${v}"
fi
fi
fi
fi
CONTENT="$(cat "$CHANGELOG")"
if echo "$CONTENT" | grep -Eiq '^##[[:space:]]*\[?TODO\]?'; then
echo "ERROR: TODO section detected in CHANGELOG.md." >&2
echo "CHANGELOG.md must contain released versions only." >&2
echo "Move all TODO items to TODO.md and remove the section from CHANGELOG.md." >&2
exit 1
if [ -z "${VERSION_RESOLVED}" ]; then
fail "Unable to infer version (set RELEASE_VERSION or VERSION, or use a versioned branch/tag)" "\"ref_name\":$(json_escape "${ref_name:-}" )"
fi
if echo "$CONTENT" | grep -Eiq 'UNRELEASED'; then
echo "ERROR: UNRELEASED placeholder detected in CHANGELOG.md." >&2
exit 1
if [ ! -f "CHANGELOG.md" ]; then
fail "CHANGELOG.md missing"
fi
for token in "TBD" "TO BE DETERMINED" "PLACEHOLDER"; do
if echo "$CONTENT" | grep -Eiq "$token"; then
echo "ERROR: Unresolved placeholder detected: $token" >&2
exit 1
fi
done
mapfile -t versions < <(
grep -E '^## \[[0-9]+\.[0-9]+\.[0-9]+\] [0-9]{4}-[0-9]{2}-[0-9]{2}$' "$CHANGELOG" \
| sed -E 's/^## \[([0-9]+\.[0-9]+\.[0-9]+)\].*/\1/'
)
if [ "${#versions[@]}" -eq 0 ]; then
echo "ERROR: No valid version headings found in CHANGELOG.md" >&2
exit 1
if [ ! -s "CHANGELOG.md" ]; then
fail "CHANGELOG.md is empty"
fi
sorted_versions="$(printf '%s\n' "${versions[@]}" | sort -Vr)"
# Core structural checks
# - Must contain at least one H2 heading with a bracketed version
# - Must contain an Unreleased section
# - Must contain a section for the resolved version
if [ "$(printf '%s\n' "${versions[@]}")" != "$sorted_versions" ]; then
echo "ERROR: Versions are not ordered from newest to oldest" >&2
exit 1
unreleased_ok=false
if grep -Eq '^## \[Unreleased\]' CHANGELOG.md; then
unreleased_ok=true
fi
echo "CHANGELOG.md validation passed"
exit 0
if [ "${unreleased_ok}" != "true" ]; then
fail "CHANGELOG.md missing '## [Unreleased]' section"
fi
if ! grep -Eq '^## \[[0-9]+\.[0-9]+\.[0-9]+\]' CHANGELOG.md; then
fail "CHANGELOG.md has no version sections (expected headings like: ## [x.y.z])"
fi
# Version section existence
if ! grep -Fq "## [${VERSION_RESOLVED}]" CHANGELOG.md; then
fail "CHANGELOG.md missing version section" "\"version\":$(json_escape "${VERSION_RESOLVED}")"
fi
# Optional quality checks (warnings only)
warnings=()
# Expect a date on the same line as the version heading, like: ## [x.y.z] YYYY-MM-DD
if ! grep -Eq "^## \[${VERSION_RESOLVED}\] [0-9]{4}-[0-9]{2}-[0-9]{2}$" CHANGELOG.md; then
warnings+=("version_heading_date_missing_or_nonstandard")
fi
# Minimal section content: require at least one non-empty line between this version heading and the next heading.
python3 - <<'PY' "${VERSION_RESOLVED}" || true
import re,sys
ver = sys.argv[1]
text = open('CHANGELOG.md','r',encoding='utf-8').read().splitlines()
start = None
for i,line in enumerate(text):
if line.startswith(f"## [{ver}]"):
start = i
break
if start is None:
sys.exit(0)
end = len(text)
for j in range(start+1,len(text)):
if text[j].startswith('## ['):
end = j
break
block = [ln for ln in text[start+1:end] if ln.strip()]
# block contains at least one meaningful line (excluding blank)
if len(block) == 0:
print('WARN: version_section_empty')
PY
if grep -Fq 'WARN: version_section_empty' <(python3 - <<'PY' "${VERSION_RESOLVED}" 2>/dev/null || true
import sys
ver = sys.argv[1]
lines = open('CHANGELOG.md','r',encoding='utf-8').read().splitlines()
start = None
for i,l in enumerate(lines):
if l.startswith(f"## [{ver}]"):
start=i
break
if start is None:
sys.exit(0)
end=len(lines)
for j in range(start+1,len(lines)):
if lines[j].startswith('## ['):
end=j
break
block=[ln for ln in lines[start+1:end] if ln.strip()]
if len(block)==0:
print('WARN: version_section_empty')
PY
); then
warnings+=("version_section_empty")
fi
# Emit machine-readable report
if [ "${#warnings[@]}" -gt 0 ]; then
# Build JSON array safely
warn_json="["
sep=""
for w in "${warnings[@]}"; do
warn_json+="${sep}$(json_escape "${w}")"
sep=",";
done
warn_json+="]"
ok "\"version\":$(json_escape "${VERSION_RESOLVED}"),\"ref_name\":$(json_escape "${ref_name:-}"),\"warnings\":${warn_json}"
else
ok "\"version\":$(json_escape "${VERSION_RESOLVED}"),\"ref_name\":$(json_escape "${ref_name:-}"),\"warnings\":[]"
fi
printf '%s\n' "changelog: ok (version=${VERSION_RESOLVED})"

View File

@@ -0,0 +1,114 @@
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/validate/language_structure.sh
# VERSION: 01.00.00
# BRIEF: Validates Joomla language structure under src/language and enforces folder and INI naming conventions.
# NOTE:
# ============================================================================
set -euo pipefail
SRC_DIR="${SRC_DIR:-src}"
LANG_ROOT="${LANG_ROOT:-${SRC_DIR}/language}"
json_escape() {
python3 -c 'import json,sys; print(json.dumps(sys.argv[1]))' "$1"
}
[ -d "${SRC_DIR}" ] || {
printf '{"status":"fail","error":%s}
' "$(json_escape "src directory missing")"
exit 1
}
python3 - <<'PY' "${LANG_ROOT}"
import json
import sys
import re
from pathlib import Path
lang_root = Path(sys.argv[1])
# Language directory is optional for some extension types
if not lang_root.exists():
print(json.dumps({"status":"ok","lang_root":str(lang_root),"languages":[],"warnings":["language_root_missing"]}, ensure_ascii=False))
sys.exit(0)
if not lang_root.is_dir():
print(json.dumps({"status":"fail","error":"language_root_not_directory","lang_root":str(lang_root)}, ensure_ascii=False))
sys.exit(1)
lang_dirs = sorted([p for p in lang_root.iterdir() if p.is_dir()])
# Joomla language tags: en-GB, fr-FR, etc.
pattern = re.compile(r'^[a-z]{2}-[A-Z]{2}$')
invalid = [p.name for p in lang_dirs if not pattern.match(p.name)]
warnings = []
# Soft expectation: en-GB exists if any language directories exist
if lang_dirs and not (lang_root / 'en-GB').exists():
warnings.append('en-GB_missing')
# Validate INI naming
missing_ini = []
nonmatching_ini = []
for d in lang_dirs:
ini_files = [p for p in d.glob('*.ini') if p.is_file()]
if not ini_files:
missing_ini.append(d.name)
continue
for ini in ini_files:
if not (ini.name.startswith(d.name + '.') or ini.name == f"{d.name}.ini"):
nonmatching_ini.append(str(ini))
result = {
"status": "ok",
"lang_root": str(lang_root),
"languages": [d.name for d in lang_dirs],
"warnings": warnings,
}
# Hard failures
if invalid:
result.update({"status":"fail","error":"invalid_language_tag_dir","invalid":invalid})
print(json.dumps(result, ensure_ascii=False))
sys.exit(1)
if nonmatching_ini:
result.update({"status":"fail","error":"ini_name_mismatch","nonmatching_ini":nonmatching_ini[:50]})
print(json.dumps(result, ensure_ascii=False))
sys.exit(1)
if missing_ini:
result.update({"status":"fail","error":"missing_ini_files","missing_ini":missing_ini})
print(json.dumps(result, ensure_ascii=False))
sys.exit(1)
print(json.dumps(result, ensure_ascii=False))
PY
echo "language_structure: ok"

View File

@@ -0,0 +1,91 @@
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/validate/license_headers.sh
# VERSION: 01.00.00
# BRIEF: Validates SPDX license identifier presence near the top of source files under src.
# NOTE:
# ============================================================================
set -euo pipefail
SRC_DIR="${SRC_DIR:-src}"
json_escape() {
python3 -c 'import json,sys; print(json.dumps(sys.argv[1]))' "$1"
}
[ -d "${SRC_DIR}" ] || {
printf '{"status":"fail","error":%s}\n' "$(json_escape "src directory missing")"
exit 1
}
python3 - <<'PY' "${SRC_DIR}"
import json
import sys
from pathlib import Path
src = Path(sys.argv[1])
exts = {'.php','.js','.css','.sh','.yml','.yaml','.xml'}
exclude_dirs = {'vendor','node_modules','dist','.git','build','tmp'}
missing = []
scanned = 0
for p in src.rglob('*'):
if not p.is_file():
continue
if any(part in exclude_dirs for part in p.parts):
continue
if p.suffix.lower() not in exts:
continue
try:
data = p.read_bytes()[:2048]
except Exception:
continue
if b'\x00' in data:
continue
scanned += 1
head = data.decode('utf-8', errors='replace')
if 'SPDX-License-Identifier:' not in head:
missing.append(str(p))
if missing:
print(json.dumps({
"status":"fail",
"error":"missing_spdx_identifier",
"scanned":scanned,
"missing_count":len(missing),
"missing":missing[:200]
}, ensure_ascii=False))
sys.exit(1)
print(json.dumps({"status":"ok","scanned":scanned,"missing_count":0}, ensure_ascii=False))
PY
echo "license_headers

View File

@@ -19,92 +19,191 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: Shell.Script
# INGROUP: MokoStandards.Validation
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/validate_manifest.sh
# VERSION: 03.05.00
# BRIEF: Validate Joomla extension manifest governance before packaging.
# NOTE: Enforces: manifest discovery, extension type presence, version and creationDate presence, XML wellformedness when xmllint is available.
# PATH: /scripts/validate/manifest.sh
# VERSION: 01.00.00
# BRIEF: Validates presence and basic structure of a Joomla manifest under src. Enforces exactly one primary manifest candidate and required fields by extension type.
# NOTE:
# ============================================================================
set -euo pipefail
# Purpose:
# - Locate the primary Joomla manifest under /src.
# - Validate that it contains a <extension ... type="..."> root.
# - Validate required fields exist: <version>, <creationDate>, <name>.
# - Validate XML is wellformed when xmllint is available.
#
# Usage:
# ./scripts/validate_manifest.sh
log_json() {
# shellcheck disable=SC2059
printf '%s\n' "$1"
}
log() { printf '%s\n' "$*"; }
fail() {
local msg="$1"
echo "ERROR: ${msg}" >&2
log "ERROR: $*" >&2
exit 1
}
[ -d "src" ] || fail "src directory missing"
SRC_DIR="${SRC_DIR:-src}"
# Discovery priority order.
manifest=""
if [ -f "src/templateDetails.xml" ]; then
manifest="src/templateDetails.xml"
elif find src -maxdepth 4 -type f -name 'templateDetails.xml' | head -n 1 | grep -q .; then
manifest="$(find src -maxdepth 4 -type f -name 'templateDetails.xml' | head -n 1)"
elif find src -maxdepth 4 -type f -name 'pkg_*.xml' | head -n 1 | grep -q .; then
manifest="$(find src -maxdepth 4 -type f -name 'pkg_*.xml' | head -n 1)"
elif find src -maxdepth 4 -type f -name 'com_*.xml' | head -n 1 | grep -q .; then
manifest="$(find src -maxdepth 4 -type f -name 'com_*.xml' | head -n 1)"
elif find src -maxdepth 4 -type f -name 'mod_*.xml' | head -n 1 | grep -q .; then
manifest="$(find src -maxdepth 4 -type f -name 'mod_*.xml' | head -n 1)"
elif find src -maxdepth 6 -type f -name 'plg_*.xml' | head -n 1 | grep -q .; then
manifest="$(find src -maxdepth 6 -type f -name 'plg_*.xml' | head -n 1)"
else
manifest="$(grep -Rsl --include='*.xml' '<extension' src | head -n 1 || true)"
if [ ! -d "${SRC_DIR}" ]; then
fail "${SRC_DIR} directory missing"
fi
[ -n "${manifest}" ] || fail "No Joomla manifest XML found under src"
[ -f "${manifest}" ] || fail "Manifest not found on disk: ${manifest}"
# Candidate discovery policy: prefer explicit known names, otherwise fall back to extension-root manifests.
# Goal: choose ONE manifest deterministically.
manifest_candidates=()
# Validate root tag presence.
if ! grep -Eq '<extension[^>]*>' "${manifest}"; then
fail "Manifest does not contain <extension ...> root: ${manifest}"
# Template
if [ -f "${SRC_DIR}/templateDetails.xml" ]; then
manifest_candidates+=("${SRC_DIR}/templateDetails.xml")
fi
ext_type="$(grep -Eo 'type="[^"]+"' "${manifest}" | head -n 1 | cut -d '"' -f2 || true)"
[ -n "${ext_type}" ] || fail "Manifest missing required attribute type= on <extension>: ${manifest}"
# Package
while IFS= read -r f; do
[ -n "${f}" ] && manifest_candidates+=("${f}")
done < <(find "${SRC_DIR}" -maxdepth 4 -type f -name 'pkg_*.xml' 2>/dev/null | sort || true)
# Required fields checks.
name_val="$(grep -Eo '<name>[^<]+' "${manifest}" | head -n 1 | sed 's/<name>//' || true)"
version_val="$(grep -Eo '<version>[^<]+' "${manifest}" | head -n 1 | sed 's/<version>//' || true)"
date_val="$(grep -Eo '<creationDate>[^<]+' "${manifest}" | head -n 1 | sed 's/<creationDate>//' || true)"
# Component
while IFS= read -r f; do
[ -n "${f}" ] && manifest_candidates+=("${f}")
done < <(find "${SRC_DIR}" -maxdepth 4 -type f -name 'com_*.xml' 2>/dev/null | sort || true)
[ -n "${name_val}" ] || fail "Manifest missing <name>: ${manifest}"
[ -n "${version_val}" ] || fail "Manifest missing <version>: ${manifest}"
[ -n "${date_val}" ] || fail "Manifest missing <creationDate>: ${manifest}"
# Module
while IFS= read -r f; do
[ -n "${f}" ] && manifest_candidates+=("${f}")
done < <(find "${SRC_DIR}" -maxdepth 4 -type f -name 'mod_*.xml' 2>/dev/null | sort || true)
# Basic version format guardrail (00.00.00 style).
if ! printf '%s' "${version_val}" | grep -Eq '^[0-9]+\.[0-9]+\.[0-9]+$'; then
fail "Manifest <version> is not semantic (x.y.z): ${version_val}"
# Plugin
while IFS= read -r f; do
[ -n "${f}" ] && manifest_candidates+=("${f}")
done < <(find "${SRC_DIR}" -maxdepth 6 -type f -name 'plg_*.xml' 2>/dev/null | sort || true)
# Fallback: any XML containing <extension ...>
if [ "${#manifest_candidates[@]}" -eq 0 ]; then
while IFS= read -r f; do
[ -n "${f}" ] && manifest_candidates+=("${f}")
done < <(grep -Rsl --include='*.xml' '<extension' "${SRC_DIR}" 2>/dev/null | sort || true)
fi
# Basic date format guardrail (YYYY-MM-DD).
if ! printf '%s' "${date_val}" | grep -Eq '^[0-9]{4}-[0-9]{2}-[0-9]{2}$'; then
fail "Manifest <creationDate> is not YYYY-MM-DD: ${date_val}"
if [ "${#manifest_candidates[@]}" -eq 0 ]; then
fail "No Joomla manifest XML found under ${SRC_DIR}"
fi
# XML wellformedness when available.
if command -v xmllint >/dev/null 2>&1; then
xmllint --noout "${manifest}" || fail "xmllint reported invalid XML: ${manifest}"
else
echo "WARN: xmllint not available, skipping strict wellformedness check" >&2
# De-duplicate while preserving order.
unique_candidates=()
for c in "${manifest_candidates[@]}"; do
seen=false
for u in "${unique_candidates[@]}"; do
if [ "${u}" = "${c}" ]; then
seen=true
break
fi
done
if [ "${seen}" = "false" ]; then
unique_candidates+=("${c}")
fi
done
manifest_candidates=("${unique_candidates[@]}")
# Enforce single primary manifest.
if [ "${#manifest_candidates[@]}" -gt 1 ]; then
{
log "ERROR: Multiple manifest candidates detected. Resolve to exactly one primary manifest." >&2
log "Candidates:" >&2
for c in "${manifest_candidates[@]}"; do
log "- ${c}" >&2
done
}
exit 1
fi
log_json "{\"status\":\"ok\",\"manifest\":\"${manifest}\",\"type\":\"${ext_type}\",\"name\":\"${name_val}\",\"version\":\"${version_val}\",\"creationDate\":\"${date_val}\"}"
MANIFEST="${manifest_candidates[0]}"
if [ ! -s "${MANIFEST}" ]; then
fail "Manifest is empty: ${MANIFEST}"
fi
# Parse with python for portability (xmllint not guaranteed).
python3 - <<'PY' "${MANIFEST}" || exit 1
import sys
import json
import xml.etree.ElementTree as ET
from pathlib import Path
manifest_path = Path(sys.argv[1])
def fail(msg, **ctx):
payload = {"status":"fail","error":msg, **ctx}
print(json.dumps(payload, ensure_ascii=False))
sys.exit(1)
try:
tree = ET.parse(manifest_path)
root = tree.getroot()
except Exception as e:
fail("XML parse failed", manifest=str(manifest_path), detail=str(e))
if root.tag != "extension":
fail("Root element must be <extension>", manifest=str(manifest_path), root=str(root.tag))
ext_type = (root.attrib.get("type") or "").strip().lower() or "unknown"
allowed_types = {"template","component","module","plugin","package","library","file","files"}
# Minimal required fields across most extension types.
name_el = root.find("name")
version_el = root.find("version")
name = (name_el.text or "").strip() if name_el is not None else ""
version = (version_el.text or "").strip() if version_el is not None else ""
missing = []
if not name:
missing.append("name")
if not version:
missing.append("version")
if ext_type not in allowed_types and ext_type != "unknown":
fail("Unsupported extension type", manifest=str(manifest_path), ext_type=ext_type)
# Type-specific expectations.
warnings = []
if ext_type == "plugin":
group = (root.attrib.get("group") or "").strip()
if not group:
missing.append("plugin.group")
files_el = root.find("files")
if files_el is None:
missing.append("files")
elif ext_type in {"component","module","template"}:
files_el = root.find("files")
if files_el is None:
missing.append("files")
elif ext_type == "package":
files_el = root.find("files")
if files_el is None:
missing.append("files")
else:
# Package should reference at least one child manifest.
file_nodes = files_el.findall("file")
if not file_nodes:
warnings.append("package.files has no <file> entries")
# Optional but commonly expected.
method = (root.attrib.get("method") or "").strip().lower()
if method and method not in {"upgrade","install"}:
warnings.append(f"unexpected extension method={method}")
# Provide a stable, machine-readable report.
if missing:
fail("Missing required fields", manifest=str(manifest_path), ext_type=ext_type, missing=missing, warnings=warnings)
print(json.dumps({
"status": "ok",
"manifest": str(manifest_path),
"ext_type": ext_type,
"name": name,
"version": version,
"warnings": warnings,
}, ensure_ascii=False))
PY
# Human-friendly summary (kept short for CI logs).
log "manifest: ok (${MANIFEST})"

View File

@@ -0,0 +1,81 @@
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/validate/no_secrets.sh
# VERSION: 01.00.00
# BRIEF: Detects high-confidence secret and credential patterns under src.
# NOTE:
# ============================================================================
set -euo pipefail
SRC_DIR="${SRC_DIR:-src}"
json_escape() {
python3 -c 'import json,sys; print(json.dumps(sys.argv[1]))' "$1"
}
[ -d "${SRC_DIR}" ] || {
printf '{"status":"fail","error":%s}
' "$(json_escape "src directory missing")"
exit 1
}
# High-signal patterns only. Any match is a hard fail.
patterns=(
'-----BEGIN (RSA|DSA|EC|OPENSSH) PRIVATE KEY-----'
'PuTTY-User-Key-File-'
'AKIA[0-9A-Z]{16}'
'ASIA[0-9A-Z]{16}'
'ghp_[A-Za-z0-9]{36}'
'gho_[A-Za-z0-9]{36}'
'github_pat_[A-Za-z0-9_]{20,}'
'xox[baprs]-[0-9A-Za-z-]{10,48}'
'sk_live_[0-9a-zA-Z]{20,}'
)
regex="$(IFS='|'; echo "${patterns[*]}")"
set +e
hits=$(grep -RInE --exclude-dir=vendor --exclude-dir=node_modules --exclude-dir=dist "${regex}" "${SRC_DIR}" 2>/dev/null)
set -e
if [ -n "${hits}" ]; then
{
echo '{"status":"fail","error":"secret_pattern_detected","hits":['
echo "${hits}" | head -n 50 | python3 - <<'PY'
import json,sys
lines=[l.rstrip('
') for l in sys.stdin.readlines() if l.strip()]
print("
".join([json.dumps({"hit":l})+"," for l in lines]).rstrip(','))
PY
echo ']}'
}
exit 1
fi
printf '{"status":"ok","src_dir":%s}
' "$(json_escape "${SRC_DIR}")"
echo "no_secrets: ok"

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env bash
#
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
@@ -20,60 +19,53 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: MokoStandards
# INGROUP: Scripts.Validation
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/paths.sh
# VERSION: 03.05.00
# BRIEF: Detect Windows-style path separators in repository text files for CI enforcement.
#
# PATH: /scripts/validate/paths.sh
# VERSION: 01.00.00
# BRIEF: Detects Windows-style path literals in source content under src.
# NOTE:
# ============================================================================
set -euo pipefail
ROOT_DIR="$(git rev-parse --show-toplevel)"
EXIT_CODE=0
SRC_DIR="${SRC_DIR:-src}"
echo "Scanning repository for Windows-style path separators (\\)"
json_escape() {
python3 -c 'import json,sys; print(json.dumps(sys.argv[1]))' "$1"
}
# Exclude common binary and vendor paths
EXCLUDES=(
".git"
"node_modules"
"vendor"
"dist"
"build"
)
[ -d "${SRC_DIR}" ] || {
printf '{"status":"fail","error":%s}
' "$(json_escape "src directory missing")"
exit 1
}
EXCLUDE_ARGS=()
for path in "${EXCLUDES[@]}"; do
EXCLUDE_ARGS+=("--exclude-dir=${path}")
done
# Target patterns:
# - drive letter paths like C:\foo\bar
# - escaped backslashes in string literals
regex='[A-Za-z]:\\|\\'
# Only scan likely-text files to reduce false positives from binaries.
# This list is intentionally broad for standards repos.
INCLUDE_GLOBS=(
"*.md" "*.txt" "*.yml" "*.yaml" "*.json" "*.xml" "*.ini" "*.cfg"
"*.sh" "*.bash" "*.ps1" "*.php" "*.js" "*.ts" "*.css" "*.scss"
"*.html" "*.htm" "*.vue" "*.java" "*.go" "*.py" "*.rb" "*.c" "*.h" "*.cpp" "*.hpp"
)
set +e
hits=$(grep -RInE --exclude-dir=vendor --exclude-dir=node_modules --exclude-dir=dist "${regex}" "${SRC_DIR}" 2>/dev/null)
set -e
GREP_INCLUDE_ARGS=()
for g in "${INCLUDE_GLOBS[@]}"; do
GREP_INCLUDE_ARGS+=("--include=${g}")
done
# Search for backslashes. This is a governance check for repo docs and automation scripts.
# Note: This does not try to interpret programming-language string escapes.
MATCHES=$(grep -RIn "\\\\" "${ROOT_DIR}" "${EXCLUDE_ARGS[@]}" "${GREP_INCLUDE_ARGS[@]}" || true)
if [ -n "${MATCHES}" ]; then
echo "Windows-style path separators detected in the following files:"
echo "${MATCHES}"
echo ""
echo "CI policy violation: use forward slashes (/) in repository content unless required by runtime logic"
EXIT_CODE=1
else
echo "No Windows-style path separators found"
if [ -n "${hits}" ]; then
{
echo '{"status":"fail","error":"windows_path_literal_detected","hits":['
echo "${hits}" | head -n 50 | python3 - <<'PY'
import json,sys
lines=[l.rstrip('
') for l in sys.stdin.readlines() if l.strip()]
print("
".join([json.dumps({"hit":l})+"," for l in lines]).rstrip(','))
PY
echo ']}'
}
exit 1
fi
exit "${EXIT_CODE}"
printf '{"status":"ok","src_dir":%s}
' "$(json_escape "${SRC_DIR}")"
echo "paths: ok"

View File

@@ -0,0 +1,70 @@
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/validate/php_syntax.sh
# VERSION: 01.00.00
# BRIEF: Runs PHP lint over all PHP files under src. If PHP is unavailable, returns ok with a warning payload.
# NOTE:
# ============================================================================
set -euo pipefail
SRC_DIR="${SRC_DIR:-src}"
json_escape() {
python3 -c 'import json,sys; print(json.dumps(sys.argv[1]))' "$1"
}
[ -d "${SRC_DIR}" ] || {
printf '{"status":"fail","error":%s}
' "$(json_escape "src directory missing")"
exit 1
}
if ! command -v php >/dev/null 2>&1; then
printf '{"status":"ok","warning":"php_not_available","src_dir":%s}
' "$(json_escape "${SRC_DIR}")"
echo "php_syntax: ok (php not available)"
exit 0
fi
failed=0
checked=0
while IFS= read -r -d '' f; do
checked=$((checked+1))
if ! php -l "$f" >/dev/null; then
failed=1
fi
done < <(find "${SRC_DIR}" -type f -name '*.php' -print0)
if [ "${failed}" -ne 0 ]; then
printf '{"status":"fail","error":"php_lint_failed","files_checked":%s}
' "${checked}"
exit 1
fi
printf '{"status":"ok","files_checked":%s}
' "${checked}"
echo "php_syntax: ok"

View File

@@ -1,4 +1,3 @@
#!/usr/bin/env bash
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
@@ -20,69 +19,47 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: MokoStandards
# INGROUP: GitHub.Actions.CI
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/tabs.sh
# VERSION: 03.05.00
# BRIEF: CI validator that blocks tab characters in YAML files and enforces two-space indentation policy.
# NOTE: YAML is indentation sensitive; tabs are noncompliant. This validator fails the job when any tab is detected.
# PATH: /scripts/validate/tabs.sh
# VERSION: 01.00.00
# BRIEF: Detects tab characters in text files under src and fails if any are present.
# NOTE:
# ============================================================================
set -euo pipefail
log() {
printf '%s\n' "$*"
SRC_DIR="${SRC_DIR:-src}"
json_escape() {
python3 -c 'import json,sys; print(json.dumps(sys.argv[1]))' "$1"
}
fail=0
[ -d "${SRC_DIR}" ] || {
printf '{"status":"fail","error":%s}
' "$(json_escape "src directory missing")"
exit 1
}
log "[tabs] Scope: *.yml, *.yaml"
log "[tabs] Policy: tab characters are noncompliant; replace with two spaces"
python3 - <<'PY' "${SRC_DIR}"
import json
import sys
from pathlib import Path
# Find YAML files tracked in git first. If not in a git repo, fall back to filesystem search.
yaml_files=""
if command -v git >/dev/null 2>&1 && git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
yaml_files="$(git ls-files '*.yml' '*.yaml' 2>/dev/null || true)"
else
yaml_files="$(find . -type f \( -name '*.yml' -o -name '*.yaml' \) -print 2>/dev/null || true)"
fi
src = Path(sys.argv[1])
exclude_dirs = {'vendor','node_modules','dist','.git','build','tmp'}
if [ -z "${yaml_files}" ]; then
log "[tabs] No YAML files found. Status: PASS"
exit 0
fi
hits = []
scanned = 0
log "[tabs] YAML files discovered: $(printf '%s\n' "${yaml_files}" | wc -l | tr -d ' ')"
while IFS= read -r f; do
[ -n "$f" ] || continue
# Skip deleted paths in edge cases
[ -f "$f" ] || continue
# Detect literal tab characters and report with line numbers.
if LC_ALL=C grep -n $'\t' -- "$f" >/dev/null 2>&1; then
log "[tabs] FAIL: tab detected in: $f"
# Emit an actionable audit trail: line number plus the exact line content.
# Use sed to avoid grep prefix repetition and keep the output deterministic.
LC_ALL=C grep -n $'\t' -- "$f" | while IFS= read -r hit; do
log " ${hit}"
done
log "[tabs] Remediation: replace each tab with exactly two spaces"
log "[tabs] Example: sed -i 's/\\t/ /g' \"$f\""
fail=1
else
log "[tabs] PASS: $f"
fi
done <<< "${yaml_files}"
if [ "$fail" -ne 0 ]; then
log "[tabs] Status: FAIL"
exit 1
fi
log "[tabs] Status: PASS"
for p in src.rglob('*'):
if not p.is_file():
continue
if any(part in exclude_dirs for part in p.parts):
continue
try:
data = p.read_bytes()
except Exception:
continue
if b'

View File

@@ -0,0 +1,115 @@
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/validate/version_alignment.sh
# VERSION: 01.00.00
# BRIEF: Validates alignment between inferred version, CHANGELOG.md section, and manifest <version> value.
# NOTE:
# ============================================================================
set -euo pipefail
SRC_DIR="${SRC_DIR:-src}"
json_escape() { python3 - <<'PY' "$1"; import json,sys; print(json.dumps(sys.argv[1])); PY; }
fail() {
local msg="$1"; shift || true
local extra="${1:-}"
if [ -n "${extra}" ]; then
printf '{"status":"fail","error":%s,%s}
' "$(json_escape "${msg}")" "${extra}"
else
printf '{"status":"fail","error":%s}
' "$(json_escape "${msg}")"
fi
exit 1
}
[ -d "${SRC_DIR}" ] || fail "src directory missing" "\"src_dir\":$(json_escape "${SRC_DIR}")"
infer_version_from_ref() {
local r="$1"
if printf '%s' "${r}" | grep -Eq '^(dev|rc|version)/[0-9]+\.[0-9]+\.[0-9]+$'; then
printf '%s' "${r#*/}"
return 0
fi
if printf '%s' "${r}" | grep -Eq '^v[0-9]+\.[0-9]+\.[0-9]+(-rc)?$'; then
r="${r#v}"
r="${r%-rc}"
printf '%s' "${r}"
return 0
fi
return 1
}
VERSION_RESOLVED="${RELEASE_VERSION:-${VERSION:-}}"
if [ -z "${VERSION_RESOLVED}" ]; then
if [ -n "${GITHUB_REF_NAME:-}" ]; then
VERSION_RESOLVED="$(infer_version_from_ref "${GITHUB_REF_NAME}" 2>/dev/null || true)"
fi
fi
if [ -z "${VERSION_RESOLVED}" ]; then
tag="$(git describe --tags --abbrev=0 2>/dev/null || true)"
if [ -n "${tag}" ]; then
VERSION_RESOLVED="$(infer_version_from_ref "${tag}" 2>/dev/null || true)"
fi
fi
[ -n "${VERSION_RESOLVED}" ] || fail "Unable to infer version" "\"ref_name\":$(json_escape "${GITHUB_REF_NAME:-}")"
echo "${VERSION_RESOLVED}" | grep -Eq '^[0-9]+\.[0-9]+\.[0-9]+$' || fail "Invalid version format" "\"version\":$(json_escape "${VERSION_RESOLVED}")"
[ -f CHANGELOG.md ] || fail "CHANGELOG.md missing"
if ! grep -Fq "## [${VERSION_RESOLVED}]" CHANGELOG.md; then
fail "CHANGELOG.md missing version section" "\"version\":$(json_escape "${VERSION_RESOLVED}")"
fi
MANIFEST=""
if [ -f "${SRC_DIR}/templateDetails.xml" ]; then
MANIFEST="${SRC_DIR}/templateDetails.xml"
else
MANIFEST="$(find "${SRC_DIR}" -maxdepth 6 -type f \( -name 'templateDetails.xml' -o -name 'pkg_*.xml' -o -name 'com_*.xml' -o -name 'mod_*.xml' -o -name 'plg_*.xml' \) 2>/dev/null | sort | head -n 1 || true)"
fi
[ -n "${MANIFEST}" ] || fail "Manifest not found under src" "\"src_dir\":$(json_escape "${SRC_DIR}")"
manifest_version="$(python3 - <<'PY' "${MANIFEST}"
import sys
import xml.etree.ElementTree as ET
p=sys.argv[1]
root=ET.parse(p).getroot()
ver=root.findtext('version') or ''
print(ver.strip())
PY
)"
[ -n "${manifest_version}" ] || fail "Manifest missing <version>" "\"manifest\":$(json_escape "${MANIFEST}")"
if [ "${manifest_version}" != "${VERSION_RESOLVED}" ]; then
fail "Version mismatch" "\"version\":$(json_escape "${VERSION_RESOLVED}"),\"manifest\":$(json_escape "${MANIFEST}"),\"manifest_version\":$(json_escape "${manifest_version}")"
fi
printf '{"status":"ok","version":%s,"manifest":%s}
' "$(json_escape "${VERSION_RESOLVED}")" "$(json_escape "${MANIFEST}")"
echo "version_alignment: ok"

View File

@@ -0,0 +1,85 @@
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: Scripts.Validate
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /scripts/validate/xml_wellformed.sh
# VERSION: 01.00.00
# BRIEF: Validates that all XML files under src are well-formed using Python XML parsing.
# NOTE:
# ============================================================================
set -euo pipefail
SRC_DIR="${SRC_DIR:-src}"
json_escape() { python3 - <<'PY' "$1"; import json,sys; print(json.dumps(sys.argv[1])); PY; }
emit_ok() {
local extra="${1:-}"
if [ -n "${extra}" ]; then
printf '{"status":"ok",%s}
' "${extra}"
else
printf '{"status":"ok"}
'
fi
}
emit_fail() {
local msg="$1"
local extra="${2:-}"
if [ -n "${extra}" ]; then
printf '{"status":"fail","error":%s,%s}
' "$(json_escape "${msg}")" "${extra}"
else
printf '{"status":"fail","error":%s}
' "$(json_escape "${msg}")"
fi
}
[ -d "${SRC_DIR}" ] || { emit_fail "src directory missing" "\"src_dir\":$(json_escape "${SRC_DIR}")"; exit 1; }
python3 - <<'PY' "${SRC_DIR}"
import json
import sys
from pathlib import Path
import xml.etree.ElementTree as ET
src = Path(sys.argv[1])
xml_files = sorted([p for p in src.rglob('*.xml') if p.is_file()])
bad = []
for p in xml_files:
try:
ET.parse(p)
except Exception as e:
bad.append({"path": str(p), "error": str(e)})
if bad:
print(json.dumps({"status":"fail","error":"XML parse failed","bad_count":len(bad),"bad":bad[:25]}, ensure_ascii=False))
sys.exit(1)
print(json.dumps({"status":"ok","src_dir":str(src),"xml_count":len(xml_files)}, ensure_ascii=False))
PY
echo "xml_wellformed: ok"