chore: add .github/workflows/repo_health.yml from MokoStandards
This commit is contained in:
730
.github/workflows/repo_health.yml
vendored
Normal file
730
.github/workflows/repo_health.yml
vendored
Normal file
@@ -0,0 +1,730 @@
|
||||
# ============================================================================
|
||||
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
|
||||
#
|
||||
# This file is part of a Moko Consulting project.
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# FILE INFORMATION
|
||||
# DEFGROUP: GitHub.Workflow
|
||||
# INGROUP: MokoStandards.Validation
|
||||
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
|
||||
# PATH: /.github/workflows/repo_health.yml
|
||||
# VERSION: 04.01.00
|
||||
# BRIEF: Enforces repository guardrails by validating release configuration, scripts governance, tooling availability, and core repository health artifacts.
|
||||
# NOTE: Field is user-managed.
|
||||
# ============================================================================
|
||||
|
||||
name: Repo Health
|
||||
|
||||
concurrency:
|
||||
group: repo-health-${{ github.repository }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
profile:
|
||||
description: Which configuration profile to validate. release checks SFTP variables used by release pipeline. scripts checks baseline script prerequisites. repo runs repository health only. al[...]
|
||||
required: true
|
||||
default: all
|
||||
type: choice
|
||||
options:
|
||||
- all
|
||||
- release
|
||||
- scripts
|
||||
- repo
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/**
|
||||
- scripts/**
|
||||
- docs/**
|
||||
- dev/**
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- .github/workflows/**
|
||||
- scripts/**
|
||||
- docs/**
|
||||
- dev/**
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
# Release policy - Repository Variables Only
|
||||
RELEASE_REQUIRED_REPO_VARS: RS_FTP_PATH_SUFFIX
|
||||
RELEASE_OPTIONAL_REPO_VARS: DEV_FTP_SUFFIX
|
||||
|
||||
# Scripts governance policy
|
||||
# Note: directories listed without a trailing slash.
|
||||
SCRIPTS_REQUIRED_DIRS:
|
||||
SCRIPTS_ALLOWED_DIRS: scripts,scripts/fix,scripts/lib,scripts/release,scripts/run,scripts/validate
|
||||
|
||||
# Repo health policy
|
||||
# Files are listed as-is; directories must end with a trailing slash.
|
||||
REPO_REQUIRED_ARTIFACTS: README.md,LICENSE,CHANGELOG.md,CONTRIBUTING.md,CODE_OF_CONDUCT.md,.github/workflows/,src/
|
||||
REPO_OPTIONAL_FILES: SECURITY.md,GOVERNANCE.md,.editorconfig,.gitattributes,.gitignore,README.md,docs/
|
||||
REPO_DISALLOWED_DIRS:
|
||||
REPO_DISALLOWED_FILES: TODO.md,todo.md
|
||||
|
||||
# Extended checks toggles
|
||||
EXTENDED_CHECKS: "true"
|
||||
|
||||
# File / directory variables (moved to top-level env)
|
||||
DOCS_INDEX: docs/docs-index.md
|
||||
SCRIPT_DIR: scripts
|
||||
WORKFLOWS_DIR: .github/workflows
|
||||
SHELLCHECK_PATTERN: '*.sh'
|
||||
SPDX_FILE_GLOBS: '*.sh,*.php,*.js,*.ts,*.css,*.xml,*.yml,*.yaml'
|
||||
|
||||
jobs:
|
||||
access_check:
|
||||
name: Access control
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
outputs:
|
||||
allowed: ${{ steps.perm.outputs.allowed }}
|
||||
permission: ${{ steps.perm.outputs.permission }}
|
||||
|
||||
steps:
|
||||
- name: Check actor permission (admin only)
|
||||
id: perm
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GH_TOKEN }}
|
||||
script: |
|
||||
const actor = context.actor;
|
||||
let permission = "unknown";
|
||||
let allowed = false;
|
||||
let method = "";
|
||||
|
||||
// Hardcoded authorized users — always allowed
|
||||
const authorizedUsers = ["jmiller-moko", "github-actions[bot]"];
|
||||
if (authorizedUsers.includes(actor)) {
|
||||
allowed = true;
|
||||
permission = "admin";
|
||||
method = "hardcoded allowlist";
|
||||
} else {
|
||||
// Check via API for other actors
|
||||
try {
|
||||
const res = await github.rest.repos.getCollaboratorPermissionLevel({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
username: actor,
|
||||
});
|
||||
permission = (res?.data?.permission || "unknown").toLowerCase();
|
||||
allowed = permission === "admin" || permission === "maintain";
|
||||
method = "repo collaborator API";
|
||||
} catch (error) {
|
||||
core.warning(`Could not fetch permissions for '${actor}': ${error.message}`);
|
||||
permission = "unknown";
|
||||
allowed = false;
|
||||
method = "API error";
|
||||
}
|
||||
}
|
||||
|
||||
core.setOutput("permission", permission);
|
||||
core.setOutput("allowed", allowed ? "true" : "false");
|
||||
|
||||
const lines = [
|
||||
"## 🔐 Access Authorization",
|
||||
"",
|
||||
"| Field | Value |",
|
||||
"|-------|-------|",
|
||||
`| **Actor** | \`${actor}\` |`,
|
||||
`| **Repository** | \`${context.repo.owner}/${context.repo.repo}\` |`,
|
||||
`| **Permission** | \`${permission}\` |`,
|
||||
`| **Method** | ${method} |`,
|
||||
`| **Authorized** | ${allowed} |`,
|
||||
`| **Trigger** | \`${context.eventName}\` |`,
|
||||
`| **Branch** | \`${context.ref.replace('refs/heads/', '')}\` |`,
|
||||
"",
|
||||
allowed
|
||||
? `✅ ${actor} authorized (${method})`
|
||||
: `❌ ${actor} is NOT authorized. Requires admin or maintain role, or be in the hardcoded allowlist.`,
|
||||
];
|
||||
|
||||
await core.summary.addRaw(lines.join("\n")).write();
|
||||
|
||||
- name: Deny execution when not permitted
|
||||
if: ${{ steps.perm.outputs.allowed != 'true' }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
printf '%s\n' 'ERROR: Access denied. Admin permission required.' >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
|
||||
release_config:
|
||||
name: Release configuration
|
||||
needs: access_check
|
||||
if: ${{ needs.access_check.outputs.allowed == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Guardrails release vars
|
||||
env:
|
||||
PROFILE_RAW: ${{ github.event.inputs.profile }}
|
||||
RS_FTP_PATH_SUFFIX: ${{ vars.RS_FTP_PATH_SUFFIX }}
|
||||
DEV_FTP_SUFFIX: ${{ vars.DEV_FTP_SUFFIX }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
profile="${PROFILE_RAW:-all}"
|
||||
case "${profile}" in
|
||||
all|release|scripts|repo) ;;
|
||||
*)
|
||||
printf '%s\n' "ERROR: Unknown profile: ${profile}" >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "${profile}" = 'scripts' ] || [ "${profile}" = 'repo' ]; then
|
||||
{
|
||||
printf '%s\n' '### Release configuration (Repository Variables)'
|
||||
printf '%s\n' "Profile: ${profile}"
|
||||
printf '%s\n' 'Status: SKIPPED'
|
||||
printf '%s\n' 'Reason: profile excludes release validation'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
IFS=',' read -r -a required <<< "${RELEASE_REQUIRED_REPO_VARS}"
|
||||
IFS=',' read -r -a optional <<< "${RELEASE_OPTIONAL_REPO_VARS}"
|
||||
|
||||
missing=()
|
||||
missing_optional=()
|
||||
|
||||
for k in "${required[@]}"; do
|
||||
v="${!k:-}"
|
||||
[ -z "${v}" ] && missing+=("${k}")
|
||||
done
|
||||
|
||||
for k in "${optional[@]}"; do
|
||||
v="${!k:-}"
|
||||
[ -z "${v}" ] && missing_optional+=("${k}")
|
||||
done
|
||||
|
||||
{
|
||||
printf '%s\n' '### Release configuration (Repository Variables)'
|
||||
printf '%s\n' "Profile: ${profile}"
|
||||
printf '%s\n' '| Variable | Status |'
|
||||
printf '%s\n' '|---|---|'
|
||||
printf '%s\n' "| RS_FTP_PATH_SUFFIX | ${RS_FTP_PATH_SUFFIX:-NOT SET} |"
|
||||
printf '%s\n' "| DEV_FTP_SUFFIX | ${DEV_FTP_SUFFIX:-NOT SET} |"
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
|
||||
if [ "${#missing_optional[@]}" -gt 0 ]; then
|
||||
{
|
||||
printf '%s\n' '### Missing optional repository variables'
|
||||
for m in "${missing_optional[@]}"; do printf '%s\n' "- ${m}"; done
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
|
||||
if [ "${#missing[@]}" -gt 0 ]; then
|
||||
{
|
||||
printf '%s\n' '### Missing required repository variables'
|
||||
for m in "${missing[@]}"; do printf '%s\n' "- ${m}"; done
|
||||
printf '%s\n' 'ERROR: Guardrails failed. Missing required repository variables.'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
{
|
||||
printf '%s\n' '### Repository variables validation result'
|
||||
printf '%s\n' 'Status: OK'
|
||||
printf '%s\n' 'All required repository variables present.'
|
||||
printf '%s\n' ''
|
||||
printf '%s\n' '**Note**: Organization secrets (RS_FTP_HOST, RS_FTP_USER, etc.) are validated at deployment time, not in repository health checks.'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
|
||||
scripts_governance:
|
||||
name: Scripts governance
|
||||
needs: access_check
|
||||
if: ${{ needs.access_check.outputs.allowed == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Scripts folder checks
|
||||
env:
|
||||
PROFILE_RAW: ${{ github.event.inputs.profile }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
profile="${PROFILE_RAW:-all}"
|
||||
case "${profile}" in
|
||||
all|release|scripts|repo) ;;
|
||||
*)
|
||||
printf '%s\n' "ERROR: Unknown profile: ${profile}" >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "${profile}" = 'release' ] || [ "${profile}" = 'repo' ]; then
|
||||
{
|
||||
printf '%s\n' '### Scripts governance'
|
||||
printf '%s\n' "Profile: ${profile}"
|
||||
printf '%s\n' 'Status: SKIPPED'
|
||||
printf '%s\n' 'Reason: profile excludes scripts governance'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ ! -d "${SCRIPT_DIR}" ]; then
|
||||
{
|
||||
printf '%s\n' '### Scripts governance'
|
||||
printf '%s\n' 'Status: OK (advisory)'
|
||||
printf '%s\n' 'scripts/ directory not present. No scripts governance enforced.'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
IFS=',' read -r -a required_dirs <<< "${SCRIPTS_REQUIRED_DIRS}"
|
||||
IFS=',' read -r -a allowed_dirs <<< "${SCRIPTS_ALLOWED_DIRS}"
|
||||
|
||||
missing_dirs=()
|
||||
unapproved_dirs=()
|
||||
|
||||
for d in "${required_dirs[@]}"; do
|
||||
req="${d%/}"
|
||||
[ ! -d "${req}" ] && missing_dirs+=("${req}/")
|
||||
done
|
||||
|
||||
while IFS= read -r d; do
|
||||
allowed=false
|
||||
for a in "${allowed_dirs[@]}"; do
|
||||
a_norm="${a%/}"
|
||||
[ "${d%/}" = "${a_norm}" ] && allowed=true
|
||||
done
|
||||
[ "${allowed}" = false ] && unapproved_dirs+=("${d%/}/")
|
||||
done < <(find "${SCRIPT_DIR}" -maxdepth 1 -mindepth 1 -type d 2>/dev/null | sed 's#^\./##')
|
||||
|
||||
{
|
||||
printf '%s\n' '### Scripts governance'
|
||||
printf '%s\n' "Profile: ${profile}"
|
||||
printf '%s\n' '| Area | Status | Notes |'
|
||||
printf '%s\n' '|---|---|---|'
|
||||
|
||||
if [ "${#missing_dirs[@]}" -gt 0 ]; then
|
||||
printf '%s\n' '| Required directories | Warning | Missing required subfolders |'
|
||||
else
|
||||
printf '%s\n' '| Required directories | OK | All required subfolders present |'
|
||||
fi
|
||||
|
||||
if [ "${#unapproved_dirs[@]}" -gt 0 ]; then
|
||||
printf '%s\n' '| Directory policy | Warning | Unapproved directories detected |'
|
||||
else
|
||||
printf '%s\n' '| Directory policy | OK | No unapproved directories |'
|
||||
fi
|
||||
|
||||
printf '%s\n' '| Enforcement mode | Advisory | scripts folder is optional |'
|
||||
printf '\n'
|
||||
|
||||
if [ "${#missing_dirs[@]}" -gt 0 ]; then
|
||||
printf '%s\n' 'Missing required script directories:'
|
||||
for m in "${missing_dirs[@]}"; do printf '%s\n' "- ${m}"; done
|
||||
printf '\n'
|
||||
else
|
||||
printf '%s\n' 'Missing required script directories: none.'
|
||||
printf '\n'
|
||||
fi
|
||||
|
||||
if [ "${#unapproved_dirs[@]}" -gt 0 ]; then
|
||||
printf '%s\n' 'Unapproved script directories detected:'
|
||||
for m in "${unapproved_dirs[@]}"; do printf '%s\n' "- ${m}"; done
|
||||
printf '\n'
|
||||
else
|
||||
printf '%s\n' 'Unapproved script directories detected: none.'
|
||||
printf '\n'
|
||||
fi
|
||||
|
||||
printf '%s\n' 'Scripts governance completed in advisory mode.'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
|
||||
repo_health:
|
||||
name: Repository health
|
||||
needs: access_check
|
||||
if: ${{ needs.access_check.outputs.allowed == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Repository health checks
|
||||
env:
|
||||
PROFILE_RAW: ${{ github.event.inputs.profile }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
profile="${PROFILE_RAW:-all}"
|
||||
case "${profile}" in
|
||||
all|release|scripts|repo) ;;
|
||||
*)
|
||||
printf '%s\n' "ERROR: Unknown profile: ${profile}" >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "${profile}" = 'release' ] || [ "${profile}" = 'scripts' ]; then
|
||||
{
|
||||
printf '%s\n' '### Repository health'
|
||||
printf '%s\n' "Profile: ${profile}"
|
||||
printf '%s\n' 'Status: SKIPPED'
|
||||
printf '%s\n' 'Reason: profile excludes repository health'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
IFS=',' read -r -a required_artifacts <<< "${REPO_REQUIRED_ARTIFACTS}"
|
||||
IFS=',' read -r -a optional_files <<< "${REPO_OPTIONAL_FILES}"
|
||||
IFS=',' read -r -a disallowed_dirs <<< "${REPO_DISALLOWED_DIRS}"
|
||||
IFS=',' read -r -a disallowed_files <<< "${REPO_DISALLOWED_FILES}"
|
||||
|
||||
missing_required=()
|
||||
missing_optional=()
|
||||
|
||||
for item in "${required_artifacts[@]}"; do
|
||||
if printf '%s' "${item}" | grep -q '/$'; then
|
||||
d="${item%/}"
|
||||
[ ! -d "${d}" ] && missing_required+=("${item}")
|
||||
else
|
||||
[ ! -f "${item}" ] && missing_required+=("${item}")
|
||||
fi
|
||||
done
|
||||
|
||||
# Optional entries: handle files and directories (trailing slash indicates dir)
|
||||
for f in "${optional_files[@]}"; do
|
||||
if printf '%s' "${f}" | grep -q '/$'; then
|
||||
d="${f%/}"
|
||||
[ ! -d "${d}" ] && missing_optional+=("${f}")
|
||||
else
|
||||
[ ! -f "${f}" ] && missing_optional+=("${f}")
|
||||
fi
|
||||
done
|
||||
|
||||
for d in "${disallowed_dirs[@]}"; do
|
||||
d_norm="${d%/}"
|
||||
[ -d "${d_norm}" ] && missing_required+=("${d_norm}/ (disallowed)")
|
||||
done
|
||||
|
||||
for f in "${disallowed_files[@]}"; do
|
||||
[ -f "${f}" ] && missing_required+=("${f} (disallowed)")
|
||||
done
|
||||
|
||||
git fetch origin --prune
|
||||
|
||||
dev_paths=()
|
||||
dev_branches=()
|
||||
|
||||
# Look for remote branches matching origin/dev*.
|
||||
# A plain origin/dev is considered invalid; we require dev/<something> branches.
|
||||
while IFS= read -r b; do
|
||||
name="${b#origin/}"
|
||||
if [ "${name}" = 'dev' ]; then
|
||||
dev_branches+=("${name}")
|
||||
else
|
||||
dev_paths+=("${name}")
|
||||
fi
|
||||
done < <(git branch -r --list 'origin/dev*' | sed 's/^ *//')
|
||||
|
||||
# If there are no dev/* branches, fail the guardrail.
|
||||
if [ "${#dev_paths[@]}" -eq 0 ]; then
|
||||
missing_required+=("dev/* branch (e.g. dev/01.00.00)")
|
||||
fi
|
||||
|
||||
# If a plain dev branch exists (origin/dev), flag it as invalid.
|
||||
if [ "${#dev_branches[@]}" -gt 0 ]; then
|
||||
missing_required+=("invalid branch dev (must be dev/<version>)")
|
||||
fi
|
||||
|
||||
content_warnings=()
|
||||
|
||||
if [ -f 'CHANGELOG.md' ] && ! grep -Eq '^# Changelog' CHANGELOG.md; then
|
||||
content_warnings+=("CHANGELOG.md missing '# Changelog' header")
|
||||
fi
|
||||
|
||||
if [ -f 'CHANGELOG.md' ] && grep -Eq '^[# ]*Unreleased' CHANGELOG.md; then
|
||||
content_warnings+=("CHANGELOG.md contains Unreleased section (review release readiness)")
|
||||
fi
|
||||
|
||||
if [ -f 'LICENSE' ] && ! grep -qiE 'GNU GENERAL PUBLIC LICENSE|GPL' LICENSE; then
|
||||
content_warnings+=("LICENSE does not look like a GPL text")
|
||||
fi
|
||||
|
||||
if [ -f 'README.md' ] && ! grep -qiE 'moko|Moko' README.md; then
|
||||
content_warnings+=("README.md missing expected brand keyword")
|
||||
fi
|
||||
|
||||
export PROFILE_RAW="${profile}"
|
||||
export MISSING_REQUIRED="$(printf '%s\n' "${missing_required[@]:-}")"
|
||||
export MISSING_OPTIONAL="$(printf '%s\n' "${missing_optional[@]:-}")"
|
||||
export CONTENT_WARNINGS="$(printf '%s\n' "${content_warnings[@]:-}")"
|
||||
|
||||
report_json="$(python3 - <<'PY'
|
||||
import json
|
||||
import os
|
||||
|
||||
profile = os.environ.get('PROFILE_RAW') or 'all'
|
||||
|
||||
missing_required = os.environ.get('MISSING_REQUIRED', '').splitlines() if os.environ.get('MISSING_REQUIRED') else []
|
||||
missing_optional = os.environ.get('MISSING_OPTIONAL', '').splitlines() if os.environ.get('MISSING_OPTIONAL') else []
|
||||
content_warnings = os.environ.get('CONTENT_WARNINGS', '').splitlines() if os.environ.get('CONTENT_WARNINGS') else []
|
||||
|
||||
out = {
|
||||
'profile': profile,
|
||||
'missing_required': [x for x in missing_required if x],
|
||||
'missing_optional': [x for x in missing_optional if x],
|
||||
'content_warnings': [x for x in content_warnings if x],
|
||||
}
|
||||
|
||||
print(json.dumps(out, indent=2))
|
||||
PY
|
||||
)"
|
||||
|
||||
{
|
||||
printf '%s\n' '### Repository health'
|
||||
printf '%s\n' "Profile: ${profile}"
|
||||
printf '%s\n' '| Metric | Value |'
|
||||
printf '%s\n' '|---|---|'
|
||||
printf '%s\n' "| Missing required | ${#missing_required[@]} |"
|
||||
printf '%s\n' "| Missing optional | ${#missing_optional[@]} |"
|
||||
printf '%s\n' "| Content warnings | ${#content_warnings[@]} |"
|
||||
printf '\n'
|
||||
|
||||
printf '%s\n' '### Guardrails report (JSON)'
|
||||
printf '%s\n' '```json'
|
||||
printf '%s\n' "${report_json}"
|
||||
printf '%s\n' '```'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
|
||||
if [ "${#missing_required[@]}" -gt 0 ]; then
|
||||
{
|
||||
printf '%s\n' '### Missing required repo artifacts'
|
||||
for m in "${missing_required[@]}"; do printf '%s\n' "- ${m}"; done
|
||||
printf '%s\n' 'ERROR: Guardrails failed. Missing required repository artifacts.'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "${#missing_optional[@]}" -gt 0 ]; then
|
||||
{
|
||||
printf '%s\n' '### Missing optional repo artifacts'
|
||||
for m in "${missing_optional[@]}"; do printf '%s\n' "- ${m}"; done
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
|
||||
if [ "${#content_warnings[@]}" -gt 0 ]; then
|
||||
{
|
||||
printf '%s\n' '### Repo content warnings'
|
||||
for m in "${content_warnings[@]}"; do printf '%s\n' "- ${m}"; done
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
|
||||
extended_enabled="${EXTENDED_CHECKS:-true}"
|
||||
extended_findings=()
|
||||
|
||||
if [ "${extended_enabled}" = 'true' ]; then
|
||||
# CODEOWNERS presence
|
||||
if [ -f '.github/CODEOWNERS' ] || [ -f 'CODEOWNERS' ] || [ -f 'docs/CODEOWNERS' ]; then
|
||||
:
|
||||
else
|
||||
extended_findings+=("CODEOWNERS not found (.github/CODEOWNERS preferred)")
|
||||
fi
|
||||
|
||||
# Workflow pinning advisory: flag uses @main/@master
|
||||
if ls "${WORKFLOWS_DIR}"/*.yml >/dev/null 2>&1 || ls "${WORKFLOWS_DIR}"/*.yaml >/dev/null 2>&1; then
|
||||
bad_refs="$(grep -RIn --include='*.yml' --include='*.yaml' -E '^[[:space:]]*uses:[[:space:]]*[^#]+@(main|master)\b' "${WORKFLOWS_DIR}" 2>/dev/null || true)"
|
||||
if [ -n "${bad_refs}" ]; then
|
||||
extended_findings+=("Workflows reference actions @main/@master (pin versions): see log excerpt")
|
||||
{
|
||||
printf '%s\n' '### Workflow pinning advisory'
|
||||
printf '%s\n' 'Found uses: entries pinned to main/master:'
|
||||
printf '%s\n' '```'
|
||||
printf '%s\n' "${bad_refs}"
|
||||
printf '%s\n' '```'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Docs index link integrity (docs/docs-index.md)
|
||||
if [ -f "${DOCS_INDEX}" ]; then
|
||||
missing_links="$(python3 - <<'PY'
|
||||
import os
|
||||
import re
|
||||
|
||||
idx = os.environ.get('DOCS_INDEX', 'docs/docs-index.md')
|
||||
base = os.getcwd()
|
||||
|
||||
bad = []
|
||||
pat = re.compile(r'\[[^\]]+\]\(([^)]+)\)')
|
||||
|
||||
with open(idx, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
for m in pat.findall(line):
|
||||
link = m.strip()
|
||||
if link.startswith('http://') or link.startswith('https://') or link.startswith('#') or link.startswith('mailto:'):
|
||||
continue
|
||||
if link.startswith('/'):
|
||||
rel = link.lstrip('/')
|
||||
else:
|
||||
rel = os.path.normpath(os.path.join(os.path.dirname(idx), link))
|
||||
rel = rel.split('#', 1)[0]
|
||||
rel = rel.split('?', 1)[0]
|
||||
if not rel:
|
||||
continue
|
||||
p = os.path.join(base, rel)
|
||||
if not os.path.exists(p):
|
||||
bad.append(rel)
|
||||
|
||||
print('\n'.join(sorted(set(bad))))
|
||||
PY
|
||||
)"
|
||||
if [ -n "${missing_links}" ]; then
|
||||
extended_findings+=("docs/docs-index.md contains broken relative links")
|
||||
{
|
||||
printf '%s\n' '### Docs index link integrity'
|
||||
printf '%s\n' 'Broken relative links:'
|
||||
while IFS= read -r l; do [ -n "${l}" ] && printf '%s\n' "- ${l}"; done <<< "${missing_links}"
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# ShellCheck advisory
|
||||
if [ -d "${SCRIPT_DIR}" ]; then
|
||||
if ! command -v shellcheck >/dev/null 2>&1; then
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y shellcheck >/dev/null
|
||||
fi
|
||||
|
||||
sc_out=''
|
||||
while IFS= read -r shf; do
|
||||
[ -z "${shf}" ] && continue
|
||||
out_one="$(shellcheck -S warning -x "${shf}" 2>/dev/null || true)"
|
||||
if [ -n "${out_one}" ]; then
|
||||
sc_out="${sc_out}${out_one}\n"
|
||||
fi
|
||||
done < <(find "${SCRIPT_DIR}" -type f -name "${SHELLCHECK_PATTERN}" 2>/dev/null | sort)
|
||||
|
||||
if [ -n "${sc_out}" ]; then
|
||||
extended_findings+=("ShellCheck warnings detected (advisory)")
|
||||
sc_head="$(printf '%s' "${sc_out}" | head -n 200)"
|
||||
{
|
||||
printf '%s\n' '### ShellCheck (advisory)'
|
||||
printf '%s\n' '```'
|
||||
printf '%s\n' "${sc_head}"
|
||||
printf '%s\n' '```'
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# SPDX header advisory for common source types
|
||||
spdx_missing=()
|
||||
IFS=',' read -r -a spdx_globs <<< "${SPDX_FILE_GLOBS}"
|
||||
spdx_args=()
|
||||
for g in "${spdx_globs[@]}"; do spdx_args+=("${g}"); done
|
||||
|
||||
while IFS= read -r f; do
|
||||
[ -z "${f}" ] && continue
|
||||
if ! head -n 40 "${f}" | grep -q 'SPDX-License-Identifier:'; then
|
||||
spdx_missing+=("${f}")
|
||||
fi
|
||||
done < <(git ls-files "${spdx_args[@]}" 2>/dev/null || true)
|
||||
|
||||
if [ "${#spdx_missing[@]}" -gt 0 ]; then
|
||||
extended_findings+=("SPDX header missing in some tracked files (advisory)")
|
||||
{
|
||||
printf '%s\n' '### SPDX header advisory'
|
||||
printf '%s\n' 'Files missing SPDX-License-Identifier (first 40 lines scan):'
|
||||
for f in "${spdx_missing[@]}"; do printf '%s\n' "- ${f}"; done
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
|
||||
# Git hygiene advisory: branches older than 180 days (remote)
|
||||
stale_cutoff_days=180
|
||||
stale_branches="$(git for-each-ref --format='%(refname:short) %(committerdate:unix)' refs/remotes/origin 2>/dev/null | awk -v now="$(date +%s)" -v days="${stale_cutoff_days}" '{if (now-$2 [...]
|
||||
if [ -n "${stale_branches}" ]; then
|
||||
extended_findings+=("Stale remote branches detected (advisory)")
|
||||
{
|
||||
printf '%s\n' '### Git hygiene advisory'
|
||||
printf '%s\n' "Branches with last commit older than ${stale_cutoff_days} days (sample up to 50):"
|
||||
while IFS= read -r b; do [ -n "${b}" ] && printf '%s\n' "- ${b}"; done <<< "${stale_branches}"
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
fi
|
||||
|
||||
{
|
||||
printf '%s\n' '### Guardrails coverage matrix'
|
||||
printf '%s\n' '| Domain | Status | Notes |'
|
||||
printf '%s\n' '|---|---|---|'
|
||||
printf '%s\n' '| Access control | OK | Admin-only execution gate |'
|
||||
printf '%s\n' '| Release variables | OK | Repository variables validation |'
|
||||
printf '%s\n' '| Scripts governance | OK | Directory policy and advisory reporting |'
|
||||
printf '%s\n' '| Repo required artifacts | OK | Required, optional, disallowed enforcement |'
|
||||
printf '%s\n' '| Repo content heuristics | OK | Brand, license, changelog structure |'
|
||||
if [ "${extended_enabled}" = 'true' ]; then
|
||||
if [ "${#extended_findings[@]}" -gt 0 ]; then
|
||||
printf '%s\n' '| Extended checks | Warning | See extended findings below |'
|
||||
else
|
||||
printf '%s\n' '| Extended checks | OK | No findings |'
|
||||
fi
|
||||
else
|
||||
printf '%s\n' '| Extended checks | SKIPPED | EXTENDED_CHECKS disabled |'
|
||||
fi
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
|
||||
if [ "${extended_enabled}" = 'true' ] && [ "${#extended_findings[@]}" -gt 0 ]; then
|
||||
{
|
||||
printf '%s\n' '### Extended findings (advisory)'
|
||||
for f in "${extended_findings[@]}"; do printf '%s\n' "- ${f}"; done
|
||||
printf '\n'
|
||||
} >> "${GITHUB_STEP_SUMMARY}"
|
||||
fi
|
||||
|
||||
printf '%s\n' 'Repository health guardrails passed.' >> "${GITHUB_STEP_SUMMARY}"
|
||||
Reference in New Issue
Block a user