Update version_branch.yml
This commit is contained in:
245
.github/workflows/version_branch.yml
vendored
245
.github/workflows/version_branch.yml
vendored
@@ -182,39 +182,40 @@ jobs:
|
|||||||
moko_init "Sanity check workflow file"
|
moko_init "Sanity check workflow file"
|
||||||
|
|
||||||
python3 - <<'PY'
|
python3 - <<'PY'
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
target = Path('.github/workflows/version_branch.yml')
|
target = Path('.github/workflows/version_branch.yml')
|
||||||
if not target.exists():
|
if not target.exists():
|
||||||
raise SystemExit('[FATAL] Missing workflow file: .github/workflows/version_branch.yml')
|
raise SystemExit('[FATAL] Missing workflow file: .github/workflows/version_branch.yml')
|
||||||
|
|
||||||
data = target.read_bytes()
|
data = target.read_bytes()
|
||||||
|
|
||||||
# Disallow literal tab (0x09) and other ASCII control characters except LF (0x0A) and CR (0x0D).
|
# Disallow literal tab (0x09) and other ASCII control characters except LF (0x0A) and CR (0x0D).
|
||||||
# Report line numbers without printing the raw characters.
|
# Report line numbers without printing the raw characters.
|
||||||
|
|
||||||
def byte_to_line(blob: bytes, idx: int) -> int:
|
def byte_to_line(blob: bytes, idx: int) -> int:
|
||||||
return blob[:idx].count(b'\n') + 1
|
return blob[:idx].count(b'
|
||||||
|
') + 1
|
||||||
|
|
||||||
bad = []
|
bad = []
|
||||||
for i, b in enumerate(data):
|
for i, b in enumerate(data):
|
||||||
if b == 0x09:
|
if b == 0x09:
|
||||||
bad.append(('TAB', i, b))
|
bad.append(('TAB', i, b))
|
||||||
elif b < 0x20 and b not in (0x0A, 0x0D):
|
elif b < 0x20 and b not in (0x0A, 0x0D):
|
||||||
bad.append(('CTRL', i, b))
|
bad.append(('CTRL', i, b))
|
||||||
|
|
||||||
if bad:
|
if bad:
|
||||||
print('[ERROR] Disallowed characters detected in workflow file:')
|
print('[ERROR] Disallowed characters detected in workflow file:')
|
||||||
for kind, off, val in bad[:200]:
|
for kind, off, val in bad[:200]:
|
||||||
line_no = byte_to_line(data, off)
|
line_no = byte_to_line(data, off)
|
||||||
if kind == 'TAB':
|
if kind == 'TAB':
|
||||||
print(f' line {line_no}: TAB_PRESENT')
|
print(f' line {line_no}: TAB_PRESENT')
|
||||||
else:
|
else:
|
||||||
print(f' line {line_no}: CTRL_0x{val:02X}_PRESENT')
|
print(f' line {line_no}: CTRL_0x{val:02X}_PRESENT')
|
||||||
raise SystemExit(2)
|
raise SystemExit(2)
|
||||||
|
|
||||||
print('[INFO] Sanity check passed')
|
print('[INFO] Sanity check passed')
|
||||||
PY
|
PY
|
||||||
|
|
||||||
- name: Enterprise policy gate
|
- name: Enterprise policy gate
|
||||||
run: |
|
run: |
|
||||||
@@ -327,121 +328,121 @@ PY
|
|||||||
moko_init "Version bump"
|
moko_init "Version bump"
|
||||||
|
|
||||||
python3 - <<'PY'
|
python3 - <<'PY'
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
new_version = (os.environ.get("NEW_VERSION") or "").strip()
|
new_version = (os.environ.get("NEW_VERSION") or "").strip()
|
||||||
version_text = (os.environ.get("VERSION_TEXT") or "").strip()
|
version_text = (os.environ.get("VERSION_TEXT") or "").strip()
|
||||||
report_only = (os.environ.get("REPORT_ONLY") or "").strip().lower() == "true"
|
report_only = (os.environ.get("REPORT_ONLY") or "").strip().lower() == "true"
|
||||||
report_path = (os.environ.get("REPORT_PATH") or "").strip()
|
report_path = (os.environ.get("REPORT_PATH") or "").strip()
|
||||||
|
|
||||||
stamp = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
stamp = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||||
root = Path(".").resolve()
|
root = Path(".").resolve()
|
||||||
|
|
||||||
# Use escape sequences only. Do not introduce literal tab characters.
|
# Use escape sequences only. Do not introduce literal tab characters.
|
||||||
header_re = re.compile(r"(?im)(VERSION[ \t]*:[ \t]*)([0-9]{2}[.][0-9]{2}[.][0-9]{2})")
|
header_re = re.compile(r"(?im)(VERSION[ ]*:[ ]*)([0-9]{2}[.][0-9]{2}[.][0-9]{2})")
|
||||||
manifest_marker_re = re.compile(r"(?is)<extension\b")
|
manifest_marker_re = re.compile(r"(?is)<extension")
|
||||||
xml_version_re = re.compile(r"(?is)(<version[ \t]*>)([^<]*?)(</version[ \t]*>)")
|
xml_version_re = re.compile(r"(?is)(<version[ ]*>)([^<]*?)(</version[ ]*>)")
|
||||||
xml_date_res = [
|
xml_date_res = [
|
||||||
re.compile(r"(?is)(<creationDate[ \t]*>)([^<]*?)(</creationDate[ \t]*>)"),
|
re.compile(r"(?is)(<creationDate[ ]*>)([^<]*?)(</creationDate[ ]*>)"),
|
||||||
re.compile(r"(?is)(<date[ \t]*>)([^<]*?)(</date[ \t]*>)"),
|
re.compile(r"(?is)(<date[ ]*>)([^<]*?)(</date[ ]*>)"),
|
||||||
re.compile(r"(?is)(<releaseDate[ \t]*>)([^<]*?)(</releaseDate[ \t]*>)"),
|
re.compile(r"(?is)(<releaseDate[ ]*>)([^<]*?)(</releaseDate[ ]*>)"),
|
||||||
]
|
]
|
||||||
|
|
||||||
skip_ext = {
|
skip_ext = {
|
||||||
".json", ".png", ".jpg", ".jpeg", ".gif", ".svg", ".ico", ".pdf",
|
".json", ".png", ".jpg", ".jpeg", ".gif", ".svg", ".ico", ".pdf",
|
||||||
".zip", ".7z", ".tar", ".gz", ".woff", ".woff2", ".ttf", ".otf",
|
".zip", ".7z", ".tar", ".gz", ".woff", ".woff2", ".ttf", ".otf",
|
||||||
".mp3", ".mp4",
|
".mp3", ".mp4",
|
||||||
}
|
}
|
||||||
skip_dirs = {".git", ".github", "node_modules", "vendor", ".venv", "dist", "build"}
|
skip_dirs = {".git", ".github", "node_modules", "vendor", ".venv", "dist", "build"}
|
||||||
|
|
||||||
counters = defaultdict(int)
|
counters = defaultdict(int)
|
||||||
updated_files = []
|
updated_files = []
|
||||||
updated_manifests = []
|
updated_manifests = []
|
||||||
would_update_files = []
|
would_update_files = []
|
||||||
would_update_manifests = []
|
would_update_manifests = []
|
||||||
|
|
||||||
exclude_root = {"update.xml", "updates.xml"}
|
exclude_root = {"update.xml", "updates.xml"}
|
||||||
|
|
||||||
def should_skip(p: Path) -> bool:
|
def should_skip(p: Path) -> bool:
|
||||||
if p.suffix.lower() in skip_ext:
|
if p.suffix.lower() in skip_ext:
|
||||||
counters["skipped_by_ext"] += 1
|
counters["skipped_by_ext"] += 1
|
||||||
return True
|
return True
|
||||||
parts = {x.lower() for x in p.parts}
|
parts = {x.lower() for x in p.parts}
|
||||||
if any(d in parts for d in skip_dirs):
|
if any(d in parts for d in skip_dirs):
|
||||||
counters["skipped_by_dir"] += 1
|
counters["skipped_by_dir"] += 1
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for p in root.rglob("*"):
|
for p in root.rglob("*"):
|
||||||
if not p.is_file():
|
if not p.is_file():
|
||||||
continue
|
continue
|
||||||
if should_skip(p):
|
if should_skip(p):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if p.parent == root and p.name.lower() in exclude_root:
|
if p.parent == root and p.name.lower() in exclude_root:
|
||||||
counters["skipped_release_artifacts"] += 1
|
counters["skipped_release_artifacts"] += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
original = p.read_text(encoding="utf-8", errors="replace")
|
original = p.read_text(encoding="utf-8", errors="replace")
|
||||||
except Exception:
|
except Exception:
|
||||||
counters["skipped_read_error"] += 1
|
counters["skipped_read_error"] += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
text = original
|
text = original
|
||||||
|
|
||||||
text, n1 = header_re.subn(lambda m: m.group(1) + new_version, text)
|
text, n1 = header_re.subn(lambda m: m.group(1) + new_version, text)
|
||||||
if n1:
|
if n1:
|
||||||
counters["header_replacements"] += n1
|
counters["header_replacements"] += n1
|
||||||
|
|
||||||
is_manifest = (p.suffix.lower() == ".xml" and manifest_marker_re.search(original) is not None)
|
is_manifest = (p.suffix.lower() == ".xml" and manifest_marker_re.search(original) is not None)
|
||||||
if is_manifest:
|
if is_manifest:
|
||||||
text, n2 = xml_version_re.subn(lambda m: m.group(1) + new_version + m.group(3), text)
|
text, n2 = xml_version_re.subn(lambda m: m.group(1) + new_version + m.group(3), text)
|
||||||
if n2:
|
if n2:
|
||||||
counters["xml_version_replacements"] += n2
|
counters["xml_version_replacements"] += n2
|
||||||
|
|
||||||
for rx in xml_date_res:
|
for rx in xml_date_res:
|
||||||
text, n3 = rx.subn(lambda m: m.group(1) + stamp + m.group(3), text)
|
text, n3 = rx.subn(lambda m: m.group(1) + stamp + m.group(3), text)
|
||||||
if n3:
|
if n3:
|
||||||
counters["xml_date_replacements"] += n3
|
counters["xml_date_replacements"] += n3
|
||||||
|
|
||||||
if text != original:
|
if text != original:
|
||||||
would_update_files.append(str(p))
|
would_update_files.append(str(p))
|
||||||
if is_manifest:
|
if is_manifest:
|
||||||
would_update_manifests.append(str(p))
|
would_update_manifests.append(str(p))
|
||||||
|
|
||||||
if not report_only:
|
if not report_only:
|
||||||
p.write_text(text, encoding="utf-8")
|
p.write_text(text, encoding="utf-8")
|
||||||
updated_files.append(str(p))
|
updated_files.append(str(p))
|
||||||
if is_manifest:
|
if is_manifest:
|
||||||
updated_manifests.append(str(p))
|
updated_manifests.append(str(p))
|
||||||
|
|
||||||
report = {
|
report = {
|
||||||
"mode": "report_only" if report_only else "apply",
|
"mode": "report_only" if report_only else "apply",
|
||||||
"new_version": new_version,
|
"new_version": new_version,
|
||||||
"version_text": version_text,
|
"version_text": version_text,
|
||||||
"stamp_utc": stamp,
|
"stamp_utc": stamp,
|
||||||
"counters": dict(counters),
|
"counters": dict(counters),
|
||||||
"updated_files": updated_files,
|
"updated_files": updated_files,
|
||||||
"updated_manifests": updated_manifests,
|
"updated_manifests": updated_manifests,
|
||||||
"would_update_files": would_update_files,
|
"would_update_files": would_update_files,
|
||||||
"would_update_manifests": would_update_manifests,
|
"would_update_manifests": would_update_manifests,
|
||||||
}
|
}
|
||||||
|
|
||||||
Path(report_path).write_text(json.dumps(report, indent=2), encoding="utf-8")
|
Path(report_path).write_text(json.dumps(report, indent=2), encoding="utf-8")
|
||||||
|
|
||||||
print("[INFO] Report written to:", report_path)
|
print("[INFO] Report written to:", report_path)
|
||||||
print("[INFO] Mode:", report["mode"])
|
print("[INFO] Mode:", report["mode"])
|
||||||
print("[INFO] Would update files:", len(would_update_files))
|
print("[INFO] Would update files:", len(would_update_files))
|
||||||
print("[INFO] Would update manifests:", len(would_update_manifests))
|
print("[INFO] Would update manifests:", len(would_update_manifests))
|
||||||
print("[INFO] Updated files:", len(updated_files))
|
print("[INFO] Updated files:", len(updated_files))
|
||||||
print("[INFO] Updated manifests:", len(updated_manifests))
|
print("[INFO] Updated manifests:", len(updated_manifests))
|
||||||
PY
|
PY
|
||||||
|
|
||||||
- name: Commit changes
|
- name: Commit changes
|
||||||
if: ${{ env.REPORT_ONLY != 'true' }}
|
if: ${{ env.REPORT_ONLY != 'true' }}
|
||||||
|
|||||||
Reference in New Issue
Block a user