Merge pull request #54 from mokoconsulting-tech/dev/security-update-2025-01-17

Unused import A module is imported (using the import statement) but t…
This commit was merged in pull request #54.
This commit is contained in:
2026-01-17 17:32:48 -06:00
committed by GitHub
5 changed files with 1063 additions and 1063 deletions

View File

@@ -32,210 +32,208 @@ USAGE: ./scripts/release/package_extension.py [output_dir] [version]
"""
import argparse
import os
import shutil
import sys
import zipfile
from datetime import datetime
from pathlib import Path
from typing import List, Set
from typing import Set
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
import extension_utils
import common
import extension_utils
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
# Exclusion patterns for packaging
EXCLUDE_PATTERNS = {
# Version control
".git", ".gitignore", ".gitattributes",
# IDE
".vscode", ".idea", "*.sublime-*",
# Development
"node_modules", "vendor", ".env", ".env.*",
# Documentation (optional, can be included)
# Build artifacts
"dist", "build", ".phpunit.cache",
# Development tool caches and artifacts
".phpstan.cache", ".psalm", ".rector",
"phpmd-cache", ".php-cs-fixer.cache", ".phplint-cache",
# OS files
".DS_Store", "Thumbs.db",
# Logs
"*.log",
# Tests
"tests", "test", "Tests",
# CI/CD
".github",
# Scripts
"scripts",
# Docs (can be included if needed)
"docs",
# Config files
"composer.json", "composer.lock",
"package.json", "package-lock.json",
"phpunit.xml", "phpstan.neon", "phpcs.xml",
"codeception.yml", "psalm.xml", ".php-cs-fixer.php",
# Others
"README.md", "CHANGELOG.md", "CONTRIBUTING.md",
"CODE_OF_CONDUCT.md", "SECURITY.md", "GOVERNANCE.md",
"Makefile",
# Version control
".git", ".gitignore", ".gitattributes",
# IDE
".vscode", ".idea", "*.sublime-*",
# Development
"node_modules", "vendor", ".env", ".env.*",
# Documentation (optional, can be included)
# Build artifacts
"dist", "build", ".phpunit.cache",
# Development tool caches and artifacts
".phpstan.cache", ".psalm", ".rector",
"phpmd-cache", ".php-cs-fixer.cache", ".phplint-cache",
# OS files
".DS_Store", "Thumbs.db",
# Logs
"*.log",
# Tests
"tests", "test", "Tests",
# CI/CD
".github",
# Scripts
"scripts",
# Docs (can be included if needed)
"docs",
# Config files
"composer.json", "composer.lock",
"package.json", "package-lock.json",
"phpunit.xml", "phpstan.neon", "phpcs.xml",
"codeception.yml", "psalm.xml", ".php-cs-fixer.php",
# Others
"README.md", "CHANGELOG.md", "CONTRIBUTING.md",
"CODE_OF_CONDUCT.md", "SECURITY.md", "GOVERNANCE.md",
"Makefile",
}
def should_exclude(path: Path, base_path: Path, exclude_patterns: Set[str]) -> bool:
"""
Check if a path should be excluded from packaging.
Args:
path: Path to check
base_path: Base directory path
exclude_patterns: Set of exclusion patterns
Returns:
True if should be excluded
"""
relative_path = path.relative_to(base_path)
# Check each part of the path
for part in relative_path.parts:
if part in exclude_patterns:
return True
# Check wildcard patterns
for pattern in exclude_patterns:
if "*" in pattern:
import fnmatch
if fnmatch.fnmatch(part, pattern):
return True
return False
"""
Check if a path should be excluded from packaging.
Args:
path: Path to check
base_path: Base directory path
exclude_patterns: Set of exclusion patterns
Returns:
True if should be excluded
"""
relative_path = path.relative_to(base_path)
# Check each part of the path
for part in relative_path.parts:
if part in exclude_patterns:
return True
# Check wildcard patterns
for pattern in exclude_patterns:
if "*" in pattern:
import fnmatch
if fnmatch.fnmatch(part, pattern):
return True
return False
def create_package(
src_dir: str,
output_dir: str,
version: str = None,
repo_name: str = None,
exclude_patterns: Set[str] = None
src_dir: str,
output_dir: str,
version: str = None,
repo_name: str = None,
exclude_patterns: Set[str] = None
) -> Path:
"""
Create a distributable ZIP package for a Joomla or Dolibarr extension.
Args:
src_dir: Source directory containing extension files
output_dir: Output directory for ZIP file
version: Version string (auto-detected if not provided)
repo_name: Repository name for ZIP file naming
exclude_patterns: Patterns to exclude from packaging
Returns:
Path to created ZIP file
"""
src_path = Path(src_dir)
if not src_path.is_dir():
common.die(f"Source directory not found: {src_dir}")
# Detect extension platform and get info
ext_info = extension_utils.get_extension_info(src_dir)
if not ext_info:
common.die(f"No Joomla or Dolibarr extension found in {src_dir}")
# Determine version
if not version:
version = ext_info.version
# Determine repo name
if not repo_name:
try:
repo_root = common.git_root()
repo_name = repo_root.name
except Exception:
repo_name = "extension"
# Determine exclusion patterns
if exclude_patterns is None:
exclude_patterns = EXCLUDE_PATTERNS
# Create output directory
output_path = Path(output_dir)
common.ensure_dir(output_path)
# Generate ZIP filename
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
platform_suffix = f"{ext_info.platform.value}-{ext_info.extension_type}"
zip_filename = f"{repo_name}-{version}-{platform_suffix}.zip"
zip_path = output_path / zip_filename
# Remove existing ZIP if present
if zip_path.exists():
zip_path.unlink()
common.log_section("Creating Extension Package")
common.log_kv("Platform", ext_info.platform.value.upper())
common.log_kv("Extension", ext_info.name)
common.log_kv("Type", ext_info.extension_type)
common.log_kv("Version", version)
common.log_kv("Source", src_dir)
common.log_kv("Output", str(zip_path))
print()
# Create ZIP file
file_count = 0
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for item in src_path.rglob("*"):
if item.is_file():
# Check if should be excluded
if should_exclude(item, src_path, exclude_patterns):
continue
# Add to ZIP with relative path
arcname = item.relative_to(src_path)
zipf.write(item, arcname)
file_count += 1
if file_count % 10 == 0:
common.log_step(f"Added {file_count} files...")
# Get ZIP file size
zip_size = zip_path.stat().st_size
zip_size_mb = zip_size / (1024 * 1024)
print()
common.log_success(f"Package created: {zip_path.name}")
common.log_kv("Files", str(file_count))
common.log_kv("Size", f"{zip_size_mb:.2f} MB")
# Output JSON for machine consumption
result = {
"status": "ok",
"platform": ext_info.platform.value,
"extension": ext_info.name,
"ext_type": ext_info.extension_type,
"version": version,
"package": str(zip_path),
"files": file_count,
"size_bytes": zip_size
}
print()
common.json_output(result)
return zip_path
"""
Create a distributable ZIP package for a Joomla or Dolibarr extension.
Args:
src_dir: Source directory containing extension files
output_dir: Output directory for ZIP file
version: Version string (auto-detected if not provided)
repo_name: Repository name for ZIP file naming
exclude_patterns: Patterns to exclude from packaging
Returns:
Path to created ZIP file
"""
src_path = Path(src_dir)
if not src_path.is_dir():
common.die(f"Source directory not found: {src_dir}")
# Detect extension platform and get info
ext_info = extension_utils.get_extension_info(src_dir)
if not ext_info:
common.die(f"No Joomla or Dolibarr extension found in {src_dir}")
# Determine version
if not version:
version = ext_info.version
# Determine repo name
if not repo_name:
try:
repo_root = common.git_root()
repo_name = repo_root.name
except Exception:
repo_name = "extension"
# Determine exclusion patterns
if exclude_patterns is None:
exclude_patterns = EXCLUDE_PATTERNS
# Create output directory
output_path = Path(output_dir)
common.ensure_dir(output_path)
# Generate ZIP filename
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
platform_suffix = f"{ext_info.platform.value}-{ext_info.extension_type}"
zip_filename = f"{repo_name}-{version}-{platform_suffix}-{timestamp}.zip"
zip_path = output_path / zip_filename
# Remove existing ZIP if present
if zip_path.exists():
zip_path.unlink()
common.log_section("Creating Extension Package")
common.log_kv("Platform", ext_info.platform.value.upper())
common.log_kv("Extension", ext_info.name)
common.log_kv("Type", ext_info.extension_type)
common.log_kv("Version", version)
common.log_kv("Source", src_dir)
common.log_kv("Output", str(zip_path))
print()
# Create ZIP file
file_count = 0
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for item in src_path.rglob("*"):
if item.is_file():
# Check if should be excluded
if should_exclude(item, src_path, exclude_patterns):
continue
# Add to ZIP with relative path
arcname = item.relative_to(src_path)
zipf.write(item, arcname)
file_count += 1
if file_count % 10 == 0:
common.log_step(f"Added {file_count} files...")
# Get ZIP file size
zip_size = zip_path.stat().st_size
zip_size_mb = zip_size / (1024 * 1024)
print()
common.log_success(f"Package created: {zip_path.name}")
common.log_kv("Files", str(file_count))
common.log_kv("Size", f"{zip_size_mb:.2f} MB")
# Output JSON for machine consumption
result = {
"status": "ok",
"platform": ext_info.platform.value,
"extension": ext_info.name,
"ext_type": ext_info.extension_type,
"version": version,
"package": str(zip_path),
"files": file_count,
"size_bytes": zip_size
}
print()
common.json_output(result)
return zip_path
def main() -> int:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Package Joomla or Dolibarr extension as distributable ZIP",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Package Joomla or Dolibarr extension as distributable ZIP",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Package with auto-detected version
%(prog)s
@@ -251,73 +249,79 @@ Examples:
Supports both Joomla and Dolibarr extensions with automatic platform detection.
"""
)
parser.add_argument(
"output_dir",
nargs="?",
default="dist",
help="Output directory for ZIP file (default: dist)"
)
parser.add_argument(
"version",
nargs="?",
help="Version string (default: auto-detected from manifest)"
)
parser.add_argument(
"-s", "--src-dir",
default="src",
help="Source directory (default: src)"
)
parser.add_argument(
"--repo-name",
help="Repository name for ZIP filename (default: auto-detected)"
)
parser.add_argument(
"--include-docs",
action="store_true",
help="Include documentation files in package"
)
parser.add_argument(
"--include-tests",
action="store_true",
help="Include test files in package"
)
args = parser.parse_args()
try:
# Adjust exclusion patterns based on arguments
exclude_patterns = EXCLUDE_PATTERNS.copy()
if args.include_docs:
exclude_patterns.discard("docs")
exclude_patterns.discard("README.md")
exclude_patterns.discard("CHANGELOG.md")
if args.include_tests:
exclude_patterns.discard("tests")
exclude_patterns.discard("test")
exclude_patterns.discard("Tests")
# Create package
zip_path = create_package(
src_dir=args.src_dir,
output_dir=args.output_dir,
version=args.version,
repo_name=args.repo_name,
exclude_patterns=exclude_patterns
)
return 0
except Exception as e:
common.log_error(f"Packaging failed: {e}")
result = {
"status": "error",
"error": str(e)
}
common.json_output(result)
return 1
)
parser.add_argument(
"output_dir",
nargs="?",
default="dist",
help="Output directory for ZIP file (default: dist)"
)
parser.add_argument(
"version",
nargs="?",
help="Version string (default: auto-detected from manifest)"
)
parser.add_argument(
"-s", "--src-dir",
default="src",
help="Source directory (default: src)"
)
parser.add_argument(
"--repo-name",
help="Repository name for ZIP filename (default: auto-detected)"
)
parser.add_argument(
"--include-docs",
action="store_true",
help="Include documentation files in package"
)
parser.add_argument(
"--include-tests",
action="store_true",
help="Include test files in package"
)
args = parser.parse_args()
try:
# Adjust exclusion patterns based on arguments
exclude_patterns = EXCLUDE_PATTERNS.copy()
if args.include_docs:
exclude_patterns.discard("docs")
exclude_patterns.discard("README.md")
exclude_patterns.discard("CHANGELOG.md")
if args.include_tests:
exclude_patterns.discard("tests")
exclude_patterns.discard("test")
exclude_patterns.discard("Tests")
# Create package
zip_path = create_package(
src_dir=args.src_dir,
output_dir=args.output_dir,
version=args.version,
repo_name=args.repo_name,
exclude_patterns=exclude_patterns
)
result = {
"status": "success",
"zip_path": str(zip_path)
}
common.json_output(result)
return 0
except Exception as e:
common.log_error(f"Packaging failed: {e}")
result = {
"status": "error",
"error": str(e)
}
common.json_output(result)
return 1
if __name__ == "__main__":
sys.exit(main())
sys.exit(main())

View File

@@ -40,11 +40,10 @@ from typing import Dict
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
import joomla_manifest
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
# ============================================================================
@@ -52,73 +51,73 @@ except ImportError:
# ============================================================================
def get_component_structure(name: str, description: str, author: str) -> Dict[str, str]:
"""Get directory structure and files for a component."""
safe_name = name.lower().replace(" ", "_")
com_name = f"com_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
"""Get directory structure and files for a component."""
safe_name = name.lower().replace(" ", "_")
com_name = f"com_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="component" version="4.0" method="upgrade">
<name>{name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<name>{name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<files folder="site">
<folder>src</folder>
</files>
<files folder="site">
<folder>src</folder>
</files>
<administration>
<menu>{name}</menu>
<files folder="admin">
<folder>services</folder>
<folder>sql</folder>
<folder>src</folder>
</files>
</administration>
<administration>
<menu>{name}</menu>
<files folder="admin">
<folder>services</folder>
<folder>sql</folder>
<folder>src</folder>
</files>
</administration>
</extension>
"""
return {
f"{com_name}.xml": manifest,
"site/src/.gitkeep": "",
"admin/services/provider.php": f"<?php\n// Service provider for {name}\n",
"admin/sql/install.mysql.utf8.sql": "-- Installation SQL\n",
"admin/sql/uninstall.mysql.utf8.sql": "-- Uninstallation SQL\n",
"admin/src/.gitkeep": "",
}
return {
f"{com_name}.xml": manifest,
"site/src/.gitkeep": "",
"admin/services/provider.php": f"<?php\n// Service provider for {name}\n",
"admin/sql/install.mysql.utf8.sql": "-- Installation SQL\n",
"admin/sql/uninstall.mysql.utf8.sql": "-- Uninstallation SQL\n",
"admin/src/.gitkeep": "",
}
def get_module_structure(name: str, description: str, author: str, client: str = "site") -> Dict[str, str]:
"""Get directory structure and files for a module."""
safe_name = name.lower().replace(" ", "_")
mod_name = f"mod_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
"""Get directory structure and files for a module."""
safe_name = name.lower().replace(" ", "_")
mod_name = f"mod_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="module" version="4.0" client="{client}" method="upgrade">
<name>{name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<name>{name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<files>
<filename module="{mod_name}">{mod_name}.php</filename>
<filename>{mod_name}.xml</filename>
<folder>tmpl</folder>
</files>
<files>
<filename module="{mod_name}">{mod_name}.php</filename>
<filename>{mod_name}.xml</filename>
<folder>tmpl</folder>
</files>
</extension>
"""
module_php = f"""<?php
module_php = f"""<?php
/**
* @package {name}
* @copyright Copyright (C) {datetime.now().year} {author}
@@ -130,8 +129,8 @@ defined('_JEXEC') or die;
// Module logic here
require JModuleHelper::getLayoutPath('mod_{safe_name}', $params->get('layout', 'default'));
"""
default_tmpl = f"""<?php
default_tmpl = f"""<?php
/**
* @package {name}
* @copyright Copyright (C) {datetime.now().year} {author}
@@ -141,41 +140,41 @@ require JModuleHelper::getLayoutPath('mod_{safe_name}', $params->get('layout', '
defined('_JEXEC') or die;
?>
<div class="{mod_name}">
<p><?php echo JText::_('MOD_{safe_name.upper()}_DESCRIPTION'); ?></p>
<p><?php echo JText::_('MOD_{safe_name.upper()}_DESCRIPTION'); ?></p>
</div>
"""
return {
f"{mod_name}.xml": manifest,
f"{mod_name}.php": module_php,
"tmpl/default.php": default_tmpl,
}
return {
f"{mod_name}.xml": manifest,
f"{mod_name}.php": module_php,
"tmpl/default.php": default_tmpl,
}
def get_plugin_structure(name: str, description: str, author: str, group: str = "system") -> Dict[str, str]:
"""Get directory structure and files for a plugin."""
safe_name = name.lower().replace(" ", "_")
plg_name = f"{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
"""Get directory structure and files for a plugin."""
safe_name = name.lower().replace(" ", "_")
plg_name = f"{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="plugin" version="4.0" group="{group}" method="upgrade">
<name>plg_{group}_{safe_name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<name>plg_{group}_{safe_name}</name>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<files>
<filename plugin="{plg_name}">{plg_name}.php</filename>
</files>
<files>
<filename plugin="{plg_name}">{plg_name}.php</filename>
</files>
</extension>
"""
plugin_php = f"""<?php
plugin_php = f"""<?php
/**
* @package {name}
* @copyright Copyright (C) {datetime.now().year} {author}
@@ -188,54 +187,54 @@ use Joomla\\CMS\\Plugin\\CMSPlugin;
class Plg{group.capitalize()}{plg_name.capitalize()} extends CMSPlugin
{{
protected $autoloadLanguage = true;
protected $autoloadLanguage = true;
public function onContentPrepare($context, &$article, &$params, $limitstart = 0)
{{
// Plugin logic here
}}
public function onContentPrepare($context, &$article, &$params, $limitstart = 0)
{{
// Plugin logic here
}}
}}
"""
return {
f"plg_{group}_{safe_name}.xml": manifest,
f"{plg_name}.php": plugin_php,
}
return {
f"plg_{group}_{safe_name}.xml": manifest,
f"{plg_name}.php": plugin_php,
}
def get_template_structure(name: str, description: str, author: str) -> Dict[str, str]:
"""Get directory structure and files for a template."""
safe_name = name.lower().replace(" ", "_")
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
"""Get directory structure and files for a template."""
safe_name = name.lower().replace(" ", "_")
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="template" version="4.0" client="site" method="upgrade">
<name>{safe_name}</name>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<author>{author}</author>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<version>1.0.0</version>
<description>{description}</description>
<name>{safe_name}</name>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<author>{author}</author>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<version>1.0.0</version>
<description>{description}</description>
<files>
<filename>index.php</filename>
<filename>templateDetails.xml</filename>
<folder>css</folder>
<folder>js</folder>
<folder>images</folder>
</files>
<files>
<filename>index.php</filename>
<filename>templateDetails.xml</filename>
<folder>css</folder>
<folder>js</folder>
<folder>images</folder>
</files>
<positions>
<position>header</position>
<position>main</position>
<position>footer</position>
</positions>
<positions>
<position>header</position>
<position>main</position>
<position>footer</position>
</positions>
</extension>
"""
index_php = f"""<?php
index_php = f"""<?php
/**
* @package {name}
* @copyright Copyright (C) {datetime.now().year} {author}
@@ -257,61 +256,61 @@ $wa->useStyle('template.{safe_name}')->useScript('template.{safe_name}');
<!DOCTYPE html>
<html lang="<?php echo $this->language; ?>" dir="<?php echo $this->direction; ?>">
<head>
<jdoc:include type="metas" />
<jdoc:include type="styles" />
<jdoc:include type="scripts" />
<jdoc:include type="metas" />
<jdoc:include type="styles" />
<jdoc:include type="scripts" />
</head>
<body>
<header>
<jdoc:include type="modules" name="header" style="html5" />
</header>
<main>
<jdoc:include type="component" />
</main>
<footer>
<jdoc:include type="modules" name="footer" style="html5" />
</footer>
<header>
<jdoc:include type="modules" name="header" style="html5" />
</header>
<main>
<jdoc:include type="component" />
</main>
<footer>
<jdoc:include type="modules" name="footer" style="html5" />
</footer>
</body>
</html>
"""
return {
"templateDetails.xml": manifest,
"index.php": index_php,
"css/template.css": "/* Template styles */\n",
"js/template.js": "// Template JavaScript\n",
"images/.gitkeep": "",
}
return {
"templateDetails.xml": manifest,
"index.php": index_php,
"css/template.css": "/* Template styles */\n",
"js/template.js": "// Template JavaScript\n",
"images/.gitkeep": "",
}
def get_package_structure(name: str, description: str, author: str) -> Dict[str, str]:
"""Get directory structure and files for a package."""
safe_name = name.lower().replace(" ", "_")
pkg_name = f"pkg_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
"""Get directory structure and files for a package."""
safe_name = name.lower().replace(" ", "_")
pkg_name = f"pkg_{safe_name}"
manifest = f"""<?xml version="1.0" encoding="utf-8"?>
<extension type="package" version="4.0" method="upgrade">
<name>{name}</name>
<packagename>{safe_name}</packagename>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<name>{name}</name>
<packagename>{safe_name}</packagename>
<author>{author}</author>
<creationDate>{datetime.now().strftime("%Y-%m-%d")}</creationDate>
<copyright>Copyright (C) {datetime.now().year} {author}</copyright>
<license>GPL-3.0-or-later</license>
<authorEmail>hello@example.com</authorEmail>
<authorUrl>https://example.com</authorUrl>
<version>1.0.0</version>
<description>{description}</description>
<files folder="packages">
<!-- Add extension packages here -->
</files>
<files folder="packages">
<!-- Add extension packages here -->
</files>
</extension>
"""
return {
f"{pkg_name}.xml": manifest,
"packages/.gitkeep": "",
}
return {
f"{pkg_name}.xml": manifest,
"packages/.gitkeep": "",
}
# ============================================================================
@@ -319,59 +318,59 @@ def get_package_structure(name: str, description: str, author: str) -> Dict[str,
# ============================================================================
def create_extension(
ext_type: str,
name: str,
description: str,
author: str,
output_dir: str = "src",
**kwargs
ext_type: str,
name: str,
description: str,
author: str,
output_dir: str = "src",
**kwargs
) -> None:
"""
Create extension scaffolding.
Args:
ext_type: Extension type (component, module, plugin, template, package)
name: Extension name
description: Extension description
author: Author name
output_dir: Output directory
**kwargs: Additional type-specific options
"""
output_path = Path(output_dir)
# Get structure based on type
if ext_type == "component":
structure = get_component_structure(name, description, author)
elif ext_type == "module":
client = kwargs.get("client", "site")
structure = get_module_structure(name, description, author, client)
elif ext_type == "plugin":
group = kwargs.get("group", "system")
structure = get_plugin_structure(name, description, author, group)
elif ext_type == "template":
structure = get_template_structure(name, description, author)
elif ext_type == "package":
structure = get_package_structure(name, description, author)
else:
common.die(f"Unknown extension type: {ext_type}")
# Create files
common.log_section(f"Creating {ext_type}: {name}")
for file_path, content in structure.items():
full_path = output_path / file_path
# Create parent directories
full_path.parent.mkdir(parents=True, exist_ok=True)
# Write file
full_path.write_text(content, encoding="utf-8")
common.log_success(f"Created: {file_path}")
common.log_section("Scaffolding Complete")
common.log_info(f"Extension files created in: {output_path}")
common.log_info(f"Extension type: {ext_type}")
common.log_info(f"Extension name: {name}")
"""
Create extension scaffolding.
Args:
ext_type: Extension type (component, module, plugin, template, package)
name: Extension name
description: Extension description
author: Author name
output_dir: Output directory
**kwargs: Additional type-specific options
"""
output_path = Path(output_dir)
# Get structure based on type
if ext_type == "component":
structure = get_component_structure(name, description, author)
elif ext_type == "module":
client = kwargs.get("client", "site")
structure = get_module_structure(name, description, author, client)
elif ext_type == "plugin":
group = kwargs.get("group", "system")
structure = get_plugin_structure(name, description, author, group)
elif ext_type == "template":
structure = get_template_structure(name, description, author)
elif ext_type == "package":
structure = get_package_structure(name, description, author)
else:
common.die(f"Unknown extension type: {ext_type}")
# Create files
common.log_section(f"Creating {ext_type}: {name}")
for file_path, content in structure.items():
full_path = output_path / file_path
# Create parent directories
full_path.parent.mkdir(parents=True, exist_ok=True)
# Write file
full_path.write_text(content, encoding="utf-8")
common.log_success(f"Created: {file_path}")
common.log_section("Scaffolding Complete")
common.log_info(f"Extension files created in: {output_path}")
common.log_info(f"Extension type: {ext_type}")
common.log_info(f"Extension name: {name}")
# ============================================================================
@@ -379,11 +378,11 @@ def create_extension(
# ============================================================================
def main() -> None:
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Create Joomla extension scaffolding",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Create Joomla extension scaffolding",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Create a component
%(prog)s component MyComponent "My Component Description" "John Doe"
@@ -400,49 +399,49 @@ Examples:
# Create a package
%(prog)s package mypackage "My Package Description" "John Doe"
"""
)
parser.add_argument(
"type",
choices=["component", "module", "plugin", "template", "package"],
help="Extension type to create"
)
parser.add_argument("name", help="Extension name")
parser.add_argument("description", help="Extension description")
parser.add_argument("author", help="Author name")
parser.add_argument(
"-o", "--output",
default="src",
help="Output directory (default: src)"
)
parser.add_argument(
"--client",
choices=["site", "administrator"],
default="site",
help="Module client (site or administrator)"
)
parser.add_argument(
"--group",
default="system",
help="Plugin group (system, content, user, etc.)"
)
args = parser.parse_args()
try:
create_extension(
ext_type=args.type,
name=args.name,
description=args.description,
author=args.author,
output_dir=args.output,
client=args.client,
group=args.group
)
except Exception as e:
common.log_error(f"Failed to create extension: {e}")
sys.exit(1)
)
parser.add_argument(
"type",
choices=["component", "module", "plugin", "template", "package"],
help="Extension type to create"
)
parser.add_argument("name", help="Extension name")
parser.add_argument("description", help="Extension description")
parser.add_argument("author", help="Author name")
parser.add_argument(
"-o", "--output",
default="src",
help="Output directory (default: src)"
)
parser.add_argument(
"--client",
choices=["site", "administrator"],
default="site",
help="Module client (site or administrator)"
)
parser.add_argument(
"--group",
default="system",
help="Plugin group (system, content, user, etc.)"
)
args = parser.parse_args()
try:
create_extension(
ext_type=args.type,
name=args.name,
description=args.description,
author=args.author,
output_dir=args.output,
client=args.client,
group=args.group
)
except Exception as e:
common.log_error(f"Failed to create extension: {e}")
sys.exit(1)
if __name__ == "__main__":
main()
main()

View File

@@ -33,149 +33,149 @@ BRIEF: Run all validation scripts
import subprocess
import sys
from pathlib import Path
from typing import List, Tuple
from typing import Tuple
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
# Required validation scripts (must pass)
REQUIRED_SCRIPTS = [
"scripts/validate/manifest.py",
"scripts/validate/xml_wellformed.py",
"scripts/validate/workflows.py",
"scripts/validate/manifest.py",
"scripts/validate/xml_wellformed.py",
"scripts/validate/workflows.py",
]
# Optional validation scripts (failures are warnings)
OPTIONAL_SCRIPTS = [
"scripts/validate/changelog.py",
"scripts/validate/language_structure.py",
"scripts/validate/license_headers.py",
"scripts/validate/no_secrets.py",
"scripts/validate/paths.py",
"scripts/validate/php_syntax.py",
"scripts/validate/tabs.py",
"scripts/validate/version_alignment.py",
"scripts/validate/version_hierarchy.py",
"scripts/validate/changelog.py",
"scripts/validate/language_structure.py",
"scripts/validate/license_headers.py",
"scripts/validate/no_secrets.py",
"scripts/validate/paths.py",
"scripts/validate/php_syntax.py",
"scripts/validate/tabs.py",
"scripts/validate/version_alignment.py",
"scripts/validate/version_hierarchy.py",
]
def run_validation_script(script_path: str) -> Tuple[bool, str]:
"""
Run a validation script.
Args:
script_path: Path to script
Returns:
Tuple of (success, output)
"""
script = Path(script_path)
if not script.exists():
return (False, f"Script not found: {script_path}")
try:
result = subprocess.run(
["python3", str(script)],
capture_output=True,
text=True,
check=False
)
output = result.stdout + result.stderr
success = result.returncode == 0
return (success, output)
except Exception as e:
return (False, f"Error running script: {e}")
"""
Run a validation script.
Args:
script_path: Path to script
Returns:
Tuple of (success, output)
"""
script = Path(script_path)
if not script.exists():
return (False, f"Script not found: {script_path}")
try:
result = subprocess.run(
["python3", str(script)],
capture_output=True,
text=True,
check=False
)
output = result.stdout + result.stderr
success = result.returncode == 0
return (success, output)
except Exception as e:
return (False, f"Error running script: {e}")
def main() -> int:
"""Main entry point."""
common.log_section("Running All Validations")
print()
total_passed = 0
total_failed = 0
total_skipped = 0
# Run required scripts
common.log_info("=== Required Validations ===")
print()
for script in REQUIRED_SCRIPTS:
script_name = Path(script).name
common.log_info(f"Running {script_name}...")
success, output = run_validation_script(script)
if success:
common.log_success(f"{script_name} passed")
total_passed += 1
else:
common.log_error(f"{script_name} FAILED")
if output:
print(output)
total_failed += 1
print()
# Run optional scripts
common.log_info("=== Optional Validations ===")
print()
for script in OPTIONAL_SCRIPTS:
script_name = Path(script).name
if not Path(script).exists():
common.log_warn(f"{script_name} not found (skipped)")
total_skipped += 1
continue
common.log_info(f"Running {script_name}...")
success, output = run_validation_script(script)
if success:
common.log_success(f"{script_name} passed")
total_passed += 1
else:
common.log_warn(f"{script_name} failed (optional)")
if output:
print(output[:500]) # Limit output
total_failed += 1
print()
# Summary
common.log_section("Validation Summary")
common.log_kv("Total Passed", str(total_passed))
common.log_kv("Total Failed", str(total_failed))
common.log_kv("Total Skipped", str(total_skipped))
print()
# Check if any required validations failed
required_failed = sum(
1 for script in REQUIRED_SCRIPTS
if Path(script).exists() and not run_validation_script(script)[0]
)
if required_failed > 0:
common.log_error(f"{required_failed} required validation(s) failed")
return 1
common.log_success("All required validations passed!")
if total_failed > 0:
common.log_warn(f"{total_failed} optional validation(s) failed")
return 0
"""Main entry point."""
common.log_section("Running All Validations")
print()
total_passed = 0
total_failed = 0
total_skipped = 0
# Run required scripts
common.log_info("=== Required Validations ===")
print()
for script in REQUIRED_SCRIPTS:
script_name = Path(script).name
common.log_info(f"Running {script_name}...")
success, output = run_validation_script(script)
if success:
common.log_success(f"{script_name} passed")
total_passed += 1
else:
common.log_error(f"{script_name} FAILED")
if output:
print(output)
total_failed += 1
print()
# Run optional scripts
common.log_info("=== Optional Validations ===")
print()
for script in OPTIONAL_SCRIPTS:
script_name = Path(script).name
if not Path(script).exists():
common.log_warn(f"{script_name} not found (skipped)")
total_skipped += 1
continue
common.log_info(f"Running {script_name}...")
success, output = run_validation_script(script)
if success:
common.log_success(f"{script_name} passed")
total_passed += 1
else:
common.log_warn(f"{script_name} failed (optional)")
if output:
print(output[:500]) # Limit output
total_failed += 1
print()
# Summary
common.log_section("Validation Summary")
common.log_kv("Total Passed", str(total_passed))
common.log_kv("Total Failed", str(total_failed))
common.log_kv("Total Skipped", str(total_skipped))
print()
# Check if any required validations failed
required_failed = sum(
1 for script in REQUIRED_SCRIPTS
if Path(script).exists() and not run_validation_script(script)[0]
)
if required_failed > 0:
common.log_error(f"{required_failed} required validation(s) failed")
return 1
common.log_success("All required validations passed!")
if total_failed > 0:
common.log_warn(f"{total_failed} optional validation(s) failed")
return 0
if __name__ == "__main__":
sys.exit(main())
sys.exit(main())

View File

@@ -33,185 +33,184 @@ NOTE: Checks YAML syntax, structure, and best practices
import sys
from pathlib import Path
from typing import List, Tuple
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
import common
import common
except ImportError:
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
print("ERROR: Cannot import required libraries", file=sys.stderr)
sys.exit(1)
def validate_yaml_syntax(filepath: Path) -> bool:
"""
Validate YAML syntax of a workflow file.
Args:
filepath: Path to workflow file
Returns:
True if valid
"""
try:
import yaml
except ImportError:
common.log_warn("PyYAML module not installed. Install with: pip3 install pyyaml")
return True # Skip validation if yaml not available
try:
with open(filepath, 'r', encoding='utf-8') as f:
yaml.safe_load(f)
print(f"✓ Valid YAML: {filepath.name}")
return True
except yaml.YAMLError as e:
print(f"✗ YAML Error in {filepath.name}: {e}", file=sys.stderr)
return False
except Exception as e:
print(f"✗ Error reading {filepath.name}: {e}", file=sys.stderr)
return False
"""
Validate YAML syntax of a workflow file.
Args:
filepath: Path to workflow file
Returns:
True if valid
"""
try:
import yaml
except ImportError:
common.log_warn("PyYAML module not installed. Install with: pip3 install pyyaml")
return True # Skip validation if yaml not available
try:
with open(filepath, 'r', encoding='utf-8') as f:
yaml.safe_load(f)
print(f"✓ Valid YAML: {filepath.name}")
return True
except yaml.YAMLError as e:
print(f"✗ YAML Error in {filepath.name}: {e}", file=sys.stderr)
return False
except Exception as e:
print(f"✗ Error reading {filepath.name}: {e}", file=sys.stderr)
return False
def check_no_tabs(filepath: Path) -> bool:
"""
Check that file contains no tab characters.
Args:
filepath: Path to file
Returns:
True if no tabs found
"""
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
if '\t' in content:
common.log_error(f"✗ File contains tab characters: {filepath.name}")
return False
except Exception as e:
common.log_warn(f"Could not read {filepath}: {e}")
return False
return True
"""
Check that file contains no tab characters.
Args:
filepath: Path to file
Returns:
True if no tabs found
"""
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
if '\t' in content:
common.log_error(f"✗ File contains tab characters: {filepath.name}")
return False
except Exception as e:
common.log_warn(f"Could not read {filepath}: {e}")
return False
return True
def check_workflow_structure(filepath: Path) -> bool:
"""
Check workflow file structure for required keys.
Args:
filepath: Path to workflow file
Returns:
True if structure is valid
"""
errors = 0
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
# Check for required top-level keys
if 'name:' not in content and not content.startswith('name:'):
common.log_warn(f"Missing 'name:' in {filepath.name}")
if 'on:' not in content and not content.startswith('on:'):
common.log_error(f"✗ Missing 'on:' trigger in {filepath.name}")
errors += 1
if 'jobs:' not in content and not content.startswith('jobs:'):
common.log_error(f"✗ Missing 'jobs:' in {filepath.name}")
errors += 1
except Exception as e:
common.log_error(f"Error reading {filepath}: {e}")
return False
return errors == 0
"""
Check workflow file structure for required keys.
Args:
filepath: Path to workflow file
Returns:
True if structure is valid
"""
errors = 0
try:
with open(filepath, 'r', encoding='utf-8') as f:
content = f.read()
# Check for required top-level keys
if 'name:' not in content and not content.startswith('name:'):
common.log_warn(f"Missing 'name:' in {filepath.name}")
if 'on:' not in content and not content.startswith('on:'):
common.log_error(f"✗ Missing 'on:' trigger in {filepath.name}")
errors += 1
if 'jobs:' not in content and not content.startswith('jobs:'):
common.log_error(f"✗ Missing 'jobs:' in {filepath.name}")
errors += 1
except Exception as e:
common.log_error(f"Error reading {filepath}: {e}")
return False
return errors == 0
def validate_workflow_file(filepath: Path) -> bool:
"""
Validate a single workflow file.
Args:
filepath: Path to workflow file
Returns:
True if valid
"""
common.log_info(f"Validating: {filepath.name}")
errors = 0
# Check YAML syntax
if not validate_yaml_syntax(filepath):
errors += 1
# Check for tabs
if not check_no_tabs(filepath):
errors += 1
# Check structure
if not check_workflow_structure(filepath):
errors += 1
if errors == 0:
common.log_info(f"{filepath.name} passed all checks")
return True
else:
common.log_error(f"{filepath.name} failed {errors} check(s)")
return False
"""
Validate a single workflow file.
Args:
filepath: Path to workflow file
Returns:
True if valid
"""
common.log_info(f"Validating: {filepath.name}")
errors = 0
# Check YAML syntax
if not validate_yaml_syntax(filepath):
errors += 1
# Check for tabs
if not check_no_tabs(filepath):
errors += 1
# Check structure
if not check_workflow_structure(filepath):
errors += 1
if errors == 0:
common.log_info(f"{filepath.name} passed all checks")
return True
else:
common.log_error(f"{filepath.name} failed {errors} check(s)")
return False
def main() -> int:
"""Main entry point."""
common.log_info("GitHub Actions Workflow Validation")
common.log_info("===================================")
print()
workflows_dir = Path(".github/workflows")
if not workflows_dir.is_dir():
common.log_error(f"Workflows directory not found: {workflows_dir}")
return 1
# Find all workflow files
workflow_files = []
for pattern in ["*.yml", "*.yaml"]:
workflow_files.extend(workflows_dir.glob(pattern))
if not workflow_files:
common.log_warn("No workflow files found")
return 0
total = len(workflow_files)
passed = 0
failed = 0
for workflow in workflow_files:
if validate_workflow_file(workflow):
passed += 1
else:
failed += 1
print()
common.log_info("===================================")
common.log_info("Summary:")
common.log_info(f" Total workflows: {total}")
common.log_info(f" Passed: {passed}")
common.log_info(f" Failed: {failed}")
common.log_info("===================================")
if failed > 0:
common.log_error("Workflow validation failed")
return 1
common.log_info("All workflows validated successfully")
return 0
"""Main entry point."""
common.log_info("GitHub Actions Workflow Validation")
common.log_info("===================================")
print()
workflows_dir = Path(".github/workflows")
if not workflows_dir.is_dir():
common.log_error(f"Workflows directory not found: {workflows_dir}")
return 1
# Find all workflow files
workflow_files = []
for pattern in ["*.yml", "*.yaml"]:
workflow_files.extend(workflows_dir.glob(pattern))
if not workflow_files:
common.log_warn("No workflow files found")
return 0
total = len(workflow_files)
passed = 0
failed = 0
for workflow in workflow_files:
if validate_workflow_file(workflow):
passed += 1
else:
failed += 1
print()
common.log_info("===================================")
common.log_info("Summary:")
common.log_info(f" Total workflows: {total}")
common.log_info(f" Passed: {passed}")
common.log_info(f" Failed: {failed}")
common.log_info("===================================")
if failed > 0:
common.log_error("Workflow validation failed")
return 1
common.log_info("All workflows validated successfully")
return 0
if __name__ == "__main__":
sys.exit(main())
sys.exit(main())

View File

@@ -19,322 +19,320 @@
BRIEF: Safe, configurable Google Tag Manager loader for Moko-Cassiopeia.
PATH: ./media/templates/site/moko-cassiopeia/js/gtm.js
NOTE: Place the <noscript> fallback iframe in your HTML template (index.php). A JS file
cannot provide a true no-JS fallback by definition.
cannot provide a true no-JS fallback by definition.
VARIABLES:
- window.MOKO_GTM_ID (string) // Optional global GTM container ID (e.g., "GTM-XXXXXXX")
- window.MOKO_GTM_OPTIONS (object) // Optional global options (see JSDoc below)
- data- attributes on the script tag or <html>/<body>:
data-gtm-id, data-data-layer, data-debug, data-ignore-dnt,
data-env-auth, data-env-preview, data-block-on-dev
- window.MOKO_GTM_ID (string) // Optional global GTM container ID (e.g., "GTM-XXXXXXX")
- window.MOKO_GTM_OPTIONS (object) // Optional global options (see JSDoc below)
- data- attributes on the script tag or <html>/<body>:
data-gtm-id, data-data-layer, data-debug, data-ignore-dnt,
data-env-auth, data-env-preview, data-block-on-dev
*/
/* global window, document, navigator */
(() => {
"use strict";
"use strict";
/**
* @typedef {Object} MokoGtmOptions
* @property {string} [id] GTM container ID (e.g., "GTM-XXXXXXX")
* @property {string} [dataLayerName] Custom dataLayer name (default: "dataLayer")
* @property {boolean} [debug] Log debug messages to console (default: false)
* @property {boolean} [ignoreDNT] Ignore Do Not Track and always load (default: false)
* @property {boolean} [blockOnDev] Block loading on localhost/*.test/127.0.0.1 (default: true)
* @property {string} [envAuth] GTM Environment auth string (optional)
* @property {string} [envPreview] GTM Environment preview name (optional)
* @property {Record<string,'granted'|'denied'>} [consentDefault]
* Default Consent Mode v2 map. Keys like:
* analytics_storage, ad_storage, ad_user_data, ad_personalization, functionality_storage, security_storage
* (default: {analytics_storage:'granted', functionality_storage:'granted', security_storage:'granted'})
* @property {() => (Record<string, any>|void)} [pageVars]
* Function returning extra page variables to push on init (optional)
*/
/**
* @typedef {Object} MokoGtmOptions
* @property {string} [id] GTM container ID (e.g., "GTM-XXXXXXX")
* @property {string} [dataLayerName] Custom dataLayer name (default: "dataLayer")
* @property {boolean} [debug] Log debug messages to console (default: false)
* @property {boolean} [ignoreDNT] Ignore Do Not Track and always load (default: false)
* @property {boolean} [blockOnDev] Block loading on localhost/*.test/127.0.0.1 (default: true)
* @property {string} [envAuth] GTM Environment auth string (optional)
* @property {string} [envPreview] GTM Environment preview name (optional)
* @property {Record<string,'granted'|'denied'>} [consentDefault]
* Default Consent Mode v2 map. Keys like:
* analytics_storage, ad_storage, ad_user_data, ad_personalization, functionality_storage, security_storage
* (default: {analytics_storage:'granted', functionality_storage:'granted', security_storage:'granted'})
* @property {() => (Record<string, any>|void)} [pageVars]
* Function returning extra page variables to push on init (optional)
*/
const PKG = "moko-gtm";
const PREFIX = `[${PKG}]`;
const WIN = window;
const PKG = "moko-gtm";
const PREFIX = `[${PKG}]`;
const WIN = window;
// Public API placeholder (attached to window at the end)
/** @type {{
* init: (opts?: Partial<MokoGtmOptions>) => void,
* setConsent: (updates: Record<string,'granted'|'denied'>) => void,
* push: (...args:any[]) => void,
* isLoaded: () => boolean,
* config: () => Required<MokoGtmOptions>
* }} */
const API = {};
// Public API placeholder (attached to window at the end)
/** @type {{
* init: (opts?: Partial<MokoGtmOptions>) => void,
* setConsent: (updates: Record<string,'granted'|'denied'>) => void,
* push: (...args:any[]) => void,
* isLoaded: () => boolean,
* config: () => Required<MokoGtmOptions>
* }} */
const API = {};
// ---- Utilities ---------------------------------------------------------
// ---- Utilities ---------------------------------------------------------
const isDevHost = () => {
const h = WIN.location && WIN.location.hostname || "";
return (
h === "localhost" ||
h === "127.0.0.1" ||
h.endsWith(".local") ||
h.endsWith(".test")
);
};
const isDevHost = () => {
const h = WIN.location && WIN.location.hostname || "";
return (
h === "localhost" ||
h === "127.0.0.1" ||
h.endsWith(".local") ||
h.endsWith(".test")
);
};
const dntEnabled = () => {
// Different browsers expose DNT differently; treat "1" or "yes" as enabled.
const n = navigator;
const v = (n.doNotTrack || n.msDoNotTrack || (n.navigator && n.navigator.doNotTrack) || "").toString().toLowerCase();
return v === "1" || v === "yes";
};
const dntEnabled = () => {
// Different browsers expose DNT differently; treat "1" or "yes" as enabled.
const n = navigator;
const v = (n.doNotTrack || n.msDoNotTrack || (n.navigator && n.navigator.doNotTrack) || "").toString().toLowerCase();
return v === "1" || v === "yes";
};
const getCurrentScript = () => {
// document.currentScript is best; fallback to last <script> whose src ends with /gtm.js
const cs = document.currentScript;
if (cs) return cs;
const scripts = Array.from(document.getElementsByTagName("script"));
return scripts.reverse().find(s => (s.getAttribute("src") || "").includes("/gtm.js")) || null;
};
const getCurrentScript = () => {
// document.currentScript is best; fallback to last <script> whose src ends with /gtm.js
const cs = document.currentScript;
if (cs) return cs;
const scripts = Array.from(document.getElementsByTagName("script"));
return scripts.reverse().find(s => (s.getAttribute("src") || "").includes("/gtm.js")) || null;
};
const getAttr = (el, name) => el ? el.getAttribute(name) : null;
const readDatasetCascade = (name) => {
// Check <script>, <html>, <body>, then <meta name="moko:gtm-<name>">
const script = getCurrentScript();
const html = document.documentElement;
const body = document.body;
const meta = document.querySelector(`meta[name="moko:gtm-${name}"]`);
return (
(script && script.dataset && script.dataset[name]) ||
(html && html.dataset && html.dataset[name]) ||
(body && body.dataset && body.dataset[name]) ||
(meta && meta.getAttribute("content")) ||
null
);
};
const readDatasetCascade = (name) => {
// Check <script>, <html>, <body>, then <meta name="moko:gtm-<name>">
const script = getCurrentScript();
const html = document.documentElement;
const body = document.body;
const meta = document.querySelector(`meta[name="moko:gtm-${name}"]`);
return (
(script && script.dataset && script.dataset[name]) ||
(html && html.dataset && html.dataset[name]) ||
(body && body.dataset && body.dataset[name]) ||
(meta && meta.getAttribute("content")) ||
null
);
};
const parseBool = (v, fallback = false) => {
if (v == null) return fallback;
const s = String(v).trim().toLowerCase();
if (["1","true","yes","y","on"].includes(s)) return true;
if (["0","false","no","n","off"].includes(s)) return false;
return fallback;
};
const parseBool = (v, fallback = false) => {
if (v == null) return fallback;
const s = String(v).trim().toLowerCase();
if (["1","true","yes","y","on"].includes(s)) return true;
if (["0","false","no","n","off"].includes(s)) return false;
return fallback;
};
const debugLog = (...args) => {
if (STATE.debug) {
try { console.info(PREFIX, ...args); } catch (_) {}
}
};
const debugLog = (...args) => {
if (STATE.debug) {
try { console.info(PREFIX, ...args); } catch (_) {}
}
};
// ---- Configuration & State --------------------------------------------
// ---- Configuration & State --------------------------------------------
/** @type {Required<MokoGtmOptions>} */
const STATE = {
id: "",
dataLayerName: "dataLayer",
debug: false,
ignoreDNT: false,
blockOnDev: true,
envAuth: "",
envPreview: "",
consentDefault: {
analytics_storage: "granted",
functionality_storage: "granted",
security_storage: "granted",
// The following default to "denied" unless the site explicitly opts-in:
ad_storage: "denied",
ad_user_data: "denied",
ad_personalization: "denied",
},
pageVars: () => ({})
};
/** @type {Required<MokoGtmOptions>} */
const STATE = {
id: "",
dataLayerName: "dataLayer",
debug: false,
ignoreDNT: false,
blockOnDev: true,
envAuth: "",
envPreview: "",
consentDefault: {
analytics_storage: "granted",
functionality_storage: "granted",
security_storage: "granted",
// The following default to "denied" unless the site explicitly opts-in:
ad_storage: "denied",
ad_user_data: "denied",
ad_personalization: "denied",
},
pageVars: () => ({})
};
const mergeOptions = (base, extra = {}) => {
const out = {...base};
for (const k in extra) {
if (!Object.prototype.hasOwnProperty.call(extra, k)) continue;
const v = extra[k];
if (v && typeof v === "object" && !Array.isArray(v)) {
out[k] = {...(out[k] || {}), ...v};
} else if (v !== undefined) {
out[k] = v;
}
}
return out;
};
const mergeOptions = (base, extra = {}) => {
const out = {...base};
for (const k in extra) {
if (!Object.prototype.hasOwnProperty.call(extra, k)) continue;
const v = extra[k];
if (v && typeof v === "object" && !Array.isArray(v)) {
out[k] = {...(out[k] || {}), ...v};
} else if (v !== undefined) {
out[k] = v;
}
}
return out;
};
const detectOptions = () => {
// 1) Global window options
/** @type {Partial<MokoGtmOptions>} */
const globalOpts = (WIN.MOKO_GTM_OPTIONS && typeof WIN.MOKO_GTM_OPTIONS === "object") ? WIN.MOKO_GTM_OPTIONS : {};
const detectOptions = () => {
// 1) Global window options
/** @type {Partial<MokoGtmOptions>} */
const globalOpts = (WIN.MOKO_GTM_OPTIONS && typeof WIN.MOKO_GTM_OPTIONS === "object") ? WIN.MOKO_GTM_OPTIONS : {};
// 2) Dataset / meta
const idFromData = readDatasetCascade("id") || WIN.MOKO_GTM_ID || "";
const dlFromData = readDatasetCascade("dataLayer") || "";
const dbgFromData = readDatasetCascade("debug");
const dntFromData = readDatasetCascade("ignoreDnt");
const devFromData = readDatasetCascade("blockOnDev");
const authFromData = readDatasetCascade("envAuth") || "";
const prevFromData = readDatasetCascade("envPreview") || "";
// 2) Dataset / meta
const idFromData = readDatasetCascade("id") || WIN.MOKO_GTM_ID || "";
const dlFromData = readDatasetCascade("dataLayer") || "";
const dbgFromData = readDatasetCascade("debug");
const dntFromData = readDatasetCascade("ignoreDnt");
const devFromData = readDatasetCascade("blockOnDev");
const authFromData = readDatasetCascade("envAuth") || "";
const prevFromData = readDatasetCascade("envPreview") || "";
// 3) Combine
/** @type {Partial<MokoGtmOptions>} */
const detected = {
id: idFromData || globalOpts.id || "",
dataLayerName: dlFromData || globalOpts.dataLayerName || undefined,
debug: parseBool(dbgFromData, !!globalOpts.debug),
ignoreDNT: parseBool(dntFromData, !!globalOpts.ignoreDNT),
blockOnDev: parseBool(devFromData, (globalOpts.blockOnDev ?? true)),
envAuth: authFromData || globalOpts.envAuth || "",
envPreview: prevFromData || globalOpts.envPreview || "",
consentDefault: globalOpts.consentDefault || undefined,
pageVars: typeof globalOpts.pageVars === "function" ? globalOpts.pageVars : undefined
};
// 3) Combine
/** @type {Partial<MokoGtmOptions>} */
const detected = {
id: idFromData || globalOpts.id || "",
dataLayerName: dlFromData || globalOpts.dataLayerName || undefined,
debug: parseBool(dbgFromData, !!globalOpts.debug),
ignoreDNT: parseBool(dntFromData, !!globalOpts.ignoreDNT),
blockOnDev: parseBool(devFromData, (globalOpts.blockOnDev ?? true)),
envAuth: authFromData || globalOpts.envAuth || "",
envPreview: prevFromData || globalOpts.envPreview || "",
consentDefault: globalOpts.consentDefault || undefined,
pageVars: typeof globalOpts.pageVars === "function" ? globalOpts.pageVars : undefined
};
return detected;
};
return detected;
};
// ---- dataLayer / gtag helpers -----------------------------------------
// ---- dataLayer / gtag helpers -----------------------------------------
const ensureDataLayer = () => {
const l = STATE.dataLayerName;
WIN[l] = WIN[l] || [];
return WIN[l];
};
const ensureDataLayer = () => {
const l = STATE.dataLayerName;
WIN[l] = WIN[l] || [];
return WIN[l];
};
/** gtag wrapper backed by dataLayer. */
const gtag = (...args) => {
const dl = ensureDataLayer();
dl.push(arguments.length > 1 ? args : args[0]);
debugLog("gtag push:", args);
};
/** gtag wrapper backed by dataLayer. */
const gtag = (...args) => {
const dl = ensureDataLayer();
dl.push(arguments.length > 1 ? args : args[0]);
debugLog("gtag push:", args);
};
API.push = (...args) => gtag(...args);
API.push = (...args) => gtag(...args);
API.setConsent = (updates) => {
gtag("consent", "update", updates || {});
};
API.setConsent = (updates) => {
gtag("consent", "update", updates || {});
};
API.isLoaded = () => {
const hasScript = !!document.querySelector('script[src*="googletagmanager.com/gtm.js"]');
return hasScript;
};
API.isLoaded = () => {
const hasScript = !!document.querySelector('script[src*="googletagmanager.com/gtm.js"]');
return hasScript;
};
API.config = () => ({...STATE});
API.config = () => ({...STATE});
// ---- Loader ------------------------------------------------------------
// ---- Loader ------------------------------------------------------------
const buildEnvQuery = () => {
const qp = [];
if (STATE.envAuth) qp.push(`gtm_auth=${encodeURIComponent(STATE.envAuth)}`);
if (STATE.envPreview) qp.push(`gtm_preview=${encodeURIComponent(STATE.envPreview)}`, "gtm_cookies_win=x");
return qp.length ? `&${qp.join("&")}` : "";
};
const buildEnvQuery = () => {
const qp = [];
if (STATE.envAuth) qp.push(`gtm_auth=${encodeURIComponent(STATE.envAuth)}`);
if (STATE.envPreview) qp.push(`gtm_preview=${encodeURIComponent(STATE.envPreview)}`, "gtm_cookies_win=x");
return qp.length ? `&${qp.join("&")}` : "";
};
const injectScript = () => {
if (!STATE.id) {
debugLog("GTM ID missing; aborting load.");
return;
}
if (API.isLoaded()) {
debugLog("GTM already loaded; skipping duplicate injection.");
return;
}
const injectScript = () => {
if (!STATE.id) {
debugLog("GTM ID missing; aborting load.");
return;
}
if (API.isLoaded()) {
debugLog("GTM already loaded; skipping duplicate injection.");
return;
}
// Standard GTM bootstrap timing event
const dl = ensureDataLayer();
dl.push({ "gtm.start": new Date().getTime(), event: "gtm.js" });
// Standard GTM bootstrap timing event
const dl = ensureDataLayer();
dl.push({ "gtm.start": new Date().getTime(), event: "gtm.js" });
const f = document.getElementsByTagName("script")[0];
const j = document.createElement("script");
j.async = true;
j.src = `https://www.googletagmanager.com/gtm.js?id=${encodeURIComponent(STATE.id)}${STATE.dataLayerName !== "dataLayer" ? `&l=${encodeURIComponent(STATE.dataLayerName)}` : ""}${buildEnvQuery()}`;
if (f && f.parentNode) {
f.parentNode.insertBefore(j, f);
} else {
(document.head || document.documentElement).appendChild(j);
}
debugLog("Injected GTM script:", j.src);
};
const f = document.getElementsByTagName("script")[0];
const j = document.createElement("script");
j.async = true;
j.src = `https://www.googletagmanager.com/gtm.js?id=${encodeURIComponent(STATE.id)}${STATE.dataLayerName !== "dataLayer" ? `&l=${encodeURIComponent(STATE.dataLayerName)}` : ""}${buildEnvQuery()}`;
if (f && f.parentNode) {
f.parentNode.insertBefore(j, f);
} else {
(document.head || document.documentElement).appendChild(j);
}
debugLog("Injected GTM script:", j.src);
};
const applyDefaultConsent = () => {
// Consent Mode v2 default
gtag("consent", "default", STATE.consentDefault);
debugLog("Applied default consent:", STATE.consentDefault);
};
const applyDefaultConsent = () => {
// Consent Mode v2 default
gtag("consent", "default", STATE.consentDefault);
debugLog("Applied default consent:", STATE.consentDefault);
};
const pushInitialVars = () => {
// Minimal page vars; allow site to add more via pageVars()
const vars = {
event: "moko.page_init",
page_title: document.title || "",
page_language: (document.documentElement && document.documentElement.lang) || "",
...(typeof STATE.pageVars === "function" ? (STATE.pageVars() || {}) : {})
};
gtag(vars);
};
const pushInitialVars = () => {
// Minimal page vars; allow site to add more via pageVars()
const vars = {
event: "moko.page_init",
page_title: document.title || "",
page_language: (document.documentElement && document.documentElement.lang) || "",
...(typeof STATE.pageVars === "function" ? (STATE.pageVars() || {}) : {})
};
gtag(vars);
};
const shouldLoad = () => {
if (!STATE.ignoreDNT && dntEnabled()) {
debugLog("DNT is enabled; blocking GTM load (set ignoreDNT=true to override).");
return false;
}
if (STATE.blockOnDev && isDevHost()) {
debugLog("Development host detected; blocking GTM load (set blockOnDev=false to override).");
return false;
}
return true;
};
const shouldLoad = () => {
if (!STATE.ignoreDNT && dntEnabled()) {
debugLog("DNT is enabled; blocking GTM load (set ignoreDNT=true to override).");
return false;
}
if (STATE.blockOnDev && isDevHost()) {
debugLog("Development host detected; blocking GTM load (set blockOnDev=false to override).");
return false;
}
return true;
};
// ---- Public init -------------------------------------------------------
// ---- Public init -------------------------------------------------------
API.init = (opts = {}) => {
// Merge: defaults <- detected <- passed opts
const detected = detectOptions();
const merged = mergeOptions(STATE, mergeOptions(detected, opts));
API.init = (opts = {}) => {
// Merge: defaults <- detected <- passed opts
const detected = detectOptions();
const merged = mergeOptions(STATE, mergeOptions(detected, opts));
// Commit back to STATE
Object.assign(STATE, merged);
// Commit back to STATE
Object.assign(STATE, merged);
debugLog("Config:", STATE);
debugLog("Config:", STATE);
// Prepare dataLayer/gtag and consent
ensureDataLayer();
applyDefaultConsent();
pushInitialVars();
// Prepare dataLayer/gtag and consent
ensureDataLayer();
applyDefaultConsent();
pushInitialVars();
// Load GTM if allowed
if (shouldLoad()) {
injectScript();
} else {
debugLog("GTM load prevented by configuration or environment.");
}
};
// Load GTM if allowed
if (shouldLoad()) {
injectScript();
} else {
debugLog("GTM load prevented by configuration or environment.");
}
};
// ---- Auto-init on DOMContentLoaded (safe even if deferred) -------------
// ---- Auto-init on DOMContentLoaded (safe even if deferred) -------------
const autoInit = () => {
// Only auto-init if we have some ID from globals/datasets.
const detected = detectOptions();
const hasId = !!(detected.id || WIN.MOKO_GTM_ID);
if (hasId) {
API.init(); // use detected/global defaults
} else {
debugLog("No GTM ID detected; awaiting manual init via window.mokoGTM.init({ id: 'GTM-XXXXXXX' }).");
}
};
const autoInit = () => {
// Only auto-init if we have some ID from globals/datasets.
const detected = detectOptions();
const hasId = !!(detected.id || WIN.MOKO_GTM_ID);
if (hasId) {
API.init(); // use detected/global defaults
} else {
debugLog("No GTM ID detected; awaiting manual init via window.mokoGTM.init({ id: 'GTM-XXXXXXX' }).");
}
};
if (document.readyState === "complete" || document.readyState === "interactive") {
// Defer to ensure <body> exists for any late consumers.
setTimeout(autoInit, 0);
} else {
document.addEventListener("DOMContentLoaded", autoInit, { once: true });
}
if (document.readyState === "complete" || document.readyState === "interactive") {
// Defer to ensure <body> exists for any late consumers.
setTimeout(autoInit, 0);
} else {
document.addEventListener("DOMContentLoaded", autoInit, { once: true });
}
// Expose API
WIN.mokoGTM = API;
// Expose API
WIN.mokoGTM = API;
// Helpful console hint (only if debug true after detection)
try {
const detected = detectOptions();
if (parseBool(detected.debug, false)) {
STATE.debug = true;
debugLog("Ready. You can call window.mokoGTM.init({ id: 'GTM-XXXXXXX' }).");
}
} catch (_) {}
// Helpful console hint (only if debug true after detection)
try {
const detected = detectOptions();
if (parseBool(detected.debug, false)) {
STATE.debug = true;
debugLog("Ready. You can call window.mokoGTM.init({ id: 'GTM-XXXXXXX' }).");
}
} catch (_) {}
})();