diff --git a/scripts/release/package_extension.py b/scripts/release/package_extension.py
index 69f075b..99a4b55 100755
--- a/scripts/release/package_extension.py
+++ b/scripts/release/package_extension.py
@@ -32,210 +32,208 @@ USAGE: ./scripts/release/package_extension.py [output_dir] [version]
"""
import argparse
-import os
-import shutil
import sys
import zipfile
from datetime import datetime
from pathlib import Path
-from typing import List, Set
+from typing import Set
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
- import common
- import extension_utils
+ import common
+ import extension_utils
except ImportError:
- print("ERROR: Cannot import required libraries", file=sys.stderr)
- sys.exit(1)
+ print("ERROR: Cannot import required libraries", file=sys.stderr)
+ sys.exit(1)
# Exclusion patterns for packaging
EXCLUDE_PATTERNS = {
- # Version control
- ".git", ".gitignore", ".gitattributes",
- # IDE
- ".vscode", ".idea", "*.sublime-*",
- # Development
- "node_modules", "vendor", ".env", ".env.*",
- # Documentation (optional, can be included)
- # Build artifacts
- "dist", "build", ".phpunit.cache",
- # Development tool caches and artifacts
- ".phpstan.cache", ".psalm", ".rector",
- "phpmd-cache", ".php-cs-fixer.cache", ".phplint-cache",
- # OS files
- ".DS_Store", "Thumbs.db",
- # Logs
- "*.log",
- # Tests
- "tests", "test", "Tests",
- # CI/CD
- ".github",
- # Scripts
- "scripts",
- # Docs (can be included if needed)
- "docs",
- # Config files
- "composer.json", "composer.lock",
- "package.json", "package-lock.json",
- "phpunit.xml", "phpstan.neon", "phpcs.xml",
- "codeception.yml", "psalm.xml", ".php-cs-fixer.php",
- # Others
- "README.md", "CHANGELOG.md", "CONTRIBUTING.md",
- "CODE_OF_CONDUCT.md", "SECURITY.md", "GOVERNANCE.md",
- "Makefile",
+ # Version control
+ ".git", ".gitignore", ".gitattributes",
+ # IDE
+ ".vscode", ".idea", "*.sublime-*",
+ # Development
+ "node_modules", "vendor", ".env", ".env.*",
+ # Documentation (optional, can be included)
+ # Build artifacts
+ "dist", "build", ".phpunit.cache",
+ # Development tool caches and artifacts
+ ".phpstan.cache", ".psalm", ".rector",
+ "phpmd-cache", ".php-cs-fixer.cache", ".phplint-cache",
+ # OS files
+ ".DS_Store", "Thumbs.db",
+ # Logs
+ "*.log",
+ # Tests
+ "tests", "test", "Tests",
+ # CI/CD
+ ".github",
+ # Scripts
+ "scripts",
+ # Docs (can be included if needed)
+ "docs",
+ # Config files
+ "composer.json", "composer.lock",
+ "package.json", "package-lock.json",
+ "phpunit.xml", "phpstan.neon", "phpcs.xml",
+ "codeception.yml", "psalm.xml", ".php-cs-fixer.php",
+ # Others
+ "README.md", "CHANGELOG.md", "CONTRIBUTING.md",
+ "CODE_OF_CONDUCT.md", "SECURITY.md", "GOVERNANCE.md",
+ "Makefile",
}
def should_exclude(path: Path, base_path: Path, exclude_patterns: Set[str]) -> bool:
- """
- Check if a path should be excluded from packaging.
-
- Args:
- path: Path to check
- base_path: Base directory path
- exclude_patterns: Set of exclusion patterns
-
- Returns:
- True if should be excluded
- """
- relative_path = path.relative_to(base_path)
-
- # Check each part of the path
- for part in relative_path.parts:
- if part in exclude_patterns:
- return True
- # Check wildcard patterns
- for pattern in exclude_patterns:
- if "*" in pattern:
- import fnmatch
- if fnmatch.fnmatch(part, pattern):
- return True
-
- return False
+ """
+ Check if a path should be excluded from packaging.
+
+ Args:
+ path: Path to check
+ base_path: Base directory path
+ exclude_patterns: Set of exclusion patterns
+
+ Returns:
+ True if should be excluded
+ """
+ relative_path = path.relative_to(base_path)
+
+ # Check each part of the path
+ for part in relative_path.parts:
+ if part in exclude_patterns:
+ return True
+ # Check wildcard patterns
+ for pattern in exclude_patterns:
+ if "*" in pattern:
+ import fnmatch
+ if fnmatch.fnmatch(part, pattern):
+ return True
+
+ return False
def create_package(
- src_dir: str,
- output_dir: str,
- version: str = None,
- repo_name: str = None,
- exclude_patterns: Set[str] = None
+ src_dir: str,
+ output_dir: str,
+ version: str = None,
+ repo_name: str = None,
+ exclude_patterns: Set[str] = None
) -> Path:
- """
- Create a distributable ZIP package for a Joomla or Dolibarr extension.
-
- Args:
- src_dir: Source directory containing extension files
- output_dir: Output directory for ZIP file
- version: Version string (auto-detected if not provided)
- repo_name: Repository name for ZIP file naming
- exclude_patterns: Patterns to exclude from packaging
-
- Returns:
- Path to created ZIP file
- """
- src_path = Path(src_dir)
- if not src_path.is_dir():
- common.die(f"Source directory not found: {src_dir}")
-
- # Detect extension platform and get info
- ext_info = extension_utils.get_extension_info(src_dir)
-
- if not ext_info:
- common.die(f"No Joomla or Dolibarr extension found in {src_dir}")
-
- # Determine version
- if not version:
- version = ext_info.version
-
- # Determine repo name
- if not repo_name:
- try:
- repo_root = common.git_root()
- repo_name = repo_root.name
- except Exception:
- repo_name = "extension"
-
- # Determine exclusion patterns
- if exclude_patterns is None:
- exclude_patterns = EXCLUDE_PATTERNS
-
- # Create output directory
- output_path = Path(output_dir)
- common.ensure_dir(output_path)
-
- # Generate ZIP filename
- timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
- platform_suffix = f"{ext_info.platform.value}-{ext_info.extension_type}"
- zip_filename = f"{repo_name}-{version}-{platform_suffix}.zip"
- zip_path = output_path / zip_filename
-
- # Remove existing ZIP if present
- if zip_path.exists():
- zip_path.unlink()
-
- common.log_section("Creating Extension Package")
- common.log_kv("Platform", ext_info.platform.value.upper())
- common.log_kv("Extension", ext_info.name)
- common.log_kv("Type", ext_info.extension_type)
- common.log_kv("Version", version)
- common.log_kv("Source", src_dir)
- common.log_kv("Output", str(zip_path))
- print()
-
- # Create ZIP file
- file_count = 0
- with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
- for item in src_path.rglob("*"):
- if item.is_file():
- # Check if should be excluded
- if should_exclude(item, src_path, exclude_patterns):
- continue
-
- # Add to ZIP with relative path
- arcname = item.relative_to(src_path)
- zipf.write(item, arcname)
- file_count += 1
-
- if file_count % 10 == 0:
- common.log_step(f"Added {file_count} files...")
-
- # Get ZIP file size
- zip_size = zip_path.stat().st_size
- zip_size_mb = zip_size / (1024 * 1024)
-
- print()
- common.log_success(f"Package created: {zip_path.name}")
- common.log_kv("Files", str(file_count))
- common.log_kv("Size", f"{zip_size_mb:.2f} MB")
-
- # Output JSON for machine consumption
- result = {
- "status": "ok",
- "platform": ext_info.platform.value,
- "extension": ext_info.name,
- "ext_type": ext_info.extension_type,
- "version": version,
- "package": str(zip_path),
- "files": file_count,
- "size_bytes": zip_size
- }
-
- print()
- common.json_output(result)
-
- return zip_path
+ """
+ Create a distributable ZIP package for a Joomla or Dolibarr extension.
+
+ Args:
+ src_dir: Source directory containing extension files
+ output_dir: Output directory for ZIP file
+ version: Version string (auto-detected if not provided)
+ repo_name: Repository name for ZIP file naming
+ exclude_patterns: Patterns to exclude from packaging
+
+ Returns:
+ Path to created ZIP file
+ """
+ src_path = Path(src_dir)
+ if not src_path.is_dir():
+ common.die(f"Source directory not found: {src_dir}")
+
+ # Detect extension platform and get info
+ ext_info = extension_utils.get_extension_info(src_dir)
+
+ if not ext_info:
+ common.die(f"No Joomla or Dolibarr extension found in {src_dir}")
+
+ # Determine version
+ if not version:
+ version = ext_info.version
+
+ # Determine repo name
+ if not repo_name:
+ try:
+ repo_root = common.git_root()
+ repo_name = repo_root.name
+ except Exception:
+ repo_name = "extension"
+
+ # Determine exclusion patterns
+ if exclude_patterns is None:
+ exclude_patterns = EXCLUDE_PATTERNS
+
+ # Create output directory
+ output_path = Path(output_dir)
+ common.ensure_dir(output_path)
+
+ # Generate ZIP filename
+ timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
+ platform_suffix = f"{ext_info.platform.value}-{ext_info.extension_type}"
+ zip_filename = f"{repo_name}-{version}-{platform_suffix}-{timestamp}.zip"
+ zip_path = output_path / zip_filename
+
+ # Remove existing ZIP if present
+ if zip_path.exists():
+ zip_path.unlink()
+
+ common.log_section("Creating Extension Package")
+ common.log_kv("Platform", ext_info.platform.value.upper())
+ common.log_kv("Extension", ext_info.name)
+ common.log_kv("Type", ext_info.extension_type)
+ common.log_kv("Version", version)
+ common.log_kv("Source", src_dir)
+ common.log_kv("Output", str(zip_path))
+ print()
+
+ # Create ZIP file
+ file_count = 0
+ with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
+ for item in src_path.rglob("*"):
+ if item.is_file():
+ # Check if should be excluded
+ if should_exclude(item, src_path, exclude_patterns):
+ continue
+
+ # Add to ZIP with relative path
+ arcname = item.relative_to(src_path)
+ zipf.write(item, arcname)
+ file_count += 1
+
+ if file_count % 10 == 0:
+ common.log_step(f"Added {file_count} files...")
+
+ # Get ZIP file size
+ zip_size = zip_path.stat().st_size
+ zip_size_mb = zip_size / (1024 * 1024)
+
+ print()
+ common.log_success(f"Package created: {zip_path.name}")
+ common.log_kv("Files", str(file_count))
+ common.log_kv("Size", f"{zip_size_mb:.2f} MB")
+
+ # Output JSON for machine consumption
+ result = {
+ "status": "ok",
+ "platform": ext_info.platform.value,
+ "extension": ext_info.name,
+ "ext_type": ext_info.extension_type,
+ "version": version,
+ "package": str(zip_path),
+ "files": file_count,
+ "size_bytes": zip_size
+ }
+
+ print()
+ common.json_output(result)
+
+ return zip_path
def main() -> int:
- """Main entry point."""
- parser = argparse.ArgumentParser(
- description="Package Joomla or Dolibarr extension as distributable ZIP",
- formatter_class=argparse.RawDescriptionHelpFormatter,
- epilog="""
+ """Main entry point."""
+ parser = argparse.ArgumentParser(
+ description="Package Joomla or Dolibarr extension as distributable ZIP",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog="""
Examples:
# Package with auto-detected version
%(prog)s
@@ -251,73 +249,79 @@ Examples:
Supports both Joomla and Dolibarr extensions with automatic platform detection.
"""
- )
-
- parser.add_argument(
- "output_dir",
- nargs="?",
- default="dist",
- help="Output directory for ZIP file (default: dist)"
- )
- parser.add_argument(
- "version",
- nargs="?",
- help="Version string (default: auto-detected from manifest)"
- )
- parser.add_argument(
- "-s", "--src-dir",
- default="src",
- help="Source directory (default: src)"
- )
- parser.add_argument(
- "--repo-name",
- help="Repository name for ZIP filename (default: auto-detected)"
- )
- parser.add_argument(
- "--include-docs",
- action="store_true",
- help="Include documentation files in package"
- )
- parser.add_argument(
- "--include-tests",
- action="store_true",
- help="Include test files in package"
- )
-
- args = parser.parse_args()
-
- try:
- # Adjust exclusion patterns based on arguments
- exclude_patterns = EXCLUDE_PATTERNS.copy()
- if args.include_docs:
- exclude_patterns.discard("docs")
- exclude_patterns.discard("README.md")
- exclude_patterns.discard("CHANGELOG.md")
- if args.include_tests:
- exclude_patterns.discard("tests")
- exclude_patterns.discard("test")
- exclude_patterns.discard("Tests")
-
- # Create package
- zip_path = create_package(
- src_dir=args.src_dir,
- output_dir=args.output_dir,
- version=args.version,
- repo_name=args.repo_name,
- exclude_patterns=exclude_patterns
- )
-
- return 0
-
- except Exception as e:
- common.log_error(f"Packaging failed: {e}")
- result = {
- "status": "error",
- "error": str(e)
- }
- common.json_output(result)
- return 1
+ )
+
+ parser.add_argument(
+ "output_dir",
+ nargs="?",
+ default="dist",
+ help="Output directory for ZIP file (default: dist)"
+ )
+ parser.add_argument(
+ "version",
+ nargs="?",
+ help="Version string (default: auto-detected from manifest)"
+ )
+ parser.add_argument(
+ "-s", "--src-dir",
+ default="src",
+ help="Source directory (default: src)"
+ )
+ parser.add_argument(
+ "--repo-name",
+ help="Repository name for ZIP filename (default: auto-detected)"
+ )
+ parser.add_argument(
+ "--include-docs",
+ action="store_true",
+ help="Include documentation files in package"
+ )
+ parser.add_argument(
+ "--include-tests",
+ action="store_true",
+ help="Include test files in package"
+ )
+
+ args = parser.parse_args()
+
+ try:
+ # Adjust exclusion patterns based on arguments
+ exclude_patterns = EXCLUDE_PATTERNS.copy()
+ if args.include_docs:
+ exclude_patterns.discard("docs")
+ exclude_patterns.discard("README.md")
+ exclude_patterns.discard("CHANGELOG.md")
+ if args.include_tests:
+ exclude_patterns.discard("tests")
+ exclude_patterns.discard("test")
+ exclude_patterns.discard("Tests")
+
+ # Create package
+ zip_path = create_package(
+ src_dir=args.src_dir,
+ output_dir=args.output_dir,
+ version=args.version,
+ repo_name=args.repo_name,
+ exclude_patterns=exclude_patterns
+ )
+
+ result = {
+ "status": "success",
+ "zip_path": str(zip_path)
+ }
+ common.json_output(result)
+
+ return 0
+
+ except Exception as e:
+ common.log_error(f"Packaging failed: {e}")
+ result = {
+ "status": "error",
+ "error": str(e)
+ }
+ common.json_output(result)
+ return 1
if __name__ == "__main__":
- sys.exit(main())
+ sys.exit(main())
diff --git a/scripts/run/scaffold_extension.py b/scripts/run/scaffold_extension.py
index 19306f6..382ef03 100755
--- a/scripts/run/scaffold_extension.py
+++ b/scripts/run/scaffold_extension.py
@@ -40,11 +40,10 @@ from typing import Dict
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
- import common
- import joomla_manifest
+ import common
except ImportError:
- print("ERROR: Cannot import required libraries", file=sys.stderr)
- sys.exit(1)
+ print("ERROR: Cannot import required libraries", file=sys.stderr)
+ sys.exit(1)
# ============================================================================
@@ -52,73 +51,73 @@ except ImportError:
# ============================================================================
def get_component_structure(name: str, description: str, author: str) -> Dict[str, str]:
- """Get directory structure and files for a component."""
- safe_name = name.lower().replace(" ", "_")
- com_name = f"com_{safe_name}"
-
- manifest = f"""
+ """Get directory structure and files for a component."""
+ safe_name = name.lower().replace(" ", "_")
+ com_name = f"com_{safe_name}"
+
+ manifest = f"""
- {name}
- {author}
- {datetime.now().strftime("%Y-%m-%d")}
- Copyright (C) {datetime.now().year} {author}
- GPL-3.0-or-later
- hello@example.com
- https://example.com
- 1.0.0
- {description}
+ {name}
+ {author}
+ {datetime.now().strftime("%Y-%m-%d")}
+ Copyright (C) {datetime.now().year} {author}
+ GPL-3.0-or-later
+ hello@example.com
+ https://example.com
+ 1.0.0
+ {description}
-
- src
-
+
+ src
+
-
-
-
- services
- sql
- src
-
-
+
+
+
+ services
+ sql
+ src
+
+
"""
-
- return {
- f"{com_name}.xml": manifest,
- "site/src/.gitkeep": "",
- "admin/services/provider.php": f" Dict[str, str]:
- """Get directory structure and files for a module."""
- safe_name = name.lower().replace(" ", "_")
- mod_name = f"mod_{safe_name}"
-
- manifest = f"""
+ """Get directory structure and files for a module."""
+ safe_name = name.lower().replace(" ", "_")
+ mod_name = f"mod_{safe_name}"
+
+ manifest = f"""
- {name}
- {author}
- {datetime.now().strftime("%Y-%m-%d")}
- Copyright (C) {datetime.now().year} {author}
- GPL-3.0-or-later
- hello@example.com
- https://example.com
- 1.0.0
- {description}
+ {name}
+ {author}
+ {datetime.now().strftime("%Y-%m-%d")}
+ Copyright (C) {datetime.now().year} {author}
+ GPL-3.0-or-later
+ hello@example.com
+ https://example.com
+ 1.0.0
+ {description}
-
- {mod_name}.php
- {mod_name}.xml
- tmpl
-
+
+ {mod_name}.php
+ {mod_name}.xml
+ tmpl
+
"""
-
- module_php = f"""get('layout', 'default'));
"""
-
- default_tmpl = f"""get('layout', '
defined('_JEXEC') or die;
?>
"""
-
- return {
- f"{mod_name}.xml": manifest,
- f"{mod_name}.php": module_php,
- "tmpl/default.php": default_tmpl,
- }
+
+ return {
+ f"{mod_name}.xml": manifest,
+ f"{mod_name}.php": module_php,
+ "tmpl/default.php": default_tmpl,
+ }
def get_plugin_structure(name: str, description: str, author: str, group: str = "system") -> Dict[str, str]:
- """Get directory structure and files for a plugin."""
- safe_name = name.lower().replace(" ", "_")
- plg_name = f"{safe_name}"
-
- manifest = f"""
+ """Get directory structure and files for a plugin."""
+ safe_name = name.lower().replace(" ", "_")
+ plg_name = f"{safe_name}"
+
+ manifest = f"""
- plg_{group}_{safe_name}
- {author}
- {datetime.now().strftime("%Y-%m-%d")}
- Copyright (C) {datetime.now().year} {author}
- GPL-3.0-or-later
- hello@example.com
- https://example.com
- 1.0.0
- {description}
+ plg_{group}_{safe_name}
+ {author}
+ {datetime.now().strftime("%Y-%m-%d")}
+ Copyright (C) {datetime.now().year} {author}
+ GPL-3.0-or-later
+ hello@example.com
+ https://example.com
+ 1.0.0
+ {description}
-
- {plg_name}.php
-
+
+ {plg_name}.php
+
"""
-
- plugin_php = f""" Dict[str, str]:
- """Get directory structure and files for a template."""
- safe_name = name.lower().replace(" ", "_")
-
- manifest = f"""
+ """Get directory structure and files for a template."""
+ safe_name = name.lower().replace(" ", "_")
+
+ manifest = f"""
- {safe_name}
- {datetime.now().strftime("%Y-%m-%d")}
- {author}
- hello@example.com
- https://example.com
- Copyright (C) {datetime.now().year} {author}
- GPL-3.0-or-later
- 1.0.0
- {description}
+ {safe_name}
+ {datetime.now().strftime("%Y-%m-%d")}
+ {author}
+ hello@example.com
+ https://example.com
+ Copyright (C) {datetime.now().year} {author}
+ GPL-3.0-or-later
+ 1.0.0
+ {description}
-
- index.php
- templateDetails.xml
- css
- js
- images
-
+
+ index.php
+ templateDetails.xml
+ css
+ js
+ images
+
-
- header
- main
- footer
-
+
+ header
+ main
+ footer
+
"""
-
- index_php = f"""useStyle('template.{safe_name}')->useScript('template.{safe_name}');
-
-
-
+
+
+
-
-
-
-
-
+
+
+
+
+
"""
-
- return {
- "templateDetails.xml": manifest,
- "index.php": index_php,
- "css/template.css": "/* Template styles */\n",
- "js/template.js": "// Template JavaScript\n",
- "images/.gitkeep": "",
- }
+
+ return {
+ "templateDetails.xml": manifest,
+ "index.php": index_php,
+ "css/template.css": "/* Template styles */\n",
+ "js/template.js": "// Template JavaScript\n",
+ "images/.gitkeep": "",
+ }
def get_package_structure(name: str, description: str, author: str) -> Dict[str, str]:
- """Get directory structure and files for a package."""
- safe_name = name.lower().replace(" ", "_")
- pkg_name = f"pkg_{safe_name}"
-
- manifest = f"""
+ """Get directory structure and files for a package."""
+ safe_name = name.lower().replace(" ", "_")
+ pkg_name = f"pkg_{safe_name}"
+
+ manifest = f"""
- {name}
- {safe_name}
- {author}
- {datetime.now().strftime("%Y-%m-%d")}
- Copyright (C) {datetime.now().year} {author}
- GPL-3.0-or-later
- hello@example.com
- https://example.com
- 1.0.0
- {description}
+ {name}
+ {safe_name}
+ {author}
+ {datetime.now().strftime("%Y-%m-%d")}
+ Copyright (C) {datetime.now().year} {author}
+ GPL-3.0-or-later
+ hello@example.com
+ https://example.com
+ 1.0.0
+ {description}
-
-
-
+
+
+
"""
-
- return {
- f"{pkg_name}.xml": manifest,
- "packages/.gitkeep": "",
- }
+
+ return {
+ f"{pkg_name}.xml": manifest,
+ "packages/.gitkeep": "",
+ }
# ============================================================================
@@ -319,59 +318,59 @@ def get_package_structure(name: str, description: str, author: str) -> Dict[str,
# ============================================================================
def create_extension(
- ext_type: str,
- name: str,
- description: str,
- author: str,
- output_dir: str = "src",
- **kwargs
+ ext_type: str,
+ name: str,
+ description: str,
+ author: str,
+ output_dir: str = "src",
+ **kwargs
) -> None:
- """
- Create extension scaffolding.
-
- Args:
- ext_type: Extension type (component, module, plugin, template, package)
- name: Extension name
- description: Extension description
- author: Author name
- output_dir: Output directory
- **kwargs: Additional type-specific options
- """
- output_path = Path(output_dir)
-
- # Get structure based on type
- if ext_type == "component":
- structure = get_component_structure(name, description, author)
- elif ext_type == "module":
- client = kwargs.get("client", "site")
- structure = get_module_structure(name, description, author, client)
- elif ext_type == "plugin":
- group = kwargs.get("group", "system")
- structure = get_plugin_structure(name, description, author, group)
- elif ext_type == "template":
- structure = get_template_structure(name, description, author)
- elif ext_type == "package":
- structure = get_package_structure(name, description, author)
- else:
- common.die(f"Unknown extension type: {ext_type}")
-
- # Create files
- common.log_section(f"Creating {ext_type}: {name}")
-
- for file_path, content in structure.items():
- full_path = output_path / file_path
-
- # Create parent directories
- full_path.parent.mkdir(parents=True, exist_ok=True)
-
- # Write file
- full_path.write_text(content, encoding="utf-8")
- common.log_success(f"Created: {file_path}")
-
- common.log_section("Scaffolding Complete")
- common.log_info(f"Extension files created in: {output_path}")
- common.log_info(f"Extension type: {ext_type}")
- common.log_info(f"Extension name: {name}")
+ """
+ Create extension scaffolding.
+
+ Args:
+ ext_type: Extension type (component, module, plugin, template, package)
+ name: Extension name
+ description: Extension description
+ author: Author name
+ output_dir: Output directory
+ **kwargs: Additional type-specific options
+ """
+ output_path = Path(output_dir)
+
+ # Get structure based on type
+ if ext_type == "component":
+ structure = get_component_structure(name, description, author)
+ elif ext_type == "module":
+ client = kwargs.get("client", "site")
+ structure = get_module_structure(name, description, author, client)
+ elif ext_type == "plugin":
+ group = kwargs.get("group", "system")
+ structure = get_plugin_structure(name, description, author, group)
+ elif ext_type == "template":
+ structure = get_template_structure(name, description, author)
+ elif ext_type == "package":
+ structure = get_package_structure(name, description, author)
+ else:
+ common.die(f"Unknown extension type: {ext_type}")
+
+ # Create files
+ common.log_section(f"Creating {ext_type}: {name}")
+
+ for file_path, content in structure.items():
+ full_path = output_path / file_path
+
+ # Create parent directories
+ full_path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Write file
+ full_path.write_text(content, encoding="utf-8")
+ common.log_success(f"Created: {file_path}")
+
+ common.log_section("Scaffolding Complete")
+ common.log_info(f"Extension files created in: {output_path}")
+ common.log_info(f"Extension type: {ext_type}")
+ common.log_info(f"Extension name: {name}")
# ============================================================================
@@ -379,11 +378,11 @@ def create_extension(
# ============================================================================
def main() -> None:
- """Main entry point."""
- parser = argparse.ArgumentParser(
- description="Create Joomla extension scaffolding",
- formatter_class=argparse.RawDescriptionHelpFormatter,
- epilog="""
+ """Main entry point."""
+ parser = argparse.ArgumentParser(
+ description="Create Joomla extension scaffolding",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog="""
Examples:
# Create a component
%(prog)s component MyComponent "My Component Description" "John Doe"
@@ -400,49 +399,49 @@ Examples:
# Create a package
%(prog)s package mypackage "My Package Description" "John Doe"
"""
- )
-
- parser.add_argument(
- "type",
- choices=["component", "module", "plugin", "template", "package"],
- help="Extension type to create"
- )
- parser.add_argument("name", help="Extension name")
- parser.add_argument("description", help="Extension description")
- parser.add_argument("author", help="Author name")
- parser.add_argument(
- "-o", "--output",
- default="src",
- help="Output directory (default: src)"
- )
- parser.add_argument(
- "--client",
- choices=["site", "administrator"],
- default="site",
- help="Module client (site or administrator)"
- )
- parser.add_argument(
- "--group",
- default="system",
- help="Plugin group (system, content, user, etc.)"
- )
-
- args = parser.parse_args()
-
- try:
- create_extension(
- ext_type=args.type,
- name=args.name,
- description=args.description,
- author=args.author,
- output_dir=args.output,
- client=args.client,
- group=args.group
- )
- except Exception as e:
- common.log_error(f"Failed to create extension: {e}")
- sys.exit(1)
+ )
+
+ parser.add_argument(
+ "type",
+ choices=["component", "module", "plugin", "template", "package"],
+ help="Extension type to create"
+ )
+ parser.add_argument("name", help="Extension name")
+ parser.add_argument("description", help="Extension description")
+ parser.add_argument("author", help="Author name")
+ parser.add_argument(
+ "-o", "--output",
+ default="src",
+ help="Output directory (default: src)"
+ )
+ parser.add_argument(
+ "--client",
+ choices=["site", "administrator"],
+ default="site",
+ help="Module client (site or administrator)"
+ )
+ parser.add_argument(
+ "--group",
+ default="system",
+ help="Plugin group (system, content, user, etc.)"
+ )
+
+ args = parser.parse_args()
+
+ try:
+ create_extension(
+ ext_type=args.type,
+ name=args.name,
+ description=args.description,
+ author=args.author,
+ output_dir=args.output,
+ client=args.client,
+ group=args.group
+ )
+ except Exception as e:
+ common.log_error(f"Failed to create extension: {e}")
+ sys.exit(1)
if __name__ == "__main__":
- main()
+ main()
diff --git a/scripts/run/validate_all.py b/scripts/run/validate_all.py
index 79657bb..c43e6e6 100755
--- a/scripts/run/validate_all.py
+++ b/scripts/run/validate_all.py
@@ -33,149 +33,149 @@ BRIEF: Run all validation scripts
import subprocess
import sys
from pathlib import Path
-from typing import List, Tuple
+from typing import Tuple
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
- import common
+ import common
except ImportError:
- print("ERROR: Cannot import required libraries", file=sys.stderr)
- sys.exit(1)
+ print("ERROR: Cannot import required libraries", file=sys.stderr)
+ sys.exit(1)
# Required validation scripts (must pass)
REQUIRED_SCRIPTS = [
- "scripts/validate/manifest.py",
- "scripts/validate/xml_wellformed.py",
- "scripts/validate/workflows.py",
+ "scripts/validate/manifest.py",
+ "scripts/validate/xml_wellformed.py",
+ "scripts/validate/workflows.py",
]
# Optional validation scripts (failures are warnings)
OPTIONAL_SCRIPTS = [
- "scripts/validate/changelog.py",
- "scripts/validate/language_structure.py",
- "scripts/validate/license_headers.py",
- "scripts/validate/no_secrets.py",
- "scripts/validate/paths.py",
- "scripts/validate/php_syntax.py",
- "scripts/validate/tabs.py",
- "scripts/validate/version_alignment.py",
- "scripts/validate/version_hierarchy.py",
+ "scripts/validate/changelog.py",
+ "scripts/validate/language_structure.py",
+ "scripts/validate/license_headers.py",
+ "scripts/validate/no_secrets.py",
+ "scripts/validate/paths.py",
+ "scripts/validate/php_syntax.py",
+ "scripts/validate/tabs.py",
+ "scripts/validate/version_alignment.py",
+ "scripts/validate/version_hierarchy.py",
]
def run_validation_script(script_path: str) -> Tuple[bool, str]:
- """
- Run a validation script.
-
- Args:
- script_path: Path to script
-
- Returns:
- Tuple of (success, output)
- """
- script = Path(script_path)
-
- if not script.exists():
- return (False, f"Script not found: {script_path}")
-
- try:
- result = subprocess.run(
- ["python3", str(script)],
- capture_output=True,
- text=True,
- check=False
- )
-
- output = result.stdout + result.stderr
- success = result.returncode == 0
-
- return (success, output)
- except Exception as e:
- return (False, f"Error running script: {e}")
+ """
+ Run a validation script.
+
+ Args:
+ script_path: Path to script
+
+ Returns:
+ Tuple of (success, output)
+ """
+ script = Path(script_path)
+
+ if not script.exists():
+ return (False, f"Script not found: {script_path}")
+
+ try:
+ result = subprocess.run(
+ ["python3", str(script)],
+ capture_output=True,
+ text=True,
+ check=False
+ )
+
+ output = result.stdout + result.stderr
+ success = result.returncode == 0
+
+ return (success, output)
+ except Exception as e:
+ return (False, f"Error running script: {e}")
def main() -> int:
- """Main entry point."""
- common.log_section("Running All Validations")
- print()
-
- total_passed = 0
- total_failed = 0
- total_skipped = 0
-
- # Run required scripts
- common.log_info("=== Required Validations ===")
- print()
-
- for script in REQUIRED_SCRIPTS:
- script_name = Path(script).name
- common.log_info(f"Running {script_name}...")
-
- success, output = run_validation_script(script)
-
- if success:
- common.log_success(f"✓ {script_name} passed")
- total_passed += 1
- else:
- common.log_error(f"✗ {script_name} FAILED")
- if output:
- print(output)
- total_failed += 1
- print()
-
- # Run optional scripts
- common.log_info("=== Optional Validations ===")
- print()
-
- for script in OPTIONAL_SCRIPTS:
- script_name = Path(script).name
-
- if not Path(script).exists():
- common.log_warn(f"⊘ {script_name} not found (skipped)")
- total_skipped += 1
- continue
-
- common.log_info(f"Running {script_name}...")
-
- success, output = run_validation_script(script)
-
- if success:
- common.log_success(f"✓ {script_name} passed")
- total_passed += 1
- else:
- common.log_warn(f"⚠ {script_name} failed (optional)")
- if output:
- print(output[:500]) # Limit output
- total_failed += 1
- print()
-
- # Summary
- common.log_section("Validation Summary")
- common.log_kv("Total Passed", str(total_passed))
- common.log_kv("Total Failed", str(total_failed))
- common.log_kv("Total Skipped", str(total_skipped))
- print()
-
- # Check if any required validations failed
- required_failed = sum(
- 1 for script in REQUIRED_SCRIPTS
- if Path(script).exists() and not run_validation_script(script)[0]
- )
-
- if required_failed > 0:
- common.log_error(f"{required_failed} required validation(s) failed")
- return 1
-
- common.log_success("All required validations passed!")
-
- if total_failed > 0:
- common.log_warn(f"{total_failed} optional validation(s) failed")
-
- return 0
+ """Main entry point."""
+ common.log_section("Running All Validations")
+ print()
+
+ total_passed = 0
+ total_failed = 0
+ total_skipped = 0
+
+ # Run required scripts
+ common.log_info("=== Required Validations ===")
+ print()
+
+ for script in REQUIRED_SCRIPTS:
+ script_name = Path(script).name
+ common.log_info(f"Running {script_name}...")
+
+ success, output = run_validation_script(script)
+
+ if success:
+ common.log_success(f"✓ {script_name} passed")
+ total_passed += 1
+ else:
+ common.log_error(f"✗ {script_name} FAILED")
+ if output:
+ print(output)
+ total_failed += 1
+ print()
+
+ # Run optional scripts
+ common.log_info("=== Optional Validations ===")
+ print()
+
+ for script in OPTIONAL_SCRIPTS:
+ script_name = Path(script).name
+
+ if not Path(script).exists():
+ common.log_warn(f"⊘ {script_name} not found (skipped)")
+ total_skipped += 1
+ continue
+
+ common.log_info(f"Running {script_name}...")
+
+ success, output = run_validation_script(script)
+
+ if success:
+ common.log_success(f"✓ {script_name} passed")
+ total_passed += 1
+ else:
+ common.log_warn(f"⚠ {script_name} failed (optional)")
+ if output:
+ print(output[:500]) # Limit output
+ total_failed += 1
+ print()
+
+ # Summary
+ common.log_section("Validation Summary")
+ common.log_kv("Total Passed", str(total_passed))
+ common.log_kv("Total Failed", str(total_failed))
+ common.log_kv("Total Skipped", str(total_skipped))
+ print()
+
+ # Check if any required validations failed
+ required_failed = sum(
+ 1 for script in REQUIRED_SCRIPTS
+ if Path(script).exists() and not run_validation_script(script)[0]
+ )
+
+ if required_failed > 0:
+ common.log_error(f"{required_failed} required validation(s) failed")
+ return 1
+
+ common.log_success("All required validations passed!")
+
+ if total_failed > 0:
+ common.log_warn(f"{total_failed} optional validation(s) failed")
+
+ return 0
if __name__ == "__main__":
- sys.exit(main())
+ sys.exit(main())
diff --git a/scripts/validate/workflows.py b/scripts/validate/workflows.py
index 9473a9f..99a9a83 100755
--- a/scripts/validate/workflows.py
+++ b/scripts/validate/workflows.py
@@ -33,185 +33,184 @@ NOTE: Checks YAML syntax, structure, and best practices
import sys
from pathlib import Path
-from typing import List, Tuple
# Add lib directory to path
sys.path.insert(0, str(Path(__file__).parent.parent / "lib"))
try:
- import common
+ import common
except ImportError:
- print("ERROR: Cannot import required libraries", file=sys.stderr)
- sys.exit(1)
+ print("ERROR: Cannot import required libraries", file=sys.stderr)
+ sys.exit(1)
def validate_yaml_syntax(filepath: Path) -> bool:
- """
- Validate YAML syntax of a workflow file.
-
- Args:
- filepath: Path to workflow file
-
- Returns:
- True if valid
- """
- try:
- import yaml
- except ImportError:
- common.log_warn("PyYAML module not installed. Install with: pip3 install pyyaml")
- return True # Skip validation if yaml not available
-
- try:
- with open(filepath, 'r', encoding='utf-8') as f:
- yaml.safe_load(f)
- print(f"✓ Valid YAML: {filepath.name}")
- return True
- except yaml.YAMLError as e:
- print(f"✗ YAML Error in {filepath.name}: {e}", file=sys.stderr)
- return False
- except Exception as e:
- print(f"✗ Error reading {filepath.name}: {e}", file=sys.stderr)
- return False
+ """
+ Validate YAML syntax of a workflow file.
+
+ Args:
+ filepath: Path to workflow file
+
+ Returns:
+ True if valid
+ """
+ try:
+ import yaml
+ except ImportError:
+ common.log_warn("PyYAML module not installed. Install with: pip3 install pyyaml")
+ return True # Skip validation if yaml not available
+
+ try:
+ with open(filepath, 'r', encoding='utf-8') as f:
+ yaml.safe_load(f)
+ print(f"✓ Valid YAML: {filepath.name}")
+ return True
+ except yaml.YAMLError as e:
+ print(f"✗ YAML Error in {filepath.name}: {e}", file=sys.stderr)
+ return False
+ except Exception as e:
+ print(f"✗ Error reading {filepath.name}: {e}", file=sys.stderr)
+ return False
def check_no_tabs(filepath: Path) -> bool:
- """
- Check that file contains no tab characters.
-
- Args:
- filepath: Path to file
-
- Returns:
- True if no tabs found
- """
- try:
- with open(filepath, 'r', encoding='utf-8') as f:
- content = f.read()
- if '\t' in content:
- common.log_error(f"✗ File contains tab characters: {filepath.name}")
- return False
- except Exception as e:
- common.log_warn(f"Could not read {filepath}: {e}")
- return False
-
- return True
+ """
+ Check that file contains no tab characters.
+
+ Args:
+ filepath: Path to file
+
+ Returns:
+ True if no tabs found
+ """
+ try:
+ with open(filepath, 'r', encoding='utf-8') as f:
+ content = f.read()
+ if '\t' in content:
+ common.log_error(f"✗ File contains tab characters: {filepath.name}")
+ return False
+ except Exception as e:
+ common.log_warn(f"Could not read {filepath}: {e}")
+ return False
+
+ return True
def check_workflow_structure(filepath: Path) -> bool:
- """
- Check workflow file structure for required keys.
-
- Args:
- filepath: Path to workflow file
-
- Returns:
- True if structure is valid
- """
- errors = 0
-
- try:
- with open(filepath, 'r', encoding='utf-8') as f:
- content = f.read()
-
- # Check for required top-level keys
- if 'name:' not in content and not content.startswith('name:'):
- common.log_warn(f"Missing 'name:' in {filepath.name}")
-
- if 'on:' not in content and not content.startswith('on:'):
- common.log_error(f"✗ Missing 'on:' trigger in {filepath.name}")
- errors += 1
-
- if 'jobs:' not in content and not content.startswith('jobs:'):
- common.log_error(f"✗ Missing 'jobs:' in {filepath.name}")
- errors += 1
-
- except Exception as e:
- common.log_error(f"Error reading {filepath}: {e}")
- return False
-
- return errors == 0
+ """
+ Check workflow file structure for required keys.
+
+ Args:
+ filepath: Path to workflow file
+
+ Returns:
+ True if structure is valid
+ """
+ errors = 0
+
+ try:
+ with open(filepath, 'r', encoding='utf-8') as f:
+ content = f.read()
+
+ # Check for required top-level keys
+ if 'name:' not in content and not content.startswith('name:'):
+ common.log_warn(f"Missing 'name:' in {filepath.name}")
+
+ if 'on:' not in content and not content.startswith('on:'):
+ common.log_error(f"✗ Missing 'on:' trigger in {filepath.name}")
+ errors += 1
+
+ if 'jobs:' not in content and not content.startswith('jobs:'):
+ common.log_error(f"✗ Missing 'jobs:' in {filepath.name}")
+ errors += 1
+
+ except Exception as e:
+ common.log_error(f"Error reading {filepath}: {e}")
+ return False
+
+ return errors == 0
def validate_workflow_file(filepath: Path) -> bool:
- """
- Validate a single workflow file.
-
- Args:
- filepath: Path to workflow file
-
- Returns:
- True if valid
- """
- common.log_info(f"Validating: {filepath.name}")
-
- errors = 0
-
- # Check YAML syntax
- if not validate_yaml_syntax(filepath):
- errors += 1
-
- # Check for tabs
- if not check_no_tabs(filepath):
- errors += 1
-
- # Check structure
- if not check_workflow_structure(filepath):
- errors += 1
-
- if errors == 0:
- common.log_info(f"✓ {filepath.name} passed all checks")
- return True
- else:
- common.log_error(f"✗ {filepath.name} failed {errors} check(s)")
- return False
+ """
+ Validate a single workflow file.
+
+ Args:
+ filepath: Path to workflow file
+
+ Returns:
+ True if valid
+ """
+ common.log_info(f"Validating: {filepath.name}")
+
+ errors = 0
+
+ # Check YAML syntax
+ if not validate_yaml_syntax(filepath):
+ errors += 1
+
+ # Check for tabs
+ if not check_no_tabs(filepath):
+ errors += 1
+
+ # Check structure
+ if not check_workflow_structure(filepath):
+ errors += 1
+
+ if errors == 0:
+ common.log_info(f"✓ {filepath.name} passed all checks")
+ return True
+ else:
+ common.log_error(f"✗ {filepath.name} failed {errors} check(s)")
+ return False
def main() -> int:
- """Main entry point."""
- common.log_info("GitHub Actions Workflow Validation")
- common.log_info("===================================")
- print()
-
- workflows_dir = Path(".github/workflows")
-
- if not workflows_dir.is_dir():
- common.log_error(f"Workflows directory not found: {workflows_dir}")
- return 1
-
- # Find all workflow files
- workflow_files = []
- for pattern in ["*.yml", "*.yaml"]:
- workflow_files.extend(workflows_dir.glob(pattern))
-
- if not workflow_files:
- common.log_warn("No workflow files found")
- return 0
-
- total = len(workflow_files)
- passed = 0
- failed = 0
-
- for workflow in workflow_files:
- if validate_workflow_file(workflow):
- passed += 1
- else:
- failed += 1
- print()
-
- common.log_info("===================================")
- common.log_info("Summary:")
- common.log_info(f" Total workflows: {total}")
- common.log_info(f" Passed: {passed}")
- common.log_info(f" Failed: {failed}")
- common.log_info("===================================")
-
- if failed > 0:
- common.log_error("Workflow validation failed")
- return 1
-
- common.log_info("All workflows validated successfully")
- return 0
+ """Main entry point."""
+ common.log_info("GitHub Actions Workflow Validation")
+ common.log_info("===================================")
+ print()
+
+ workflows_dir = Path(".github/workflows")
+
+ if not workflows_dir.is_dir():
+ common.log_error(f"Workflows directory not found: {workflows_dir}")
+ return 1
+
+ # Find all workflow files
+ workflow_files = []
+ for pattern in ["*.yml", "*.yaml"]:
+ workflow_files.extend(workflows_dir.glob(pattern))
+
+ if not workflow_files:
+ common.log_warn("No workflow files found")
+ return 0
+
+ total = len(workflow_files)
+ passed = 0
+ failed = 0
+
+ for workflow in workflow_files:
+ if validate_workflow_file(workflow):
+ passed += 1
+ else:
+ failed += 1
+ print()
+
+ common.log_info("===================================")
+ common.log_info("Summary:")
+ common.log_info(f" Total workflows: {total}")
+ common.log_info(f" Passed: {passed}")
+ common.log_info(f" Failed: {failed}")
+ common.log_info("===================================")
+
+ if failed > 0:
+ common.log_error("Workflow validation failed")
+ return 1
+
+ common.log_info("All workflows validated successfully")
+ return 0
if __name__ == "__main__":
- sys.exit(main())
+ sys.exit(main())
diff --git a/src/media/js/gtm.js b/src/media/js/gtm.js
index e490d11..5eff752 100644
--- a/src/media/js/gtm.js
+++ b/src/media/js/gtm.js
@@ -19,322 +19,320 @@
BRIEF: Safe, configurable Google Tag Manager loader for Moko-Cassiopeia.
PATH: ./media/templates/site/moko-cassiopeia/js/gtm.js
NOTE: Place the