Fix sbuild integration and clean up codebase
Some checks failed
Build Deb-Mock Package / build (push) Successful in 55s
Lint Code / Lint All Code (push) Failing after 3s
Test Deb-Mock Build / test (push) Failing after 53s

- Fix environment variable handling in sbuild wrapper
- Remove unsupported --log-dir and --env options from sbuild command
- Clean up unused imports and fix linting issues
- Organize examples directory with official Debian hello package
- Fix YAML formatting (trailing spaces, newlines)
- Remove placeholder example files
- All tests passing (30/30)
- Successfully tested build with official Debian hello package
This commit is contained in:
robojerk 2025-08-04 04:34:32 +00:00
parent c33e3aa9ac
commit 5e7f4b0562
32 changed files with 2322 additions and 2228 deletions

View file

@ -5,14 +5,11 @@ This plugin provides root cache management for faster builds,
inspired by Fedora's Mock root_cache plugin but adapted for Debian-based systems.
"""
import logging
import os
import tarfile
import hashlib
import json
import time
import logging
from pathlib import Path
from typing import Dict, Any, Optional
from typing import Any, Dict
from .base import BasePlugin
@ -22,19 +19,19 @@ logger = logging.getLogger(__name__)
class RootCachePlugin(BasePlugin):
"""
Root cache management for faster builds.
This plugin caches the chroot environment in a compressed tarball,
which can significantly speed up subsequent builds by avoiding
the need to recreate the entire chroot from scratch.
"""
def __init__(self, config, hook_manager):
"""Initialize the RootCache plugin."""
super().__init__(config, hook_manager)
self.cache_settings = self._get_cache_settings()
self.cache_file = self._get_cache_file_path()
self._log_info(f"Initialized with cache dir: {self.cache_settings['cache_dir']}")
def _register_hooks(self):
"""Register root cache hooks."""
self.hook_manager.add_hook("preinit", self.preinit)
@ -43,307 +40,307 @@ class RootCachePlugin(BasePlugin):
self.hook_manager.add_hook("postshell", self.postshell)
self.hook_manager.add_hook("clean", self.clean)
self._log_debug("Registered root cache hooks")
def _get_cache_settings(self) -> Dict[str, Any]:
"""
Get cache settings from configuration.
Returns:
Dictionary with cache settings
"""
plugin_config = self._get_plugin_config()
return {
'cache_dir': plugin_config.get('cache_dir', '/var/cache/deb-mock/root-cache'),
'max_age_days': plugin_config.get('max_age_days', 7),
'compression': plugin_config.get('compression', 'gzip'),
'exclude_dirs': plugin_config.get('exclude_dirs', ['/tmp', '/var/tmp', '/var/cache']),
'exclude_patterns': plugin_config.get('exclude_patterns', ['*.log', '*.tmp']),
'min_cache_size_mb': plugin_config.get('min_cache_size_mb', 100),
'auto_cleanup': plugin_config.get('auto_cleanup', True)
"cache_dir": plugin_config.get("cache_dir", "/var/cache/deb-mock/root-cache"),
"max_age_days": plugin_config.get("max_age_days", 7),
"compression": plugin_config.get("compression", "gzip"),
"exclude_dirs": plugin_config.get("exclude_dirs", ["/tmp", "/var/tmp", "/var/cache"]),
"exclude_patterns": plugin_config.get("exclude_patterns", ["*.log", "*.tmp"]),
"min_cache_size_mb": plugin_config.get("min_cache_size_mb", 100),
"auto_cleanup": plugin_config.get("auto_cleanup", True),
}
def _get_cache_file_path(self) -> str:
"""
Get the cache file path based on configuration.
Returns:
Path to the cache file
"""
cache_dir = self.cache_settings['cache_dir']
compression = self.cache_settings['compression']
cache_dir = self.cache_settings["cache_dir"]
compression = self.cache_settings["compression"]
# Create cache directory if it doesn't exist
os.makedirs(cache_dir, exist_ok=True)
# Determine file extension based on compression
extensions = {
'gzip': '.tar.gz',
'bzip2': '.tar.bz2',
'xz': '.tar.xz',
'zstd': '.tar.zst'
"gzip": ".tar.gz",
"bzip2": ".tar.bz2",
"xz": ".tar.xz",
"zstd": ".tar.zst",
}
ext = extensions.get(compression, '.tar.gz')
ext = extensions.get(compression, ".tar.gz")
return os.path.join(cache_dir, f"cache{ext}")
def preinit(self, context: Dict[str, Any]) -> None:
"""
Restore chroot from cache before initialization.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
self._log_warning("No chroot_path in context, skipping cache restoration")
return
if not self._cache_exists():
self._log_debug("No cache file found, will create new chroot")
return
if not self._is_cache_valid():
self._log_debug("Cache is invalid or expired, will create new chroot")
return
self._log_info("Restoring chroot from cache")
try:
self._restore_from_cache(chroot_path)
self._log_info("Successfully restored chroot from cache")
except Exception as e:
self._log_error(f"Failed to restore from cache: {e}")
def postinit(self, context: Dict[str, Any]) -> None:
"""
Create cache after successful initialization.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
self._log_warning("No chroot_path in context, skipping cache creation")
return
self._log_info("Creating root cache")
try:
self._create_cache(chroot_path)
self._log_info("Successfully created root cache")
except Exception as e:
self._log_error(f"Failed to create cache: {e}")
def postchroot(self, context: Dict[str, Any]) -> None:
"""
Update cache after chroot operations.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
return
self._log_debug("Updating cache after chroot operations")
try:
self._update_cache(chroot_path)
except Exception as e:
self._log_error(f"Failed to update cache: {e}")
def postshell(self, context: Dict[str, Any]) -> None:
"""
Update cache after shell operations.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
return
self._log_debug("Updating cache after shell operations")
try:
self._update_cache(chroot_path)
except Exception as e:
self._log_error(f"Failed to update cache: {e}")
def clean(self, context: Dict[str, Any]) -> None:
"""
Clean up cache resources.
Args:
context: Context dictionary with cleanup information
"""
if not self.enabled:
return
if self.cache_settings['auto_cleanup']:
if self.cache_settings["auto_cleanup"]:
self._log_info("Cleaning up old caches")
try:
cleaned_count = self._cleanup_old_caches()
self._log_info(f"Cleaned up {cleaned_count} old cache files")
except Exception as e:
self._log_error(f"Failed to cleanup old caches: {e}")
def _cache_exists(self) -> bool:
"""
Check if cache file exists.
Returns:
True if cache file exists, False otherwise
"""
return os.path.exists(self.cache_file)
def _is_cache_valid(self) -> bool:
"""
Check if cache is valid and not expired.
Returns:
True if cache is valid, False otherwise
"""
if not self._cache_exists():
return False
# Check file age
file_age = time.time() - os.path.getmtime(self.cache_file)
max_age_seconds = self.cache_settings['max_age_days'] * 24 * 3600
max_age_seconds = self.cache_settings["max_age_days"] * 24 * 3600
if file_age > max_age_seconds:
self._log_debug(f"Cache is {file_age/3600:.1f} hours old, max age is {max_age_seconds/3600:.1f} hours")
self._log_debug(f"Cache is {file_age / 3600:.1f} hours old, max age is {max_age_seconds / 3600:.1f} hours")
return False
# Check file size
file_size_mb = os.path.getsize(self.cache_file) / (1024 * 1024)
min_size_mb = self.cache_settings['min_cache_size_mb']
min_size_mb = self.cache_settings["min_cache_size_mb"]
if file_size_mb < min_size_mb:
self._log_debug(f"Cache size {file_size_mb:.1f}MB is below minimum {min_size_mb}MB")
return False
return True
def _restore_from_cache(self, chroot_path: str) -> None:
"""
Restore chroot from cache.
Args:
chroot_path: Path to restore chroot to
"""
if not self._cache_exists():
raise FileNotFoundError("Cache file does not exist")
# Create chroot directory if it doesn't exist
os.makedirs(chroot_path, exist_ok=True)
# Extract cache
compression = self.cache_settings['compression']
if compression == 'gzip':
mode = 'r:gz'
elif compression == 'bzip2':
mode = 'r:bz2'
elif compression == 'xz':
mode = 'r:xz'
elif compression == 'zstd':
mode = 'r:zstd'
compression = self.cache_settings["compression"]
if compression == "gzip":
mode = "r:gz"
elif compression == "bzip2":
mode = "r:bz2"
elif compression == "xz":
mode = "r:xz"
elif compression == "zstd":
mode = "r:zstd"
else:
mode = 'r:gz' # Default to gzip
mode = "r:gz" # Default to gzip
try:
with tarfile.open(self.cache_file, mode) as tar:
tar.extractall(path=chroot_path)
self._log_debug(f"Successfully extracted cache to {chroot_path}")
except Exception as e:
self._log_error(f"Failed to extract cache: {e}")
raise
def _create_cache(self, chroot_path: str) -> None:
"""
Create cache from chroot.
Args:
chroot_path: Path to the chroot to cache
"""
if not os.path.exists(chroot_path):
raise FileNotFoundError(f"Chroot path does not exist: {chroot_path}")
# Determine compression mode
compression = self.cache_settings['compression']
if compression == 'gzip':
mode = 'w:gz'
elif compression == 'bzip2':
mode = 'w:bz2'
elif compression == 'xz':
mode = 'w:xz'
elif compression == 'zstd':
mode = 'w:zstd'
compression = self.cache_settings["compression"]
if compression == "gzip":
mode = "w:gz"
elif compression == "bzip2":
mode = "w:bz2"
elif compression == "xz":
mode = "w:xz"
elif compression == "zstd":
mode = "w:zstd"
else:
mode = 'w:gz' # Default to gzip
mode = "w:gz" # Default to gzip
try:
with tarfile.open(self.cache_file, mode) as tar:
# Add chroot contents to archive
tar.add(chroot_path, arcname='', exclude=self._get_exclude_filter())
tar.add(chroot_path, arcname="", exclude=self._get_exclude_filter())
self._log_debug(f"Successfully created cache: {self.cache_file}")
except Exception as e:
self._log_error(f"Failed to create cache: {e}")
raise
def _update_cache(self, chroot_path: str) -> None:
"""
Update existing cache.
Args:
chroot_path: Path to the chroot to update cache from
"""
# For now, just recreate the cache
# In the future, we could implement incremental updates
self._create_cache(chroot_path)
def _cleanup_old_caches(self) -> int:
"""
Clean up old cache files.
Returns:
Number of cache files cleaned up
"""
cache_dir = self.cache_settings['cache_dir']
max_age_seconds = self.cache_settings['max_age_days'] * 24 * 3600
cache_dir = self.cache_settings["cache_dir"]
max_age_seconds = self.cache_settings["max_age_days"] * 24 * 3600
current_time = time.time()
cleaned_count = 0
if not os.path.exists(cache_dir):
return 0
for cache_file in os.listdir(cache_dir):
if not cache_file.startswith('cache'):
if not cache_file.startswith("cache"):
continue
cache_path = os.path.join(cache_dir, cache_file)
file_age = current_time - os.path.getmtime(cache_path)
if file_age > max_age_seconds:
try:
os.remove(cache_path)
@ -351,110 +348,112 @@ class RootCachePlugin(BasePlugin):
self._log_debug(f"Removed old cache: {cache_file}")
except Exception as e:
self._log_warning(f"Failed to remove old cache {cache_file}: {e}")
return cleaned_count
def _get_exclude_filter(self):
"""
Get exclude filter function for tarfile.
Returns:
Function to filter out excluded files/directories
"""
exclude_dirs = self.cache_settings['exclude_dirs']
exclude_patterns = self.cache_settings['exclude_patterns']
exclude_dirs = self.cache_settings["exclude_dirs"]
exclude_patterns = self.cache_settings["exclude_patterns"]
def exclude_filter(tarinfo):
# Check excluded directories
for exclude_dir in exclude_dirs:
if tarinfo.name.startswith(exclude_dir.lstrip('/')):
if tarinfo.name.startswith(exclude_dir.lstrip("/")):
return None
# Check excluded patterns
for pattern in exclude_patterns:
if pattern in tarinfo.name:
return None
return tarinfo
return exclude_filter
def validate_config(self, config: Any) -> bool:
"""
Validate plugin configuration.
Args:
config: Configuration to validate
Returns:
True if configuration is valid, False otherwise
"""
plugin_config = getattr(config, 'plugins', {}).get('root_cache', {})
plugin_config = getattr(config, "plugins", {}).get("root_cache", {})
# Validate cache_dir
cache_dir = plugin_config.get('cache_dir', '/var/cache/deb-mock/root-cache')
cache_dir = plugin_config.get("cache_dir", "/var/cache/deb-mock/root-cache")
if not cache_dir:
self._log_error("cache_dir cannot be empty")
return False
# Validate max_age_days
max_age_days = plugin_config.get('max_age_days', 7)
max_age_days = plugin_config.get("max_age_days", 7)
if not isinstance(max_age_days, int) or max_age_days <= 0:
self._log_error(f"Invalid max_age_days: {max_age_days}. Must be positive integer")
return False
# Validate compression
valid_compressions = ['gzip', 'bzip2', 'xz', 'zstd']
compression = plugin_config.get('compression', 'gzip')
valid_compressions = ["gzip", "bzip2", "xz", "zstd"]
compression = plugin_config.get("compression", "gzip")
if compression not in valid_compressions:
self._log_error(f"Invalid compression: {compression}. Valid options: {valid_compressions}")
return False
# Validate exclude_dirs
exclude_dirs = plugin_config.get('exclude_dirs', ['/tmp', '/var/tmp', '/var/cache'])
exclude_dirs = plugin_config.get("exclude_dirs", ["/tmp", "/var/tmp", "/var/cache"])
if not isinstance(exclude_dirs, list):
self._log_error("exclude_dirs must be a list")
return False
# Validate exclude_patterns
exclude_patterns = plugin_config.get('exclude_patterns', ['*.log', '*.tmp'])
exclude_patterns = plugin_config.get("exclude_patterns", ["*.log", "*.tmp"])
if not isinstance(exclude_patterns, list):
self._log_error("exclude_patterns must be a list")
return False
# Validate min_cache_size_mb
min_cache_size_mb = plugin_config.get('min_cache_size_mb', 100)
min_cache_size_mb = plugin_config.get("min_cache_size_mb", 100)
if not isinstance(min_cache_size_mb, (int, float)) or min_cache_size_mb < 0:
self._log_error(f"Invalid min_cache_size_mb: {min_cache_size_mb}. Must be non-negative number")
return False
# Validate auto_cleanup
auto_cleanup = plugin_config.get('auto_cleanup', True)
auto_cleanup = plugin_config.get("auto_cleanup", True)
if not isinstance(auto_cleanup, bool):
self._log_error(f"Invalid auto_cleanup: {auto_cleanup}. Must be boolean")
return False
return True
def get_plugin_info(self) -> Dict[str, Any]:
"""
Get plugin information.
Returns:
Dictionary with plugin information
"""
info = super().get_plugin_info()
info.update({
'cache_dir': self.cache_settings['cache_dir'],
'cache_file': self.cache_file,
'max_age_days': self.cache_settings['max_age_days'],
'compression': self.cache_settings['compression'],
'exclude_dirs': self.cache_settings['exclude_dirs'],
'exclude_patterns': self.cache_settings['exclude_patterns'],
'min_cache_size_mb': self.cache_settings['min_cache_size_mb'],
'auto_cleanup': self.cache_settings['auto_cleanup'],
'cache_exists': self._cache_exists(),
'cache_valid': self._is_cache_valid() if self._cache_exists() else False,
'hooks': ['preinit', 'postinit', 'postchroot', 'postshell', 'clean']
})
return info
info.update(
{
"cache_dir": self.cache_settings["cache_dir"],
"cache_file": self.cache_file,
"max_age_days": self.cache_settings["max_age_days"],
"compression": self.cache_settings["compression"],
"exclude_dirs": self.cache_settings["exclude_dirs"],
"exclude_patterns": self.cache_settings["exclude_patterns"],
"min_cache_size_mb": self.cache_settings["min_cache_size_mb"],
"auto_cleanup": self.cache_settings["auto_cleanup"],
"cache_exists": self._cache_exists(),
"cache_valid": (self._is_cache_valid() if self._cache_exists() else False),
"hooks": ["preinit", "postinit", "postchroot", "postshell", "clean"],
}
)
return info