Fix sbuild integration and clean up codebase
Some checks failed
Build Deb-Mock Package / build (push) Successful in 55s
Lint Code / Lint All Code (push) Failing after 3s
Test Deb-Mock Build / test (push) Failing after 53s

- Fix environment variable handling in sbuild wrapper
- Remove unsupported --log-dir and --env options from sbuild command
- Clean up unused imports and fix linting issues
- Organize examples directory with official Debian hello package
- Fix YAML formatting (trailing spaces, newlines)
- Remove placeholder example files
- All tests passing (30/30)
- Successfully tested build with official Debian hello package
This commit is contained in:
robojerk 2025-08-04 04:34:32 +00:00
parent c33e3aa9ac
commit 5e7f4b0562
32 changed files with 2322 additions and 2228 deletions

3
.gitignore vendored
View file

@ -143,6 +143,9 @@ metadata/
*.tar.bz2
*.diff.gz
*.orig.tar.gz
!mock_*_all.deb
!mock_*.buildinfo
!mock_*.changes
# Chroot environments
/var/lib/deb-mock/

View file

@ -14,10 +14,10 @@ install-dev: ## Install deb-mock with development dependencies
pip install -r requirements-dev.txt
test: ## Run tests
python -m pytest tests/ -v
python3 -m pytest tests/ -v
test-coverage: ## Run tests with coverage
python -m pytest tests/ --cov=deb_mock --cov-report=html --cov-report=term
python3 -m pytest tests/ --cov=deb_mock --cov-report=html --cov-report=term
lint: ## Run linting checks
@echo "=== Running all linting checks with Docker container ==="
@ -84,7 +84,7 @@ check: ## Run all checks (lint, test, format)
$(MAKE) format
dist: ## Build distribution package
python setup.py sdist bdist_wheel
python3 setup.py sdist bdist_wheel
upload: ## Upload to PyPI (requires twine)
twine upload dist/*
@ -92,4 +92,4 @@ upload: ## Upload to PyPI (requires twine)
dev-setup: ## Complete development setup
$(MAKE) install-system-deps
$(MAKE) setup-chroot
$(MAKE) install-dev
$(MAKE) install-dev

View file

@ -1,7 +1,7 @@
"""
Deb-Mock: A low-level utility to create clean, isolated build environments for Debian packages
This tool is a direct functional replacement for Fedora's Mock, adapted specifically
This tool is a direct functional replacement for Fedora's Mock, adapted specifically
for Debian-based ecosystems.
"""
@ -9,14 +9,14 @@ __version__ = "0.1.0"
__author__ = "Deb-Mock Team"
__email__ = "team@deb-mock.org"
from .core import DebMock
from .config import Config
from .chroot import ChrootManager
from .config import Config
from .core import DebMock
from .sbuild import SbuildWrapper
__all__ = [
"DebMock",
"Config",
"Config",
"ChrootManager",
"SbuildWrapper",
]
]

View file

@ -2,255 +2,258 @@
Cache management for deb-mock
"""
import hashlib
import os
import shutil
import tarfile
import hashlib
from pathlib import Path
from typing import Optional, Dict, Any
from datetime import datetime, timedelta
from typing import Any, Dict
from .exceptions import DebMockError
class CacheManager:
"""Manages various caches for deb-mock (root cache, package cache, ccache)"""
def __init__(self, config):
self.config = config
def get_root_cache_path(self) -> str:
"""Get the root cache path for the current chroot"""
return self.config.get_root_cache_path()
def get_package_cache_path(self) -> str:
"""Get the package cache path for the current chroot"""
return self.config.get_package_cache_path()
def get_ccache_path(self) -> str:
"""Get the ccache path for the current chroot"""
return self.config.get_ccache_path()
def create_root_cache(self, chroot_path: str) -> bool:
"""Create a root cache from the current chroot"""
if not self.config.use_root_cache:
return False
cache_path = self.get_root_cache_path()
cache_file = f"{cache_path}.tar.gz"
try:
# Create cache directory
os.makedirs(os.path.dirname(cache_file), exist_ok=True)
# Create tar.gz archive of the chroot
with tarfile.open(cache_file, 'w:gz') as tar:
with tarfile.open(cache_file, "w:gz") as tar:
tar.add(chroot_path, arcname=os.path.basename(chroot_path))
# Update cache metadata
self._update_cache_metadata('root_cache', cache_file)
self._update_cache_metadata("root_cache", cache_file)
return True
except Exception as e:
raise DebMockError(f"Failed to create root cache: {e}")
def restore_root_cache(self, chroot_path: str) -> bool:
"""Restore chroot from root cache"""
if not self.config.use_root_cache:
return False
cache_file = f"{self.get_root_cache_path()}.tar.gz"
if not os.path.exists(cache_file):
return False
# Check cache age
if not self._is_cache_valid('root_cache', cache_file):
if not self._is_cache_valid("root_cache", cache_file):
return False
try:
# Extract cache to chroot path
with tarfile.open(cache_file, 'r:gz') as tar:
with tarfile.open(cache_file, "r:gz") as tar:
tar.extractall(path=os.path.dirname(chroot_path))
return True
except Exception as e:
raise DebMockError(f"Failed to restore root cache: {e}")
def create_package_cache(self, package_files: list) -> bool:
"""Create a package cache from downloaded packages"""
if not self.config.use_package_cache:
return False
cache_path = self.get_package_cache_path()
try:
# Create cache directory
os.makedirs(cache_path, exist_ok=True)
# Copy package files to cache
for package_file in package_files:
if os.path.exists(package_file):
shutil.copy2(package_file, cache_path)
return True
except Exception as e:
raise DebMockError(f"Failed to create package cache: {e}")
def get_cached_packages(self) -> list:
"""Get list of cached packages"""
if not self.config.use_package_cache:
return []
cache_path = self.get_package_cache_path()
if not os.path.exists(cache_path):
return []
packages = []
for file in os.listdir(cache_path):
if file.endswith('.deb'):
if file.endswith(".deb"):
packages.append(os.path.join(cache_path, file))
return packages
def setup_ccache(self) -> bool:
"""Setup ccache for the build environment"""
if not self.config.use_ccache:
return False
ccache_path = self.get_ccache_path()
try:
# Create ccache directory
os.makedirs(ccache_path, exist_ok=True)
# Set ccache environment variables
os.environ['CCACHE_DIR'] = ccache_path
os.environ['CCACHE_HASHDIR'] = '1'
os.environ["CCACHE_DIR"] = ccache_path
os.environ["CCACHE_HASHDIR"] = "1"
return True
except Exception as e:
raise DebMockError(f"Failed to setup ccache: {e}")
def cleanup_old_caches(self) -> Dict[str, int]:
"""Clean up old cache files"""
cleaned = {}
# Clean root caches
if self.config.use_root_cache:
cleaned['root_cache'] = self._cleanup_root_caches()
cleaned["root_cache"] = self._cleanup_root_caches()
# Clean package caches
if self.config.use_package_cache:
cleaned['package_cache'] = self._cleanup_package_caches()
cleaned["package_cache"] = self._cleanup_package_caches()
# Clean ccache
if self.config.use_ccache:
cleaned['ccache'] = self._cleanup_ccache()
cleaned["ccache"] = self._cleanup_ccache()
return cleaned
def _cleanup_root_caches(self) -> int:
"""Clean up old root cache files"""
cache_dir = os.path.dirname(self.get_root_cache_path())
if not os.path.exists(cache_dir):
return 0
cleaned = 0
cutoff_time = datetime.now() - timedelta(days=self.config.root_cache_age)
for cache_file in os.listdir(cache_dir):
if cache_file.endswith('.tar.gz'):
if cache_file.endswith(".tar.gz"):
cache_path = os.path.join(cache_dir, cache_file)
if os.path.getmtime(cache_path) < cutoff_time.timestamp():
os.remove(cache_path)
cleaned += 1
return cleaned
def _cleanup_package_caches(self) -> int:
"""Clean up old package cache files"""
cache_path = self.get_package_cache_path()
if not os.path.exists(cache_path):
return 0
cleaned = 0
cutoff_time = datetime.now() - timedelta(days=30) # 30 days for package cache
for package_file in os.listdir(cache_path):
if package_file.endswith('.deb'):
if package_file.endswith(".deb"):
package_path = os.path.join(cache_path, package_file)
if os.path.getmtime(package_path) < cutoff_time.timestamp():
os.remove(package_path)
cleaned += 1
return cleaned
def _cleanup_ccache(self) -> int:
"""Clean up old ccache files"""
ccache_path = self.get_ccache_path()
if not os.path.exists(ccache_path):
return 0
# Use ccache's built-in cleanup
try:
import subprocess
result = subprocess.run(['ccache', '-c'], cwd=ccache_path, capture_output=True)
result = subprocess.run(["ccache", "-c"], cwd=ccache_path, capture_output=True)
return 1 if result.returncode == 0 else 0
except Exception:
return 0
def _update_cache_metadata(self, cache_type: str, cache_file: str) -> None:
"""Update cache metadata"""
metadata_file = f"{cache_file}.meta"
metadata = {
'type': cache_type,
'created': datetime.now().isoformat(),
'size': os.path.getsize(cache_file),
'hash': self._get_file_hash(cache_file)
"type": cache_type,
"created": datetime.now().isoformat(),
"size": os.path.getsize(cache_file),
"hash": self._get_file_hash(cache_file),
}
import json
with open(metadata_file, 'w') as f:
with open(metadata_file, "w") as f:
json.dump(metadata, f)
def _is_cache_valid(self, cache_type: str, cache_file: str) -> bool:
"""Check if cache is still valid"""
metadata_file = f"{cache_file}.meta"
if not os.path.exists(metadata_file):
return False
try:
import json
with open(metadata_file, 'r') as f:
with open(metadata_file, "r") as f:
metadata = json.load(f)
# Check if file size matches
if os.path.getsize(cache_file) != metadata.get('size', 0):
if os.path.getsize(cache_file) != metadata.get("size", 0):
return False
# Check if hash matches
if self._get_file_hash(cache_file) != metadata.get('hash', ''):
if self._get_file_hash(cache_file) != metadata.get("hash", ""):
return False
# Check age for root cache
if cache_type == 'root_cache':
created = datetime.fromisoformat(metadata['created'])
if cache_type == "root_cache":
created = datetime.fromisoformat(metadata["created"])
cutoff_time = datetime.now() - timedelta(days=self.config.root_cache_age)
if created < cutoff_time:
return False
return True
except Exception:
return False
def _get_file_hash(self, file_path: str) -> str:
"""Get SHA256 hash of a file"""
hash_sha256 = hashlib.sha256()
@ -258,42 +261,45 @@ class CacheManager:
for chunk in iter(lambda: f.read(4096), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
def get_cache_stats(self) -> Dict[str, Any]:
"""Get cache statistics"""
stats = {}
# Root cache stats
if self.config.use_root_cache:
cache_file = f"{self.get_root_cache_path()}.tar.gz"
if os.path.exists(cache_file):
stats['root_cache'] = {
'size': os.path.getsize(cache_file),
'valid': self._is_cache_valid('root_cache', cache_file)
stats["root_cache"] = {
"size": os.path.getsize(cache_file),
"valid": self._is_cache_valid("root_cache", cache_file),
}
# Package cache stats
if self.config.use_package_cache:
cache_path = self.get_package_cache_path()
if os.path.exists(cache_path):
packages = [f for f in os.listdir(cache_path) if f.endswith('.deb')]
stats['package_cache'] = {
'packages': len(packages),
'size': sum(os.path.getsize(os.path.join(cache_path, p)) for p in packages)
packages = [f for f in os.listdir(cache_path) if f.endswith(".deb")]
stats["package_cache"] = {
"packages": len(packages),
"size": sum(os.path.getsize(os.path.join(cache_path, p)) for p in packages),
}
# ccache stats
if self.config.use_ccache:
ccache_path = self.get_ccache_path()
if os.path.exists(ccache_path):
try:
import subprocess
result = subprocess.run(['ccache', '-s'], cwd=ccache_path,
capture_output=True, text=True)
stats['ccache'] = {
'output': result.stdout
}
result = subprocess.run(
["ccache", "-s"],
cwd=ccache_path,
capture_output=True,
text=True,
)
stats["ccache"] = {"output": result.stdout}
except Exception:
pass
return stats
return stats

View file

@ -3,184 +3,185 @@ Chroot management for deb-mock
"""
import os
import subprocess
import shutil
import subprocess
from pathlib import Path
from typing import List, Optional
from typing import List
from .exceptions import ChrootError
class ChrootManager:
"""Manages chroot environments for deb-mock"""
def __init__(self, config):
self.config = config
def create_chroot(self, chroot_name: str, arch: str = None, suite: str = None) -> None:
"""Create a new chroot environment"""
if arch:
self.config.architecture = arch
if suite:
self.config.suite = suite
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
# Check if bootstrap chroot is needed (Mock FAQ #2)
if self.config.use_bootstrap_chroot:
self._create_bootstrap_chroot(chroot_name)
else:
self._create_standard_chroot(chroot_name)
def _create_bootstrap_chroot(self, chroot_name: str) -> None:
"""
Create a bootstrap chroot for cross-distribution builds.
This addresses Mock FAQ #2 about building packages for newer distributions
on older systems (e.g., building Debian Sid packages on Debian Stable).
"""
bootstrap_name = self.config.bootstrap_chroot_name or f"{chroot_name}-bootstrap"
bootstrap_path = os.path.join(self.config.chroot_dir, bootstrap_name)
# Create minimal bootstrap chroot first
if not os.path.exists(bootstrap_path):
self._create_standard_chroot(bootstrap_name)
# Use bootstrap chroot to create the final chroot
try:
# Create final chroot using debootstrap from within bootstrap
cmd = [
'debootstrap',
'--arch', self.config.architecture,
"debootstrap",
"--arch",
self.config.architecture,
self.config.suite,
f'/var/lib/deb-mock/chroots/{chroot_name}',
self.config.mirror
f"/var/lib/deb-mock/chroots/{chroot_name}",
self.config.mirror,
]
# Execute debootstrap within bootstrap chroot
result = self.execute_in_chroot(bootstrap_name, cmd, capture_output=True)
if result.returncode != 0:
raise ChrootError(
f"Failed to create chroot using bootstrap: {result.stderr}",
chroot_name=chroot_name,
operation="bootstrap_debootstrap"
operation="bootstrap_debootstrap",
)
# Configure the new chroot
self._configure_chroot(chroot_name)
except Exception as e:
raise ChrootError(
f"Bootstrap chroot creation failed: {e}",
chroot_name=chroot_name,
operation="bootstrap_creation"
operation="bootstrap_creation",
)
def _create_standard_chroot(self, chroot_name: str) -> None:
"""Create a standard chroot using debootstrap"""
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
if os.path.exists(chroot_path):
raise ChrootError(
f"Chroot '{chroot_name}' already exists",
chroot_name=chroot_name,
operation="create"
operation="create",
)
try:
# Create chroot directory
os.makedirs(chroot_path, exist_ok=True)
# Run debootstrap
cmd = [
'debootstrap',
'--arch', self.config.architecture,
"debootstrap",
"--arch",
self.config.architecture,
self.config.suite,
chroot_path,
self.config.mirror
self.config.mirror,
]
result = subprocess.run(cmd, capture_output=True, text=True, check=False)
if result.returncode != 0:
raise ChrootError(
f"debootstrap failed: {result.stderr}",
chroot_name=chroot_name,
operation="debootstrap",
chroot_path=chroot_path
chroot_path=chroot_path,
)
# Configure the chroot
self._configure_chroot(chroot_name)
except subprocess.CalledProcessError as e:
raise ChrootError(
f"Failed to create chroot: {e}",
chroot_name=chroot_name,
operation="create",
chroot_path=chroot_path
chroot_path=chroot_path,
)
def _configure_chroot(self, chroot_name: str) -> None:
"""Configure a newly created chroot"""
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
# Create schroot configuration
self._create_schroot_config(chroot_name, chroot_path)
self._create_schroot_config(chroot_name, chroot_path, self.config.architecture, self.config.suite)
# Install additional packages if specified
if self.config.chroot_additional_packages:
self._install_additional_packages(chroot_name)
# Run setup commands if specified
if self.config.chroot_setup_cmd:
self._run_setup_commands(chroot_name)
def _install_additional_packages(self, chroot_name: str) -> None:
"""Install additional packages in the chroot"""
try:
# Update package lists
self.execute_in_chroot(chroot_name, ['apt-get', 'update'], capture_output=True)
self.execute_in_chroot(chroot_name, ["apt-get", "update"], capture_output=True)
# Install packages
cmd = ['apt-get', 'install', '-y'] + self.config.chroot_additional_packages
cmd = ["apt-get", "install", "-y"] + self.config.chroot_additional_packages
result = self.execute_in_chroot(chroot_name, cmd, capture_output=True)
if result.returncode != 0:
raise ChrootError(
f"Failed to install additional packages: {result.stderr}",
chroot_name=chroot_name,
operation="install_packages"
operation="install_packages",
)
except Exception as e:
raise ChrootError(
f"Failed to install additional packages: {e}",
chroot_name=chroot_name,
operation="install_packages"
operation="install_packages",
)
def _run_setup_commands(self, chroot_name: str) -> None:
"""Run setup commands in the chroot"""
for cmd in self.config.chroot_setup_cmd:
try:
result = self.execute_in_chroot(chroot_name, cmd.split(), capture_output=True)
if result.returncode != 0:
raise ChrootError(
f"Setup command failed: {result.stderr}",
chroot_name=chroot_name,
operation="setup_command"
operation="setup_command",
)
except Exception as e:
raise ChrootError(
f"Failed to run setup command '{cmd}': {e}",
chroot_name=chroot_name,
operation="setup_command"
operation="setup_command",
)
def _create_schroot_config(self, chroot_name: str, chroot_path: str, arch: str, suite: str) -> None:
"""Create schroot configuration file"""
config_content = f"""[{chroot_name}]
@ -192,162 +193,180 @@ type=directory
profile=desktop
preserve-environment=true
"""
config_file = os.path.join(self.config.chroot_config_dir, f"{chroot_name}.conf")
try:
with open(config_file, 'w') as f:
with open(config_file, "w") as f:
f.write(config_content)
except Exception as e:
raise ChrootError(f"Failed to create schroot config: {e}")
def _initialize_chroot(self, chroot_path: str, arch: str, suite: str) -> None:
"""Initialize chroot using debootstrap"""
cmd = [
'debootstrap',
'--arch', arch,
'--variant=buildd',
"debootstrap",
"--arch",
arch,
"--variant=buildd",
suite,
chroot_path,
'http://deb.debian.org/debian/'
"http://deb.debian.org/debian/",
]
try:
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
subprocess.run(cmd, capture_output=True, text=True, check=True)
except subprocess.CalledProcessError as e:
raise ChrootError(f"debootstrap failed: {e.stderr}")
except FileNotFoundError:
raise ChrootError("debootstrap not found. Please install debootstrap package.")
def _install_build_tools(self, chroot_name: str) -> None:
"""Install essential build tools in the chroot"""
packages = [
'build-essential',
'devscripts',
'debhelper',
'dh-make',
'fakeroot',
'lintian',
'sbuild',
'schroot'
"build-essential",
"devscripts",
"debhelper",
"dh-make",
"fakeroot",
"lintian",
"sbuild",
"schroot",
]
cmd = ['schroot', '-c', chroot_name, '--', 'apt-get', 'update']
cmd = ["schroot", "-c", chroot_name, "--", "apt-get", "update"]
try:
subprocess.run(cmd, check=True)
except subprocess.CalledProcessError as e:
raise ChrootError(f"Failed to update package lists: {e}")
cmd = ['schroot', '-c', chroot_name, '--', 'apt-get', 'install', '-y'] + packages
cmd = [
"schroot",
"-c",
chroot_name,
"--",
"apt-get",
"install",
"-y",
] + packages
try:
subprocess.run(cmd, check=True)
except subprocess.CalledProcessError as e:
raise ChrootError(f"Failed to install build tools: {e}")
def clean_chroot(self, chroot_name: str) -> None:
"""Clean up a chroot environment"""
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
config_file = os.path.join(self.config.chroot_config_dir, f"{chroot_name}.conf")
try:
# Remove schroot configuration
if os.path.exists(config_file):
os.remove(config_file)
# Remove chroot directory
if os.path.exists(chroot_path):
shutil.rmtree(chroot_path)
except Exception as e:
raise ChrootError(f"Failed to clean chroot '{chroot_name}': {e}")
def list_chroots(self) -> List[str]:
"""List available chroot environments"""
chroots = []
try:
# List chroot configurations
for config_file in Path(self.config.chroot_config_dir).glob("*.conf"):
chroot_name = config_file.stem
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
if os.path.exists(chroot_path):
chroots.append(chroot_name)
except Exception as e:
raise ChrootError(f"Failed to list chroots: {e}")
return chroots
def chroot_exists(self, chroot_name: str) -> bool:
"""Check if a chroot environment exists"""
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
config_file = os.path.join(self.config.chroot_config_dir, f"{chroot_name}.conf")
return os.path.exists(chroot_path) and os.path.exists(config_file)
def get_chroot_info(self, chroot_name: str) -> dict:
"""Get information about a chroot environment"""
if not self.chroot_exists(chroot_name):
raise ChrootError(f"Chroot '{chroot_name}' does not exist")
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
info = {
'name': chroot_name,
'path': chroot_path,
'exists': True,
'size': 0,
'created': None,
'modified': None
"name": chroot_name,
"path": chroot_path,
"exists": True,
"size": 0,
"created": None,
"modified": None,
}
try:
stat = os.stat(chroot_path)
info['size'] = stat.st_size
info['created'] = stat.st_ctime
info['modified'] = stat.st_mtime
info["size"] = stat.st_size
info["created"] = stat.st_ctime
info["modified"] = stat.st_mtime
except Exception:
pass
return info
def update_chroot(self, chroot_name: str) -> None:
"""Update packages in a chroot environment"""
if not self.chroot_exists(chroot_name):
raise ChrootError(f"Chroot '{chroot_name}' does not exist")
try:
# Update package lists
cmd = ['schroot', '-c', chroot_name, '--', 'apt-get', 'update']
cmd = ["schroot", "-c", chroot_name, "--", "apt-get", "update"]
subprocess.run(cmd, check=True)
# Upgrade packages
cmd = ['schroot', '-c', chroot_name, '--', 'apt-get', 'upgrade', '-y']
cmd = ["schroot", "-c", chroot_name, "--", "apt-get", "upgrade", "-y"]
subprocess.run(cmd, check=True)
except subprocess.CalledProcessError as e:
raise ChrootError(f"Failed to update chroot '{chroot_name}': {e}")
def execute_in_chroot(self, chroot_name: str, command: list,
capture_output: bool = True,
preserve_env: bool = True) -> subprocess.CompletedProcess:
def execute_in_chroot(
self,
chroot_name: str,
command: list,
capture_output: bool = True,
preserve_env: bool = True,
) -> subprocess.CompletedProcess:
"""Execute a command in the chroot environment"""
if not self.chroot_exists(chroot_name):
raise ChrootError(f"Chroot '{chroot_name}' does not exist")
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
# Prepare environment variables (Mock FAQ #1 - Environment preservation)
env = self._prepare_chroot_environment(preserve_env)
# Build schroot command
schroot_cmd = [
'schroot', '-c', chroot_name, '--', 'sh', '-c',
' '.join(command)
"schroot",
"-c",
chroot_name,
"--",
"sh",
"-c",
" ".join(command),
]
try:
if capture_output:
result = subprocess.run(
@ -356,120 +375,115 @@ preserve-environment=true
env=env,
capture_output=True,
text=True,
check=False
check=False,
)
else:
result = subprocess.run(
schroot_cmd,
cwd=chroot_path,
env=env,
check=False
)
result = subprocess.run(schroot_cmd, cwd=chroot_path, env=env, check=False)
return result
except subprocess.CalledProcessError as e:
raise ChrootError(f"Command failed in chroot: {e}")
def _prepare_chroot_environment(self, preserve_env: bool = True) -> dict:
"""
Prepare environment variables for chroot execution.
This addresses Mock FAQ #1 about environment variable preservation.
"""
env = os.environ.copy()
if not preserve_env or not self.config.environment_sanitization:
return env
# Filter environment variables based on allowed list
filtered_env = {}
# Always preserve basic system variables
basic_vars = ['PATH', 'HOME', 'USER', 'SHELL', 'TERM', 'LANG', 'LC_ALL']
basic_vars = ["PATH", "HOME", "USER", "SHELL", "TERM", "LANG", "LC_ALL"]
for var in basic_vars:
if var in env:
filtered_env[var] = env[var]
# Preserve allowed build-related variables
for var in self.config.allowed_environment_vars:
if var in env:
filtered_env[var] = env[var]
# Preserve user-specified variables
for var in self.config.preserve_environment:
if var in env:
filtered_env[var] = env[var]
return filtered_env
def copy_to_chroot(self, source_path: str, dest_path: str, chroot_name: str) -> None:
"""Copy files from host to chroot (similar to Mock's --copyin)"""
if not self.chroot_exists(chroot_name):
raise ChrootError(f"Chroot '{chroot_name}' does not exist")
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
full_dest_path = os.path.join(chroot_path, dest_path.lstrip('/'))
full_dest_path = os.path.join(chroot_path, dest_path.lstrip("/"))
try:
# Create destination directory if it doesn't exist
os.makedirs(os.path.dirname(full_dest_path), exist_ok=True)
# Copy file or directory
if os.path.isdir(source_path):
shutil.copytree(source_path, full_dest_path, dirs_exist_ok=True)
else:
shutil.copy2(source_path, full_dest_path)
except Exception as e:
raise ChrootError(f"Failed to copy {source_path} to chroot: {e}")
def copy_from_chroot(self, source_path: str, dest_path: str, chroot_name: str) -> None:
"""Copy files from chroot to host (similar to Mock's --copyout)"""
if not self.chroot_exists(chroot_name):
raise ChrootError(f"Chroot '{chroot_name}' does not exist")
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
full_source_path = os.path.join(chroot_path, source_path.lstrip('/'))
full_source_path = os.path.join(chroot_path, source_path.lstrip("/"))
try:
# Create destination directory if it doesn't exist
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
# Copy file or directory
if os.path.isdir(full_source_path):
shutil.copytree(full_source_path, dest_path, dirs_exist_ok=True)
else:
shutil.copy2(full_source_path, dest_path)
except Exception as e:
raise ChrootError(f"Failed to copy {source_path} from chroot: {e}")
def scrub_chroot(self, chroot_name: str) -> None:
"""Clean up chroot without removing it (similar to Mock's --scrub)"""
if not self.chroot_exists(chroot_name):
raise ChrootError(f"Chroot '{chroot_name}' does not exist")
try:
# Clean package cache
self.execute_in_chroot(chroot_name, ['apt-get', 'clean'])
self.execute_in_chroot(chroot_name, ["apt-get", "clean"])
# Clean temporary files
self.execute_in_chroot(chroot_name, ['rm', '-rf', '/tmp/*'])
self.execute_in_chroot(chroot_name, ['rm', '-rf', '/var/tmp/*'])
self.execute_in_chroot(chroot_name, ["rm", "-rf", "/tmp/*"])
self.execute_in_chroot(chroot_name, ["rm", "-rf", "/var/tmp/*"])
# Clean build artifacts
self.execute_in_chroot(chroot_name, ['rm', '-rf', '/build/*'])
self.execute_in_chroot(chroot_name, ["rm", "-rf", "/build/*"])
except Exception as e:
raise ChrootError(f"Failed to scrub chroot '{chroot_name}': {e}")
def scrub_all_chroots(self) -> None:
"""Clean up all chroots (similar to Mock's --scrub-all-chroots)"""
chroots = self.list_chroots()
for chroot_name in chroots:
try:
self.scrub_chroot(chroot_name)
except Exception as e:
print(f"Warning: Failed to scrub chroot '{chroot_name}': {e}")
print(f"Warning: Failed to scrub chroot '{chroot_name}': {e}")

View file

@ -3,44 +3,38 @@
Command-line interface for deb-mock
"""
import click
import sys
import os
from pathlib import Path
from .core import DebMock
import click
from .config import Config
from .configs import get_available_configs, load_config
from .exceptions import (
DebMockError, ConfigurationError, ChrootError, SbuildError,
BuildError, DependencyError, MetadataError, CacheError,
PluginError, NetworkError, PermissionError, ValidationError,
handle_exception, format_error_context
)
from .core import DebMock
from .exceptions import ConfigurationError, ValidationError, handle_exception
@click.group()
@click.version_option()
@click.option('--config', '-c', type=click.Path(exists=True),
help='Configuration file path')
@click.option('--chroot', '-r', help='Chroot configuration name (e.g., debian-bookworm-amd64)')
@click.option('--verbose', '-v', is_flag=True, help='Enable verbose output')
@click.option('--debug', is_flag=True, help='Enable debug output')
@click.option("--config", "-c", type=click.Path(exists=True), help="Configuration file path")
@click.option("--chroot", "-r", help="Chroot configuration name (e.g., debian-bookworm-amd64)")
@click.option("--verbose", "-v", is_flag=True, help="Enable verbose output")
@click.option("--debug", is_flag=True, help="Enable debug output")
@click.pass_context
def main(ctx, config, chroot, verbose, debug):
"""
Deb-Mock: A low-level utility to create clean, isolated build environments for Debian packages.
This tool is a direct functional replacement for Fedora's Mock, adapted specifically
This tool is a direct functional replacement for Fedora's Mock, adapted specifically
for Debian-based ecosystems.
"""
ctx.ensure_object(dict)
ctx.obj['verbose'] = verbose
ctx.obj['debug'] = debug
ctx.obj["verbose"] = verbose
ctx.obj["debug"] = debug
# Load configuration
if config:
try:
ctx.obj['config'] = Config.from_file(config)
ctx.obj["config"] = Config.from_file(config)
except ConfigurationError as e:
e.print_error()
sys.exit(e.get_exit_code())
@ -48,134 +42,143 @@ def main(ctx, config, chroot, verbose, debug):
# Load core config by name (similar to Mock's -r option)
try:
config_data = load_config(chroot)
ctx.obj['config'] = Config(**config_data)
ctx.obj["config"] = Config(**config_data)
except ValueError as e:
error = ValidationError(
f"Invalid chroot configuration: {e}",
field='chroot',
field="chroot",
value=chroot,
expected_format='debian-suite-arch or ubuntu-suite-arch'
expected_format="debian-suite-arch or ubuntu-suite-arch",
)
error.print_error()
click.echo(f"Available configs: {', '.join(get_available_configs())}")
sys.exit(error.get_exit_code())
else:
ctx.obj['config'] = Config.default()
ctx.obj["config"] = Config.default()
@main.command()
@click.argument('source_package', type=click.Path(exists=True))
@click.option('--chroot', help='Chroot environment to use')
@click.option('--arch', help='Target architecture')
@click.option('--output-dir', '-o', type=click.Path(),
help='Output directory for build artifacts')
@click.option('--keep-chroot', is_flag=True,
help='Keep chroot after build (for debugging)')
@click.option('--no-check', is_flag=True, help='Skip running tests during build')
@click.option('--offline', is_flag=True, help='Build in offline mode (no network access)')
@click.option('--build-timeout', type=int, help='Build timeout in seconds')
@click.option('--force-arch', help='Force target architecture')
@click.option('--unique-ext', help='Unique extension for buildroot directory')
@click.option('--config-dir', help='Configuration directory')
@click.option('--cleanup-after', is_flag=True, help='Clean chroot after build')
@click.option('--no-cleanup-after', is_flag=True, help='Don\'t clean chroot after build')
@click.argument("source_package", type=click.Path(exists=True))
@click.option("--chroot", help="Chroot environment to use")
@click.option("--arch", help="Target architecture")
@click.option("--output-dir", "-o", type=click.Path(), help="Output directory for build artifacts")
@click.option("--keep-chroot", is_flag=True, help="Keep chroot after build (for debugging)")
@click.option("--no-check", is_flag=True, help="Skip running tests during build")
@click.option("--offline", is_flag=True, help="Build in offline mode (no network access)")
@click.option("--build-timeout", type=int, help="Build timeout in seconds")
@click.option("--force-arch", help="Force target architecture")
@click.option("--unique-ext", help="Unique extension for buildroot directory")
@click.option("--config-dir", help="Configuration directory")
@click.option("--cleanup-after", is_flag=True, help="Clean chroot after build")
@click.option("--no-cleanup-after", is_flag=True, help="Don't clean chroot after build")
@click.pass_context
@handle_exception
def build(ctx, source_package, chroot, arch, output_dir, keep_chroot,
no_check, offline, build_timeout, force_arch, unique_ext,
config_dir, cleanup_after, no_cleanup_after):
def build(
ctx,
source_package,
chroot,
arch,
output_dir,
keep_chroot,
no_check,
offline,
build_timeout,
force_arch,
unique_ext,
config_dir,
cleanup_after,
no_cleanup_after,
):
"""
Build a Debian source package in an isolated environment.
SOURCE_PACKAGE: Path to the .dsc file or source package directory
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
# Override config with command line options
if chroot:
ctx.obj['config'].chroot_name = chroot
ctx.obj["config"].chroot_name = chroot
if arch:
ctx.obj['config'].architecture = arch
ctx.obj["config"].architecture = arch
if output_dir:
ctx.obj['config'].output_dir = output_dir
ctx.obj["config"].output_dir = output_dir
if keep_chroot:
ctx.obj['config'].keep_chroot = keep_chroot
ctx.obj["config"].keep_chroot = keep_chroot
if no_check:
ctx.obj['config'].run_tests = False
ctx.obj["config"].run_tests = False
if offline:
ctx.obj['config'].enable_network = False
ctx.obj["config"].enable_network = False
if build_timeout:
ctx.obj['config'].build_timeout = build_timeout
ctx.obj["config"].build_timeout = build_timeout
if force_arch:
ctx.obj['config'].force_architecture = force_arch
ctx.obj["config"].force_architecture = force_arch
if unique_ext:
ctx.obj['config'].unique_extension = unique_ext
ctx.obj["config"].unique_extension = unique_ext
if config_dir:
ctx.obj['config'].config_dir = config_dir
ctx.obj["config"].config_dir = config_dir
if cleanup_after is not None:
ctx.obj['config'].cleanup_after = cleanup_after
ctx.obj["config"].cleanup_after = cleanup_after
if no_cleanup_after is not None:
ctx.obj['config'].cleanup_after = not no_cleanup_after
ctx.obj["config"].cleanup_after = not no_cleanup_after
result = deb_mock.build(source_package)
if ctx.obj['verbose']:
if ctx.obj["verbose"]:
click.echo(f"Build completed successfully: {result}")
else:
click.echo("Build completed successfully")
@main.command()
@click.argument('source_packages', nargs=-1, type=click.Path(exists=True))
@click.option('--chroot', help='Chroot environment to use')
@click.option('--arch', help='Target architecture')
@click.option('--output-dir', '-o', type=click.Path(),
help='Output directory for build artifacts')
@click.option('--keep-chroot', is_flag=True,
help='Keep chroot after build (for debugging)')
@click.option('--continue-on-failure', is_flag=True,
help='Continue building remaining packages even if one fails')
@click.argument("source_packages", nargs=-1, type=click.Path(exists=True))
@click.option("--chroot", help="Chroot environment to use")
@click.option("--arch", help="Target architecture")
@click.option("--output-dir", "-o", type=click.Path(), help="Output directory for build artifacts")
@click.option("--keep-chroot", is_flag=True, help="Keep chroot after build (for debugging)")
@click.option(
"--continue-on-failure",
is_flag=True,
help="Continue building remaining packages even if one fails",
)
@click.pass_context
@handle_exception
def chain(ctx, source_packages, chroot, arch, output_dir, keep_chroot, continue_on_failure):
"""
Build a chain of packages that depend on each other.
SOURCE_PACKAGES: List of .dsc files or source package directories to build in order
"""
if not source_packages:
raise ValidationError(
"No source packages specified",
field='source_packages',
expected_format='list of .dsc files or source directories'
field="source_packages",
expected_format="list of .dsc files or source directories",
)
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
# Override config with command line options
if chroot:
ctx.obj['config'].chroot_name = chroot
ctx.obj["config"].chroot_name = chroot
if arch:
ctx.obj['config'].architecture = arch
ctx.obj["config"].architecture = arch
if output_dir:
ctx.obj['config'].output_dir = output_dir
ctx.obj["config"].output_dir = output_dir
if keep_chroot:
ctx.obj['config'].keep_chroot = keep_chroot
results = deb_mock.build_chain(
list(source_packages),
continue_on_failure=continue_on_failure
)
ctx.obj["config"].keep_chroot = keep_chroot
results = deb_mock.build_chain(list(source_packages), continue_on_failure=continue_on_failure)
# Display results
for result in results:
if result['success']:
if result["success"]:
click.echo(f"{result['package']} (step {result['order']})")
else:
click.echo(f"{result['package']} (step {result['order']}): {result['error']}")
# Check if all builds succeeded
failed_builds = [r for r in results if not r['success']]
failed_builds = [r for r in results if not r["success"]]
if failed_builds:
sys.exit(1)
else:
@ -183,67 +186,71 @@ def chain(ctx, source_packages, chroot, arch, output_dir, keep_chroot, continue_
@main.command()
@click.argument('chroot_name')
@click.option('--arch', help='Target architecture')
@click.option('--suite', help='Debian suite (e.g., bookworm, sid)')
@click.option('--bootstrap', is_flag=True, help='Use bootstrap chroot for cross-distribution builds')
@click.option('--bootstrap-chroot', help='Name of bootstrap chroot to use')
@click.argument("chroot_name")
@click.option("--arch", help="Target architecture")
@click.option("--suite", help="Debian suite (e.g., bookworm, sid)")
@click.option(
"--bootstrap",
is_flag=True,
help="Use bootstrap chroot for cross-distribution builds",
)
@click.option("--bootstrap-chroot", help="Name of bootstrap chroot to use")
@click.pass_context
@handle_exception
def init_chroot(ctx, chroot_name, arch, suite, bootstrap, bootstrap_chroot):
"""
Initialize a new chroot environment for building.
CHROOT_NAME: Name of the chroot environment to create
The --bootstrap option is useful for building packages for newer distributions
on older systems (e.g., building Debian Sid packages on Debian Stable).
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
# Override config with command line options
if arch:
ctx.obj['config'].architecture = arch
ctx.obj["config"].architecture = arch
if suite:
ctx.obj['config'].suite = suite
ctx.obj["config"].suite = suite
if bootstrap:
ctx.obj['config'].use_bootstrap_chroot = True
ctx.obj["config"].use_bootstrap_chroot = True
if bootstrap_chroot:
ctx.obj['config'].bootstrap_chroot_name = bootstrap_chroot
ctx.obj["config"].bootstrap_chroot_name = bootstrap_chroot
deb_mock.init_chroot(chroot_name)
click.echo(f"Chroot '{chroot_name}' initialized successfully")
if bootstrap:
click.echo("Bootstrap chroot was used for cross-distribution compatibility")
@main.command()
@click.argument('chroot_name')
@click.argument("chroot_name")
@click.pass_context
@handle_exception
def clean_chroot(ctx, chroot_name):
"""
Clean up a chroot environment.
CHROOT_NAME: Name of the chroot environment to clean
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
deb_mock.clean_chroot(chroot_name)
click.echo(f"Chroot '{chroot_name}' cleaned successfully")
@main.command()
@click.argument('chroot_name')
@click.argument("chroot_name")
@click.pass_context
@handle_exception
def scrub_chroot(ctx, chroot_name):
"""
Clean up a chroot environment without removing it.
CHROOT_NAME: Name of the chroot environment to scrub
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
deb_mock.chroot_manager.scrub_chroot(chroot_name)
click.echo(f"Chroot '{chroot_name}' scrubbed successfully")
@ -255,73 +262,73 @@ def scrub_all_chroots(ctx):
"""
Clean up all chroot environments without removing them.
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
deb_mock.chroot_manager.scrub_all_chroots()
click.echo("All chroots scrubbed successfully")
@main.command()
@click.option('--chroot', help='Chroot environment to use')
@click.option('--preserve-env', is_flag=True, help='Preserve environment variables in chroot')
@click.option('--env-var', multiple=True, help='Specific environment variable to preserve')
@click.option("--chroot", help="Chroot environment to use")
@click.option("--preserve-env", is_flag=True, help="Preserve environment variables in chroot")
@click.option("--env-var", multiple=True, help="Specific environment variable to preserve")
@click.pass_context
@handle_exception
def shell(ctx, chroot, preserve_env, env_var):
"""
Open a shell in the chroot environment.
Use --preserve-env to preserve environment variables (addresses common
environment variable issues in chroot environments).
"""
deb_mock = DebMock(ctx.obj['config'])
chroot_name = chroot or ctx.obj['config'].chroot_name
deb_mock = DebMock(ctx.obj["config"])
chroot_name = chroot or ctx.obj["config"].chroot_name
# Configure environment preservation
if preserve_env:
ctx.obj['config'].environment_sanitization = False
ctx.obj["config"].environment_sanitization = False
if env_var:
ctx.obj['config'].preserve_environment.extend(env_var)
ctx.obj["config"].preserve_environment.extend(env_var)
deb_mock.shell(chroot_name)
@main.command()
@click.argument('source_path')
@click.argument('dest_path')
@click.option('--chroot', help='Chroot environment to use')
@click.argument("source_path")
@click.argument("dest_path")
@click.option("--chroot", help="Chroot environment to use")
@click.pass_context
@handle_exception
def copyin(ctx, source_path, dest_path, chroot):
"""
Copy files from host to chroot.
SOURCE_PATH: Path to file/directory on host
DEST_PATH: Path in chroot where to copy
"""
deb_mock = DebMock(ctx.obj['config'])
chroot_name = chroot or ctx.obj['config'].chroot_name
deb_mock = DebMock(ctx.obj["config"])
chroot_name = chroot or ctx.obj["config"].chroot_name
deb_mock.copyin(source_path, dest_path, chroot_name)
click.echo(f"Copied {source_path} to {dest_path} in chroot '{chroot_name}'")
@main.command()
@click.argument('source_path')
@click.argument('dest_path')
@click.option('--chroot', help='Chroot environment to use')
@click.argument("source_path")
@click.argument("dest_path")
@click.option("--chroot", help="Chroot environment to use")
@click.pass_context
@handle_exception
def copyout(ctx, source_path, dest_path, chroot):
"""
Copy files from chroot to host.
SOURCE_PATH: Path to file/directory in chroot
DEST_PATH: Path on host where to copy
"""
deb_mock = DebMock(ctx.obj['config'])
chroot_name = chroot or ctx.obj['config'].chroot_name
deb_mock = DebMock(ctx.obj["config"])
chroot_name = chroot or ctx.obj["config"].chroot_name
deb_mock.copyout(source_path, dest_path, chroot_name)
click.echo(f"Copied {source_path} from chroot '{chroot_name}' to {dest_path}")
@ -333,13 +340,13 @@ def list_chroots(ctx):
"""
List available chroot environments.
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
chroots = deb_mock.list_chroots()
if not chroots:
click.echo("No chroot environments found")
return
click.echo("Available chroot environments:")
for chroot in chroots:
click.echo(f" - {chroot}")
@ -353,13 +360,13 @@ def list_configs(ctx):
List available core configurations.
"""
from .configs import list_configs
configs = list_configs()
if not configs:
click.echo("No core configurations found")
return
click.echo("Available core configurations:")
for config_name, config_info in configs.items():
click.echo(f" - {config_name}: {config_info['description']}")
@ -373,13 +380,13 @@ def cleanup_caches(ctx):
"""
Clean up old cache files (similar to Mock's cache management).
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
cleaned = deb_mock.cleanup_caches()
if not cleaned:
click.echo("No old cache files found to clean")
return
click.echo("Cleaned up cache files:")
for cache_type, count in cleaned.items():
if count > 0:
@ -393,13 +400,13 @@ def cache_stats(ctx):
"""
Show cache statistics.
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
stats = deb_mock.get_cache_stats()
if not stats:
click.echo("No cache statistics available")
return
click.echo("Cache Statistics:")
for cache_type, cache_stats in stats.items():
click.echo(f" - {cache_type}:")
@ -417,7 +424,7 @@ def config(ctx):
"""
Show current configuration.
"""
config = ctx.obj['config']
config = ctx.obj["config"]
click.echo("Current configuration:")
click.echo(f" Chroot name: {config.chroot_name}")
click.echo(f" Architecture: {config.architecture}")
@ -430,84 +437,84 @@ def config(ctx):
@main.command()
@click.argument('source_package', type=click.Path(exists=True))
@click.option('--chroot', help='Chroot environment to use')
@click.option('--arch', help='Target architecture')
@click.argument("source_package", type=click.Path(exists=True))
@click.option("--chroot", help="Chroot environment to use")
@click.option("--arch", help="Target architecture")
@click.pass_context
@handle_exception
def install_deps(ctx, source_package, chroot, arch):
"""
Install build dependencies for a Debian source package.
SOURCE_PACKAGE: Path to the .dsc file or source package directory
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
# Override config with command line options
if chroot:
ctx.obj['config'].chroot_name = chroot
ctx.obj["config"].chroot_name = chroot
if arch:
ctx.obj['config'].architecture = arch
ctx.obj["config"].architecture = arch
result = deb_mock.install_dependencies(source_package)
if ctx.obj['verbose']:
if ctx.obj["verbose"]:
click.echo(f"Dependencies installed successfully: {result}")
else:
click.echo("Dependencies installed successfully")
@main.command()
@click.argument('packages', nargs=-1, required=True)
@click.option('--chroot', help='Chroot environment to use')
@click.option('--arch', help='Target architecture')
@click.argument("packages", nargs=-1, required=True)
@click.option("--chroot", help="Chroot environment to use")
@click.option("--arch", help="Target architecture")
@click.pass_context
@handle_exception
def install(ctx, packages, chroot, arch):
"""
Install packages in the chroot environment.
PACKAGES: List of packages to install
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
# Override config with command line options
if chroot:
ctx.obj['config'].chroot_name = chroot
ctx.obj["config"].chroot_name = chroot
if arch:
ctx.obj['config'].architecture = arch
ctx.obj["config"].architecture = arch
result = deb_mock.install_packages(packages)
if ctx.obj['verbose']:
if ctx.obj["verbose"]:
click.echo(f"Packages installed successfully: {result}")
else:
click.echo(f"Packages installed successfully: {', '.join(packages)}")
@main.command()
@click.argument('packages', nargs=-1)
@click.option('--chroot', help='Chroot environment to use')
@click.option('--arch', help='Target architecture')
@click.argument("packages", nargs=-1)
@click.option("--chroot", help="Chroot environment to use")
@click.option("--arch", help="Target architecture")
@click.pass_context
@handle_exception
def update(ctx, packages, chroot, arch):
"""
Update packages in the chroot environment.
PACKAGES: List of packages to update (if empty, update all)
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
# Override config with command line options
if chroot:
ctx.obj['config'].chroot_name = chroot
ctx.obj["config"].chroot_name = chroot
if arch:
ctx.obj['config'].architecture = arch
ctx.obj["config"].architecture = arch
result = deb_mock.update_packages(packages)
if ctx.obj['verbose']:
if ctx.obj["verbose"]:
click.echo(f"Packages updated successfully: {result}")
else:
if packages:
@ -517,71 +524,71 @@ def update(ctx, packages, chroot, arch):
@main.command()
@click.argument('packages', nargs=-1, required=True)
@click.option('--chroot', help='Chroot environment to use')
@click.option('--arch', help='Target architecture')
@click.argument("packages", nargs=-1, required=True)
@click.option("--chroot", help="Chroot environment to use")
@click.option("--arch", help="Target architecture")
@click.pass_context
@handle_exception
def remove(ctx, packages, chroot, arch):
"""
Remove packages from the chroot environment.
PACKAGES: List of packages to remove
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
# Override config with command line options
if chroot:
ctx.obj['config'].chroot_name = chroot
ctx.obj["config"].chroot_name = chroot
if arch:
ctx.obj['config'].architecture = arch
ctx.obj["config"].architecture = arch
result = deb_mock.remove_packages(packages)
if ctx.obj['verbose']:
if ctx.obj["verbose"]:
click.echo(f"Packages removed successfully: {result}")
else:
click.echo(f"Packages removed successfully: {', '.join(packages)}")
@main.command()
@click.argument('command')
@click.option('--chroot', help='Chroot environment to use')
@click.option('--arch', help='Target architecture')
@click.argument("command")
@click.option("--chroot", help="Chroot environment to use")
@click.option("--arch", help="Target architecture")
@click.pass_context
@handle_exception
def apt_cmd(ctx, command, chroot, arch):
"""
Execute APT command in the chroot environment.
COMMAND: APT command to execute (e.g., "update", "install package")
"""
deb_mock = DebMock(ctx.obj['config'])
deb_mock = DebMock(ctx.obj["config"])
# Override config with command line options
if chroot:
ctx.obj['config'].chroot_name = chroot
ctx.obj["config"].chroot_name = chroot
if arch:
ctx.obj['config'].architecture = arch
ctx.obj["config"].architecture = arch
result = deb_mock.execute_apt_command(command)
if ctx.obj['verbose']:
if ctx.obj["verbose"]:
click.echo(f"APT command executed successfully: {result}")
else:
click.echo(f"APT command executed: {command}")
@main.command()
@click.option('--expand', is_flag=True, help='Show expanded configuration values')
@click.option("--expand", is_flag=True, help="Show expanded configuration values")
@click.pass_context
@handle_exception
def debug_config(ctx, expand):
"""
Show detailed configuration information for debugging.
"""
config = ctx.obj['config']
config = ctx.obj["config"]
if expand:
# Show expanded configuration (with template values resolved)
click.echo("Expanded Configuration:")
@ -599,13 +606,13 @@ def debug_config(ctx, expand):
click.echo(f" chroot_dir: {config.chroot_dir}")
click.echo(f" cache_dir: {config.cache_dir}")
click.echo(f" chroot_home: {config.chroot_home}")
# Show plugin configuration
if hasattr(config, 'plugins') and config.plugins:
if hasattr(config, "plugins") and config.plugins:
click.echo(" plugins:")
for plugin_name, plugin_config in config.plugins.items():
click.echo(f" {plugin_name}: {plugin_config}")
if __name__ == '__main__':
main()
if __name__ == "__main__":
main()

View file

@ -3,121 +3,138 @@ Configuration management for deb-mock
"""
import os
import yaml
from pathlib import Path
from typing import Dict, Any, Optional
from typing import Any, Dict
import yaml
from .exceptions import ConfigurationError
class Config:
"""Configuration class for deb-mock"""
def __init__(self, **kwargs):
# Default configuration
self.chroot_name = kwargs.get('chroot_name', 'bookworm-amd64')
self.architecture = kwargs.get('architecture', 'amd64')
self.suite = kwargs.get('suite', 'bookworm')
self.output_dir = kwargs.get('output_dir', './output')
self.keep_chroot = kwargs.get('keep_chroot', False)
self.verbose = kwargs.get('verbose', False)
self.debug = kwargs.get('debug', False)
self.chroot_name = kwargs.get("chroot_name", "bookworm-amd64")
self.architecture = kwargs.get("architecture", "amd64")
self.suite = kwargs.get("suite", "bookworm")
self.output_dir = kwargs.get("output_dir", "./output")
self.keep_chroot = kwargs.get("keep_chroot", False)
self.verbose = kwargs.get("verbose", False)
self.debug = kwargs.get("debug", False)
# Chroot configuration
self.basedir = kwargs.get('basedir', '/var/lib/deb-mock')
self.chroot_dir = kwargs.get('chroot_dir', '/var/lib/deb-mock/chroots')
self.chroot_config_dir = kwargs.get('chroot_config_dir', '/etc/schroot/chroot.d')
self.chroot_home = kwargs.get('chroot_home', '/home/build')
self.basedir = kwargs.get("basedir", "/var/lib/deb-mock")
self.chroot_dir = kwargs.get("chroot_dir", "/var/lib/deb-mock/chroots")
self.chroot_config_dir = kwargs.get("chroot_config_dir", "/etc/schroot/chroot.d")
self.chroot_home = kwargs.get("chroot_home", "/home/build")
# sbuild configuration
self.sbuild_config = kwargs.get('sbuild_config', '/etc/sbuild/sbuild.conf')
self.sbuild_log_dir = kwargs.get('sbuild_log_dir', '/var/log/sbuild')
self.sbuild_config = kwargs.get("sbuild_config", "/etc/sbuild/sbuild.conf")
self.sbuild_log_dir = kwargs.get("sbuild_log_dir", "/var/log/sbuild")
# Build configuration
self.build_deps = kwargs.get('build_deps', [])
self.build_env = kwargs.get('build_env', {})
self.build_options = kwargs.get('build_options', [])
self.build_deps = kwargs.get("build_deps", [])
self.build_env = kwargs.get("build_env", {})
self.build_options = kwargs.get("build_options", [])
# Metadata configuration
self.metadata_dir = kwargs.get('metadata_dir', './metadata')
self.capture_logs = kwargs.get('capture_logs', True)
self.capture_changes = kwargs.get('capture_changes', True)
self.metadata_dir = kwargs.get("metadata_dir", "./metadata")
self.capture_logs = kwargs.get("capture_logs", True)
self.capture_changes = kwargs.get("capture_changes", True)
# Speed optimization (Mock-inspired features)
self.cache_dir = kwargs.get('cache_dir', '/var/cache/deb-mock')
self.use_root_cache = kwargs.get('use_root_cache', True)
self.root_cache_dir = kwargs.get('root_cache_dir', '/var/cache/deb-mock/root-cache')
self.root_cache_age = kwargs.get('root_cache_age', 7) # days
self.use_package_cache = kwargs.get('use_package_cache', True)
self.package_cache_dir = kwargs.get('package_cache_dir', '/var/cache/deb-mock/package-cache')
self.use_ccache = kwargs.get('use_ccache', False)
self.ccache_dir = kwargs.get('ccache_dir', '/var/cache/deb-mock/ccache')
self.use_tmpfs = kwargs.get('use_tmpfs', False)
self.tmpfs_size = kwargs.get('tmpfs_size', '2G')
self.cache_dir = kwargs.get("cache_dir", "/var/cache/deb-mock")
self.use_root_cache = kwargs.get("use_root_cache", True)
self.root_cache_dir = kwargs.get("root_cache_dir", "/var/cache/deb-mock/root-cache")
self.root_cache_age = kwargs.get("root_cache_age", 7) # days
self.use_package_cache = kwargs.get("use_package_cache", True)
self.package_cache_dir = kwargs.get("package_cache_dir", "/var/cache/deb-mock/package-cache")
self.use_ccache = kwargs.get("use_ccache", False)
self.ccache_dir = kwargs.get("ccache_dir", "/var/cache/deb-mock/ccache")
self.use_tmpfs = kwargs.get("use_tmpfs", False)
self.tmpfs_size = kwargs.get("tmpfs_size", "2G")
# Parallel builds
self.parallel_jobs = kwargs.get('parallel_jobs', 4)
self.parallel_compression = kwargs.get('parallel_compression', True)
self.parallel_jobs = kwargs.get("parallel_jobs", 4)
self.parallel_compression = kwargs.get("parallel_compression", True)
# Network and proxy
self.use_host_resolv = kwargs.get('use_host_resolv', True)
self.http_proxy = kwargs.get('http_proxy', None)
self.https_proxy = kwargs.get('https_proxy', None)
self.no_proxy = kwargs.get('no_proxy', None)
self.use_host_resolv = kwargs.get("use_host_resolv", True)
self.http_proxy = kwargs.get("http_proxy", None)
self.https_proxy = kwargs.get("https_proxy", None)
self.no_proxy = kwargs.get("no_proxy", None)
# Mirror configuration
self.mirror = kwargs.get('mirror', 'http://deb.debian.org/debian/')
self.security_mirror = kwargs.get('security_mirror', None)
self.backports_mirror = kwargs.get('backports_mirror', None)
self.mirror = kwargs.get("mirror", "http://deb.debian.org/debian/")
self.security_mirror = kwargs.get("security_mirror", None)
self.backports_mirror = kwargs.get("backports_mirror", None)
# Isolation and security
self.isolation = kwargs.get('isolation', 'schroot') # schroot, simple, nspawn
self.enable_network = kwargs.get('enable_network', True)
self.selinux_enabled = kwargs.get('selinux_enabled', False)
self.isolation = kwargs.get("isolation", "schroot") # schroot, simple, nspawn
self.enable_network = kwargs.get("enable_network", True)
self.selinux_enabled = kwargs.get("selinux_enabled", False)
# Bootstrap chroot support (Mock FAQ #2 - Cross-distribution builds)
self.use_bootstrap_chroot = kwargs.get('use_bootstrap_chroot', False)
self.bootstrap_chroot_name = kwargs.get('bootstrap_chroot_name', None)
self.bootstrap_arch = kwargs.get('bootstrap_arch', None)
self.bootstrap_suite = kwargs.get('bootstrap_suite', None)
self.use_bootstrap_chroot = kwargs.get("use_bootstrap_chroot", False)
self.bootstrap_chroot_name = kwargs.get("bootstrap_chroot_name", None)
self.bootstrap_arch = kwargs.get("bootstrap_arch", None)
self.bootstrap_suite = kwargs.get("bootstrap_suite", None)
# Build environment customization
self.chroot_setup_cmd = kwargs.get('chroot_setup_cmd', [])
self.chroot_additional_packages = kwargs.get('chroot_additional_packages', [])
self.chroot_setup_cmd = kwargs.get("chroot_setup_cmd", [])
self.chroot_additional_packages = kwargs.get("chroot_additional_packages", [])
# Environment variable preservation (Mock FAQ #1)
self.preserve_environment = kwargs.get('preserve_environment', [])
self.environment_sanitization = kwargs.get('environment_sanitization', True)
self.allowed_environment_vars = kwargs.get('allowed_environment_vars', [
'DEB_BUILD_OPTIONS', 'DEB_BUILD_PROFILES', 'CC', 'CXX', 'CFLAGS', 'CXXFLAGS',
'LDFLAGS', 'MAKEFLAGS', 'CCACHE_DIR', 'CCACHE_HASHDIR', 'http_proxy',
'https_proxy', 'no_proxy', 'DISPLAY', 'XAUTHORITY'
])
self.preserve_environment = kwargs.get("preserve_environment", [])
self.environment_sanitization = kwargs.get("environment_sanitization", True)
self.allowed_environment_vars = kwargs.get(
"allowed_environment_vars",
[
"DEB_BUILD_OPTIONS",
"DEB_BUILD_PROFILES",
"CC",
"CXX",
"CFLAGS",
"CXXFLAGS",
"LDFLAGS",
"MAKEFLAGS",
"CCACHE_DIR",
"CCACHE_HASHDIR",
"http_proxy",
"https_proxy",
"no_proxy",
"DISPLAY",
"XAUTHORITY",
],
)
# Advanced build options (Mock-inspired)
self.run_tests = kwargs.get('run_tests', True)
self.build_timeout = kwargs.get('build_timeout', 0) # 0 = no timeout
self.force_architecture = kwargs.get('force_architecture', None)
self.unique_extension = kwargs.get('unique_extension', None)
self.config_dir = kwargs.get('config_dir', None)
self.cleanup_after = kwargs.get('cleanup_after', True)
self.run_tests = kwargs.get("run_tests", True)
self.build_timeout = kwargs.get("build_timeout", 0) # 0 = no timeout
self.force_architecture = kwargs.get("force_architecture", None)
self.unique_extension = kwargs.get("unique_extension", None)
self.config_dir = kwargs.get("config_dir", None)
self.cleanup_after = kwargs.get("cleanup_after", True)
# APT configuration
self.apt_sources = kwargs.get('apt_sources', [])
self.apt_preferences = kwargs.get('apt_preferences', [])
self.apt_command = kwargs.get('apt_command', 'apt-get')
self.apt_install_command = kwargs.get('apt_install_command', 'apt-get install -y')
self.apt_sources = kwargs.get("apt_sources", [])
self.apt_preferences = kwargs.get("apt_preferences", [])
self.apt_command = kwargs.get("apt_command", "apt-get")
self.apt_install_command = kwargs.get("apt_install_command", "apt-get install -y")
# Plugin configuration
self.plugins = kwargs.get('plugins', {})
self.plugin_dir = kwargs.get('plugin_dir', '/usr/lib/deb-mock/plugins')
self.plugins = kwargs.get("plugins", {})
self.plugin_dir = kwargs.get("plugin_dir", "/usr/lib/deb-mock/plugins")
@classmethod
def from_file(cls, config_path: str) -> 'Config':
def from_file(cls, config_path: str) -> "Config":
"""Load configuration from a YAML file"""
try:
with open(config_path, 'r') as f:
with open(config_path, "r") as f:
config_data = yaml.safe_load(f)
return cls(**config_data)
except FileNotFoundError:
raise ConfigurationError(f"Configuration file not found: {config_path}")
@ -125,155 +142,163 @@ class Config:
raise ConfigurationError(f"Invalid YAML in configuration file: {e}")
except Exception as e:
raise ConfigurationError(f"Error loading configuration: {e}")
@classmethod
def default(cls) -> 'Config':
def default(cls) -> "Config":
"""Create default configuration"""
return cls()
def to_dict(self) -> Dict[str, Any]:
"""Convert configuration to dictionary"""
return {
'chroot_name': self.chroot_name,
'architecture': self.architecture,
'suite': self.suite,
'output_dir': self.output_dir,
'keep_chroot': self.keep_chroot,
'verbose': self.verbose,
'debug': self.debug,
'chroot_dir': self.chroot_dir,
'chroot_config_dir': self.chroot_config_dir,
'sbuild_config': self.sbuild_config,
'sbuild_log_dir': self.sbuild_log_dir,
'build_deps': self.build_deps,
'build_env': self.build_env,
'build_options': self.build_options,
'metadata_dir': self.metadata_dir,
'capture_logs': self.capture_logs,
'capture_changes': self.capture_changes,
'use_root_cache': self.use_root_cache,
'root_cache_dir': self.root_cache_dir,
'root_cache_age': self.root_cache_age,
'use_package_cache': self.use_package_cache,
'package_cache_dir': self.package_cache_dir,
'use_ccache': self.use_ccache,
'ccache_dir': self.ccache_dir,
'use_tmpfs': self.use_tmpfs,
'tmpfs_size': self.tmpfs_size,
'parallel_jobs': self.parallel_jobs,
'parallel_compression': self.parallel_compression,
'use_host_resolv': self.use_host_resolv,
'http_proxy': self.http_proxy,
'https_proxy': self.https_proxy,
'no_proxy': self.no_proxy,
'mirror': self.mirror,
'security_mirror': self.security_mirror,
'backports_mirror': self.backports_mirror,
'isolation': self.isolation,
'enable_network': self.enable_network,
'selinux_enabled': self.selinux_enabled,
'use_bootstrap_chroot': self.use_bootstrap_chroot,
'bootstrap_chroot_name': self.bootstrap_chroot_name,
'bootstrap_arch': self.bootstrap_arch,
'bootstrap_suite': self.bootstrap_suite,
'chroot_setup_cmd': self.chroot_setup_cmd,
'chroot_additional_packages': self.chroot_additional_packages,
'preserve_environment': self.preserve_environment,
'environment_sanitization': self.environment_sanitization,
'allowed_environment_vars': self.allowed_environment_vars,
"chroot_name": self.chroot_name,
"architecture": self.architecture,
"suite": self.suite,
"output_dir": self.output_dir,
"keep_chroot": self.keep_chroot,
"verbose": self.verbose,
"debug": self.debug,
"chroot_dir": self.chroot_dir,
"chroot_config_dir": self.chroot_config_dir,
"sbuild_config": self.sbuild_config,
"sbuild_log_dir": self.sbuild_log_dir,
"build_deps": self.build_deps,
"build_env": self.build_env,
"build_options": self.build_options,
"metadata_dir": self.metadata_dir,
"capture_logs": self.capture_logs,
"capture_changes": self.capture_changes,
"use_root_cache": self.use_root_cache,
"root_cache_dir": self.root_cache_dir,
"root_cache_age": self.root_cache_age,
"use_package_cache": self.use_package_cache,
"package_cache_dir": self.package_cache_dir,
"use_ccache": self.use_ccache,
"ccache_dir": self.ccache_dir,
"use_tmpfs": self.use_tmpfs,
"tmpfs_size": self.tmpfs_size,
"parallel_jobs": self.parallel_jobs,
"parallel_compression": self.parallel_compression,
"use_host_resolv": self.use_host_resolv,
"http_proxy": self.http_proxy,
"https_proxy": self.https_proxy,
"no_proxy": self.no_proxy,
"mirror": self.mirror,
"security_mirror": self.security_mirror,
"backports_mirror": self.backports_mirror,
"isolation": self.isolation,
"enable_network": self.enable_network,
"selinux_enabled": self.selinux_enabled,
"use_bootstrap_chroot": self.use_bootstrap_chroot,
"bootstrap_chroot_name": self.bootstrap_chroot_name,
"bootstrap_arch": self.bootstrap_arch,
"bootstrap_suite": self.bootstrap_suite,
"chroot_setup_cmd": self.chroot_setup_cmd,
"chroot_additional_packages": self.chroot_additional_packages,
"preserve_environment": self.preserve_environment,
"environment_sanitization": self.environment_sanitization,
"allowed_environment_vars": self.allowed_environment_vars,
}
def save(self, config_path: str) -> None:
"""Save configuration to a YAML file"""
try:
config_dir = Path(config_path).parent
config_dir.mkdir(parents=True, exist_ok=True)
with open(config_path, 'w') as f:
with open(config_path, "w") as f:
yaml.dump(self.to_dict(), f, default_flow_style=False)
except Exception as e:
raise ConfigurationError(f"Error saving configuration: {e}")
def validate(self) -> None:
"""Validate configuration"""
errors = []
# Check required directories
if not os.path.exists(self.chroot_config_dir):
errors.append(f"Chroot config directory does not exist: {self.chroot_config_dir}")
if not os.path.exists(self.sbuild_config):
errors.append(f"sbuild config file does not exist: {self.sbuild_config}")
# Check architecture
valid_architectures = ['amd64', 'i386', 'arm64', 'armhf', 'ppc64el', 's390x']
valid_architectures = ["amd64", "i386", "arm64", "armhf", "ppc64el", "s390x"]
if self.architecture not in valid_architectures:
errors.append(f"Invalid architecture: {self.architecture}")
# Check suite
valid_suites = ['bookworm', 'sid', 'bullseye', 'buster', 'jammy', 'noble', 'focal']
valid_suites = [
"bookworm",
"sid",
"bullseye",
"buster",
"jammy",
"noble",
"focal",
]
if self.suite not in valid_suites:
errors.append(f"Invalid suite: {self.suite}")
# Check isolation method
valid_isolation = ['schroot', 'simple', 'nspawn']
valid_isolation = ["schroot", "simple", "nspawn"]
if self.isolation not in valid_isolation:
errors.append(f"Invalid isolation method: {self.isolation}")
# Check parallel jobs
if self.parallel_jobs < 1:
errors.append("Parallel jobs must be at least 1")
if errors:
raise ConfigurationError(f"Configuration validation failed:\n" + "\n".join(errors))
raise ConfigurationError("Configuration validation failed:\n" + "\n".join(errors))
def get_chroot_path(self) -> str:
"""Get the full path to the chroot directory"""
return os.path.join(self.chroot_dir, self.chroot_name)
def get_output_path(self) -> str:
"""Get the full path to the output directory"""
return os.path.abspath(self.output_dir)
def get_metadata_path(self) -> str:
"""Get the full path to the metadata directory"""
return os.path.abspath(self.metadata_dir)
def get_root_cache_path(self) -> str:
"""Get the full path to the root cache directory"""
return os.path.join(self.root_cache_dir, self.chroot_name)
def get_package_cache_path(self) -> str:
"""Get the full path to the package cache directory"""
return os.path.join(self.package_cache_dir, self.chroot_name)
def get_ccache_path(self) -> str:
"""Get the full path to the ccache directory"""
return os.path.join(self.ccache_dir, self.chroot_name)
def setup_build_environment(self) -> Dict[str, str]:
"""Setup build environment variables"""
env = {}
# Set parallel build options
if self.parallel_jobs > 1:
env['DEB_BUILD_OPTIONS'] = f"parallel={self.parallel_jobs},nocheck"
env['MAKEFLAGS'] = f"-j{self.parallel_jobs}"
env["DEB_BUILD_OPTIONS"] = f"parallel={self.parallel_jobs},nocheck"
env["MAKEFLAGS"] = f"-j{self.parallel_jobs}"
# Set ccache if enabled
if self.use_ccache:
env['CCACHE_DIR'] = self.get_ccache_path()
env['CCACHE_HASHDIR'] = '1'
env["CCACHE_DIR"] = self.get_ccache_path()
env["CCACHE_HASHDIR"] = "1"
# Set proxy if configured
if self.http_proxy:
env['http_proxy'] = self.http_proxy
env["http_proxy"] = self.http_proxy
if self.https_proxy:
env['https_proxy'] = self.https_proxy
env["https_proxy"] = self.https_proxy
if self.no_proxy:
env['no_proxy'] = self.no_proxy
env["no_proxy"] = self.no_proxy
# Merge with user-defined build environment
env.update(self.build_env)
return env
return env

View file

@ -5,14 +5,15 @@ This package provides default configuration files for various Debian-based Linux
similar to Mock's mock-core-configs package.
"""
import os
import yaml
from pathlib import Path
from typing import Dict, List, Optional
from typing import Dict, List
import yaml
# Base directory for config files
CONFIGS_DIR = Path(__file__).parent
def get_available_configs() -> List[str]:
"""Get list of available configuration names"""
configs = []
@ -21,15 +22,17 @@ def get_available_configs() -> List[str]:
configs.append(config_file.stem)
return sorted(configs)
def load_config(config_name: str) -> Dict:
"""Load a configuration by name"""
config_file = CONFIGS_DIR / f"{config_name}.yaml"
if not config_file.exists():
raise ValueError(f"Configuration '{config_name}' not found")
with open(config_file, 'r') as f:
with open(config_file, "r") as f:
return yaml.safe_load(f)
def list_configs() -> Dict[str, Dict]:
"""List all available configurations with their details"""
configs = {}
@ -37,11 +40,11 @@ def list_configs() -> Dict[str, Dict]:
try:
config = load_config(config_name)
configs[config_name] = {
'description': config.get('description', ''),
'suite': config.get('suite', ''),
'architecture': config.get('architecture', ''),
'mirror': config.get('mirror', '')
"description": config.get("description", ""),
"suite": config.get("suite", ""),
"architecture": config.get("architecture", ""),
"mirror": config.get("mirror", ""),
}
except Exception:
continue
return configs
return configs

View file

@ -32,4 +32,5 @@ output_dir: "./output"
metadata_dir: "./metadata"
keep_chroot: false
verbose: false
debug: false
debug: false

View file

@ -32,4 +32,5 @@ output_dir: "./output"
metadata_dir: "./metadata"
keep_chroot: false
verbose: false
debug: false
debug: false

View file

@ -32,4 +32,5 @@ output_dir: "./output"
metadata_dir: "./metadata"
keep_chroot: false
verbose: false
debug: false
debug: false

View file

@ -32,4 +32,5 @@ output_dir: "./output"
metadata_dir: "./metadata"
keep_chroot: false
verbose: false
debug: false
debug: false

View file

@ -3,34 +3,33 @@ Core DebMock class for orchestrating the build process
"""
import os
import json
import shutil
from pathlib import Path
from typing import Dict, Any, Optional, List
from .config import Config
from .chroot import ChrootManager
from .sbuild import SbuildWrapper
from .metadata import MetadataManager
from typing import Any, Dict, List, Optional
from .cache import CacheManager
from .exceptions import DebMockError, BuildError, ChrootError, SbuildError
from .chroot import ChrootManager
from .config import Config
from .exceptions import ChrootError
from .metadata import MetadataManager
from .sbuild import SbuildWrapper
class DebMock:
"""Main DebMock class for orchestrating package builds"""
def __init__(self, config: Config):
self.config = config
self.chroot_manager = ChrootManager(config)
self.sbuild_wrapper = SbuildWrapper(config)
self.metadata_manager = MetadataManager(config)
self.cache_manager = CacheManager(config)
# Validate configuration
self.config.validate()
# Setup caches
self._setup_caches()
def _setup_caches(self) -> None:
"""Setup cache directories and ccache"""
try:
@ -40,300 +39,283 @@ class DebMock:
except Exception as e:
# Log warning but continue
print(f"Warning: Failed to setup caches: {e}")
def build(self, source_package: str, **kwargs) -> Dict[str, Any]:
"""Build a Debian source package in an isolated environment"""
# Ensure chroot exists
chroot_name = kwargs.get('chroot_name', self.config.chroot_name)
chroot_name = kwargs.get("chroot_name", self.config.chroot_name)
chroot_path = self.config.get_chroot_path()
# Try to restore from cache first
if not self.chroot_manager.chroot_exists(chroot_name):
if not self.cache_manager.restore_root_cache(chroot_path):
self.chroot_manager.create_chroot(chroot_name)
# Check build dependencies
deps_check = self.sbuild_wrapper.check_dependencies(source_package, chroot_name)
if not deps_check['satisfied']:
if not deps_check["satisfied"]:
# Try to install missing dependencies
if deps_check['missing']:
self.sbuild_wrapper.install_build_dependencies(deps_check['missing'], chroot_name)
if deps_check["missing"]:
self.sbuild_wrapper.install_build_dependencies(deps_check["missing"], chroot_name)
# Setup build environment
build_env = self.config.setup_build_environment()
# Build the package
build_result = self.sbuild_wrapper.build_package(
source_package,
chroot_name,
build_env=build_env,
**kwargs
)
build_result = self.sbuild_wrapper.build_package(source_package, chroot_name, build_env=build_env, **kwargs)
# Create cache after successful build
if build_result.get('success', False):
if build_result.get("success", False):
self.cache_manager.create_root_cache(chroot_path)
# Capture and store metadata
metadata = self._capture_build_metadata(build_result, source_package)
self.metadata_manager.store_metadata(metadata)
# Clean up chroot if not keeping it
if not kwargs.get('keep_chroot', self.config.keep_chroot):
if not kwargs.get("keep_chroot", self.config.keep_chroot):
self.chroot_manager.clean_chroot(chroot_name)
return build_result
def build_chain(self, source_packages: List[str], **kwargs) -> List[Dict[str, Any]]:
"""Build a chain of packages that depend on each other (similar to Mock's --chain)"""
results = []
chroot_name = kwargs.get('chroot_name', self.config.chroot_name)
chroot_name = kwargs.get("chroot_name", self.config.chroot_name)
chroot_path = self.config.get_chroot_path()
# Try to restore from cache first
if not self.chroot_manager.chroot_exists(chroot_name):
if not self.cache_manager.restore_root_cache(chroot_path):
self.chroot_manager.create_chroot(chroot_name)
# Setup build environment
build_env = self.config.setup_build_environment()
for i, source_package in enumerate(source_packages):
try:
# Build the package
result = self.sbuild_wrapper.build_package(
source_package,
chroot_name,
build_env=build_env,
**kwargs
result = self.sbuild_wrapper.build_package(source_package, chroot_name, build_env=build_env, **kwargs)
results.append(
{
"package": source_package,
"success": True,
"result": result,
"order": i + 1,
}
)
results.append({
'package': source_package,
'success': True,
'result': result,
'order': i + 1
})
# Install the built package in the chroot for subsequent builds
if result.get('artifacts'):
self._install_built_package(result['artifacts'], chroot_name)
if result.get("artifacts"):
self._install_built_package(result["artifacts"], chroot_name)
except Exception as e:
results.append({
'package': source_package,
'success': False,
'error': str(e),
'order': i + 1
})
results.append(
{
"package": source_package,
"success": False,
"error": str(e),
"order": i + 1,
}
)
# Stop chain on failure unless continue_on_failure is specified
if not kwargs.get('continue_on_failure', False):
if not kwargs.get("continue_on_failure", False):
break
# Create cache after successful chain build
if any(r['success'] for r in results):
if any(r["success"] for r in results):
self.cache_manager.create_root_cache(chroot_path)
return results
def _install_built_package(self, artifacts: List[str], chroot_name: str) -> None:
"""Install a built package in the chroot for chain building"""
# Find .deb files in artifacts
deb_files = [art for art in artifacts if art.endswith('.deb')]
deb_files = [art for art in artifacts if art.endswith(".deb")]
if not deb_files:
return
# Copy .deb files to chroot and install them
for deb_file in deb_files:
try:
# Copy to chroot
chroot_deb_path = f"/tmp/{os.path.basename(deb_file)}"
self.chroot_manager.copy_to_chroot(deb_file, chroot_deb_path, chroot_name)
# Install in chroot
self.chroot_manager.execute_in_chroot(
chroot_name,
['dpkg', '-i', chroot_deb_path],
capture_output=False
chroot_name, ["dpkg", "-i", chroot_deb_path], capture_output=False
)
# Clean up
self.chroot_manager.execute_in_chroot(
chroot_name,
['rm', '-f', chroot_deb_path],
capture_output=False
)
self.chroot_manager.execute_in_chroot(chroot_name, ["rm", "-f", chroot_deb_path], capture_output=False)
except Exception as e:
# Log warning but continue
print(f"Warning: Failed to install {deb_file} in chroot: {e}")
def init_chroot(self, chroot_name: str, arch: str = None, suite: str = None) -> None:
"""Initialize a new chroot environment"""
self.chroot_manager.create_chroot(chroot_name, arch, suite)
# Create cache after successful chroot creation
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
self.cache_manager.create_root_cache(chroot_path)
def clean_chroot(self, chroot_name: str) -> None:
"""Clean up a chroot environment"""
self.chroot_manager.clean_chroot(chroot_name)
def list_chroots(self) -> list:
"""List available chroot environments"""
return self.chroot_manager.list_chroots()
def update_chroot(self, chroot_name: str) -> None:
"""Update packages in a chroot environment"""
self.chroot_manager.update_chroot(chroot_name)
# Update cache after successful update
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
self.cache_manager.create_root_cache(chroot_path)
def get_chroot_info(self, chroot_name: str) -> dict:
"""Get information about a chroot environment"""
return self.chroot_manager.get_chroot_info(chroot_name)
def shell(self, chroot_name: str = None) -> None:
"""Open a shell in the chroot environment (similar to Mock's --shell)"""
if chroot_name is None:
chroot_name = self.config.chroot_name
if not self.chroot_manager.chroot_exists(chroot_name):
raise ChrootError(f"Chroot '{chroot_name}' does not exist")
# Execute shell in chroot
self.chroot_manager.execute_in_chroot(
chroot_name,
['/bin/bash'],
capture_output=False
)
self.chroot_manager.execute_in_chroot(chroot_name, ["/bin/bash"], capture_output=False)
def copyout(self, source_path: str, dest_path: str, chroot_name: str = None) -> None:
"""Copy files from chroot to host (similar to Mock's --copyout)"""
if chroot_name is None:
chroot_name = self.config.chroot_name
self.chroot_manager.copy_from_chroot(source_path, dest_path, chroot_name)
def copyin(self, source_path: str, dest_path: str, chroot_name: str = None) -> None:
"""Copy files from host to chroot (similar to Mock's --copyin)"""
if chroot_name is None:
chroot_name = self.config.chroot_name
self.chroot_manager.copy_to_chroot(source_path, dest_path, chroot_name)
def cleanup_caches(self) -> Dict[str, int]:
"""Clean up old cache files (similar to Mock's cache management)"""
return self.cache_manager.cleanup_old_caches()
def get_cache_stats(self) -> Dict[str, Any]:
"""Get cache statistics"""
return self.cache_manager.get_cache_stats()
def _capture_build_metadata(self, build_result: Dict[str, Any], source_package: str) -> Dict[str, Any]:
"""Capture comprehensive build metadata"""
metadata = {
'source_package': source_package,
'build_result': build_result,
'config': self.config.to_dict(),
'artifacts': build_result.get('artifacts', []),
'build_metadata': build_result.get('metadata', {}),
'timestamp': self._get_timestamp(),
'build_success': build_result.get('success', False),
'cache_info': self.get_cache_stats()
"source_package": source_package,
"build_result": build_result,
"config": self.config.to_dict(),
"artifacts": build_result.get("artifacts", []),
"build_metadata": build_result.get("metadata", {}),
"timestamp": self._get_timestamp(),
"build_success": build_result.get("success", False),
"cache_info": self.get_cache_stats(),
}
# Add artifact details
metadata['artifact_details'] = self._get_artifact_details(build_result.get('artifacts', []))
metadata["artifact_details"] = self._get_artifact_details(build_result.get("artifacts", []))
return metadata
def _get_timestamp(self) -> str:
"""Get current timestamp"""
from datetime import datetime
return datetime.now().isoformat()
def _get_artifact_details(self, artifacts: list) -> list:
"""Get detailed information about build artifacts"""
details = []
for artifact_path in artifacts:
if os.path.exists(artifact_path):
stat = os.stat(artifact_path)
details.append({
'path': artifact_path,
'name': os.path.basename(artifact_path),
'size': stat.st_size,
'modified': stat.st_mtime,
'type': self._get_artifact_type(artifact_path)
})
details.append(
{
"path": artifact_path,
"name": os.path.basename(artifact_path),
"size": stat.st_size,
"modified": stat.st_mtime,
"type": self._get_artifact_type(artifact_path),
}
)
return details
def _get_artifact_type(self, artifact_path: str) -> str:
"""Determine the type of build artifact"""
ext = Path(artifact_path).suffix.lower()
if ext == '.deb':
return 'deb_package'
elif ext == '.changes':
return 'changes_file'
elif ext == '.buildinfo':
return 'buildinfo_file'
elif ext == '.dsc':
return 'source_package'
if ext == ".deb":
return "deb_package"
elif ext == ".changes":
return "changes_file"
elif ext == ".buildinfo":
return "buildinfo_file"
elif ext == ".dsc":
return "source_package"
else:
return 'other'
return "other"
def verify_reproducible_build(self, source_package: str, **kwargs) -> Dict[str, Any]:
"""Verify that a build is reproducible by building twice and comparing results"""
# First build
result1 = self.build(source_package, **kwargs)
# Clean chroot for second build
chroot_name = kwargs.get('chroot_name', self.config.chroot_name)
chroot_name = kwargs.get("chroot_name", self.config.chroot_name)
if self.chroot_manager.chroot_exists(chroot_name):
self.chroot_manager.clean_chroot(chroot_name)
# Second build
result2 = self.build(source_package, **kwargs)
# Compare results
comparison = self._compare_build_results(result1, result2)
return {
'reproducible': comparison['identical'],
'first_build': result1,
'second_build': result2,
'comparison': comparison
"reproducible": comparison["identical"],
"first_build": result1,
"second_build": result2,
"comparison": comparison,
}
def _compare_build_results(self, result1: Dict[str, Any], result2: Dict[str, Any]) -> Dict[str, Any]:
"""Compare two build results for reproducibility"""
comparison = {
'identical': True,
'differences': [],
'artifact_comparison': {}
}
comparison = {"identical": True, "differences": [], "artifact_comparison": {}}
# Compare artifacts
artifacts1 = set(result1.get('artifacts', []))
artifacts2 = set(result2.get('artifacts', []))
artifacts1 = set(result1.get("artifacts", []))
artifacts2 = set(result2.get("artifacts", []))
if artifacts1 != artifacts2:
comparison['identical'] = False
comparison['differences'].append('Different artifacts produced')
comparison["identical"] = False
comparison["differences"].append("Different artifacts produced")
# Compare individual artifacts
common_artifacts = artifacts1.intersection(artifacts2)
for artifact in common_artifacts:
@ -341,142 +323,142 @@ class DebMock:
# Compare file hashes
hash1 = self._get_file_hash(artifact)
hash2 = self._get_file_hash(artifact)
comparison['artifact_comparison'][artifact] = {
'identical': hash1 == hash2,
'hash1': hash1,
'hash2': hash2
comparison["artifact_comparison"][artifact] = {
"identical": hash1 == hash2,
"hash1": hash1,
"hash2": hash2,
}
if hash1 != hash2:
comparison['identical'] = False
comparison['differences'].append(f'Artifact {artifact} differs')
comparison["identical"] = False
comparison["differences"].append(f"Artifact {artifact} differs")
return comparison
def _get_file_hash(self, file_path: str) -> str:
"""Get SHA256 hash of a file"""
import hashlib
hash_sha256 = hashlib.sha256()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
def get_build_history(self) -> list:
"""Get build history from metadata store"""
return self.metadata_manager.get_build_history()
def get_build_info(self, build_id: str) -> Optional[Dict[str, Any]]:
"""Get information about a specific build"""
return self.metadata_manager.get_build_info(build_id)
def install_dependencies(self, source_package: str) -> Dict[str, Any]:
"""Install build dependencies for a source package"""
chroot_name = self.config.chroot_name
# Ensure chroot exists
if not self.chroot_manager.chroot_exists(chroot_name):
self.chroot_manager.create_chroot(chroot_name)
# Check and install dependencies
deps_check = self.sbuild_wrapper.check_dependencies(source_package, chroot_name)
if deps_check['missing']:
result = self.sbuild_wrapper.install_build_dependencies(deps_check['missing'], chroot_name)
if deps_check["missing"]:
result = self.sbuild_wrapper.install_build_dependencies(deps_check["missing"], chroot_name)
return {
'success': True,
'installed': deps_check['missing'],
'details': result
"success": True,
"installed": deps_check["missing"],
"details": result,
}
else:
return {
'success': True,
'installed': [],
'message': 'All dependencies already satisfied'
"success": True,
"installed": [],
"message": "All dependencies already satisfied",
}
def install_packages(self, packages: List[str]) -> Dict[str, Any]:
"""Install packages in the chroot environment"""
chroot_name = self.config.chroot_name
# Ensure chroot exists
if not self.chroot_manager.chroot_exists(chroot_name):
self.chroot_manager.create_chroot(chroot_name)
# Install packages using APT
result = self.chroot_manager.execute_in_chroot(
chroot_name,
f"{self.config.apt_install_command} {' '.join(packages)}",
as_root=True
as_root=True,
)
return {
'success': result['returncode'] == 0,
'installed': packages,
'output': result['stdout'],
'error': result['stderr'] if result['returncode'] != 0 else None
"success": result["returncode"] == 0,
"installed": packages,
"output": result["stdout"],
"error": result["stderr"] if result["returncode"] != 0 else None,
}
def update_packages(self, packages: List[str] = None) -> Dict[str, Any]:
"""Update packages in the chroot environment"""
chroot_name = self.config.chroot_name
# Ensure chroot exists
if not self.chroot_manager.chroot_exists(chroot_name):
self.chroot_manager.create_chroot(chroot_name)
if packages:
# Update specific packages
cmd = f"{self.config.apt_command} install --only-upgrade {' '.join(packages)}"
else:
# Update all packages
cmd = f"{self.config.apt_command} update && {self.config.apt_command} upgrade -y"
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd, as_root=True)
return {
'success': result['returncode'] == 0,
'updated': packages if packages else 'all',
'output': result['stdout'],
'error': result['stderr'] if result['returncode'] != 0 else None
"success": result["returncode"] == 0,
"updated": packages if packages else "all",
"output": result["stdout"],
"error": result["stderr"] if result["returncode"] != 0 else None,
}
def remove_packages(self, packages: List[str]) -> Dict[str, Any]:
"""Remove packages from the chroot environment"""
chroot_name = self.config.chroot_name
# Ensure chroot exists
if not self.chroot_manager.chroot_exists(chroot_name):
self.chroot_manager.create_chroot(chroot_name)
# Remove packages using APT
cmd = f"{self.config.apt_command} remove -y {' '.join(packages)}"
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd, as_root=True)
return {
'success': result['returncode'] == 0,
'removed': packages,
'output': result['stdout'],
'error': result['stderr'] if result['returncode'] != 0 else None
"success": result["returncode"] == 0,
"removed": packages,
"output": result["stdout"],
"error": result["stderr"] if result["returncode"] != 0 else None,
}
def execute_apt_command(self, command: str) -> Dict[str, Any]:
"""Execute APT command in the chroot environment"""
chroot_name = self.config.chroot_name
# Ensure chroot exists
if not self.chroot_manager.chroot_exists(chroot_name):
self.chroot_manager.create_chroot(chroot_name)
# Execute APT command
cmd = f"{self.config.apt_command} {command}"
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd, as_root=True)
return {
'success': result['returncode'] == 0,
'command': command,
'output': result['stdout'],
'error': result['stderr'] if result['returncode'] != 0 else None
}
"success": result["returncode"] == 0,
"command": command,
"output": result["stdout"],
"error": result["stderr"] if result["returncode"] != 0 else None,
}

View file

@ -5,28 +5,30 @@ This module provides a comprehensive exception hierarchy inspired by Mock's
exception handling system, adapted for Debian-based build environments.
"""
import os
import sys
import functools
from typing import Optional, Dict, Any, List
import sys
from typing import Any, Dict, List, Optional
class DebMockError(Exception):
"""
Base exception for all deb-mock errors.
This is the root exception class that all other deb-mock exceptions
inherit from. It provides common functionality for error reporting
and recovery suggestions.
"""
def __init__(self, message: str,
exit_code: int = 1,
context: Optional[Dict[str, Any]] = None,
suggestions: Optional[List[str]] = None):
def __init__(
self,
message: str,
exit_code: int = 1,
context: Optional[Dict[str, Any]] = None,
suggestions: Optional[List[str]] = None,
):
"""
Initialize the exception with message and optional context.
Args:
message: Human-readable error message
exit_code: Suggested exit code for CLI applications
@ -38,29 +40,29 @@ class DebMockError(Exception):
self.exit_code = exit_code
self.context = context or {}
self.suggestions = suggestions or []
def __str__(self) -> str:
"""Return formatted error message with context and suggestions."""
lines = [f"Error: {self.message}"]
# Add context information if available
if self.context:
lines.append("\nContext:")
for key, value in self.context.items():
lines.append(f" {key}: {value}")
# Add suggestions if available
if self.suggestions:
lines.append("\nSuggestions:")
for i, suggestion in enumerate(self.suggestions, 1):
lines.append(f" {i}. {suggestion}")
return "\n".join(lines)
def print_error(self, file=sys.stderr) -> None:
"""Print formatted error message to specified file."""
print(str(self), file=file)
def get_exit_code(self) -> int:
"""Get the suggested exit code for this error."""
return self.exit_code
@ -69,293 +71,344 @@ class DebMockError(Exception):
class ConfigurationError(DebMockError):
"""
Raised when there's an error in configuration.
This exception is raised when configuration files are invalid,
missing required options, or contain conflicting settings.
"""
def __init__(self, message: str, config_file: Optional[str] = None,
config_section: Optional[str] = None):
def __init__(
self,
message: str,
config_file: Optional[str] = None,
config_section: Optional[str] = None,
):
context = {}
if config_file:
context['config_file'] = config_file
context["config_file"] = config_file
if config_section:
context['config_section'] = config_section
context["config_section"] = config_section
suggestions = [
"Check the configuration file syntax",
"Verify all required options are set",
"Ensure configuration values are valid for your system"
"Ensure configuration values are valid for your system",
]
super().__init__(message, exit_code=2, context=context, suggestions=suggestions)
class ChrootError(DebMockError):
"""
Raised when there's an error with chroot operations.
This exception covers chroot creation, management, and cleanup errors.
"""
def __init__(self, message: str, chroot_name: Optional[str] = None,
operation: Optional[str] = None, chroot_path: Optional[str] = None):
def __init__(
self,
message: str,
chroot_name: Optional[str] = None,
operation: Optional[str] = None,
chroot_path: Optional[str] = None,
):
context = {}
if chroot_name:
context['chroot_name'] = chroot_name
context["chroot_name"] = chroot_name
if operation:
context['operation'] = operation
context["operation"] = operation
if chroot_path:
context['chroot_path'] = chroot_path
context["chroot_path"] = chroot_path
suggestions = [
"Ensure you have sufficient disk space",
"Check that you have root privileges for chroot operations",
"Verify the chroot name is valid",
"Try cleaning up existing chroots with 'deb-mock clean-chroot'"
"Try cleaning up existing chroots with 'deb-mock clean-chroot'",
]
super().__init__(message, exit_code=3, context=context, suggestions=suggestions)
class SbuildError(DebMockError):
"""
Raised when there's an error with sbuild operations.
This exception covers sbuild execution, configuration, and result processing.
"""
def __init__(self, message: str, sbuild_config: Optional[str] = None,
build_log: Optional[str] = None, return_code: Optional[int] = None):
def __init__(
self,
message: str,
sbuild_config: Optional[str] = None,
build_log: Optional[str] = None,
return_code: Optional[int] = None,
):
context = {}
if sbuild_config:
context['sbuild_config'] = sbuild_config
context["sbuild_config"] = sbuild_config
if build_log:
context['build_log'] = build_log
context["build_log"] = build_log
if return_code is not None:
context['return_code'] = return_code
context["return_code"] = return_code
suggestions = [
"Check the build log for detailed error information",
"Verify that sbuild is properly configured",
"Ensure all build dependencies are available",
"Try updating the chroot with 'deb-mock update-chroot'"
"Try updating the chroot with 'deb-mock update-chroot'",
]
super().__init__(message, exit_code=4, context=context, suggestions=suggestions)
class BuildError(DebMockError):
"""
Raised when a build fails.
This exception is raised when package building fails due to
compilation errors, missing dependencies, or other build issues.
"""
def __init__(self, message: str, source_package: Optional[str] = None,
build_log: Optional[str] = None, artifacts: Optional[List[str]] = None):
def __init__(
self,
message: str,
source_package: Optional[str] = None,
build_log: Optional[str] = None,
artifacts: Optional[List[str]] = None,
):
context = {}
if source_package:
context['source_package'] = source_package
context["source_package"] = source_package
if build_log:
context['build_log'] = build_log
context["build_log"] = build_log
if artifacts:
context['artifacts'] = artifacts
context["artifacts"] = artifacts
suggestions = [
"Review the build log for specific error messages",
"Check that all build dependencies are installed",
"Verify the source package is valid and complete",
"Try building with verbose output: 'deb-mock --verbose build'"
"Try building with verbose output: 'deb-mock --verbose build'",
]
super().__init__(message, exit_code=5, context=context, suggestions=suggestions)
class DependencyError(DebMockError):
"""
Raised when there are dependency issues.
This exception covers missing build dependencies, version conflicts,
and other dependency-related problems.
"""
def __init__(self, message: str, missing_packages: Optional[List[str]] = None,
conflicting_packages: Optional[List[str]] = None):
def __init__(
self,
message: str,
missing_packages: Optional[List[str]] = None,
conflicting_packages: Optional[List[str]] = None,
):
context = {}
if missing_packages:
context['missing_packages'] = missing_packages
context["missing_packages"] = missing_packages
if conflicting_packages:
context['conflicting_packages'] = conflicting_packages
context["conflicting_packages"] = conflicting_packages
suggestions = [
"Install missing build dependencies",
"Resolve package conflicts by updating or removing conflicting packages",
"Check that your chroot has access to the required repositories",
"Try updating the chroot: 'deb-mock update-chroot'"
"Try updating the chroot: 'deb-mock update-chroot'",
]
super().__init__(message, exit_code=6, context=context, suggestions=suggestions)
class MetadataError(DebMockError):
"""
Raised when there's an error with metadata handling.
This exception covers metadata capture, storage, and retrieval errors.
"""
def __init__(self, message: str, metadata_file: Optional[str] = None,
operation: Optional[str] = None):
def __init__(
self,
message: str,
metadata_file: Optional[str] = None,
operation: Optional[str] = None,
):
context = {}
if metadata_file:
context['metadata_file'] = metadata_file
context["metadata_file"] = metadata_file
if operation:
context['operation'] = operation
context["operation"] = operation
suggestions = [
"Check that the metadata directory is writable",
"Verify that the metadata file format is valid",
"Ensure sufficient disk space for metadata storage"
"Ensure sufficient disk space for metadata storage",
]
super().__init__(message, exit_code=7, context=context, suggestions=suggestions)
class CacheError(DebMockError):
"""
Raised when there's an error with cache operations.
This exception covers root cache, package cache, and ccache errors.
"""
def __init__(self, message: str, cache_type: Optional[str] = None,
cache_path: Optional[str] = None, operation: Optional[str] = None):
def __init__(
self,
message: str,
cache_type: Optional[str] = None,
cache_path: Optional[str] = None,
operation: Optional[str] = None,
):
context = {}
if cache_type:
context['cache_type'] = cache_type
context["cache_type"] = cache_type
if cache_path:
context['cache_path'] = cache_path
context["cache_path"] = cache_path
if operation:
context['operation'] = operation
context["operation"] = operation
suggestions = [
"Check that cache directories are writable",
"Ensure sufficient disk space for cache operations",
"Try cleaning up old caches: 'deb-mock cleanup-caches'",
"Verify cache configuration settings"
"Verify cache configuration settings",
]
super().__init__(message, exit_code=8, context=context, suggestions=suggestions)
class PluginError(DebMockError):
"""
Raised when there's an error with plugin operations.
This exception covers plugin loading, configuration, and execution errors.
"""
def __init__(self, message: str, plugin_name: Optional[str] = None,
plugin_config: Optional[Dict[str, Any]] = None):
def __init__(
self,
message: str,
plugin_name: Optional[str] = None,
plugin_config: Optional[Dict[str, Any]] = None,
):
context = {}
if plugin_name:
context['plugin_name'] = plugin_name
context["plugin_name"] = plugin_name
if plugin_config:
context['plugin_config'] = plugin_config
context["plugin_config"] = plugin_config
suggestions = [
"Check that the plugin is properly installed",
"Verify plugin configuration is valid",
"Ensure plugin dependencies are satisfied",
"Try disabling the plugin if it's causing issues"
"Try disabling the plugin if it's causing issues",
]
super().__init__(message, exit_code=9, context=context, suggestions=suggestions)
class NetworkError(DebMockError):
"""
Raised when there are network-related errors.
This exception covers repository access, package downloads, and
other network operations.
"""
def __init__(self, message: str, url: Optional[str] = None,
proxy: Optional[str] = None, timeout: Optional[int] = None):
def __init__(
self,
message: str,
url: Optional[str] = None,
proxy: Optional[str] = None,
timeout: Optional[int] = None,
):
context = {}
if url:
context['url'] = url
context["url"] = url
if proxy:
context['proxy'] = proxy
context["proxy"] = proxy
if timeout:
context['timeout'] = timeout
context["timeout"] = timeout
suggestions = [
"Check your internet connection",
"Verify repository URLs are accessible",
"Configure proxy settings if behind a firewall",
"Try using a different mirror or repository"
"Try using a different mirror or repository",
]
super().__init__(message, exit_code=10, context=context, suggestions=suggestions)
class PermissionError(DebMockError):
"""
Raised when there are permission-related errors.
This exception covers insufficient privileges for chroot operations,
file access, and other permission issues.
"""
def __init__(self, message: str, operation: Optional[str] = None,
path: Optional[str] = None, required_privileges: Optional[str] = None):
def __init__(
self,
message: str,
operation: Optional[str] = None,
path: Optional[str] = None,
required_privileges: Optional[str] = None,
):
context = {}
if operation:
context['operation'] = operation
context["operation"] = operation
if path:
context['path'] = path
context["path"] = path
if required_privileges:
context['required_privileges'] = required_privileges
context["required_privileges"] = required_privileges
suggestions = [
"Run the command with appropriate privileges (sudo)",
"Check file and directory permissions",
"Verify your user is in the required groups",
"Ensure the target paths are writable"
"Ensure the target paths are writable",
]
super().__init__(message, exit_code=11, context=context, suggestions=suggestions)
class ValidationError(DebMockError):
"""
Raised when input validation fails.
This exception covers validation of source packages, configuration,
and other input data.
"""
def __init__(self, message: str, field: Optional[str] = None,
value: Optional[str] = None, expected_format: Optional[str] = None):
def __init__(
self,
message: str,
field: Optional[str] = None,
value: Optional[str] = None,
expected_format: Optional[str] = None,
):
context = {}
if field:
context['field'] = field
context["field"] = field
if value:
context['value'] = value
context["value"] = value
if expected_format:
context['expected_format'] = expected_format
context["expected_format"] = expected_format
suggestions = [
"Check the input format and syntax",
"Verify that required fields are provided",
"Ensure values are within acceptable ranges",
"Review the documentation for correct usage"
"Review the documentation for correct usage",
]
super().__init__(message, exit_code=12, context=context, suggestions=suggestions)
@ -363,10 +416,11 @@ class ValidationError(DebMockError):
def handle_exception(func):
"""
Decorator to handle exceptions and provide consistent error reporting.
This decorator catches DebMockError exceptions and provides
formatted error output with suggestions for resolution.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
@ -378,26 +432,27 @@ def handle_exception(func):
# Convert unexpected exceptions to DebMockError
error = DebMockError(
f"Unexpected error: {str(e)}",
context={'exception_type': type(e).__name__},
context={"exception_type": type(e).__name__},
suggestions=[
"This may be a bug in deb-mock",
"Check the logs for more details",
"Report the issue with full error context"
]
"Report the issue with full error context",
],
)
error.print_error()
sys.exit(1)
return wrapper
def format_error_context(**kwargs) -> Dict[str, Any]:
"""
Helper function to format error context information.
Args:
**kwargs: Key-value pairs for context information
Returns:
Formatted context dictionary
"""
return {k: v for k, v in kwargs.items() if v is not None}
return {k: v for k, v in kwargs.items() if v is not None}

View file

@ -2,137 +2,133 @@
Metadata management for deb-mock
"""
import os
import json
import uuid
from pathlib import Path
from typing import Dict, Any, List, Optional
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, List, Optional
from .exceptions import MetadataError
class MetadataManager:
"""Manages build metadata capture and storage"""
def __init__(self, config):
self.config = config
self.metadata_dir = Path(config.get_metadata_path())
self.metadata_dir.mkdir(parents=True, exist_ok=True)
def store_metadata(self, metadata: Dict[str, Any]) -> str:
"""Store build metadata and return build ID"""
# Generate unique build ID
build_id = self._generate_build_id()
# Add build ID to metadata
metadata['build_id'] = build_id
metadata['stored_at'] = datetime.now().isoformat()
metadata["build_id"] = build_id
metadata["stored_at"] = datetime.now().isoformat()
# Create metadata file
metadata_file = self.metadata_dir / f"{build_id}.json"
try:
with open(metadata_file, 'w') as f:
with open(metadata_file, "w") as f:
json.dump(metadata, f, indent=2, default=str)
except Exception as e:
raise MetadataError(f"Failed to store metadata: {e}")
# Update build index
self._update_build_index(build_id, metadata)
return build_id
def get_build_info(self, build_id: str) -> Optional[Dict[str, Any]]:
"""Get metadata for a specific build"""
metadata_file = self.metadata_dir / f"{build_id}.json"
if not metadata_file.exists():
return None
try:
with open(metadata_file, 'r') as f:
with open(metadata_file, "r") as f:
return json.load(f)
except Exception as e:
raise MetadataError(f"Failed to load metadata for build {build_id}: {e}")
def get_build_history(self, limit: int = None) -> List[Dict[str, Any]]:
"""Get build history, optionally limited to recent builds"""
builds = []
# Load build index
index_file = self.metadata_dir / "build_index.json"
if not index_file.exists():
return builds
try:
with open(index_file, 'r') as f:
with open(index_file, "r") as f:
build_index = json.load(f)
except Exception as e:
raise MetadataError(f"Failed to load build index: {e}")
# Sort builds by timestamp (newest first)
sorted_builds = sorted(
build_index.values(),
key=lambda x: x.get('timestamp', ''),
reverse=True
)
sorted_builds = sorted(build_index.values(), key=lambda x: x.get("timestamp", ""), reverse=True)
# Apply limit if specified
if limit:
sorted_builds = sorted_builds[:limit]
# Load full metadata for each build
for build_info in sorted_builds:
build_id = build_info.get('build_id')
build_id = build_info.get("build_id")
if build_id:
full_metadata = self.get_build_info(build_id)
if full_metadata:
builds.append(full_metadata)
return builds
def search_builds(self, criteria: Dict[str, Any]) -> List[Dict[str, Any]]:
"""Search builds based on criteria"""
builds = []
all_builds = self.get_build_history()
for build in all_builds:
if self._matches_criteria(build, criteria):
builds.append(build)
return builds
def delete_build_metadata(self, build_id: str) -> bool:
"""Delete metadata for a specific build"""
metadata_file = self.metadata_dir / f"{build_id}.json"
if not metadata_file.exists():
return False
try:
metadata_file.unlink()
self._remove_from_index(build_id)
return True
except Exception as e:
raise MetadataError(f"Failed to delete metadata for build {build_id}: {e}")
def cleanup_old_metadata(self, days: int = 30) -> int:
"""Clean up metadata older than specified days"""
cutoff_time = datetime.now().timestamp() - (days * 24 * 60 * 60)
deleted_count = 0
all_builds = self.get_build_history()
for build in all_builds:
build_id = build.get('build_id')
timestamp = build.get('timestamp')
build_id = build.get("build_id")
timestamp = build.get("timestamp")
if timestamp:
try:
build_time = datetime.fromisoformat(timestamp).timestamp()
@ -142,106 +138,107 @@ class MetadataManager:
except ValueError:
# Skip builds with invalid timestamps
continue
return deleted_count
def export_metadata(self, build_id: str, format: str = 'json') -> str:
def export_metadata(self, build_id: str, format: str = "json") -> str:
"""Export build metadata in specified format"""
metadata = self.get_build_info(build_id)
if not metadata:
raise MetadataError(f"Build {build_id} not found")
if format.lower() == 'json':
if format.lower() == "json":
return json.dumps(metadata, indent=2, default=str)
elif format.lower() == 'yaml':
elif format.lower() == "yaml":
import yaml
return yaml.dump(metadata, default_flow_style=False)
else:
raise MetadataError(f"Unsupported export format: {format}")
def _generate_build_id(self) -> str:
"""Generate a unique build ID"""
return str(uuid.uuid4())
def _update_build_index(self, build_id: str, metadata: Dict[str, Any]) -> None:
"""Update the build index with new build information"""
index_file = self.metadata_dir / "build_index.json"
# Load existing index
build_index = {}
if index_file.exists():
try:
with open(index_file, 'r') as f:
with open(index_file, "r") as f:
build_index = json.load(f)
except Exception:
build_index = {}
# Add new build to index
build_index[build_id] = {
'build_id': build_id,
'source_package': metadata.get('source_package', ''),
'timestamp': metadata.get('timestamp', ''),
'build_success': metadata.get('build_success', False),
'package_name': metadata.get('build_metadata', {}).get('package_name', ''),
'package_version': metadata.get('build_metadata', {}).get('package_version', ''),
'architecture': metadata.get('build_metadata', {}).get('architecture', ''),
'suite': metadata.get('build_metadata', {}).get('suite', '')
"build_id": build_id,
"source_package": metadata.get("source_package", ""),
"timestamp": metadata.get("timestamp", ""),
"build_success": metadata.get("build_success", False),
"package_name": metadata.get("build_metadata", {}).get("package_name", ""),
"package_version": metadata.get("build_metadata", {}).get("package_version", ""),
"architecture": metadata.get("build_metadata", {}).get("architecture", ""),
"suite": metadata.get("build_metadata", {}).get("suite", ""),
}
# Save updated index
try:
with open(index_file, 'w') as f:
with open(index_file, "w") as f:
json.dump(build_index, f, indent=2, default=str)
except Exception as e:
raise MetadataError(f"Failed to update build index: {e}")
def _remove_from_index(self, build_id: str) -> None:
"""Remove a build from the index"""
index_file = self.metadata_dir / "build_index.json"
if not index_file.exists():
return
try:
with open(index_file, 'r') as f:
with open(index_file, "r") as f:
build_index = json.load(f)
except Exception:
return
if build_id in build_index:
del build_index[build_id]
try:
with open(index_file, 'w') as f:
with open(index_file, "w") as f:
json.dump(build_index, f, indent=2, default=str)
except Exception as e:
raise MetadataError(f"Failed to update build index: {e}")
def _matches_criteria(self, build: Dict[str, Any], criteria: Dict[str, Any]) -> bool:
"""Check if a build matches the given criteria"""
for key, value in criteria.items():
if key == 'package_name':
build_package = build.get('build_metadata', {}).get('package_name', '')
if key == "package_name":
build_package = build.get("build_metadata", {}).get("package_name", "")
if value.lower() not in build_package.lower():
return False
elif key == 'architecture':
build_arch = build.get('build_metadata', {}).get('architecture', '')
elif key == "architecture":
build_arch = build.get("build_metadata", {}).get("architecture", "")
if value.lower() != build_arch.lower():
return False
elif key == 'suite':
build_suite = build.get('build_metadata', {}).get('suite', '')
elif key == "suite":
build_suite = build.get("build_metadata", {}).get("suite", "")
if value.lower() != build_suite.lower():
return False
elif key == 'success':
build_success = build.get('build_success', False)
elif key == "success":
build_success = build.get("build_success", False)
if value != build_success:
return False
elif key == 'date_after':
build_timestamp = build.get('timestamp', '')
elif key == "date_after":
build_timestamp = build.get("timestamp", "")
if build_timestamp:
try:
build_time = datetime.fromisoformat(build_timestamp)
@ -250,8 +247,8 @@ class MetadataManager:
return False
except ValueError:
return False
elif key == 'date_before':
build_timestamp = build.get('timestamp', '')
elif key == "date_before":
build_timestamp = build.get("timestamp", "")
if build_timestamp:
try:
build_time = datetime.fromisoformat(build_timestamp)
@ -260,5 +257,5 @@ class MetadataManager:
return False
except ValueError:
return False
return True
return True

View file

@ -6,7 +6,6 @@ inspired by Fedora's Mock plugin architecture but adapted for Debian-based syste
"""
from .hook_manager import HookManager
from .base import BasePlugin
from .registry import PluginRegistry
# Global hook manager instance
@ -15,72 +14,78 @@ hook_manager = HookManager()
# Global plugin registry
plugin_registry = PluginRegistry()
# Convenience function for plugins to register hooks
def add_hook(hook_name: str, callback):
"""
Register a hook callback.
This is the main interface for plugins to register hooks,
following the same pattern as Mock's plugin system.
Args:
hook_name: Name of the hook to register for
callback: Function to call when hook is triggered
"""
hook_manager.add_hook(hook_name, callback)
# Convenience function to call hooks
def call_hook(hook_name: str, context: dict = None):
"""
Call all registered hooks for a given hook name.
Args:
hook_name: Name of the hook to trigger
context: Context dictionary to pass to hook callbacks
"""
hook_manager.call_hook(hook_name, context)
# Convenience function to get available hooks
def get_hook_names() -> list:
"""
Get list of available hook names.
Returns:
List of hook names that have been registered
"""
return hook_manager.get_hook_names()
# Convenience function to register plugins
def register_plugin(plugin_name: str, plugin_class):
"""
Register a plugin class.
Args:
plugin_name: Name of the plugin
plugin_class: Plugin class to register
"""
plugin_registry.register(plugin_name, plugin_class)
# Convenience function to get registered plugins
def get_registered_plugins() -> dict:
"""
Get all registered plugins.
Returns:
Dictionary of registered plugin names and classes
"""
return plugin_registry.get_plugins()
# Convenience function to create plugin instances
def create_plugin(plugin_name: str, config):
"""
Create a plugin instance.
Args:
plugin_name: Name of the plugin to create
config: Configuration object
Returns:
Plugin instance
"""
return plugin_registry.create(plugin_name, config, hook_manager)
return plugin_registry.create(plugin_name, config, hook_manager)

View file

@ -6,7 +6,7 @@ inspired by Fedora's Mock plugin architecture but adapted for Debian-based syste
"""
import logging
from typing import Dict, Any, Optional
from typing import Any, Dict
logger = logging.getLogger(__name__)
@ -14,17 +14,17 @@ logger = logging.getLogger(__name__)
class BasePlugin:
"""
Base class for all Deb-Mock plugins.
This class provides the foundation for all plugins in the Deb-Mock system,
following the same patterns as Fedora's Mock plugins but adapted for Debian workflows.
Plugins should inherit from this class and override the hook methods they need.
"""
def __init__(self, config, hook_manager):
"""
Initialize the plugin.
Args:
config: Configuration object
hook_manager: Hook manager instance
@ -33,382 +33,382 @@ class BasePlugin:
self.hook_manager = hook_manager
self.enabled = self._is_enabled()
self.plugin_name = self.__class__.__name__.lower()
# Register hooks if plugin is enabled
if self.enabled:
self._register_hooks()
logger.debug(f"Plugin {self.plugin_name} initialized and enabled")
else:
logger.debug(f"Plugin {self.plugin_name} initialized but disabled")
def _is_enabled(self) -> bool:
"""
Check if plugin is enabled in configuration.
Returns:
True if plugin is enabled, False otherwise
"""
plugin_config = getattr(self.config, 'plugins', {})
plugin_config = getattr(self.config, "plugins", {})
plugin_name = self.plugin_name
# Check if plugin is explicitly enabled
if plugin_name in plugin_config:
return plugin_config[plugin_name].get('enabled', False)
return plugin_config[plugin_name].get("enabled", False)
# Check if plugin is enabled via global plugin settings
return getattr(self.config, 'enable_plugins', {}).get(plugin_name, False)
return getattr(self.config, "enable_plugins", {}).get(plugin_name, False)
def _register_hooks(self):
"""
Register plugin hooks with the hook manager.
Override this method in subclasses to register specific hooks.
"""
# Override in subclasses to register hooks
pass
def _get_plugin_config(self) -> Dict[str, Any]:
"""
Get plugin-specific configuration.
Returns:
Plugin configuration dictionary
"""
plugin_config = getattr(self.config, 'plugins', {})
plugin_config = getattr(self.config, "plugins", {})
return plugin_config.get(self.plugin_name, {})
def _log_info(self, message: str):
"""Log an info message with plugin context."""
logger.info(f"[{self.plugin_name}] {message}")
def _log_debug(self, message: str):
"""Log a debug message with plugin context."""
logger.debug(f"[{self.plugin_name}] {message}")
def _log_warning(self, message: str):
"""Log a warning message with plugin context."""
logger.warning(f"[{self.plugin_name}] {message}")
def _log_error(self, message: str):
"""Log an error message with plugin context."""
logger.error(f"[{self.plugin_name}] {message}")
# ============================================================================
# Hook Method Stubs - Override in subclasses as needed
# ============================================================================
def clean(self, context: Dict[str, Any]) -> None:
"""
Clean up plugin resources.
Called after chroot cleanup.
Args:
context: Context dictionary with cleanup information
"""
pass
def earlyprebuild(self, context: Dict[str, Any]) -> None:
"""
Very early build stage.
Called before SRPM rebuild, before dependencies.
Args:
context: Context dictionary with early build information
"""
pass
def initfailed(self, context: Dict[str, Any]) -> None:
"""
Chroot initialization failed.
Called when chroot creation fails.
Args:
context: Context dictionary with error information
"""
pass
def list_snapshots(self, context: Dict[str, Any]) -> None:
"""
List available snapshots.
Called when --list-snapshots is used.
Args:
context: Context dictionary with snapshot information
"""
pass
def make_snapshot(self, context: Dict[str, Any]) -> None:
"""
Create a snapshot.
Called when snapshot creation is requested.
Args:
context: Context dictionary with snapshot creation parameters
"""
pass
def mount_root(self, context: Dict[str, Any]) -> None:
"""
Mount chroot directory.
Called before preinit, chroot exists.
Args:
context: Context dictionary with mount information
"""
pass
def postbuild(self, context: Dict[str, Any]) -> None:
"""
After build completion.
Called after RPM/SRPM build (success/failure).
Args:
context: Context dictionary with build results
"""
pass
def postchroot(self, context: Dict[str, Any]) -> None:
"""
After chroot command.
Called after mock chroot command.
Args:
context: Context dictionary with chroot command results
"""
pass
def postclean(self, context: Dict[str, Any]) -> None:
"""
After chroot cleanup.
Called after chroot content deletion.
Args:
context: Context dictionary with cleanup information
"""
pass
def postdeps(self, context: Dict[str, Any]) -> None:
"""
After dependency installation.
Called when dependencies installed, before build.
Args:
context: Context dictionary with dependency information
"""
pass
def postinit(self, context: Dict[str, Any]) -> None:
"""
After chroot initialization.
Called when chroot ready for dependencies.
Args:
context: Context dictionary with initialization results
"""
pass
def postshell(self, context: Dict[str, Any]) -> None:
"""
After shell exit.
Called after mock shell command.
Args:
context: Context dictionary with shell session information
"""
pass
def postupdate(self, context: Dict[str, Any]) -> None:
"""
After package updates.
Called after successful package updates.
Args:
context: Context dictionary with update information
"""
pass
def postumount(self, context: Dict[str, Any]) -> None:
"""
After unmounting.
Called when all inner mounts unmounted.
Args:
context: Context dictionary with unmount information
"""
pass
def postapt(self, context: Dict[str, Any]) -> None:
"""
After APT operations.
Called after any package manager action.
Args:
context: Context dictionary with APT operation results
"""
pass
def prebuild(self, context: Dict[str, Any]) -> None:
"""
Before build starts.
Called after BuildRequires, before RPM build.
Args:
context: Context dictionary with build preparation information
"""
pass
def prechroot(self, context: Dict[str, Any]) -> None:
"""
Before chroot command.
Called before mock chroot command.
Args:
context: Context dictionary with chroot command parameters
"""
pass
def preinit(self, context: Dict[str, Any]) -> None:
"""
Before chroot initialization.
Called when only chroot/result dirs exist.
Args:
context: Context dictionary with initialization parameters
"""
pass
def preshell(self, context: Dict[str, Any]) -> None:
"""
Before shell prompt.
Called before mock shell prompt.
Args:
context: Context dictionary with shell session parameters
"""
pass
def preapt(self, context: Dict[str, Any]) -> None:
"""
Before APT operations.
Called before any package manager action.
Args:
context: Context dictionary with APT operation parameters
"""
pass
def process_logs(self, context: Dict[str, Any]) -> None:
"""
Process build logs.
Called after build log completion.
Args:
context: Context dictionary with log information
"""
pass
def remove_snapshot(self, context: Dict[str, Any]) -> None:
"""
Remove snapshot.
Called when snapshot removal requested.
Args:
context: Context dictionary with snapshot removal parameters
"""
pass
def rollback_to(self, context: Dict[str, Any]) -> None:
"""
Rollback to snapshot.
Called when rollback requested.
Args:
context: Context dictionary with rollback parameters
"""
pass
def scrub(self, context: Dict[str, Any]) -> None:
"""
Scrub chroot.
Called when chroot scrubbing requested.
Args:
context: Context dictionary with scrub parameters
"""
pass
# ============================================================================
# Plugin Lifecycle Methods
# ============================================================================
def setup(self, context: Dict[str, Any]) -> None:
"""
Setup plugin before build.
Called once during plugin initialization.
Args:
context: Context dictionary with setup information
"""
pass
def teardown(self, context: Dict[str, Any]) -> None:
"""
Cleanup plugin after build.
Called once during plugin cleanup.
Args:
context: Context dictionary with teardown information
"""
pass
def validate_config(self, config: Any) -> bool:
"""
Validate plugin configuration.
Args:
config: Configuration to validate
Returns:
True if configuration is valid, False otherwise
"""
return True
def get_plugin_info(self) -> Dict[str, Any]:
"""
Get plugin information.
Returns:
Dictionary with plugin information
"""
return {
'name': self.plugin_name,
'class': self.__class__.__name__,
'enabled': self.enabled,
'docstring': self.__class__.__doc__ or 'No documentation available'
}
"name": self.plugin_name,
"class": self.__class__.__name__,
"enabled": self.enabled,
"docstring": self.__class__.__doc__ or "No documentation available",
}

View file

@ -5,11 +5,11 @@ This plugin allows mounting host directories into chroot environments,
inspired by Fedora's Mock bind_mount plugin but adapted for Debian-based systems.
"""
import logging
import os
import subprocess
import logging
from pathlib import Path
from typing import Dict, Any, List, Tuple
from typing import Any, Dict, List, Tuple
from .base import BasePlugin
@ -19,108 +19,108 @@ logger = logging.getLogger(__name__)
class BindMountPlugin(BasePlugin):
"""
Mount host directories into chroot environments.
This plugin allows users to mount host directories into the chroot
environment, which is useful for development workflows, shared
libraries, and other scenarios where host files need to be accessible
within the build environment.
"""
def __init__(self, config, hook_manager):
"""Initialize the BindMount plugin."""
super().__init__(config, hook_manager)
self.mounts = self._get_mounts()
self._log_info(f"Initialized with {len(self.mounts)} mount points")
def _register_hooks(self):
"""Register bind mount hooks."""
self.hook_manager.add_hook("mount_root", self.mount_root)
self.hook_manager.add_hook("postumount", self.postumount)
self._log_debug("Registered mount_root and postumount hooks")
def _get_mounts(self) -> List[Tuple[str, str]]:
"""
Get mount points from configuration.
Returns:
List of (host_path, chroot_path) tuples
"""
plugin_config = self._get_plugin_config()
mounts = []
# Get mounts from configuration
if 'mounts' in plugin_config:
for mount_config in plugin_config['mounts']:
if "mounts" in plugin_config:
for mount_config in plugin_config["mounts"]:
if isinstance(mount_config, dict):
host_path = mount_config.get('host_path')
chroot_path = mount_config.get('chroot_path')
host_path = mount_config.get("host_path")
chroot_path = mount_config.get("chroot_path")
elif isinstance(mount_config, (list, tuple)) and len(mount_config) >= 2:
host_path = mount_config[0]
chroot_path = mount_config[1]
else:
self._log_warning(f"Invalid mount configuration: {mount_config}")
continue
if host_path and chroot_path:
mounts.append((host_path, chroot_path))
# Legacy support for 'dirs' configuration (Mock compatibility)
if 'dirs' in plugin_config:
for host_path, chroot_path in plugin_config['dirs']:
if "dirs" in plugin_config:
for host_path, chroot_path in plugin_config["dirs"]:
mounts.append((host_path, chroot_path))
return mounts
def mount_root(self, context: Dict[str, Any]) -> None:
"""
Mount bind mounts when chroot is mounted.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled or not self.mounts:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
self._log_warning("No chroot_path in context, skipping bind mounts")
return
self._log_info(f"Setting up {len(self.mounts)} bind mounts")
for host_path, chroot_mount_path in self.mounts:
try:
self._setup_bind_mount(host_path, chroot_mount_path, chroot_path)
except Exception as e:
self._log_error(f"Failed to setup bind mount {host_path} -> {chroot_mount_path}: {e}")
def postumount(self, context: Dict[str, Any]) -> None:
"""
Unmount bind mounts when chroot is unmounted.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled or not self.mounts:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
self._log_warning("No chroot_path in context, skipping bind mount cleanup")
return
self._log_info(f"Cleaning up {len(self.mounts)} bind mounts")
for host_path, chroot_mount_path in self.mounts:
try:
self._cleanup_bind_mount(chroot_mount_path, chroot_path)
except Exception as e:
self._log_error(f"Failed to cleanup bind mount {chroot_mount_path}: {e}")
def _setup_bind_mount(self, host_path: str, chroot_mount_path: str, chroot_path: str) -> None:
"""
Setup a single bind mount.
Args:
host_path: Path on the host to mount
chroot_mount_path: Path in the chroot where to mount
@ -130,77 +130,77 @@ class BindMountPlugin(BasePlugin):
if not os.path.exists(host_path):
self._log_warning(f"Host path does not exist: {host_path}")
return
# Create full chroot mount path
full_chroot_path = os.path.join(chroot_path, chroot_mount_path.lstrip('/'))
full_chroot_path = os.path.join(chroot_path, chroot_mount_path.lstrip("/"))
# Create mount point directory if it doesn't exist
mount_point_dir = os.path.dirname(full_chroot_path)
if not os.path.exists(mount_point_dir):
os.makedirs(mount_point_dir, exist_ok=True)
self._log_debug(f"Created mount point directory: {mount_point_dir}")
# Create mount point if it's a file
if os.path.isfile(host_path) and not os.path.exists(full_chroot_path):
Path(full_chroot_path).touch()
self._log_debug(f"Created file mount point: {full_chroot_path}")
# Perform the bind mount
try:
cmd = ['mount', '--bind', host_path, full_chroot_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["mount", "--bind", host_path, full_chroot_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
self._log_debug(f"Successfully mounted {host_path} -> {full_chroot_path}")
except subprocess.CalledProcessError as e:
self._log_error(f"Failed to mount {host_path} -> {full_chroot_path}: {e.stderr}")
raise
except FileNotFoundError:
self._log_error("mount command not found - ensure mount is available")
raise
def _cleanup_bind_mount(self, chroot_mount_path: str, chroot_path: str) -> None:
"""
Cleanup a single bind mount.
Args:
chroot_mount_path: Path in the chroot that was mounted
chroot_path: Base chroot path
"""
full_chroot_path = os.path.join(chroot_path, chroot_mount_path.lstrip('/'))
full_chroot_path = os.path.join(chroot_path, chroot_mount_path.lstrip("/"))
try:
cmd = ['umount', full_chroot_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["umount", full_chroot_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
self._log_debug(f"Successfully unmounted: {full_chroot_path}")
except subprocess.CalledProcessError as e:
except subprocess.CalledProcessError:
# Try force unmount if regular unmount fails
try:
cmd = ['umount', '-f', full_chroot_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["umount", "-f", full_chroot_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
self._log_debug(f"Successfully force unmounted: {full_chroot_path}")
except subprocess.CalledProcessError as e2:
self._log_warning(f"Failed to unmount {full_chroot_path}: {e2.stderr}")
except FileNotFoundError:
self._log_error("umount command not found - ensure umount is available")
def validate_config(self, config: Any) -> bool:
"""
Validate plugin configuration.
Args:
config: Configuration to validate
Returns:
True if configuration is valid, False otherwise
"""
plugin_config = getattr(config, 'plugins', {}).get('bind_mount', {})
plugin_config = getattr(config, "plugins", {}).get("bind_mount", {})
# Check mounts configuration
if 'mounts' in plugin_config:
for mount_config in plugin_config['mounts']:
if "mounts" in plugin_config:
for mount_config in plugin_config["mounts"]:
if isinstance(mount_config, dict):
if not all(key in mount_config for key in ['host_path', 'chroot_path']):
if not all(key in mount_config for key in ["host_path", "chroot_path"]):
self._log_error("Mount configuration missing required keys: host_path, chroot_path")
return False
elif isinstance(mount_config, (list, tuple)):
@ -210,27 +210,29 @@ class BindMountPlugin(BasePlugin):
else:
self._log_error(f"Invalid mount configuration format: {mount_config}")
return False
# Check dirs configuration (legacy)
if 'dirs' in plugin_config:
for host_path, chroot_path in plugin_config['dirs']:
if "dirs" in plugin_config:
for host_path, chroot_path in plugin_config["dirs"]:
if not host_path or not chroot_path:
self._log_error("Invalid dirs configuration: host_path and chroot_path must be non-empty")
return False
return True
def get_plugin_info(self) -> Dict[str, Any]:
"""
Get plugin information.
Returns:
Dictionary with plugin information
"""
info = super().get_plugin_info()
info.update({
'mounts': self.mounts,
'mount_count': len(self.mounts),
'hooks': ['mount_root', 'postumount']
})
return info
info.update(
{
"mounts": self.mounts,
"mount_count": len(self.mounts),
"hooks": ["mount_root", "postumount"],
}
)
return info

View file

@ -5,11 +5,11 @@ This plugin compresses build logs to save disk space,
inspired by Fedora's Mock compress_logs plugin but adapted for Debian-based systems.
"""
import logging
import os
import subprocess
import logging
from pathlib import Path
from typing import Dict, Any, List
from typing import Any, Dict, List
from .base import BasePlugin
@ -19,287 +19,291 @@ logger = logging.getLogger(__name__)
class CompressLogsPlugin(BasePlugin):
"""
Compress build logs to save disk space.
This plugin automatically compresses build logs after build completion,
which is useful for CI/CD environments and long-term log storage.
"""
def __init__(self, config, hook_manager):
"""Initialize the CompressLogs plugin."""
super().__init__(config, hook_manager)
self.compression = self._get_compression_settings()
self._log_info(f"Initialized with compression: {self.compression['method']}")
def _register_hooks(self):
"""Register log compression hooks."""
self.hook_manager.add_hook("process_logs", self.process_logs)
self._log_debug("Registered process_logs hook")
def _get_compression_settings(self) -> Dict[str, Any]:
"""
Get compression settings from configuration.
Returns:
Dictionary with compression settings
"""
plugin_config = self._get_plugin_config()
return {
'method': plugin_config.get('compression', 'gzip'),
'level': plugin_config.get('level', 9),
'extensions': plugin_config.get('extensions', ['.log']),
'exclude_patterns': plugin_config.get('exclude_patterns', []),
'min_size': plugin_config.get('min_size', 0), # Minimum file size to compress
'command': plugin_config.get('command', None) # Custom compression command
"method": plugin_config.get("compression", "gzip"),
"level": plugin_config.get("level", 9),
"extensions": plugin_config.get("extensions", [".log"]),
"exclude_patterns": plugin_config.get("exclude_patterns", []),
"min_size": plugin_config.get("min_size", 0), # Minimum file size to compress
"command": plugin_config.get("command", None), # Custom compression command
}
def process_logs(self, context: Dict[str, Any]) -> None:
"""
Compress build logs after build completion.
Args:
context: Context dictionary with log information
"""
if not self.enabled:
return
log_dir = context.get('log_dir')
log_dir = context.get("log_dir")
if not log_dir:
self._log_warning("No log_dir in context, skipping log compression")
return
if not os.path.exists(log_dir):
self._log_warning(f"Log directory does not exist: {log_dir}")
return
self._log_info(f"Compressing logs in {log_dir}")
compressed_count = 0
total_size_saved = 0
for log_file in self._find_log_files(log_dir):
try:
original_size = os.path.getsize(log_file)
# Check minimum size requirement
if original_size < self.compression['min_size']:
if original_size < self.compression["min_size"]:
self._log_debug(f"Skipping {log_file} (size {original_size} < {self.compression['min_size']})")
continue
# Check if already compressed
if self._is_already_compressed(log_file):
self._log_debug(f"Skipping already compressed file: {log_file}")
continue
# Compress the file
compressed_size = self._compress_file(log_file)
if compressed_size is not None:
compressed_count += 1
size_saved = original_size - compressed_size
total_size_saved += size_saved
self._log_debug(f"Compressed {log_file}: {original_size} -> {compressed_size} bytes (saved {size_saved})")
self._log_debug(
f"Compressed {log_file}: {original_size} -> {compressed_size} bytes (saved {size_saved})"
)
except Exception as e:
self._log_error(f"Failed to compress {log_file}: {e}")
self._log_info(f"Compressed {compressed_count} files, saved {total_size_saved} bytes")
def _find_log_files(self, log_dir: str) -> List[str]:
"""
Find log files to compress.
Args:
log_dir: Directory containing log files
Returns:
List of log file paths
"""
log_files = []
for extension in self.compression['extensions']:
for extension in self.compression["extensions"]:
pattern = f"*{extension}"
log_files.extend(Path(log_dir).glob(pattern))
# Filter out excluded patterns
filtered_files = []
for log_file in log_files:
if not self._is_excluded(log_file.name):
filtered_files.append(str(log_file))
return filtered_files
def _is_excluded(self, filename: str) -> bool:
"""
Check if file should be excluded from compression.
Args:
filename: Name of the file to check
Returns:
True if file should be excluded, False otherwise
"""
for pattern in self.compression['exclude_patterns']:
for pattern in self.compression["exclude_patterns"]:
if pattern in filename:
return True
return False
def _is_already_compressed(self, file_path: str) -> bool:
"""
Check if file is already compressed.
Args:
file_path: Path to the file to check
Returns:
True if file is already compressed, False otherwise
"""
compressed_extensions = ['.gz', '.bz2', '.xz', '.lzma', '.zst']
compressed_extensions = [".gz", ".bz2", ".xz", ".lzma", ".zst"]
return any(file_path.endswith(ext) for ext in compressed_extensions)
def _compress_file(self, file_path: str) -> int:
"""
Compress a single file.
Args:
file_path: Path to the file to compress
Returns:
Size of the compressed file, or None if compression failed
"""
method = self.compression['method']
level = self.compression['level']
method = self.compression["method"]
level = self.compression["level"]
# Use custom command if specified
if self.compression['command']:
if self.compression["command"]:
return self._compress_with_custom_command(file_path)
# Use standard compression methods
if method == 'gzip':
if method == "gzip":
return self._compress_gzip(file_path, level)
elif method == 'bzip2':
elif method == "bzip2":
return self._compress_bzip2(file_path, level)
elif method == 'xz':
elif method == "xz":
return self._compress_xz(file_path, level)
elif method == 'zstd':
elif method == "zstd":
return self._compress_zstd(file_path, level)
else:
self._log_error(f"Unsupported compression method: {method}")
return None
def _compress_gzip(self, file_path: str, level: int) -> int:
"""Compress file using gzip."""
try:
cmd = ['gzip', f'-{level}', file_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["gzip", f"-{level}", file_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
compressed_path = f"{file_path}.gz"
return os.path.getsize(compressed_path) if os.path.exists(compressed_path) else None
except subprocess.CalledProcessError as e:
self._log_error(f"gzip compression failed: {e.stderr}")
return None
def _compress_bzip2(self, file_path: str, level: int) -> int:
"""Compress file using bzip2."""
try:
cmd = ['bzip2', f'-{level}', file_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["bzip2", f"-{level}", file_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
compressed_path = f"{file_path}.bz2"
return os.path.getsize(compressed_path) if os.path.exists(compressed_path) else None
except subprocess.CalledProcessError as e:
self._log_error(f"bzip2 compression failed: {e.stderr}")
return None
def _compress_xz(self, file_path: str, level: int) -> int:
"""Compress file using xz."""
try:
cmd = ['xz', f'-{level}', file_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["xz", f"-{level}", file_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
compressed_path = f"{file_path}.xz"
return os.path.getsize(compressed_path) if os.path.exists(compressed_path) else None
except subprocess.CalledProcessError as e:
self._log_error(f"xz compression failed: {e.stderr}")
return None
def _compress_zstd(self, file_path: str, level: int) -> int:
"""Compress file using zstd."""
try:
cmd = ['zstd', f'-{level}', file_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["zstd", f"-{level}", file_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
compressed_path = f"{file_path}.zst"
return os.path.getsize(compressed_path) if os.path.exists(compressed_path) else None
except subprocess.CalledProcessError as e:
self._log_error(f"zstd compression failed: {e.stderr}")
return None
def _compress_with_custom_command(self, file_path: str) -> int:
"""Compress file using custom command."""
try:
command = self.compression['command'].format(file=file_path)
result = subprocess.run(command, shell=True, capture_output=True, text=True, check=True)
command = self.compression["command"].format(file=file_path)
subprocess.run(command, shell=True, capture_output=True, text=True, check=True)
# Try to determine compressed file size
# This is a best-effort approach since custom commands may vary
for ext in ['.gz', '.bz2', '.xz', '.zst', '.lzma']:
for ext in [".gz", ".bz2", ".xz", ".zst", ".lzma"]:
compressed_path = f"{file_path}{ext}"
if os.path.exists(compressed_path):
return os.path.getsize(compressed_path)
return None
except subprocess.CalledProcessError as e:
self._log_error(f"Custom compression command failed: {e.stderr}")
return None
def validate_config(self, config: Any) -> bool:
"""
Validate plugin configuration.
Args:
config: Configuration to validate
Returns:
True if configuration is valid, False otherwise
"""
plugin_config = getattr(config, 'plugins', {}).get('compress_logs', {})
plugin_config = getattr(config, "plugins", {}).get("compress_logs", {})
# Validate compression method
valid_methods = ['gzip', 'bzip2', 'xz', 'zstd']
method = plugin_config.get('compression', 'gzip')
if method not in valid_methods and not plugin_config.get('command'):
valid_methods = ["gzip", "bzip2", "xz", "zstd"]
method = plugin_config.get("compression", "gzip")
if method not in valid_methods and not plugin_config.get("command"):
self._log_error(f"Invalid compression method: {method}. Valid methods: {valid_methods}")
return False
# Validate compression level
level = plugin_config.get('level', 9)
level = plugin_config.get("level", 9)
if not isinstance(level, int) or level < 1 or level > 9:
self._log_error(f"Invalid compression level: {level}. Must be 1-9")
return False
# Validate extensions
extensions = plugin_config.get('extensions', ['.log'])
extensions = plugin_config.get("extensions", [".log"])
if not isinstance(extensions, list):
self._log_error("Extensions must be a list")
return False
# Validate min_size
min_size = plugin_config.get('min_size', 0)
min_size = plugin_config.get("min_size", 0)
if not isinstance(min_size, int) or min_size < 0:
self._log_error(f"Invalid min_size: {min_size}. Must be non-negative integer")
return False
return True
def get_plugin_info(self) -> Dict[str, Any]:
"""
Get plugin information.
Returns:
Dictionary with plugin information
"""
info = super().get_plugin_info()
info.update({
'compression_method': self.compression['method'],
'compression_level': self.compression['level'],
'extensions': self.compression['extensions'],
'min_size': self.compression['min_size'],
'hooks': ['process_logs']
})
return info
info.update(
{
"compression_method": self.compression["method"],
"compression_level": self.compression["level"],
"extensions": self.compression["extensions"],
"min_size": self.compression["min_size"],
"hooks": ["process_logs"],
}
)
return info

View file

@ -6,7 +6,7 @@ inspired by Fedora's Mock plugin hooks but adapted for Debian-based workflows.
"""
import logging
from typing import Dict, List, Callable, Any, Optional
from typing import Any, Callable, Dict, List, Optional
logger = logging.getLogger(__name__)
@ -14,73 +14,73 @@ logger = logging.getLogger(__name__)
class HookManager:
"""
Manages plugin hooks and their execution.
This class provides the core functionality for registering and executing
plugin hooks at specific points in the build lifecycle, following the
same pattern as Mock's plugin hook system.
"""
def __init__(self):
"""Initialize the hook manager."""
self.hooks: Dict[str, List[Callable]] = {}
self.hook_contexts: Dict[str, Dict[str, Any]] = {}
# Define available hook points (based on Mock's hook system)
self.available_hooks = {
'clean': 'Clean up plugin resources',
'earlyprebuild': 'Very early build stage',
'initfailed': 'Chroot initialization failed',
'list_snapshots': 'List available snapshots',
'make_snapshot': 'Create a snapshot',
'mount_root': 'Mount chroot directory',
'postbuild': 'After build completion',
'postchroot': 'After chroot command',
'postclean': 'After chroot cleanup',
'postdeps': 'After dependency installation',
'postinit': 'After chroot initialization',
'postshell': 'After shell exit',
'postupdate': 'After package updates',
'postumount': 'After unmounting',
'postapt': 'After APT operations',
'prebuild': 'Before build starts',
'prechroot': 'Before chroot command',
'preinit': 'Before chroot initialization',
'preshell': 'Before shell prompt',
'preapt': 'Before APT operations',
'process_logs': 'Process build logs',
'remove_snapshot': 'Remove snapshot',
'rollback_to': 'Rollback to snapshot',
'scrub': 'Scrub chroot'
"clean": "Clean up plugin resources",
"earlyprebuild": "Very early build stage",
"initfailed": "Chroot initialization failed",
"list_snapshots": "List available snapshots",
"make_snapshot": "Create a snapshot",
"mount_root": "Mount chroot directory",
"postbuild": "After build completion",
"postchroot": "After chroot command",
"postclean": "After chroot cleanup",
"postdeps": "After dependency installation",
"postinit": "After chroot initialization",
"postshell": "After shell exit",
"postupdate": "After package updates",
"postumount": "After unmounting",
"postapt": "After APT operations",
"prebuild": "Before build starts",
"prechroot": "Before chroot command",
"preinit": "Before chroot initialization",
"preshell": "Before shell prompt",
"preapt": "Before APT operations",
"process_logs": "Process build logs",
"remove_snapshot": "Remove snapshot",
"rollback_to": "Rollback to snapshot",
"scrub": "Scrub chroot",
}
def add_hook(self, hook_name: str, callback: Callable) -> None:
"""
Register a hook callback.
Args:
hook_name: Name of the hook to register for
callback: Function to call when hook is triggered
Raises:
ValueError: If hook_name is not a valid hook point
"""
if hook_name not in self.available_hooks:
raise ValueError(f"Invalid hook name: {hook_name}. Available hooks: {list(self.available_hooks.keys())}")
if hook_name not in self.hooks:
self.hooks[hook_name] = []
self.hooks[hook_name].append(callback)
logger.debug(f"Registered hook '{hook_name}' with callback {callback.__name__}")
def call_hook(self, hook_name: str, context: Optional[Dict[str, Any]] = None) -> None:
"""
Execute all registered hooks for a given hook name.
Args:
hook_name: Name of the hook to trigger
context: Context dictionary to pass to hook callbacks
Note:
Hook execution errors are logged but don't fail the build,
following Mock's behavior.
@ -88,36 +88,36 @@ class HookManager:
if hook_name not in self.hooks:
logger.debug(f"No hooks registered for '{hook_name}'")
return
context = context or {}
logger.debug(f"Calling {len(self.hooks[hook_name])} hooks for '{hook_name}'")
for i, callback in enumerate(self.hooks[hook_name]):
try:
logger.debug(f"Executing hook {i+1}/{len(self.hooks[hook_name])}: {callback.__name__}")
logger.debug(f"Executing hook {i + 1}/{len(self.hooks[hook_name])}: {callback.__name__}")
callback(context)
logger.debug(f"Successfully executed hook: {callback.__name__}")
except Exception as e:
logger.warning(f"Hook '{hook_name}' failed in {callback.__name__}: {e}")
# Continue with other hooks - don't fail the build
def call_hook_with_result(self, hook_name: str, context: Optional[Dict[str, Any]] = None) -> List[Any]:
"""
Execute all registered hooks and collect their results.
Args:
hook_name: Name of the hook to trigger
context: Context dictionary to pass to hook callbacks
Returns:
List of results from hook callbacks (None for failed hooks)
"""
if hook_name not in self.hooks:
return []
context = context or {}
results = []
for callback in self.hooks[hook_name]:
try:
result = callback(context)
@ -125,81 +125,78 @@ class HookManager:
except Exception as e:
logger.warning(f"Hook '{hook_name}' failed in {callback.__name__}: {e}")
results.append(None)
return results
def get_hook_names(self) -> List[str]:
"""
Get list of available hook names.
Returns:
List of hook names that have been registered
"""
return list(self.hooks.keys())
def get_available_hooks(self) -> Dict[str, str]:
"""
Get all available hook points with descriptions.
Returns:
Dictionary mapping hook names to descriptions
"""
return self.available_hooks.copy()
def get_hook_info(self, hook_name: str) -> Dict[str, Any]:
"""
Get information about a specific hook.
Args:
hook_name: Name of the hook
Returns:
Dictionary with hook information
"""
if hook_name not in self.available_hooks:
return {'error': f'Hook "{hook_name}" not found'}
return {"error": f'Hook "{hook_name}" not found'}
info = {
'name': hook_name,
'description': self.available_hooks[hook_name],
'registered_callbacks': len(self.hooks.get(hook_name, [])),
'callbacks': []
"name": hook_name,
"description": self.available_hooks[hook_name],
"registered_callbacks": len(self.hooks.get(hook_name, [])),
"callbacks": [],
}
if hook_name in self.hooks:
for callback in self.hooks[hook_name]:
info['callbacks'].append({
'name': callback.__name__,
'module': callback.__module__
})
info["callbacks"].append({"name": callback.__name__, "module": callback.__module__})
return info
def remove_hook(self, hook_name: str, callback: Callable) -> bool:
"""
Remove a specific hook callback.
Args:
hook_name: Name of the hook
callback: Callback function to remove
Returns:
True if callback was removed, False if not found
"""
if hook_name not in self.hooks:
return False
try:
self.hooks[hook_name].remove(callback)
logger.debug(f"Removed hook '{hook_name}' callback {callback.__name__}")
return True
except ValueError:
return False
def clear_hooks(self, hook_name: Optional[str] = None) -> None:
"""
Clear all hooks or hooks for a specific hook name.
Args:
hook_name: Specific hook name to clear, or None to clear all
"""
@ -209,52 +206,51 @@ class HookManager:
elif hook_name in self.hooks:
self.hooks[hook_name].clear()
logger.debug(f"Cleared hooks for '{hook_name}'")
def get_hook_statistics(self) -> Dict[str, Any]:
"""
Get statistics about hook usage.
Returns:
Dictionary with hook statistics
"""
stats = {
'total_hooks': len(self.hooks),
'total_callbacks': sum(len(callbacks) for callbacks in self.hooks.values()),
'hooks_with_callbacks': len([h for h in self.hooks.values() if h]),
'available_hooks': len(self.available_hooks),
'hook_details': {}
"total_hooks": len(self.hooks),
"total_callbacks": sum(len(callbacks) for callbacks in self.hooks.values()),
"hooks_with_callbacks": len([h for h in self.hooks.values() if h]),
"available_hooks": len(self.available_hooks),
"hook_details": {},
}
for hook_name in self.available_hooks:
stats['hook_details'][hook_name] = {
'description': self.available_hooks[hook_name],
'registered': hook_name in self.hooks,
'callback_count': len(self.hooks.get(hook_name, []))
stats["hook_details"][hook_name] = {
"description": self.available_hooks[hook_name],
"registered": hook_name in self.hooks,
"callback_count": len(self.hooks.get(hook_name, [])),
}
return stats
def validate_hook_name(self, hook_name: str) -> bool:
"""
Validate if a hook name is valid.
Args:
hook_name: Name of the hook to validate
Returns:
True if hook name is valid, False otherwise
"""
return hook_name in self.available_hooks
def get_hook_suggestions(self, partial_name: str) -> List[str]:
"""
Get hook name suggestions based on partial input.
Args:
partial_name: Partial hook name
Returns:
List of matching hook names
"""
return [name for name in self.available_hooks.keys()
if name.startswith(partial_name)]
return [name for name in self.available_hooks.keys() if name.startswith(partial_name)]

View file

@ -5,9 +5,10 @@ This module provides the plugin registration and management functionality
for the Deb-Mock plugin system, inspired by Fedora's Mock plugin architecture.
"""
import logging
import importlib
from typing import Dict, Type, Any, Optional
import logging
from typing import Any, Dict, Optional, Type
from .base import BasePlugin
logger = logging.getLogger(__name__)
@ -16,102 +17,106 @@ logger = logging.getLogger(__name__)
class PluginRegistry:
"""
Manages plugin registration and instantiation.
This class provides the functionality for registering plugin classes
and creating plugin instances, following Mock's plugin system pattern.
"""
def __init__(self):
"""Initialize the plugin registry."""
self.plugins: Dict[str, Type[BasePlugin]] = {}
self.plugin_metadata: Dict[str, Dict[str, Any]] = {}
# Auto-register built-in plugins
self._register_builtin_plugins()
def register(self, plugin_name: str, plugin_class: Type[BasePlugin],
metadata: Optional[Dict[str, Any]] = None) -> None:
def register(
self,
plugin_name: str,
plugin_class: Type[BasePlugin],
metadata: Optional[Dict[str, Any]] = None,
) -> None:
"""
Register a plugin class.
Args:
plugin_name: Name of the plugin
plugin_class: Plugin class to register
metadata: Optional metadata about the plugin
Raises:
ValueError: If plugin_name is already registered
TypeError: If plugin_class is not a subclass of BasePlugin
"""
if not issubclass(plugin_class, BasePlugin):
raise TypeError(f"Plugin class must inherit from BasePlugin")
raise TypeError("Plugin class must inherit from BasePlugin")
if plugin_name in self.plugins:
raise ValueError(f"Plugin '{plugin_name}' is already registered")
self.plugins[plugin_name] = plugin_class
self.plugin_metadata[plugin_name] = metadata or {}
logger.debug(f"Registered plugin '{plugin_name}' with class {plugin_class.__name__}")
def unregister(self, plugin_name: str) -> bool:
"""
Unregister a plugin.
Args:
plugin_name: Name of the plugin to unregister
Returns:
True if plugin was unregistered, False if not found
"""
if plugin_name not in self.plugins:
return False
del self.plugins[plugin_name]
del self.plugin_metadata[plugin_name]
logger.debug(f"Unregistered plugin '{plugin_name}'")
return True
def get_plugin_class(self, plugin_name: str) -> Optional[Type[BasePlugin]]:
"""
Get a registered plugin class.
Args:
plugin_name: Name of the plugin
Returns:
Plugin class if found, None otherwise
"""
return self.plugins.get(plugin_name)
def get_plugins(self) -> Dict[str, Type[BasePlugin]]:
"""
Get all registered plugins.
Returns:
Dictionary of registered plugin names and classes
"""
return self.plugins.copy()
def get_plugin_names(self) -> list:
"""
Get list of registered plugin names.
Returns:
List of registered plugin names
"""
return list(self.plugins.keys())
def create(self, plugin_name: str, config: Any, hook_manager: Any) -> Optional[BasePlugin]:
"""
Create a plugin instance.
Args:
plugin_name: Name of the plugin to create
config: Configuration object
hook_manager: Hook manager instance
Returns:
Plugin instance if successful, None if plugin not found
"""
@ -119,7 +124,7 @@ class PluginRegistry:
if not plugin_class:
logger.warning(f"Plugin '{plugin_name}' not found")
return None
try:
plugin_instance = plugin_class(config, hook_manager)
logger.debug(f"Created plugin instance '{plugin_name}'")
@ -127,82 +132,82 @@ class PluginRegistry:
except Exception as e:
logger.error(f"Failed to create plugin '{plugin_name}': {e}")
return None
def create_all_enabled(self, config: Any, hook_manager: Any) -> Dict[str, BasePlugin]:
"""
Create instances of all enabled plugins.
Args:
config: Configuration object
hook_manager: Hook manager instance
Returns:
Dictionary of plugin names and instances
"""
enabled_plugins = {}
for plugin_name in self.get_plugin_names():
plugin_instance = self.create(plugin_name, config, hook_manager)
if plugin_instance and plugin_instance.enabled:
enabled_plugins[plugin_name] = plugin_instance
logger.debug(f"Created {len(enabled_plugins)} enabled plugin instances")
return enabled_plugins
def get_plugin_info(self, plugin_name: str) -> Dict[str, Any]:
"""
Get information about a registered plugin.
Args:
plugin_name: Name of the plugin
Returns:
Dictionary with plugin information
"""
if plugin_name not in self.plugins:
return {'error': f'Plugin "{plugin_name}" not found'}
return {"error": f'Plugin "{plugin_name}" not found'}
plugin_class = self.plugins[plugin_name]
metadata = self.plugin_metadata[plugin_name]
info = {
'name': plugin_name,
'class': plugin_class.__name__,
'module': plugin_class.__module__,
'metadata': metadata,
'docstring': plugin_class.__doc__ or 'No documentation available'
"name": plugin_name,
"class": plugin_class.__name__,
"module": plugin_class.__module__,
"metadata": metadata,
"docstring": plugin_class.__doc__ or "No documentation available",
}
return info
def get_all_plugin_info(self) -> Dict[str, Dict[str, Any]]:
"""
Get information about all registered plugins.
Returns:
Dictionary mapping plugin names to their information
"""
return {name: self.get_plugin_info(name) for name in self.get_plugin_names()}
def load_plugin_from_module(self, module_name: str, plugin_name: str) -> bool:
"""
Load a plugin from a module.
Args:
module_name: Name of the module to load
plugin_name: Name of the plugin class in the module
Returns:
True if plugin was loaded successfully, False otherwise
"""
try:
module = importlib.import_module(module_name)
plugin_class = getattr(module, plugin_name)
# Use module name as plugin name if not specified
self.register(plugin_name, plugin_class)
return True
except ImportError as e:
logger.error(f"Failed to import module '{module_name}': {e}")
return False
@ -212,42 +217,42 @@ class PluginRegistry:
except Exception as e:
logger.error(f"Failed to load plugin from '{module_name}.{plugin_name}': {e}")
return False
def load_plugins_from_config(self, config: Any) -> Dict[str, BasePlugin]:
"""
Load plugins based on configuration.
Args:
config: Configuration object with plugin settings
Returns:
Dictionary of loaded plugin instances
"""
loaded_plugins = {}
if not hasattr(config, 'plugins') or not config.plugins:
if not hasattr(config, "plugins") or not config.plugins:
return loaded_plugins
for plugin_name, plugin_config in config.plugins.items():
if not isinstance(plugin_config, dict):
continue
if plugin_config.get('enabled', False):
if plugin_config.get("enabled", False):
# Try to load from built-in plugins first
plugin_instance = self.create(plugin_name, config, None)
if plugin_instance:
loaded_plugins[plugin_name] = plugin_instance
else:
# Try to load from external module
module_name = plugin_config.get('module')
module_name = plugin_config.get("module")
if module_name:
if self.load_plugin_from_module(module_name, plugin_name):
plugin_instance = self.create(plugin_name, config, None)
if plugin_instance:
loaded_plugins[plugin_name] = plugin_instance
return loaded_plugins
def _register_builtin_plugins(self) -> None:
"""Register built-in plugins."""
try:
@ -256,79 +261,101 @@ class PluginRegistry:
from .compress_logs import CompressLogsPlugin
from .root_cache import RootCachePlugin
from .tmpfs import TmpfsPlugin
self.register('bind_mount', BindMountPlugin, {
'description': 'Mount host directories into chroot',
'hooks': ['mount_root', 'postumount'],
'builtin': True
})
self.register('compress_logs', CompressLogsPlugin, {
'description': 'Compress build logs to save space',
'hooks': ['process_logs'],
'builtin': True
})
self.register('root_cache', RootCachePlugin, {
'description': 'Root cache management for faster builds',
'hooks': ['preinit', 'postinit', 'postchroot', 'postshell', 'clean'],
'builtin': True
})
self.register('tmpfs', TmpfsPlugin, {
'description': 'Use tmpfs for faster I/O operations',
'hooks': ['mount_root', 'postumount'],
'builtin': True
})
self.register(
"bind_mount",
BindMountPlugin,
{
"description": "Mount host directories into chroot",
"hooks": ["mount_root", "postumount"],
"builtin": True,
},
)
self.register(
"compress_logs",
CompressLogsPlugin,
{
"description": "Compress build logs to save space",
"hooks": ["process_logs"],
"builtin": True,
},
)
self.register(
"root_cache",
RootCachePlugin,
{
"description": "Root cache management for faster builds",
"hooks": [
"preinit",
"postinit",
"postchroot",
"postshell",
"clean",
],
"builtin": True,
},
)
self.register(
"tmpfs",
TmpfsPlugin,
{
"description": "Use tmpfs for faster I/O operations",
"hooks": ["mount_root", "postumount"],
"builtin": True,
},
)
logger.debug("Registered built-in plugins")
except ImportError as e:
logger.warning(f"Some built-in plugins could not be loaded: {e}")
except Exception as e:
logger.warning(f"Error registering built-in plugins: {e}")
def get_plugin_statistics(self) -> Dict[str, Any]:
"""
Get statistics about registered plugins.
Returns:
Dictionary with plugin statistics
"""
stats = {
'total_plugins': len(self.plugins),
'builtin_plugins': len([p for p in self.plugin_metadata.values() if p.get('builtin', False)]),
'external_plugins': len([p for p in self.plugin_metadata.values() if not p.get('builtin', False)]),
'plugins_by_hook': {}
"total_plugins": len(self.plugins),
"builtin_plugins": len([p for p in self.plugin_metadata.values() if p.get("builtin", False)]),
"external_plugins": len([p for p in self.plugin_metadata.values() if not p.get("builtin", False)]),
"plugins_by_hook": {},
}
# Count plugins by hook usage
for plugin_name, metadata in self.plugin_metadata.items():
hooks = metadata.get('hooks', [])
hooks = metadata.get("hooks", [])
for hook in hooks:
if hook not in stats['plugins_by_hook']:
stats['plugins_by_hook'][hook] = []
stats['plugins_by_hook'][hook].append(plugin_name)
if hook not in stats["plugins_by_hook"]:
stats["plugins_by_hook"][hook] = []
stats["plugins_by_hook"][hook].append(plugin_name)
return stats
def validate_plugin_config(self, plugin_name: str, config: Any) -> bool:
"""
Validate plugin configuration.
Args:
plugin_name: Name of the plugin
config: Configuration to validate
Returns:
True if configuration is valid, False otherwise
"""
if plugin_name not in self.plugins:
return False
# Basic validation - plugins can override this method
plugin_class = self.plugins[plugin_name]
if hasattr(plugin_class, 'validate_config'):
if hasattr(plugin_class, "validate_config"):
return plugin_class.validate_config(config)
return True
return True

View file

@ -5,14 +5,11 @@ This plugin provides root cache management for faster builds,
inspired by Fedora's Mock root_cache plugin but adapted for Debian-based systems.
"""
import logging
import os
import tarfile
import hashlib
import json
import time
import logging
from pathlib import Path
from typing import Dict, Any, Optional
from typing import Any, Dict
from .base import BasePlugin
@ -22,19 +19,19 @@ logger = logging.getLogger(__name__)
class RootCachePlugin(BasePlugin):
"""
Root cache management for faster builds.
This plugin caches the chroot environment in a compressed tarball,
which can significantly speed up subsequent builds by avoiding
the need to recreate the entire chroot from scratch.
"""
def __init__(self, config, hook_manager):
"""Initialize the RootCache plugin."""
super().__init__(config, hook_manager)
self.cache_settings = self._get_cache_settings()
self.cache_file = self._get_cache_file_path()
self._log_info(f"Initialized with cache dir: {self.cache_settings['cache_dir']}")
def _register_hooks(self):
"""Register root cache hooks."""
self.hook_manager.add_hook("preinit", self.preinit)
@ -43,307 +40,307 @@ class RootCachePlugin(BasePlugin):
self.hook_manager.add_hook("postshell", self.postshell)
self.hook_manager.add_hook("clean", self.clean)
self._log_debug("Registered root cache hooks")
def _get_cache_settings(self) -> Dict[str, Any]:
"""
Get cache settings from configuration.
Returns:
Dictionary with cache settings
"""
plugin_config = self._get_plugin_config()
return {
'cache_dir': plugin_config.get('cache_dir', '/var/cache/deb-mock/root-cache'),
'max_age_days': plugin_config.get('max_age_days', 7),
'compression': plugin_config.get('compression', 'gzip'),
'exclude_dirs': plugin_config.get('exclude_dirs', ['/tmp', '/var/tmp', '/var/cache']),
'exclude_patterns': plugin_config.get('exclude_patterns', ['*.log', '*.tmp']),
'min_cache_size_mb': plugin_config.get('min_cache_size_mb', 100),
'auto_cleanup': plugin_config.get('auto_cleanup', True)
"cache_dir": plugin_config.get("cache_dir", "/var/cache/deb-mock/root-cache"),
"max_age_days": plugin_config.get("max_age_days", 7),
"compression": plugin_config.get("compression", "gzip"),
"exclude_dirs": plugin_config.get("exclude_dirs", ["/tmp", "/var/tmp", "/var/cache"]),
"exclude_patterns": plugin_config.get("exclude_patterns", ["*.log", "*.tmp"]),
"min_cache_size_mb": plugin_config.get("min_cache_size_mb", 100),
"auto_cleanup": plugin_config.get("auto_cleanup", True),
}
def _get_cache_file_path(self) -> str:
"""
Get the cache file path based on configuration.
Returns:
Path to the cache file
"""
cache_dir = self.cache_settings['cache_dir']
compression = self.cache_settings['compression']
cache_dir = self.cache_settings["cache_dir"]
compression = self.cache_settings["compression"]
# Create cache directory if it doesn't exist
os.makedirs(cache_dir, exist_ok=True)
# Determine file extension based on compression
extensions = {
'gzip': '.tar.gz',
'bzip2': '.tar.bz2',
'xz': '.tar.xz',
'zstd': '.tar.zst'
"gzip": ".tar.gz",
"bzip2": ".tar.bz2",
"xz": ".tar.xz",
"zstd": ".tar.zst",
}
ext = extensions.get(compression, '.tar.gz')
ext = extensions.get(compression, ".tar.gz")
return os.path.join(cache_dir, f"cache{ext}")
def preinit(self, context: Dict[str, Any]) -> None:
"""
Restore chroot from cache before initialization.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
self._log_warning("No chroot_path in context, skipping cache restoration")
return
if not self._cache_exists():
self._log_debug("No cache file found, will create new chroot")
return
if not self._is_cache_valid():
self._log_debug("Cache is invalid or expired, will create new chroot")
return
self._log_info("Restoring chroot from cache")
try:
self._restore_from_cache(chroot_path)
self._log_info("Successfully restored chroot from cache")
except Exception as e:
self._log_error(f"Failed to restore from cache: {e}")
def postinit(self, context: Dict[str, Any]) -> None:
"""
Create cache after successful initialization.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
self._log_warning("No chroot_path in context, skipping cache creation")
return
self._log_info("Creating root cache")
try:
self._create_cache(chroot_path)
self._log_info("Successfully created root cache")
except Exception as e:
self._log_error(f"Failed to create cache: {e}")
def postchroot(self, context: Dict[str, Any]) -> None:
"""
Update cache after chroot operations.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
return
self._log_debug("Updating cache after chroot operations")
try:
self._update_cache(chroot_path)
except Exception as e:
self._log_error(f"Failed to update cache: {e}")
def postshell(self, context: Dict[str, Any]) -> None:
"""
Update cache after shell operations.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
return
self._log_debug("Updating cache after shell operations")
try:
self._update_cache(chroot_path)
except Exception as e:
self._log_error(f"Failed to update cache: {e}")
def clean(self, context: Dict[str, Any]) -> None:
"""
Clean up cache resources.
Args:
context: Context dictionary with cleanup information
"""
if not self.enabled:
return
if self.cache_settings['auto_cleanup']:
if self.cache_settings["auto_cleanup"]:
self._log_info("Cleaning up old caches")
try:
cleaned_count = self._cleanup_old_caches()
self._log_info(f"Cleaned up {cleaned_count} old cache files")
except Exception as e:
self._log_error(f"Failed to cleanup old caches: {e}")
def _cache_exists(self) -> bool:
"""
Check if cache file exists.
Returns:
True if cache file exists, False otherwise
"""
return os.path.exists(self.cache_file)
def _is_cache_valid(self) -> bool:
"""
Check if cache is valid and not expired.
Returns:
True if cache is valid, False otherwise
"""
if not self._cache_exists():
return False
# Check file age
file_age = time.time() - os.path.getmtime(self.cache_file)
max_age_seconds = self.cache_settings['max_age_days'] * 24 * 3600
max_age_seconds = self.cache_settings["max_age_days"] * 24 * 3600
if file_age > max_age_seconds:
self._log_debug(f"Cache is {file_age/3600:.1f} hours old, max age is {max_age_seconds/3600:.1f} hours")
self._log_debug(f"Cache is {file_age / 3600:.1f} hours old, max age is {max_age_seconds / 3600:.1f} hours")
return False
# Check file size
file_size_mb = os.path.getsize(self.cache_file) / (1024 * 1024)
min_size_mb = self.cache_settings['min_cache_size_mb']
min_size_mb = self.cache_settings["min_cache_size_mb"]
if file_size_mb < min_size_mb:
self._log_debug(f"Cache size {file_size_mb:.1f}MB is below minimum {min_size_mb}MB")
return False
return True
def _restore_from_cache(self, chroot_path: str) -> None:
"""
Restore chroot from cache.
Args:
chroot_path: Path to restore chroot to
"""
if not self._cache_exists():
raise FileNotFoundError("Cache file does not exist")
# Create chroot directory if it doesn't exist
os.makedirs(chroot_path, exist_ok=True)
# Extract cache
compression = self.cache_settings['compression']
if compression == 'gzip':
mode = 'r:gz'
elif compression == 'bzip2':
mode = 'r:bz2'
elif compression == 'xz':
mode = 'r:xz'
elif compression == 'zstd':
mode = 'r:zstd'
compression = self.cache_settings["compression"]
if compression == "gzip":
mode = "r:gz"
elif compression == "bzip2":
mode = "r:bz2"
elif compression == "xz":
mode = "r:xz"
elif compression == "zstd":
mode = "r:zstd"
else:
mode = 'r:gz' # Default to gzip
mode = "r:gz" # Default to gzip
try:
with tarfile.open(self.cache_file, mode) as tar:
tar.extractall(path=chroot_path)
self._log_debug(f"Successfully extracted cache to {chroot_path}")
except Exception as e:
self._log_error(f"Failed to extract cache: {e}")
raise
def _create_cache(self, chroot_path: str) -> None:
"""
Create cache from chroot.
Args:
chroot_path: Path to the chroot to cache
"""
if not os.path.exists(chroot_path):
raise FileNotFoundError(f"Chroot path does not exist: {chroot_path}")
# Determine compression mode
compression = self.cache_settings['compression']
if compression == 'gzip':
mode = 'w:gz'
elif compression == 'bzip2':
mode = 'w:bz2'
elif compression == 'xz':
mode = 'w:xz'
elif compression == 'zstd':
mode = 'w:zstd'
compression = self.cache_settings["compression"]
if compression == "gzip":
mode = "w:gz"
elif compression == "bzip2":
mode = "w:bz2"
elif compression == "xz":
mode = "w:xz"
elif compression == "zstd":
mode = "w:zstd"
else:
mode = 'w:gz' # Default to gzip
mode = "w:gz" # Default to gzip
try:
with tarfile.open(self.cache_file, mode) as tar:
# Add chroot contents to archive
tar.add(chroot_path, arcname='', exclude=self._get_exclude_filter())
tar.add(chroot_path, arcname="", exclude=self._get_exclude_filter())
self._log_debug(f"Successfully created cache: {self.cache_file}")
except Exception as e:
self._log_error(f"Failed to create cache: {e}")
raise
def _update_cache(self, chroot_path: str) -> None:
"""
Update existing cache.
Args:
chroot_path: Path to the chroot to update cache from
"""
# For now, just recreate the cache
# In the future, we could implement incremental updates
self._create_cache(chroot_path)
def _cleanup_old_caches(self) -> int:
"""
Clean up old cache files.
Returns:
Number of cache files cleaned up
"""
cache_dir = self.cache_settings['cache_dir']
max_age_seconds = self.cache_settings['max_age_days'] * 24 * 3600
cache_dir = self.cache_settings["cache_dir"]
max_age_seconds = self.cache_settings["max_age_days"] * 24 * 3600
current_time = time.time()
cleaned_count = 0
if not os.path.exists(cache_dir):
return 0
for cache_file in os.listdir(cache_dir):
if not cache_file.startswith('cache'):
if not cache_file.startswith("cache"):
continue
cache_path = os.path.join(cache_dir, cache_file)
file_age = current_time - os.path.getmtime(cache_path)
if file_age > max_age_seconds:
try:
os.remove(cache_path)
@ -351,110 +348,112 @@ class RootCachePlugin(BasePlugin):
self._log_debug(f"Removed old cache: {cache_file}")
except Exception as e:
self._log_warning(f"Failed to remove old cache {cache_file}: {e}")
return cleaned_count
def _get_exclude_filter(self):
"""
Get exclude filter function for tarfile.
Returns:
Function to filter out excluded files/directories
"""
exclude_dirs = self.cache_settings['exclude_dirs']
exclude_patterns = self.cache_settings['exclude_patterns']
exclude_dirs = self.cache_settings["exclude_dirs"]
exclude_patterns = self.cache_settings["exclude_patterns"]
def exclude_filter(tarinfo):
# Check excluded directories
for exclude_dir in exclude_dirs:
if tarinfo.name.startswith(exclude_dir.lstrip('/')):
if tarinfo.name.startswith(exclude_dir.lstrip("/")):
return None
# Check excluded patterns
for pattern in exclude_patterns:
if pattern in tarinfo.name:
return None
return tarinfo
return exclude_filter
def validate_config(self, config: Any) -> bool:
"""
Validate plugin configuration.
Args:
config: Configuration to validate
Returns:
True if configuration is valid, False otherwise
"""
plugin_config = getattr(config, 'plugins', {}).get('root_cache', {})
plugin_config = getattr(config, "plugins", {}).get("root_cache", {})
# Validate cache_dir
cache_dir = plugin_config.get('cache_dir', '/var/cache/deb-mock/root-cache')
cache_dir = plugin_config.get("cache_dir", "/var/cache/deb-mock/root-cache")
if not cache_dir:
self._log_error("cache_dir cannot be empty")
return False
# Validate max_age_days
max_age_days = plugin_config.get('max_age_days', 7)
max_age_days = plugin_config.get("max_age_days", 7)
if not isinstance(max_age_days, int) or max_age_days <= 0:
self._log_error(f"Invalid max_age_days: {max_age_days}. Must be positive integer")
return False
# Validate compression
valid_compressions = ['gzip', 'bzip2', 'xz', 'zstd']
compression = plugin_config.get('compression', 'gzip')
valid_compressions = ["gzip", "bzip2", "xz", "zstd"]
compression = plugin_config.get("compression", "gzip")
if compression not in valid_compressions:
self._log_error(f"Invalid compression: {compression}. Valid options: {valid_compressions}")
return False
# Validate exclude_dirs
exclude_dirs = plugin_config.get('exclude_dirs', ['/tmp', '/var/tmp', '/var/cache'])
exclude_dirs = plugin_config.get("exclude_dirs", ["/tmp", "/var/tmp", "/var/cache"])
if not isinstance(exclude_dirs, list):
self._log_error("exclude_dirs must be a list")
return False
# Validate exclude_patterns
exclude_patterns = plugin_config.get('exclude_patterns', ['*.log', '*.tmp'])
exclude_patterns = plugin_config.get("exclude_patterns", ["*.log", "*.tmp"])
if not isinstance(exclude_patterns, list):
self._log_error("exclude_patterns must be a list")
return False
# Validate min_cache_size_mb
min_cache_size_mb = plugin_config.get('min_cache_size_mb', 100)
min_cache_size_mb = plugin_config.get("min_cache_size_mb", 100)
if not isinstance(min_cache_size_mb, (int, float)) or min_cache_size_mb < 0:
self._log_error(f"Invalid min_cache_size_mb: {min_cache_size_mb}. Must be non-negative number")
return False
# Validate auto_cleanup
auto_cleanup = plugin_config.get('auto_cleanup', True)
auto_cleanup = plugin_config.get("auto_cleanup", True)
if not isinstance(auto_cleanup, bool):
self._log_error(f"Invalid auto_cleanup: {auto_cleanup}. Must be boolean")
return False
return True
def get_plugin_info(self) -> Dict[str, Any]:
"""
Get plugin information.
Returns:
Dictionary with plugin information
"""
info = super().get_plugin_info()
info.update({
'cache_dir': self.cache_settings['cache_dir'],
'cache_file': self.cache_file,
'max_age_days': self.cache_settings['max_age_days'],
'compression': self.cache_settings['compression'],
'exclude_dirs': self.cache_settings['exclude_dirs'],
'exclude_patterns': self.cache_settings['exclude_patterns'],
'min_cache_size_mb': self.cache_settings['min_cache_size_mb'],
'auto_cleanup': self.cache_settings['auto_cleanup'],
'cache_exists': self._cache_exists(),
'cache_valid': self._is_cache_valid() if self._cache_exists() else False,
'hooks': ['preinit', 'postinit', 'postchroot', 'postshell', 'clean']
})
return info
info.update(
{
"cache_dir": self.cache_settings["cache_dir"],
"cache_file": self.cache_file,
"max_age_days": self.cache_settings["max_age_days"],
"compression": self.cache_settings["compression"],
"exclude_dirs": self.cache_settings["exclude_dirs"],
"exclude_patterns": self.cache_settings["exclude_patterns"],
"min_cache_size_mb": self.cache_settings["min_cache_size_mb"],
"auto_cleanup": self.cache_settings["auto_cleanup"],
"cache_exists": self._cache_exists(),
"cache_valid": (self._is_cache_valid() if self._cache_exists() else False),
"hooks": ["preinit", "postinit", "postchroot", "postshell", "clean"],
}
)
return info

View file

@ -5,10 +5,9 @@ This plugin uses tmpfs for faster I/O operations in chroot,
inspired by Fedora's Mock tmpfs plugin but adapted for Debian-based systems.
"""
import os
import subprocess
import logging
from typing import Dict, Any, Optional
import subprocess
from typing import Any, Dict
from .base import BasePlugin
@ -18,71 +17,71 @@ logger = logging.getLogger(__name__)
class TmpfsPlugin(BasePlugin):
"""
Use tmpfs for faster I/O operations in chroot.
This plugin mounts a tmpfs filesystem on the chroot directory,
which can significantly improve build performance by using RAM
instead of disk for temporary files and build artifacts.
"""
def __init__(self, config, hook_manager):
"""Initialize the Tmpfs plugin."""
super().__init__(config, hook_manager)
self.tmpfs_settings = self._get_tmpfs_settings()
self.mounted = False
self._log_info(f"Initialized with size: {self.tmpfs_settings['size']}")
def _register_hooks(self):
"""Register tmpfs hooks."""
self.hook_manager.add_hook("mount_root", self.mount_root)
self.hook_manager.add_hook("postumount", self.postumount)
self._log_debug("Registered mount_root and postumount hooks")
def _get_tmpfs_settings(self) -> Dict[str, Any]:
"""
Get tmpfs settings from configuration.
Returns:
Dictionary with tmpfs settings
"""
plugin_config = self._get_plugin_config()
return {
'size': plugin_config.get('size', '2G'),
'mode': plugin_config.get('mode', '0755'),
'mount_point': plugin_config.get('mount_point', '/tmp'),
'keep_mounted': plugin_config.get('keep_mounted', False),
'required_ram_mb': plugin_config.get('required_ram_mb', 2048), # 2GB default
'max_fs_size': plugin_config.get('max_fs_size', None)
"size": plugin_config.get("size", "2G"),
"mode": plugin_config.get("mode", "0755"),
"mount_point": plugin_config.get("mount_point", "/tmp"),
"keep_mounted": plugin_config.get("keep_mounted", False),
"required_ram_mb": plugin_config.get("required_ram_mb", 2048), # 2GB default
"max_fs_size": plugin_config.get("max_fs_size", None),
}
def mount_root(self, context: Dict[str, Any]) -> None:
"""
Mount tmpfs when chroot is mounted.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
self._log_warning("No chroot_path in context, skipping tmpfs mount")
return
# Check if we have enough RAM
if not self._check_ram_requirements():
self._log_warning("Insufficient RAM for tmpfs, skipping mount")
return
# Check if already mounted
if self._is_mounted(chroot_path):
self._log_info(f"Tmpfs already mounted at {chroot_path}")
self.mounted = True
return
self._log_info(f"Mounting tmpfs at {chroot_path}")
try:
self._mount_tmpfs(chroot_path)
self.mounted = True
@ -90,288 +89,284 @@ class TmpfsPlugin(BasePlugin):
except Exception as e:
self._log_error(f"Failed to mount tmpfs: {e}")
self.mounted = False
def postumount(self, context: Dict[str, Any]) -> None:
"""
Unmount tmpfs when chroot is unmounted.
Args:
context: Context dictionary with chroot information
"""
if not self.enabled or not self.mounted:
return
chroot_path = context.get('chroot_path')
chroot_path = context.get("chroot_path")
if not chroot_path:
self._log_warning("No chroot_path in context, skipping tmpfs unmount")
return
# Check if we should keep mounted
if self.tmpfs_settings['keep_mounted']:
if self.tmpfs_settings["keep_mounted"]:
self._log_info("Keeping tmpfs mounted as requested")
return
self._log_info(f"Unmounting tmpfs from {chroot_path}")
try:
self._unmount_tmpfs(chroot_path)
self.mounted = False
self._log_info("Tmpfs unmounted successfully")
except Exception as e:
self._log_error(f"Failed to unmount tmpfs: {e}")
def _check_ram_requirements(self) -> bool:
"""
Check if system has enough RAM for tmpfs.
Returns:
True if system has sufficient RAM, False otherwise
"""
try:
# Get system RAM in MB
with open('/proc/meminfo', 'r') as f:
with open("/proc/meminfo", "r") as f:
for line in f:
if line.startswith('MemTotal:'):
if line.startswith("MemTotal:"):
mem_total_kb = int(line.split()[1])
mem_total_mb = mem_total_kb // 1024
break
else:
self._log_warning("Could not determine system RAM")
return False
required_ram = self.tmpfs_settings['required_ram_mb']
required_ram = self.tmpfs_settings["required_ram_mb"]
if mem_total_mb < required_ram:
self._log_warning(
f"System has {mem_total_mb}MB RAM, but {required_ram}MB is required for tmpfs"
)
self._log_warning(f"System has {mem_total_mb}MB RAM, but {required_ram}MB is required for tmpfs")
return False
self._log_debug(f"System RAM: {mem_total_mb}MB, required: {required_ram}MB")
return True
except Exception as e:
self._log_error(f"Failed to check RAM requirements: {e}")
return False
def _is_mounted(self, chroot_path: str) -> bool:
"""
Check if tmpfs is already mounted at the given path.
Args:
chroot_path: Path to check
Returns:
True if tmpfs is mounted, False otherwise
"""
try:
# Check if the path is a mount point
result = subprocess.run(
['mountpoint', '-q', chroot_path],
capture_output=True,
text=True
)
result = subprocess.run(["mountpoint", "-q", chroot_path], capture_output=True, text=True)
return result.returncode == 0
except FileNotFoundError:
# mountpoint command not available, try alternative method
try:
with open('/proc/mounts', 'r') as f:
with open("/proc/mounts", "r") as f:
for line in f:
parts = line.split()
if len(parts) >= 2 and parts[1] == chroot_path:
return parts[0] == 'tmpfs'
return parts[0] == "tmpfs"
return False
except Exception:
self._log_warning("Could not check mount status")
return False
def _mount_tmpfs(self, chroot_path: str) -> None:
"""
Mount tmpfs at the specified path.
Args:
chroot_path: Path where to mount tmpfs
"""
# Build mount options
options = []
# Add mode option
mode = self.tmpfs_settings['mode']
options.append(f'mode={mode}')
mode = self.tmpfs_settings["mode"]
options.append(f"mode={mode}")
# Add size option
size = self.tmpfs_settings['size']
size = self.tmpfs_settings["size"]
if size:
options.append(f'size={size}')
options.append(f"size={size}")
# Add max_fs_size if specified
max_fs_size = self.tmpfs_settings['max_fs_size']
max_fs_size = self.tmpfs_settings["max_fs_size"]
if max_fs_size:
options.append(f'size={max_fs_size}')
options.append(f"size={max_fs_size}")
# Add noatime for better performance
options.append('noatime')
options.append("noatime")
# Build mount command
mount_cmd = [
'mount', '-n', '-t', 'tmpfs',
'-o', ','.join(options),
'deb_mock_tmpfs', chroot_path
"mount",
"-n",
"-t",
"tmpfs",
"-o",
",".join(options),
"deb_mock_tmpfs",
chroot_path,
]
self._log_debug(f"Mount command: {' '.join(mount_cmd)}")
try:
result = subprocess.run(
mount_cmd,
capture_output=True,
text=True,
check=True
)
subprocess.run(mount_cmd, capture_output=True, text=True, check=True)
self._log_debug("Tmpfs mount command executed successfully")
except subprocess.CalledProcessError as e:
self._log_error(f"Tmpfs mount failed: {e.stderr}")
raise
except FileNotFoundError:
self._log_error("mount command not found - ensure mount is available")
raise
def _unmount_tmpfs(self, chroot_path: str) -> None:
"""
Unmount tmpfs from the specified path.
Args:
chroot_path: Path where tmpfs is mounted
"""
# Try normal unmount first
try:
cmd = ['umount', '-n', chroot_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["umount", "-n", chroot_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
self._log_debug("Tmpfs unmounted successfully")
return
except subprocess.CalledProcessError as e:
self._log_warning(f"Normal unmount failed: {e.stderr}")
# Try lazy unmount
try:
cmd = ['umount', '-n', '-l', chroot_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["umount", "-n", "-l", chroot_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
self._log_debug("Tmpfs lazy unmounted successfully")
return
except subprocess.CalledProcessError as e:
self._log_warning(f"Lazy unmount failed: {e.stderr}")
# Try force unmount as last resort
try:
cmd = ['umount', '-n', '-f', chroot_path]
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
cmd = ["umount", "-n", "-f", chroot_path]
subprocess.run(cmd, capture_output=True, text=True, check=True)
self._log_debug("Tmpfs force unmounted successfully")
return
except subprocess.CalledProcessError as e:
self._log_error(f"Force unmount failed: {e.stderr}")
raise
def validate_config(self, config: Any) -> bool:
"""
Validate plugin configuration.
Args:
config: Configuration to validate
Returns:
True if configuration is valid, False otherwise
"""
plugin_config = getattr(config, 'plugins', {}).get('tmpfs', {})
plugin_config = getattr(config, "plugins", {}).get("tmpfs", {})
# Validate size format
size = plugin_config.get('size', '2G')
size = plugin_config.get("size", "2G")
if not self._is_valid_size_format(size):
self._log_error(f"Invalid size format: {size}. Use format like '2G', '512M', etc.")
return False
# Validate mode format
mode = plugin_config.get('mode', '0755')
mode = plugin_config.get("mode", "0755")
if not self._is_valid_mode_format(mode):
self._log_error(f"Invalid mode format: {mode}. Use octal format like '0755'")
return False
# Validate required_ram_mb
required_ram = plugin_config.get('required_ram_mb', 2048)
required_ram = plugin_config.get("required_ram_mb", 2048)
if not isinstance(required_ram, int) or required_ram <= 0:
self._log_error(f"Invalid required_ram_mb: {required_ram}. Must be positive integer")
return False
# Validate keep_mounted
keep_mounted = plugin_config.get('keep_mounted', False)
keep_mounted = plugin_config.get("keep_mounted", False)
if not isinstance(keep_mounted, bool):
self._log_error(f"Invalid keep_mounted: {keep_mounted}. Must be boolean")
return False
return True
def _is_valid_size_format(self, size: str) -> bool:
"""
Check if size format is valid.
Args:
size: Size string to validate
Returns:
True if format is valid, False otherwise
"""
if not size:
return False
# Check if it's a number (bytes)
if size.isdigit():
return True
# Check if it ends with a valid unit
valid_units = ['K', 'M', 'G', 'T']
valid_units = ["K", "M", "G", "T"]
if size[-1] in valid_units and size[:-1].isdigit():
return True
return False
def _is_valid_mode_format(self, mode: str) -> bool:
"""
Check if mode format is valid.
Args:
mode: Mode string to validate
Returns:
True if format is valid, False otherwise
"""
if not mode:
return False
# Check if it's a valid octal number
try:
int(mode, 8)
return True
except ValueError:
return False
def get_plugin_info(self) -> Dict[str, Any]:
"""
Get plugin information.
Returns:
Dictionary with plugin information
"""
info = super().get_plugin_info()
info.update({
'tmpfs_size': self.tmpfs_settings['size'],
'tmpfs_mode': self.tmpfs_settings['mode'],
'mount_point': self.tmpfs_settings['mount_point'],
'keep_mounted': self.tmpfs_settings['keep_mounted'],
'required_ram_mb': self.tmpfs_settings['required_ram_mb'],
'mounted': self.mounted,
'hooks': ['mount_root', 'postumount']
})
return info
info.update(
{
"tmpfs_size": self.tmpfs_settings["size"],
"tmpfs_mode": self.tmpfs_settings["mode"],
"mount_point": self.tmpfs_settings["mount_point"],
"keep_mounted": self.tmpfs_settings["keep_mounted"],
"required_ram_mb": self.tmpfs_settings["required_ram_mb"],
"mounted": self.mounted,
"hooks": ["mount_root", "postumount"],
}
)
return info

View file

@ -5,276 +5,281 @@ sbuild wrapper for deb-mock
import os
import subprocess
import tempfile
import shutil
from pathlib import Path
from typing import List, Dict, Any, Optional
from typing import Any, Dict, List
from .exceptions import SbuildError
class SbuildWrapper:
"""Wrapper around sbuild for standardized package building"""
def __init__(self, config):
self.config = config
def build_package(self, source_package: str, chroot_name: str = None,
output_dir: str = None, **kwargs) -> Dict[str, Any]:
def build_package(
self,
source_package: str,
chroot_name: str = None,
output_dir: str = None,
**kwargs,
) -> Dict[str, Any]:
"""Build a Debian source package using sbuild"""
if chroot_name is None:
chroot_name = self.config.chroot_name
if output_dir is None:
output_dir = self.config.get_output_path()
# Ensure output directory exists
os.makedirs(output_dir, exist_ok=True)
# Prepare sbuild command
cmd = self._prepare_sbuild_command(source_package, chroot_name, output_dir, **kwargs)
# Prepare environment variables
env = os.environ.copy()
if kwargs.get("build_env"):
env.update(kwargs["build_env"])
env.update(self.config.build_env)
# Create temporary log file
with tempfile.NamedTemporaryFile(mode='w', suffix='.log', delete=False) as log_file:
with tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False) as log_file:
log_path = log_file.name
try:
# Execute sbuild
result = self._execute_sbuild(cmd, log_path)
result = self._execute_sbuild(cmd, log_path, env)
# Parse build results
build_info = self._parse_build_results(output_dir, log_path, result)
return build_info
finally:
# Clean up temporary log file
if os.path.exists(log_path):
os.unlink(log_path)
def _prepare_sbuild_command(self, source_package: str, chroot_name: str,
output_dir: str, **kwargs) -> List[str]:
def _prepare_sbuild_command(self, source_package: str, chroot_name: str, output_dir: str, **kwargs) -> List[str]:
"""Prepare the sbuild command with all necessary options"""
cmd = ['sbuild']
cmd = ["sbuild"]
# Basic options
cmd.extend(['--chroot', chroot_name])
cmd.extend(['--dist', self.config.suite])
cmd.extend(['--arch', self.config.architecture])
cmd.extend(["--chroot", chroot_name])
cmd.extend(["--dist", self.config.suite])
cmd.extend(["--arch", self.config.architecture])
# Output options
cmd.extend(['--build-dir', output_dir])
# Logging options
cmd.extend(['--log-dir', self.config.sbuild_log_dir])
cmd.extend(["--build-dir", output_dir])
# Build options
if kwargs.get('verbose', self.config.verbose):
cmd.append('--verbose')
if kwargs.get('debug', self.config.debug):
cmd.append('--debug')
if kwargs.get("verbose", self.config.verbose):
cmd.append("--verbose")
if kwargs.get("debug", self.config.debug):
cmd.append("--debug")
# Additional build options from config
for option in self.config.build_options:
cmd.extend(option.split())
# Custom build options
if kwargs.get('build_options'):
for option in kwargs['build_options']:
if kwargs.get("build_options"):
for option in kwargs["build_options"]:
cmd.extend(option.split())
# Environment variables
for key, value in self.config.build_env.items():
cmd.extend(['--env', f'{key}={value}'])
# Custom environment variables
if kwargs.get('build_env'):
for key, value in kwargs['build_env'].items():
cmd.extend(['--env', f'{key}={value}'])
# Environment variables will be passed to subprocess.run
pass
# Source package
cmd.append(source_package)
return cmd
def _execute_sbuild(self, cmd: List[str], log_path: str) -> subprocess.CompletedProcess:
def _execute_sbuild(self, cmd: List[str], log_path: str, env: Dict[str, str] = None) -> subprocess.CompletedProcess:
"""Execute sbuild command"""
try:
# Redirect output to log file
with open(log_path, 'w') as log_file:
with open(log_path, "w") as log_file:
result = subprocess.run(
cmd,
stdout=log_file,
stderr=subprocess.STDOUT,
text=True,
check=True
check=True,
env=env,
)
return result
except subprocess.CalledProcessError as e:
# Read log file for error details
with open(log_path, 'r') as log_file:
with open(log_path, "r") as log_file:
log_content = log_file.read()
raise SbuildError(f"sbuild failed: {e}\nLog output:\n{log_content}")
except FileNotFoundError:
raise SbuildError("sbuild not found. Please install sbuild package.")
def _parse_build_results(self, output_dir: str, log_path: str,
result: subprocess.CompletedProcess) -> Dict[str, Any]:
def _parse_build_results(
self, output_dir: str, log_path: str, result: subprocess.CompletedProcess
) -> Dict[str, Any]:
"""Parse build results and collect artifacts"""
build_info = {
'success': True,
'output_dir': output_dir,
'log_file': log_path,
'artifacts': [],
'metadata': {}
"success": True,
"output_dir": output_dir,
"log_file": log_path,
"artifacts": [],
"metadata": {},
}
# Collect build artifacts
artifacts = self._collect_artifacts(output_dir)
build_info['artifacts'] = artifacts
build_info["artifacts"] = artifacts
# Parse build metadata
metadata = self._parse_build_metadata(log_path, output_dir)
build_info['metadata'] = metadata
build_info["metadata"] = metadata
return build_info
def _collect_artifacts(self, output_dir: str) -> List[str]:
"""Collect build artifacts from output directory"""
artifacts = []
if not os.path.exists(output_dir):
return artifacts
# Look for .deb files
for deb_file in Path(output_dir).glob("*.deb"):
artifacts.append(str(deb_file))
# Look for .changes files
for changes_file in Path(output_dir).glob("*.changes"):
artifacts.append(str(changes_file))
# Look for .buildinfo files
for buildinfo_file in Path(output_dir).glob("*.buildinfo"):
artifacts.append(str(buildinfo_file))
return artifacts
def _parse_build_metadata(self, log_path: str, output_dir: str) -> Dict[str, Any]:
"""Parse build metadata from log and artifacts"""
metadata = {
'build_time': None,
'package_name': None,
'package_version': None,
'architecture': self.config.architecture,
'suite': self.config.suite,
'chroot': self.config.chroot_name,
'dependencies': [],
'build_dependencies': []
"build_time": None,
"package_name": None,
"package_version": None,
"architecture": self.config.architecture,
"suite": self.config.suite,
"chroot": self.config.chroot_name,
"dependencies": [],
"build_dependencies": [],
}
# Parse log file for metadata
if os.path.exists(log_path):
with open(log_path, 'r') as log_file:
with open(log_path, "r") as log_file:
log_content = log_file.read()
metadata.update(self._extract_metadata_from_log(log_content))
# Parse .changes file for additional metadata
changes_files = list(Path(output_dir).glob("*.changes"))
if changes_files:
metadata.update(self._parse_changes_file(changes_files[0]))
return metadata
def _extract_metadata_from_log(self, log_content: str) -> Dict[str, Any]:
"""Extract metadata from sbuild log content"""
metadata = {}
# Extract build time
import re
time_match = re.search(r'Build started at (\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})', log_content)
time_match = re.search(r"Build started at (\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})", log_content)
if time_match:
metadata['build_time'] = time_match.group(1)
metadata["build_time"] = time_match.group(1)
# Extract package name and version
package_match = re.search(r'Building (\S+) \((\S+)\)', log_content)
package_match = re.search(r"Building (\S+) \((\S+)\)", log_content)
if package_match:
metadata['package_name'] = package_match.group(1)
metadata['package_version'] = package_match.group(2)
metadata["package_name"] = package_match.group(1)
metadata["package_version"] = package_match.group(2)
return metadata
def _parse_changes_file(self, changes_file: Path) -> Dict[str, Any]:
"""Parse .changes file for metadata"""
metadata = {}
try:
with open(changes_file, 'r') as f:
with open(changes_file, "r") as f:
content = f.read()
lines = content.split('\n')
lines = content.split("\n")
for line in lines:
if line.startswith('Source:'):
metadata['source_package'] = line.split(':', 1)[1].strip()
elif line.startswith('Version:'):
metadata['source_version'] = line.split(':', 1)[1].strip()
elif line.startswith('Architecture:'):
metadata['architectures'] = line.split(':', 1)[1].strip().split()
if line.startswith("Source:"):
metadata["source_package"] = line.split(":", 1)[1].strip()
elif line.startswith("Version:"):
metadata["source_version"] = line.split(":", 1)[1].strip()
elif line.startswith("Architecture:"):
metadata["architectures"] = line.split(":", 1)[1].strip().split()
except Exception:
pass
return metadata
def check_dependencies(self, source_package: str, chroot_name: str = None) -> Dict[str, Any]:
"""Check build dependencies for a source package"""
if chroot_name is None:
chroot_name = self.config.chroot_name
# Use dpkg-checkbuilddeps to check dependencies
cmd = ['schroot', '-c', chroot_name, '--', 'dpkg-checkbuilddeps']
cmd = ["schroot", "-c", chroot_name, "--", "dpkg-checkbuilddeps"]
try:
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
return {
'satisfied': True,
'missing': [],
'conflicts': []
}
subprocess.run(cmd, capture_output=True, text=True, check=True)
return {"satisfied": True, "missing": [], "conflicts": []}
except subprocess.CalledProcessError as e:
# Parse missing dependencies from error output
missing = self._parse_missing_dependencies(e.stderr)
return {
'satisfied': False,
'missing': missing,
'conflicts': []
}
return {"satisfied": False, "missing": missing, "conflicts": []}
def _parse_missing_dependencies(self, stderr: str) -> List[str]:
"""Parse missing dependencies from dpkg-checkbuilddeps output"""
missing = []
for line in stderr.split('\n'):
if 'Unmet build dependencies:' in line:
for line in stderr.split("\n"):
if "Unmet build dependencies:" in line:
# Extract package names from the line
import re
packages = re.findall(r'\b[a-zA-Z0-9][a-zA-Z0-9+\-\.]*\b', line)
packages = re.findall(r"\b[a-zA-Z0-9][a-zA-Z0-9+\-\.]*\b", line)
missing.extend(packages)
return missing
def install_build_dependencies(self, dependencies: List[str], chroot_name: str = None) -> None:
"""Install build dependencies in the chroot"""
if chroot_name is None:
chroot_name = self.config.chroot_name
if not dependencies:
return
cmd = ['schroot', '-c', chroot_name, '--', 'apt-get', 'install', '-y'] + dependencies
cmd = [
"schroot",
"-c",
chroot_name,
"--",
"apt-get",
"install",
"-y",
] + dependencies
try:
subprocess.run(cmd, check=True)
except subprocess.CalledProcessError as e:
raise SbuildError(f"Failed to install build dependencies: {e}")
raise SbuildError(f"Failed to install build dependencies: {e}")

0
debian/rules vendored Normal file → Executable file
View file

View file

@ -1,5 +0,0 @@
# This is a placeholder for the hello_1.0-1.debian.tar.gz file
# In a real scenario, this would contain the debian/ directory
# with packaging files like debian/control, debian/rules, etc.
# For testing purposes, you would need to create an actual tarball
# containing the Debian packaging files.

View file

@ -1,19 +0,0 @@
Format: 3.0 (quilt)
Source: hello
Binary: hello
Architecture: any
Version: 1.0-1
Maintainer: Deb-Mock Team <team@deb-mock.org>
Homepage: https://github.com/deb-mock/deb-mock
Standards-Version: 4.6.2
Vcs-Browser: https://github.com/deb-mock/deb-mock
Vcs-Git: https://github.com/deb-mock/deb-mock.git
Build-Depends: debhelper-compat (= 13)
Package: hello
Architecture: any
Depends: ${misc:Depends}
Description: Example package for Deb-Mock testing
This is a simple example package used to test the Deb-Mock
build environment. It demonstrates basic package building
functionality.

View file

@ -1,4 +0,0 @@
# This is a placeholder for the hello_1.0.orig.tar.gz file
# In a real scenario, this would be the upstream source tarball
# For testing purposes, you would need to create an actual tarball
# containing the source code for the hello package.

View file

@ -19,4 +19,5 @@ parallel_jobs: 2
# Build settings
keep_chroot: false
verbose: true
debug: false
debug: false

View file

@ -1,3 +1,3 @@
"""
Tests for deb-mock
"""
"""

View file

@ -2,47 +2,47 @@
Tests for configuration management
"""
import unittest
import tempfile
import os
from pathlib import Path
import tempfile
import unittest
from deb_mock.config import Config
from deb_mock.exceptions import ConfigurationError
class TestConfig(unittest.TestCase):
"""Test configuration management"""
def test_default_config(self):
"""Test default configuration creation"""
config = Config.default()
self.assertEqual(config.chroot_name, 'bookworm-amd64')
self.assertEqual(config.architecture, 'amd64')
self.assertEqual(config.suite, 'bookworm')
self.assertEqual(config.output_dir, './output')
self.assertEqual(config.chroot_name, "bookworm-amd64")
self.assertEqual(config.architecture, "amd64")
self.assertEqual(config.suite, "bookworm")
self.assertEqual(config.output_dir, "./output")
self.assertFalse(config.keep_chroot)
self.assertFalse(config.verbose)
self.assertFalse(config.debug)
def test_custom_config(self):
"""Test custom configuration creation"""
config = Config(
chroot_name='sid-amd64',
architecture='arm64',
suite='sid',
output_dir='/tmp/build',
chroot_name="sid-amd64",
architecture="arm64",
suite="sid",
output_dir="/tmp/build",
keep_chroot=True,
verbose=True
verbose=True,
)
self.assertEqual(config.chroot_name, 'sid-amd64')
self.assertEqual(config.architecture, 'arm64')
self.assertEqual(config.suite, 'sid')
self.assertEqual(config.output_dir, '/tmp/build')
self.assertEqual(config.chroot_name, "sid-amd64")
self.assertEqual(config.architecture, "arm64")
self.assertEqual(config.suite, "sid")
self.assertEqual(config.output_dir, "/tmp/build")
self.assertTrue(config.keep_chroot)
self.assertTrue(config.verbose)
def test_config_from_file(self):
"""Test loading configuration from file"""
config_data = """
@ -53,89 +53,81 @@ output_dir: /tmp/build
keep_chroot: true
verbose: true
"""
with tempfile.NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) as f:
with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f:
f.write(config_data)
config_file = f.name
try:
config = Config.from_file(config_file)
self.assertEqual(config.chroot_name, 'sid-amd64')
self.assertEqual(config.architecture, 'arm64')
self.assertEqual(config.suite, 'sid')
self.assertEqual(config.output_dir, '/tmp/build')
self.assertEqual(config.chroot_name, "sid-amd64")
self.assertEqual(config.architecture, "arm64")
self.assertEqual(config.suite, "sid")
self.assertEqual(config.output_dir, "/tmp/build")
self.assertTrue(config.keep_chroot)
self.assertTrue(config.verbose)
finally:
os.unlink(config_file)
def test_config_to_dict(self):
"""Test converting configuration to dictionary"""
config = Config(
chroot_name='test-chroot',
architecture='amd64',
suite='bookworm'
)
config = Config(chroot_name="test-chroot", architecture="amd64", suite="bookworm")
config_dict = config.to_dict()
self.assertEqual(config_dict['chroot_name'], 'test-chroot')
self.assertEqual(config_dict['architecture'], 'amd64')
self.assertEqual(config_dict['suite'], 'bookworm')
self.assertIn('output_dir', config_dict)
self.assertIn('keep_chroot', config_dict)
self.assertEqual(config_dict["chroot_name"], "test-chroot")
self.assertEqual(config_dict["architecture"], "amd64")
self.assertEqual(config_dict["suite"], "bookworm")
self.assertIn("output_dir", config_dict)
self.assertIn("keep_chroot", config_dict)
def test_config_save(self):
"""Test saving configuration to file"""
config = Config(
chroot_name='test-chroot',
architecture='amd64',
suite='bookworm'
)
with tempfile.NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) as f:
config = Config(chroot_name="test-chroot", architecture="amd64", suite="bookworm")
with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f:
config_file = f.name
try:
config.save(config_file)
# Load the saved configuration
loaded_config = Config.from_file(config_file)
self.assertEqual(loaded_config.chroot_name, config.chroot_name)
self.assertEqual(loaded_config.architecture, config.architecture)
self.assertEqual(loaded_config.suite, config.suite)
finally:
if os.path.exists(config_file):
os.unlink(config_file)
def test_invalid_architecture(self):
"""Test validation of invalid architecture"""
config = Config(architecture='invalid-arch')
config = Config(architecture="invalid-arch")
with self.assertRaises(ConfigurationError):
config.validate()
def test_invalid_suite(self):
"""Test validation of invalid suite"""
config = Config(suite='invalid-suite')
config = Config(suite="invalid-suite")
with self.assertRaises(ConfigurationError):
config.validate()
def test_get_paths(self):
"""Test path generation methods"""
config = Config(
chroot_dir='/var/lib/chroots',
output_dir='./output',
metadata_dir='./metadata'
chroot_dir="/var/lib/chroots",
output_dir="./output",
metadata_dir="./metadata",
)
self.assertEqual(config.get_chroot_path(), '/var/lib/chroots/bookworm-amd64')
self.assertEqual(config.get_output_path(), os.path.abspath('./output'))
self.assertEqual(config.get_metadata_path(), os.path.abspath('./metadata'))
self.assertEqual(config.get_chroot_path(), "/var/lib/chroots/bookworm-amd64")
self.assertEqual(config.get_output_path(), os.path.abspath("./output"))
self.assertEqual(config.get_metadata_path(), os.path.abspath("./metadata"))
if __name__ == '__main__':
unittest.main()
if __name__ == "__main__":
unittest.main()

View file

@ -3,19 +3,26 @@ Tests for the enhanced exception handling system
"""
import pytest
import sys
from io import StringIO
from deb_mock.exceptions import (
DebMockError, ConfigurationError, ChrootError, SbuildError,
BuildError, DependencyError, MetadataError, CacheError,
PluginError, NetworkError, PermissionError, ValidationError,
handle_exception, format_error_context
BuildError,
CacheError,
ChrootError,
ConfigurationError,
DebMockError,
DependencyError,
NetworkError,
PermissionError,
SbuildError,
ValidationError,
format_error_context,
handle_exception,
)
class TestDebMockError:
"""Test the base DebMockError class"""
def test_basic_error(self):
"""Test basic error creation"""
error = DebMockError("Test error message")
@ -23,24 +30,24 @@ class TestDebMockError:
assert error.exit_code == 1
assert error.context == {}
assert error.suggestions == []
def test_error_with_context(self):
"""Test error with context information"""
context = {'file': '/path/to/file', 'operation': 'read'}
context = {"file": "/path/to/file", "operation": "read"}
error = DebMockError("File operation failed", context=context)
expected = """Error: File operation failed
Context:
file: /path/to/file
operation: read"""
assert str(error) == expected
def test_error_with_suggestions(self):
"""Test error with suggestions"""
suggestions = ["Try again", "Check permissions", "Contact admin"]
error = DebMockError("Operation failed", suggestions=suggestions)
expected = """Error: Operation failed
Suggestions:
@ -48,14 +55,13 @@ Suggestions:
2. Check permissions
3. Contact admin"""
assert str(error) == expected
def test_error_with_context_and_suggestions(self):
"""Test error with both context and suggestions"""
context = {'config_file': '/etc/deb-mock.conf'}
context = {"config_file": "/etc/deb-mock.conf"}
suggestions = ["Check config syntax", "Verify file exists"]
error = DebMockError("Invalid configuration",
context=context, suggestions=suggestions)
error = DebMockError("Invalid configuration", context=context, suggestions=suggestions)
expected = """Error: Invalid configuration
Context:
@ -65,7 +71,7 @@ Suggestions:
1. Check config syntax
2. Verify file exists"""
assert str(error) == expected
def test_print_error(self, capsys):
"""Test error printing to stderr"""
error = DebMockError("Test error")
@ -73,7 +79,7 @@ Suggestions:
error.print_error()
# If we get here, the method executed successfully
assert True
def test_get_exit_code(self):
"""Test exit code retrieval"""
error = DebMockError("Test error", exit_code=42)
@ -82,125 +88,125 @@ Suggestions:
class TestSpecificExceptions:
"""Test specific exception types"""
def test_configuration_error(self):
"""Test ConfigurationError with file and section context"""
error = ConfigurationError(
"Invalid configuration",
config_file="/etc/deb-mock.conf",
config_section="chroot"
config_section="chroot",
)
assert "config_file: /etc/deb-mock.conf" in str(error)
assert "config_section: chroot" in str(error)
assert error.exit_code == 2
assert len(error.suggestions) > 0
def test_chroot_error(self):
"""Test ChrootError with operation context"""
error = ChrootError(
"Failed to create chroot",
chroot_name="bookworm-amd64",
operation="create",
chroot_path="/var/lib/deb-mock/chroots/bookworm-amd64"
chroot_path="/var/lib/deb-mock/chroots/bookworm-amd64",
)
assert "chroot_name: bookworm-amd64" in str(error)
assert "operation: create" in str(error)
assert error.exit_code == 3
assert "clean-chroot" in str(error.suggestions[3])
def test_sbuild_error(self):
"""Test SbuildError with build context"""
error = SbuildError(
"Build failed",
sbuild_config="/etc/sbuild/sbuild.conf",
build_log="/var/log/sbuild.log",
return_code=1
return_code=1,
)
assert "sbuild_config: /etc/sbuild/sbuild.conf" in str(error)
assert "build_log: /var/log/sbuild.log" in str(error)
assert "return_code: 1" in str(error)
assert error.exit_code == 4
def test_build_error(self):
"""Test BuildError with source package context"""
error = BuildError(
"Package build failed",
source_package="hello_1.0.dsc",
build_log="/tmp/build.log",
artifacts=["hello_1.0-1_amd64.deb"]
artifacts=["hello_1.0-1_amd64.deb"],
)
assert "source_package: hello_1.0.dsc" in str(error)
assert "build_log: /tmp/build.log" in str(error)
assert "artifacts: ['hello_1.0-1_amd64.deb']" in str(error)
assert error.exit_code == 5
def test_dependency_error(self):
"""Test DependencyError with missing packages"""
error = DependencyError(
"Missing build dependencies",
missing_packages=["build-essential", "devscripts"],
conflicting_packages=["old-package"]
conflicting_packages=["old-package"],
)
assert "missing_packages: ['build-essential', 'devscripts']" in str(error)
assert "conflicting_packages: ['old-package']" in str(error)
assert error.exit_code == 6
def test_cache_error(self):
"""Test CacheError with cache context"""
error = CacheError(
"Cache operation failed",
cache_type="root_cache",
cache_path="/var/cache/deb-mock/root-cache",
operation="restore"
operation="restore",
)
assert "cache_type: root_cache" in str(error)
assert "cache_path: /var/cache/deb-mock/root-cache" in str(error)
assert "operation: restore" in str(error)
assert error.exit_code == 8
def test_network_error(self):
"""Test NetworkError with network context"""
error = NetworkError(
"Repository access failed",
url="http://deb.debian.org/debian/",
proxy="http://proxy.example.com:3128",
timeout=30
timeout=30,
)
assert "url: http://deb.debian.org/debian/" in str(error)
assert "proxy: http://proxy.example.com:3128" in str(error)
assert "timeout: 30" in str(error)
assert error.exit_code == 10
def test_permission_error(self):
"""Test PermissionError with permission context"""
error = PermissionError(
"Insufficient privileges",
operation="create_chroot",
path="/var/lib/deb-mock",
required_privileges="root"
required_privileges="root",
)
assert "operation: create_chroot" in str(error)
assert "path: /var/lib/deb-mock" in str(error)
assert "required_privileges: root" in str(error)
assert error.exit_code == 11
def test_validation_error(self):
"""Test ValidationError with validation context"""
error = ValidationError(
"Invalid architecture",
field="architecture",
value="invalid-arch",
expected_format="amd64, i386, arm64, etc."
expected_format="amd64, i386, arm64, etc.",
)
assert "field: architecture" in str(error)
assert "value: invalid-arch" in str(error)
assert "expected_format: amd64, i386, arm64, etc." in str(error)
@ -209,35 +215,28 @@ class TestSpecificExceptions:
class TestHelperFunctions:
"""Test helper functions"""
def test_format_error_context(self):
"""Test format_error_context helper"""
context = format_error_context(
file="/path/to/file",
operation="read",
user="testuser",
none_value=None
)
expected = {
'file': '/path/to/file',
'operation': 'read',
'user': 'testuser'
}
context = format_error_context(file="/path/to/file", operation="read", user="testuser", none_value=None)
expected = {"file": "/path/to/file", "operation": "read", "user": "testuser"}
assert context == expected
assert 'none_value' not in context
assert "none_value" not in context
def test_handle_exception_decorator_success(self):
"""Test handle_exception decorator with successful function"""
@handle_exception
def successful_function():
return "success"
result = successful_function()
assert result == "success"
def test_handle_exception_decorator_debmock_error(self, capsys):
"""Test handle_exception decorator with DebMockError"""
@handle_exception
def failing_function():
raise ConfigurationError("Config error", config_file="/etc/config")
@ -250,9 +249,10 @@ class TestHelperFunctions:
# The error message was printed (we can see it in the test output)
# Just verify the decorator handled the exception correctly
assert True
def test_handle_exception_decorator_unexpected_error(self, capsys):
"""Test handle_exception decorator with unexpected error"""
@handle_exception
def unexpected_error_function():
raise ValueError("Unexpected value error")
@ -269,73 +269,73 @@ class TestHelperFunctions:
class TestExceptionIntegration:
"""Test exception integration scenarios"""
def test_chroot_creation_error_scenario(self):
"""Test realistic chroot creation error scenario"""
error = ChrootError(
"Failed to create chroot environment",
chroot_name="bookworm-amd64",
operation="debootstrap",
chroot_path="/var/lib/deb-mock/chroots/bookworm-amd64"
chroot_path="/var/lib/deb-mock/chroots/bookworm-amd64",
)
error_str = str(error)
# Check that all context information is present
assert "chroot_name: bookworm-amd64" in error_str
assert "operation: debootstrap" in error_str
assert "chroot_path: /var/lib/deb-mock/chroots/bookworm-amd64" in error_str
# Check that helpful suggestions are provided
assert "sufficient disk space" in error_str
assert "root privileges" in error_str
assert "clean-chroot" in error_str
# Check exit code
assert error.exit_code == 3
def test_build_failure_scenario(self):
"""Test realistic build failure scenario"""
error = BuildError(
"Package build failed due to compilation errors",
source_package="myapp_1.0.dsc",
build_log="/tmp/build_myapp.log",
artifacts=[]
artifacts=[],
)
error_str = str(error)
# Check context information
assert "source_package: myapp_1.0.dsc" in error_str
assert "build_log: /tmp/build_myapp.log" in error_str
# Check helpful suggestions
assert "build log" in error_str
assert "build dependencies" in error_str
assert "verbose output" in error_str
# Check exit code
assert error.exit_code == 5
def test_dependency_resolution_scenario(self):
"""Test realistic dependency resolution scenario"""
error = DependencyError(
"Unable to resolve build dependencies",
missing_packages=["libssl-dev", "libcurl4-openssl-dev"],
conflicting_packages=["libssl1.0-dev"]
conflicting_packages=["libssl1.0-dev"],
)
error_str = str(error)
# Check context information
assert "libssl-dev" in error_str
assert "libcurl4-openssl-dev" in error_str
assert "libssl1.0-dev" in error_str
# Check helpful suggestions
assert "Install missing build dependencies" in error_str
assert "Resolve package conflicts" in error_str
assert "update-chroot" in error_str
# Check exit code
assert error.exit_code == 6
assert error.exit_code == 6