Add comprehensive testing framework, performance monitoring, and plugin system
- Add complete pytest testing framework with conftest.py and test files - Add performance monitoring and benchmarking capabilities - Add plugin system with ccache plugin example - Add comprehensive documentation (API, deployment, testing, etc.) - Add Docker API wrapper for service deployment - Add advanced configuration examples - Remove old wget package file - Update core modules with enhanced functionality
This commit is contained in:
parent
4c0dcb2522
commit
c51819c836
30 changed files with 11141 additions and 105 deletions
89
Dockerfile.api
Normal file
89
Dockerfile.api
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
FROM python:3.11-slim
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
schroot \
|
||||
debootstrap \
|
||||
sbuild \
|
||||
sudo \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy requirements and install Python dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy deb-mock source code
|
||||
COPY . .
|
||||
|
||||
# Install deb-mock in development mode
|
||||
RUN pip install -e .
|
||||
|
||||
# Create necessary directories
|
||||
RUN mkdir -p /app/configs /app/work /app/cache /app/logs
|
||||
|
||||
# Create a simple API wrapper script
|
||||
RUN echo '#!/usr/bin/env python3\n\
|
||||
import os\n\
|
||||
import subprocess\n\
|
||||
import json\n\
|
||||
from flask import Flask, request, jsonify\n\
|
||||
\n\
|
||||
app = Flask(__name__)\n\
|
||||
\n\
|
||||
@app.route("/health")\n\
|
||||
def health():\n\
|
||||
return jsonify({"status": "healthy", "service": "deb-mock-api"})\n\
|
||||
\n\
|
||||
@app.route("/api/v1/build", methods=["POST"])\n\
|
||||
def build_package():\n\
|
||||
try:\n\
|
||||
data = request.get_json()\n\
|
||||
package_name = data.get("package_name")\n\
|
||||
architecture = data.get("architecture", "amd64")\n\
|
||||
config_file = data.get("config_file", "config-advanced.yaml")\n\
|
||||
\n\
|
||||
# Execute deb-mock build command\n\
|
||||
cmd = ["deb-mock", "-c", config_file, "build", package_name]\n\
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, cwd="/app")\n\
|
||||
\n\
|
||||
if result.returncode == 0:\n\
|
||||
return jsonify({\n\
|
||||
"status": "success",\n\
|
||||
"package": package_name,\n\
|
||||
"architecture": architecture,\n\
|
||||
"output": result.stdout\n\
|
||||
}), 200\n\
|
||||
else:\n\
|
||||
return jsonify({\n\
|
||||
"status": "error",\n\
|
||||
"package": package_name,\n\
|
||||
"error": result.stderr\n\
|
||||
}), 400\n\
|
||||
except Exception as e:\n\
|
||||
return jsonify({"status": "error", "error": str(e)}), 500\n\
|
||||
\n\
|
||||
@app.route("/api/v1/status", methods=["GET"])\n\
|
||||
def status():\n\
|
||||
return jsonify({\n\
|
||||
"status": "running",\n\
|
||||
"service": "deb-mock-api",\n\
|
||||
"version": "1.0.0"\n\
|
||||
})\n\
|
||||
\n\
|
||||
if __name__ == "__main__":\n\
|
||||
port = int(os.environ.get("MOCK_API_PORT", 8081))\n\
|
||||
app.run(host="0.0.0.0", port=port, debug=False)\n\
|
||||
' > /app/api_server.py
|
||||
|
||||
# Make the API server executable
|
||||
RUN chmod +x /app/api_server.py
|
||||
|
||||
# Expose the API port
|
||||
EXPOSE 8081
|
||||
|
||||
# Start the API server
|
||||
CMD ["python3", "/app/api_server.py"]
|
||||
778
deb_mock/benchmarking.py
Normal file
778
deb_mock/benchmarking.py
Normal file
|
|
@ -0,0 +1,778 @@
|
|||
"""
|
||||
Advanced benchmarking system for deb-mock
|
||||
"""
|
||||
|
||||
import time
|
||||
import psutil
|
||||
import threading
|
||||
import json
|
||||
import os
|
||||
import statistics
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any, Optional, Callable, Tuple
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass, asdict
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
import multiprocessing
|
||||
|
||||
from .exceptions import PerformanceError
|
||||
|
||||
|
||||
@dataclass
|
||||
class BenchmarkConfig:
|
||||
"""Configuration for benchmarking"""
|
||||
name: str
|
||||
description: str
|
||||
iterations: int
|
||||
warmup_iterations: int
|
||||
parallel_runs: int
|
||||
timeout_seconds: int
|
||||
collect_system_metrics: bool
|
||||
collect_detailed_metrics: bool
|
||||
output_format: str # json, html, csv
|
||||
output_file: Optional[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BenchmarkMetrics:
|
||||
"""Metrics collected during benchmarking"""
|
||||
timestamp: datetime
|
||||
duration: float
|
||||
cpu_percent: float
|
||||
memory_mb: float
|
||||
disk_io_read_mb: float
|
||||
disk_io_write_mb: float
|
||||
network_io_mb: float
|
||||
chroot_size_mb: float
|
||||
cache_hit_rate: float
|
||||
parallel_efficiency: float
|
||||
resource_utilization: float
|
||||
# System-level metrics
|
||||
system_cpu_percent: float
|
||||
system_memory_percent: float
|
||||
system_load_average: Tuple[float, float, float]
|
||||
system_disk_usage_percent: float
|
||||
system_network_connections: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class BenchmarkResult:
|
||||
"""Result of a benchmark run"""
|
||||
benchmark_name: str
|
||||
config: BenchmarkConfig
|
||||
start_time: datetime
|
||||
end_time: datetime
|
||||
total_duration: float
|
||||
iterations: int
|
||||
successful_iterations: int
|
||||
failed_iterations: int
|
||||
|
||||
# Performance statistics
|
||||
durations: List[float]
|
||||
average_duration: float
|
||||
min_duration: float
|
||||
max_duration: float
|
||||
median_duration: float
|
||||
standard_deviation: float
|
||||
coefficient_of_variation: float
|
||||
|
||||
# Percentiles
|
||||
percentiles: Dict[str, float]
|
||||
|
||||
# System impact
|
||||
system_impact: Dict[str, float]
|
||||
|
||||
# Detailed metrics
|
||||
metrics: List[BenchmarkMetrics]
|
||||
|
||||
# Analysis
|
||||
analysis: Dict[str, Any]
|
||||
recommendations: List[str]
|
||||
|
||||
# Metadata
|
||||
system_info: Dict[str, Any]
|
||||
benchmark_version: str
|
||||
|
||||
|
||||
class BenchmarkRunner:
|
||||
"""Advanced benchmark runner for deb-mock operations"""
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Benchmark history
|
||||
self._benchmark_history = []
|
||||
self._benchmark_results = {}
|
||||
|
||||
# System information
|
||||
self._system_info = self._collect_system_info()
|
||||
|
||||
# Benchmark templates
|
||||
self._benchmark_templates = self._load_benchmark_templates()
|
||||
|
||||
# Performance baselines
|
||||
self._performance_baselines = {}
|
||||
self._load_performance_baselines()
|
||||
|
||||
def _collect_system_info(self) -> Dict[str, Any]:
|
||||
"""Collect comprehensive system information"""
|
||||
try:
|
||||
# CPU information
|
||||
cpu_info = {
|
||||
"count": psutil.cpu_count(),
|
||||
"count_logical": psutil.cpu_count(logical=True),
|
||||
"freq": psutil.cpu_freq()._asdict() if psutil.cpu_freq() else None,
|
||||
"architecture": os.uname().machine if hasattr(os, 'uname') else "unknown"
|
||||
}
|
||||
|
||||
# Memory information
|
||||
memory = psutil.virtual_memory()
|
||||
memory_info = {
|
||||
"total_gb": memory.total / (1024**3),
|
||||
"available_gb": memory.available / (1024**3),
|
||||
"percent": memory.percent
|
||||
}
|
||||
|
||||
# Disk information
|
||||
disk = psutil.disk_usage('/')
|
||||
disk_info = {
|
||||
"total_gb": disk.total / (1024**3),
|
||||
"free_gb": disk.free / (1024**3),
|
||||
"percent": disk.percent
|
||||
}
|
||||
|
||||
# OS information
|
||||
os_info = {
|
||||
"platform": os.uname().sysname if hasattr(os, 'uname') else "unknown",
|
||||
"release": os.uname().release if hasattr(os, 'uname') else "unknown",
|
||||
"version": os.uname().version if hasattr(os, 'uname') else "unknown"
|
||||
}
|
||||
|
||||
# Python information
|
||||
python_info = {
|
||||
"version": f"{os.sys.version_info.major}.{os.sys.version_info.minor}.{os.sys.version_info.micro}",
|
||||
"implementation": os.sys.implementation.name,
|
||||
"platform": os.sys.platform
|
||||
}
|
||||
|
||||
return {
|
||||
"cpu": cpu_info,
|
||||
"memory": memory_info,
|
||||
"disk": disk_info,
|
||||
"os": os_info,
|
||||
"python": python_info,
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to collect system info: {e}")
|
||||
return {"error": str(e)}
|
||||
|
||||
def _load_benchmark_templates(self) -> Dict[str, BenchmarkConfig]:
|
||||
"""Load predefined benchmark templates"""
|
||||
templates = {
|
||||
"quick": BenchmarkConfig(
|
||||
name="Quick Benchmark",
|
||||
description="Fast benchmark with minimal iterations",
|
||||
iterations=5,
|
||||
warmup_iterations=1,
|
||||
parallel_runs=1,
|
||||
timeout_seconds=300,
|
||||
collect_system_metrics=True,
|
||||
collect_detailed_metrics=False,
|
||||
output_format="json",
|
||||
output_file=None
|
||||
),
|
||||
"standard": BenchmarkConfig(
|
||||
name="Standard Benchmark",
|
||||
description="Standard benchmark with moderate iterations",
|
||||
iterations=20,
|
||||
warmup_iterations=3,
|
||||
parallel_runs=2,
|
||||
timeout_seconds=600,
|
||||
collect_system_metrics=True,
|
||||
collect_detailed_metrics=True,
|
||||
output_format="html",
|
||||
output_file=None
|
||||
),
|
||||
"comprehensive": BenchmarkConfig(
|
||||
name="Comprehensive Benchmark",
|
||||
description="Comprehensive benchmark with many iterations",
|
||||
iterations=100,
|
||||
warmup_iterations=10,
|
||||
parallel_runs=4,
|
||||
timeout_seconds=1800,
|
||||
collect_system_metrics=True,
|
||||
collect_detailed_metrics=True,
|
||||
output_format="html",
|
||||
output_file=None
|
||||
),
|
||||
"stress": BenchmarkConfig(
|
||||
name="Stress Test",
|
||||
description="Stress test with high load",
|
||||
iterations=50,
|
||||
warmup_iterations=5,
|
||||
parallel_runs=8,
|
||||
timeout_seconds=1200,
|
||||
collect_system_metrics=True,
|
||||
collect_detailed_metrics=True,
|
||||
output_format="json",
|
||||
output_file=None
|
||||
)
|
||||
}
|
||||
|
||||
return templates
|
||||
|
||||
def _load_performance_baselines(self):
|
||||
"""Load performance baselines for comparison"""
|
||||
baseline_file = os.path.join(getattr(self.config, 'performance_metrics_dir', './performance-metrics'), "baselines.json")
|
||||
if os.path.exists(baseline_file):
|
||||
try:
|
||||
with open(baseline_file, 'r') as f:
|
||||
self._performance_baselines = json.load(f)
|
||||
self.logger.info("Loaded performance baselines for benchmarking")
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to load baselines: {e}")
|
||||
|
||||
def run_benchmark(self, benchmark_name: str, operation_func: Callable,
|
||||
operation_args: Tuple = (), operation_kwargs: Dict = None,
|
||||
config: Optional[BenchmarkConfig] = None) -> BenchmarkResult:
|
||||
"""Run a benchmark for a specific operation"""
|
||||
if operation_kwargs is None:
|
||||
operation_kwargs = {}
|
||||
|
||||
# Use template if no config provided
|
||||
if config is None:
|
||||
if benchmark_name in self._benchmark_templates:
|
||||
config = self._benchmark_templates[benchmark_name]
|
||||
else:
|
||||
config = self._benchmark_templates["standard"]
|
||||
|
||||
self.logger.info(f"Starting benchmark: {benchmark_name}")
|
||||
self.logger.info(f"Configuration: {iterations} iterations, {parallel_runs} parallel runs")
|
||||
|
||||
start_time = datetime.now()
|
||||
results = []
|
||||
metrics_list = []
|
||||
|
||||
# Warmup runs
|
||||
if config.warmup_iterations > 0:
|
||||
self.logger.info(f"Running {config.warmup_iterations} warmup iterations")
|
||||
for i in range(config.warmup_iterations):
|
||||
try:
|
||||
operation_func(*operation_args, **operation_kwargs)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Warmup iteration {i+1} failed: {e}")
|
||||
|
||||
# Main benchmark runs
|
||||
self.logger.info(f"Running {config.iterations} benchmark iterations")
|
||||
|
||||
if config.parallel_runs > 1:
|
||||
results = self._run_parallel_benchmark(operation_func, operation_args, operation_kwargs, config)
|
||||
else:
|
||||
results = self._run_sequential_benchmark(operation_func, operation_args, operation_kwargs, config)
|
||||
|
||||
# Collect system metrics if enabled
|
||||
if config.collect_system_metrics:
|
||||
metrics_list = self._collect_benchmark_metrics(results, config)
|
||||
|
||||
# Calculate statistics
|
||||
durations = [r["duration"] for r in results if r["success"]]
|
||||
successful_iterations = len(durations)
|
||||
failed_iterations = len(results) - successful_iterations
|
||||
|
||||
if not durations:
|
||||
raise PerformanceError("No successful benchmark iterations")
|
||||
|
||||
# Calculate performance statistics
|
||||
stats = self._calculate_performance_statistics(durations)
|
||||
|
||||
# Calculate system impact
|
||||
system_impact = self._calculate_system_impact(metrics_list) if metrics_list else {}
|
||||
|
||||
# Generate analysis and recommendations
|
||||
analysis = self._analyze_benchmark_results(stats, system_impact)
|
||||
recommendations = self._generate_benchmark_recommendations(analysis, stats)
|
||||
|
||||
# Create benchmark result
|
||||
end_time = datetime.now()
|
||||
total_duration = (end_time - start_time).total_seconds()
|
||||
|
||||
benchmark_result = BenchmarkResult(
|
||||
benchmark_name=benchmark_name,
|
||||
config=config,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
total_duration=total_duration,
|
||||
iterations=config.iterations,
|
||||
successful_iterations=successful_iterations,
|
||||
failed_iterations=failed_iterations,
|
||||
durations=durations,
|
||||
average_duration=stats["average"],
|
||||
min_duration=stats["min"],
|
||||
max_duration=stats["max"],
|
||||
median_duration=stats["median"],
|
||||
standard_deviation=stats["std_dev"],
|
||||
coefficient_of_variation=stats["cv"],
|
||||
percentiles=stats["percentiles"],
|
||||
system_impact=system_impact,
|
||||
metrics=metrics_list,
|
||||
analysis=analysis,
|
||||
recommendations=recommendations,
|
||||
system_info=self._system_info,
|
||||
benchmark_version="1.0.0"
|
||||
)
|
||||
|
||||
# Store result
|
||||
self._benchmark_results[benchmark_name] = benchmark_result
|
||||
self._benchmark_history.append(benchmark_result)
|
||||
|
||||
# Save result
|
||||
self._save_benchmark_result(benchmark_result)
|
||||
|
||||
self.logger.info(f"Benchmark completed: {benchmark_name}")
|
||||
self.logger.info(f"Results: {successful_iterations}/{config.iterations} successful, "
|
||||
f"avg duration: {stats['average']:.3f}s")
|
||||
|
||||
return benchmark_result
|
||||
|
||||
def _run_sequential_benchmark(self, operation_func: Callable, operation_args: Tuple,
|
||||
operation_kwargs: Dict, config: BenchmarkConfig) -> List[Dict[str, Any]]:
|
||||
"""Run benchmark iterations sequentially"""
|
||||
results = []
|
||||
|
||||
for i in range(config.iterations):
|
||||
self.logger.debug(f"Running iteration {i+1}/{config.iterations}")
|
||||
|
||||
try:
|
||||
start_time = time.time()
|
||||
result = operation_func(*operation_args, **operation_kwargs)
|
||||
end_time = time.time()
|
||||
|
||||
iteration_result = {
|
||||
"iteration": i + 1,
|
||||
"success": True,
|
||||
"duration": end_time - start_time,
|
||||
"result": result,
|
||||
"timestamp": datetime.now()
|
||||
}
|
||||
|
||||
results.append(iteration_result)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Iteration {i+1} failed: {e}")
|
||||
iteration_result = {
|
||||
"iteration": i + 1,
|
||||
"success": False,
|
||||
"duration": 0,
|
||||
"error": str(e),
|
||||
"timestamp": datetime.now()
|
||||
}
|
||||
results.append(iteration_result)
|
||||
|
||||
return results
|
||||
|
||||
def _run_parallel_benchmark(self, operation_func: Callable, operation_args: Tuple,
|
||||
operation_kwargs: Dict, config: BenchmarkConfig) -> List[Dict[str, Any]]:
|
||||
"""Run benchmark iterations in parallel"""
|
||||
results = []
|
||||
|
||||
def run_iteration(iteration_num):
|
||||
try:
|
||||
start_time = time.time()
|
||||
result = operation_func(*operation_args, **operation_kwargs)
|
||||
end_time = time.time()
|
||||
|
||||
return {
|
||||
"iteration": iteration_num,
|
||||
"success": True,
|
||||
"duration": end_time - start_time,
|
||||
"result": result,
|
||||
"timestamp": datetime.now()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Iteration {iteration_num} failed: {e}")
|
||||
return {
|
||||
"iteration": iteration_num,
|
||||
"success": False,
|
||||
"duration": 0,
|
||||
"error": str(e),
|
||||
"timestamp": datetime.now()
|
||||
}
|
||||
|
||||
# Use ThreadPoolExecutor for parallel execution
|
||||
with ThreadPoolExecutor(max_workers=config.parallel_runs) as executor:
|
||||
future_to_iteration = {
|
||||
executor.submit(run_iteration, i + 1): i + 1
|
||||
for i in range(config.iterations)
|
||||
}
|
||||
|
||||
for future in as_completed(future_to_iteration):
|
||||
result = future.result()
|
||||
results.append(result)
|
||||
|
||||
# Sort results by iteration number
|
||||
results.sort(key=lambda x: x["iteration"])
|
||||
return results
|
||||
|
||||
def _collect_benchmark_metrics(self, results: List[Dict[str, Any]],
|
||||
config: BenchmarkConfig) -> List[BenchmarkMetrics]:
|
||||
"""Collect system metrics during benchmarking"""
|
||||
metrics_list = []
|
||||
|
||||
for result in results:
|
||||
if not result["success"]:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Collect system metrics
|
||||
cpu_percent = psutil.cpu_percent(interval=0.1)
|
||||
memory = psutil.virtual_memory()
|
||||
disk_io = psutil.disk_io_counters()
|
||||
net_io = psutil.net_io_counters()
|
||||
|
||||
# Get load average if available
|
||||
try:
|
||||
load_avg = os.getloadavg()
|
||||
except (OSError, AttributeError):
|
||||
load_avg = (0.0, 0.0, 0.0)
|
||||
|
||||
# Get disk usage
|
||||
disk_usage = psutil.disk_usage('/')
|
||||
|
||||
# Get network connections count
|
||||
try:
|
||||
net_connections = len(psutil.net_connections())
|
||||
except (OSError, psutil.AccessDenied):
|
||||
net_connections = 0
|
||||
|
||||
metrics = BenchmarkMetrics(
|
||||
timestamp=result["timestamp"],
|
||||
duration=result["duration"],
|
||||
cpu_percent=cpu_percent,
|
||||
memory_mb=memory.used / (1024 * 1024),
|
||||
disk_io_read_mb=disk_io.read_bytes / (1024 * 1024) if disk_io else 0,
|
||||
disk_io_write_mb=disk_io.write_bytes / (1024 * 1024) if disk_io else 0,
|
||||
network_io_mb=(net_io.bytes_sent + net_io.bytes_recv) / (1024 * 1024) if net_io else 0,
|
||||
chroot_size_mb=0, # Would need to be calculated from actual chroot
|
||||
cache_hit_rate=0.0, # Would need to be calculated from cache metrics
|
||||
parallel_efficiency=1.0, # Would need to be calculated
|
||||
resource_utilization=0.0, # Would need to be calculated
|
||||
system_cpu_percent=cpu_percent,
|
||||
system_memory_percent=memory.percent,
|
||||
system_load_average=load_avg,
|
||||
system_disk_usage_percent=disk_usage.percent,
|
||||
system_network_connections=net_connections
|
||||
)
|
||||
|
||||
metrics_list.append(metrics)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to collect metrics for iteration {result['iteration']}: {e}")
|
||||
|
||||
return metrics_list
|
||||
|
||||
def _calculate_performance_statistics(self, durations: List[float]) -> Dict[str, Any]:
|
||||
"""Calculate comprehensive performance statistics"""
|
||||
if not durations:
|
||||
return {}
|
||||
|
||||
# Basic statistics
|
||||
avg_duration = statistics.mean(durations)
|
||||
min_duration = min(durations)
|
||||
max_duration = max(durations)
|
||||
median_duration = statistics.median(durations)
|
||||
|
||||
# Standard deviation and coefficient of variation
|
||||
try:
|
||||
std_dev = statistics.stdev(durations)
|
||||
cv = std_dev / avg_duration if avg_duration > 0 else 0
|
||||
except statistics.StatisticsError:
|
||||
std_dev = 0
|
||||
cv = 0
|
||||
|
||||
# Percentiles
|
||||
sorted_durations = sorted(durations)
|
||||
percentiles = {
|
||||
"p10": sorted_durations[int(0.1 * len(sorted_durations))],
|
||||
"p25": sorted_durations[int(0.25 * len(sorted_durations))],
|
||||
"p50": sorted_durations[int(0.5 * len(sorted_durations))],
|
||||
"p75": sorted_durations[int(0.75 * len(sorted_durations))],
|
||||
"p90": sorted_durations[int(0.9 * len(sorted_durations))],
|
||||
"p95": sorted_durations[int(0.95 * len(sorted_durations))],
|
||||
"p99": sorted_durations[int(0.99 * len(sorted_durations))]
|
||||
}
|
||||
|
||||
return {
|
||||
"average": avg_duration,
|
||||
"min": min_duration,
|
||||
"max": max_duration,
|
||||
"median": median_duration,
|
||||
"std_dev": std_dev,
|
||||
"cv": cv,
|
||||
"percentiles": percentiles
|
||||
}
|
||||
|
||||
def _calculate_system_impact(self, metrics_list: List[BenchmarkMetrics]) -> Dict[str, float]:
|
||||
"""Calculate system impact during benchmarking"""
|
||||
if not metrics_list:
|
||||
return {}
|
||||
|
||||
# Calculate averages across all metrics
|
||||
avg_cpu = statistics.mean(m.cpu_percent for m in metrics_list)
|
||||
avg_memory = statistics.mean(m.memory_mb for m in metrics_list)
|
||||
avg_disk_read = statistics.mean(m.disk_io_read_mb for m in metrics_list)
|
||||
avg_disk_write = statistics.mean(m.disk_io_write_mb for m in metrics_list)
|
||||
avg_network = statistics.mean(m.network_io_mb for m in metrics_list)
|
||||
|
||||
# Calculate peak values
|
||||
peak_cpu = max(m.cpu_percent for m in metrics_list)
|
||||
peak_memory = max(m.memory_mb for m in metrics_list)
|
||||
|
||||
return {
|
||||
"avg_cpu_percent": avg_cpu,
|
||||
"avg_memory_mb": avg_memory,
|
||||
"avg_disk_read_mb": avg_disk_read,
|
||||
"avg_disk_write_mb": avg_disk_write,
|
||||
"avg_network_mb": avg_network,
|
||||
"peak_cpu_percent": peak_cpu,
|
||||
"peak_memory_mb": peak_memory
|
||||
}
|
||||
|
||||
def _analyze_benchmark_results(self, stats: Dict[str, Any],
|
||||
system_impact: Dict[str, float]) -> Dict[str, Any]:
|
||||
"""Analyze benchmark results for insights"""
|
||||
analysis = {
|
||||
"performance_stability": "unknown",
|
||||
"system_impact_level": "unknown",
|
||||
"optimization_opportunities": [],
|
||||
"anomalies": []
|
||||
}
|
||||
|
||||
# Analyze performance stability
|
||||
cv = stats.get("cv", 0)
|
||||
if cv < 0.1:
|
||||
analysis["performance_stability"] = "excellent"
|
||||
elif cv < 0.2:
|
||||
analysis["performance_stability"] = "good"
|
||||
elif cv < 0.3:
|
||||
analysis["performance_stability"] = "fair"
|
||||
else:
|
||||
analysis["performance_stability"] = "poor"
|
||||
analysis["optimization_opportunities"].append("High performance variability detected")
|
||||
|
||||
# Analyze system impact
|
||||
avg_cpu = system_impact.get("avg_cpu_percent", 0)
|
||||
avg_memory = system_impact.get("avg_memory_mb", 0)
|
||||
|
||||
if avg_cpu < 30:
|
||||
analysis["system_impact_level"] = "low"
|
||||
analysis["optimization_opportunities"].append("CPU utilization is low, consider increasing parallelization")
|
||||
elif avg_cpu < 70:
|
||||
analysis["system_impact_level"] = "moderate"
|
||||
else:
|
||||
analysis["system_impact_level"] = "high"
|
||||
analysis["optimization_opportunities"].append("High CPU utilization, consider reducing load")
|
||||
|
||||
if avg_memory > 2048: # 2GB
|
||||
analysis["optimization_opportunities"].append("High memory usage, consider optimizing memory allocation")
|
||||
|
||||
# Detect anomalies
|
||||
durations = stats.get("durations", [])
|
||||
if durations:
|
||||
avg_duration = stats.get("average", 0)
|
||||
for duration in durations:
|
||||
if abs(duration - avg_duration) > 2 * stats.get("std_dev", 0):
|
||||
analysis["anomalies"].append(f"Duration anomaly: {duration:.3f}s (avg: {avg_duration:.3f}s)")
|
||||
|
||||
return analysis
|
||||
|
||||
def _generate_benchmark_recommendations(self, analysis: Dict[str, Any],
|
||||
stats: Dict[str, Any]) -> List[str]:
|
||||
"""Generate actionable recommendations based on benchmark results"""
|
||||
recommendations = []
|
||||
|
||||
# Performance stability recommendations
|
||||
stability = analysis.get("performance_stability", "unknown")
|
||||
if stability in ["fair", "poor"]:
|
||||
recommendations.append("Investigate performance variability - check for external factors affecting performance")
|
||||
recommendations.append("Consider running more iterations to get more stable results")
|
||||
|
||||
# System impact recommendations
|
||||
impact_level = analysis.get("system_impact_level", "unknown")
|
||||
if impact_level == "low":
|
||||
recommendations.append("System resources are underutilized - consider increasing workload or parallelization")
|
||||
elif impact_level == "high":
|
||||
recommendations.append("System is under high load - consider reducing workload or optimizing operations")
|
||||
|
||||
# Optimization recommendations
|
||||
for opportunity in analysis.get("optimization_opportunities", []):
|
||||
recommendations.append(opportunity)
|
||||
|
||||
# General recommendations
|
||||
if stats.get("cv", 0) > 0.2:
|
||||
recommendations.append("High coefficient of variation suggests inconsistent performance - investigate root causes")
|
||||
|
||||
if len(recommendations) == 0:
|
||||
recommendations.append("Performance is within acceptable parameters - continue monitoring")
|
||||
|
||||
return recommendations
|
||||
|
||||
def _save_benchmark_result(self, result: BenchmarkResult):
|
||||
"""Save benchmark result to file"""
|
||||
try:
|
||||
metrics_dir = getattr(self.config, 'performance_metrics_dir', './performance-metrics')
|
||||
os.makedirs(metrics_dir, exist_ok=True)
|
||||
|
||||
timestamp = result.start_time.strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"benchmark_{result.benchmark_name}_{timestamp}.json"
|
||||
filepath = os.path.join(metrics_dir, filename)
|
||||
|
||||
# Convert to dict for JSON serialization
|
||||
result_dict = asdict(result)
|
||||
result_dict["start_time"] = result.start_time.isoformat()
|
||||
result_dict["end_time"] = result.end_time.isoformat()
|
||||
result_dict["timestamp"] = result.timestamp.isoformat()
|
||||
|
||||
with open(filepath, 'w') as f:
|
||||
json.dump(result_dict, f, indent=2, default=str)
|
||||
|
||||
self.logger.info(f"Benchmark result saved: {filepath}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to save benchmark result: {e}")
|
||||
|
||||
def compare_benchmarks(self, benchmark_names: List[str]) -> Dict[str, Any]:
|
||||
"""Compare multiple benchmark results"""
|
||||
if len(benchmark_names) < 2:
|
||||
raise ValueError("Need at least 2 benchmark names for comparison")
|
||||
|
||||
comparison = {
|
||||
"benchmarks": benchmark_names,
|
||||
"comparison_date": datetime.now().isoformat(),
|
||||
"results": {},
|
||||
"analysis": {},
|
||||
"recommendations": []
|
||||
}
|
||||
|
||||
# Collect benchmark results
|
||||
for name in benchmark_names:
|
||||
if name in self._benchmark_results:
|
||||
result = self._benchmark_results[name]
|
||||
comparison["results"][name] = {
|
||||
"average_duration": result.average_duration,
|
||||
"min_duration": result.min_duration,
|
||||
"max_duration": result.max_duration,
|
||||
"standard_deviation": result.standard_deviation,
|
||||
"coefficient_of_variation": result.coefficient_of_variation,
|
||||
"successful_iterations": result.successful_iterations,
|
||||
"total_iterations": result.iterations
|
||||
}
|
||||
|
||||
# Perform comparison analysis
|
||||
if len(comparison["results"]) >= 2:
|
||||
comparison["analysis"] = self._analyze_benchmark_comparison(comparison["results"])
|
||||
comparison["recommendations"] = self._generate_comparison_recommendations(comparison["analysis"])
|
||||
|
||||
return comparison
|
||||
|
||||
def _analyze_benchmark_comparison(self, results: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Analyze comparison between benchmark results"""
|
||||
analysis = {
|
||||
"fastest_benchmark": None,
|
||||
"slowest_benchmark": None,
|
||||
"most_stable_benchmark": None,
|
||||
"least_stable_benchmark": None,
|
||||
"performance_differences": {},
|
||||
"stability_differences": {}
|
||||
}
|
||||
|
||||
if len(results) < 2:
|
||||
return analysis
|
||||
|
||||
# Find fastest and slowest
|
||||
avg_durations = {name: data["average_duration"] for name, data in results.items()}
|
||||
fastest = min(avg_durations, key=avg_durations.get)
|
||||
slowest = max(avg_durations, key=avg_durations.get)
|
||||
|
||||
analysis["fastest_benchmark"] = fastest
|
||||
analysis["slowest_benchmark"] = slowest
|
||||
|
||||
# Find most and least stable
|
||||
cv_values = {name: data["coefficient_of_variation"] for name, data in results.items()}
|
||||
most_stable = min(cv_values, key=cv_values.get)
|
||||
least_stable = max(cv_values, key=cv_values.get)
|
||||
|
||||
analysis["most_stable_benchmark"] = most_stable
|
||||
analysis["least_stable_benchmark"] = least_stable
|
||||
|
||||
# Calculate performance differences
|
||||
fastest_avg = avg_durations[fastest]
|
||||
for name, data in results.items():
|
||||
if name != fastest:
|
||||
diff_percent = ((data["average_duration"] - fastest_avg) / fastest_avg) * 100
|
||||
analysis["performance_differences"][name] = {
|
||||
"vs_fastest_percent": diff_percent,
|
||||
"vs_fastest_seconds": data["average_duration"] - fastest_avg
|
||||
}
|
||||
|
||||
# Calculate stability differences
|
||||
most_stable_cv = cv_values[most_stable]
|
||||
for name, data in results.items():
|
||||
if name != most_stable:
|
||||
cv_diff = data["coefficient_of_variation"] - most_stable_cv
|
||||
analysis["stability_differences"][name] = {
|
||||
"vs_most_stable_cv": cv_diff,
|
||||
"stability_ratio": data["coefficient_of_variation"] / most_stable_cv
|
||||
}
|
||||
|
||||
return analysis
|
||||
|
||||
def _generate_comparison_recommendations(self, analysis: Dict[str, Any]) -> List[str]:
|
||||
"""Generate recommendations based on benchmark comparison"""
|
||||
recommendations = []
|
||||
|
||||
fastest = analysis.get("fastest_benchmark")
|
||||
slowest = analysis.get("slowest_benchmark")
|
||||
most_stable = analysis.get("most_stable_benchmark")
|
||||
least_stable = analysis.get("least_stable_benchmark")
|
||||
|
||||
if fastest and slowest and fastest != slowest:
|
||||
fastest_avg = analysis["performance_differences"][slowest]["vs_fastest_percent"]
|
||||
recommendations.append(f"Benchmark '{slowest}' is {fastest_avg:.1f}% slower than '{fastest}' - investigate performance differences")
|
||||
|
||||
if most_stable and least_stable and most_stable != least_stable:
|
||||
stability_ratio = analysis["stability_differences"][least_stable]["stability_ratio"]
|
||||
recommendations.append(f"Benchmark '{least_stable}' is {stability_ratio:.2f}x less stable than '{most_stable}' - investigate variability causes")
|
||||
|
||||
# General recommendations
|
||||
if len(analysis.get("performance_differences", {})) > 0:
|
||||
recommendations.append("Consider using the fastest benchmark configuration for production")
|
||||
|
||||
if len(analysis.get("stability_differences", {})) > 0:
|
||||
recommendations.append("Consider using the most stable benchmark configuration for critical operations")
|
||||
|
||||
return recommendations
|
||||
|
||||
def list_benchmarks(self) -> List[str]:
|
||||
"""List all available benchmark templates"""
|
||||
return list(self._benchmark_templates.keys())
|
||||
|
||||
def get_benchmark_result(self, benchmark_name: str) -> Optional[BenchmarkResult]:
|
||||
"""Get a specific benchmark result"""
|
||||
return self._benchmark_results.get(benchmark_name)
|
||||
|
||||
def get_benchmark_history(self) -> List[BenchmarkResult]:
|
||||
"""Get all benchmark results"""
|
||||
return self._benchmark_history.copy()
|
||||
|
||||
def clear_benchmark_history(self):
|
||||
"""Clear benchmark history"""
|
||||
self._benchmark_history.clear()
|
||||
self._benchmark_results.clear()
|
||||
self.logger.info("Benchmark history cleared")
|
||||
|
|
@ -5,10 +5,12 @@ Chroot management for deb-mock
|
|||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
from .exceptions import ChrootError
|
||||
from .uid_manager import UIDManager
|
||||
|
||||
|
||||
class ChrootManager:
|
||||
|
|
@ -16,6 +18,8 @@ class ChrootManager:
|
|||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self._active_mounts = {} # Track active mounts per chroot
|
||||
self.uid_manager = UIDManager(config)
|
||||
|
||||
def create_chroot(self, chroot_name: str, arch: str = None, suite: str = None) -> None:
|
||||
"""Create a new chroot environment"""
|
||||
|
|
@ -30,6 +34,12 @@ class ChrootManager:
|
|||
self._create_bootstrap_chroot(chroot_name)
|
||||
else:
|
||||
self._create_standard_chroot(chroot_name)
|
||||
|
||||
# Setup advanced mounts after chroot creation
|
||||
self._setup_advanced_mounts(chroot_name)
|
||||
|
||||
# Setup UID/GID management
|
||||
self._setup_chroot_users(chroot_name)
|
||||
|
||||
def _create_bootstrap_chroot(self, chroot_name: str) -> None:
|
||||
"""
|
||||
|
|
@ -49,7 +59,7 @@ class ChrootManager:
|
|||
try:
|
||||
# Create final chroot using debootstrap from within bootstrap
|
||||
cmd = [
|
||||
"debootstrap",
|
||||
"/usr/sbin/debootstrap",
|
||||
"--arch",
|
||||
self.config.architecture,
|
||||
self.config.suite,
|
||||
|
|
@ -94,7 +104,7 @@ class ChrootManager:
|
|||
|
||||
# Run debootstrap
|
||||
cmd = [
|
||||
"debootstrap",
|
||||
"/usr/sbin/debootstrap",
|
||||
"--arch",
|
||||
self.config.architecture,
|
||||
self.config.suite,
|
||||
|
|
@ -108,7 +118,7 @@ class ChrootManager:
|
|||
raise ChrootError(
|
||||
f"debootstrap failed: {result.stderr}",
|
||||
chroot_name=chroot_name,
|
||||
operation="debootstrap",
|
||||
operation="/usr/sbin/debootstrap",
|
||||
chroot_path=chroot_path,
|
||||
)
|
||||
|
||||
|
|
@ -205,7 +215,7 @@ preserve-environment=true
|
|||
def _initialize_chroot(self, chroot_path: str, arch: str, suite: str) -> None:
|
||||
"""Initialize chroot using debootstrap"""
|
||||
cmd = [
|
||||
"debootstrap",
|
||||
"/usr/sbin/debootstrap",
|
||||
"--arch",
|
||||
arch,
|
||||
"--variant=buildd",
|
||||
|
|
@ -487,3 +497,251 @@ preserve-environment=true
|
|||
self.scrub_chroot(chroot_name)
|
||||
except Exception as e:
|
||||
print(f"Warning: Failed to scrub chroot '{chroot_name}': {e}")
|
||||
|
||||
def _setup_advanced_mounts(self, chroot_name: str) -> None:
|
||||
"""Setup advanced mount points for the chroot"""
|
||||
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
|
||||
|
||||
# Initialize mount tracking for this chroot
|
||||
self._active_mounts[chroot_name] = []
|
||||
|
||||
try:
|
||||
# Setup standard system mounts
|
||||
if self.config.mount_proc:
|
||||
self._mount_proc(chroot_name, chroot_path)
|
||||
|
||||
if self.config.mount_sys:
|
||||
self._mount_sys(chroot_name, chroot_path)
|
||||
|
||||
if self.config.mount_dev:
|
||||
self._mount_dev(chroot_name, chroot_path)
|
||||
|
||||
if self.config.mount_devpts:
|
||||
self._mount_devpts(chroot_name, chroot_path)
|
||||
|
||||
if self.config.mount_tmp:
|
||||
self._mount_tmp(chroot_name, chroot_path)
|
||||
|
||||
# Setup custom bind mounts
|
||||
for bind_mount in self.config.bind_mounts:
|
||||
self._setup_bind_mount(chroot_name, bind_mount)
|
||||
|
||||
# Setup tmpfs mounts
|
||||
for tmpfs_mount in self.config.tmpfs_mounts:
|
||||
self._setup_tmpfs_mount(chroot_name, tmpfs_mount)
|
||||
|
||||
# Setup overlay mounts
|
||||
for overlay_mount in self.config.overlay_mounts:
|
||||
self._setup_overlay_mount(chroot_name, overlay_mount)
|
||||
|
||||
except Exception as e:
|
||||
raise ChrootError(
|
||||
f"Failed to setup advanced mounts: {e}",
|
||||
chroot_name=chroot_name,
|
||||
operation="mount_setup"
|
||||
)
|
||||
|
||||
def _mount_proc(self, chroot_name: str, chroot_path: str) -> None:
|
||||
"""Mount /proc in the chroot"""
|
||||
proc_path = os.path.join(chroot_path, "proc")
|
||||
if not os.path.exists(proc_path):
|
||||
os.makedirs(proc_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
subprocess.run(["mount", "--bind", "/proc", proc_path], check=True)
|
||||
self._active_mounts[chroot_name].append(("proc", proc_path))
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to mount /proc: {e}")
|
||||
|
||||
def _mount_sys(self, chroot_name: str, chroot_path: str) -> None:
|
||||
"""Mount /sys in the chroot"""
|
||||
sys_path = os.path.join(chroot_path, "sys")
|
||||
if not os.path.exists(sys_path):
|
||||
os.makedirs(sys_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
subprocess.run(["mount", "--bind", "/sys", sys_path], check=True)
|
||||
self._active_mounts[chroot_name].append(("sys", sys_path))
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to mount /sys: {e}")
|
||||
|
||||
def _mount_dev(self, chroot_name: str, chroot_path: str) -> None:
|
||||
"""Mount /dev in the chroot"""
|
||||
dev_path = os.path.join(chroot_path, "dev")
|
||||
if not os.path.exists(dev_path):
|
||||
os.makedirs(dev_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
subprocess.run(["mount", "--bind", "/dev", dev_path], check=True)
|
||||
self._active_mounts[chroot_name].append(("dev", dev_path))
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to mount /dev: {e}")
|
||||
|
||||
def _mount_devpts(self, chroot_name: str, chroot_path: str) -> None:
|
||||
"""Mount /dev/pts in the chroot"""
|
||||
devpts_path = os.path.join(chroot_path, "dev", "pts")
|
||||
if not os.path.exists(devpts_path):
|
||||
os.makedirs(devpts_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
subprocess.run(["mount", "-t", "devpts", "devpts", devpts_path], check=True)
|
||||
self._active_mounts[chroot_name].append(("devpts", devpts_path))
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to mount /dev/pts: {e}")
|
||||
|
||||
def _mount_tmp(self, chroot_name: str, chroot_path: str) -> None:
|
||||
"""Mount /tmp in the chroot"""
|
||||
tmp_path = os.path.join(chroot_path, "tmp")
|
||||
if not os.path.exists(tmp_path):
|
||||
os.makedirs(tmp_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
# Use tmpfs for better performance if configured
|
||||
if self.config.use_tmpfs:
|
||||
subprocess.run([
|
||||
"mount", "-t", "tmpfs", "-o", f"size={self.config.tmpfs_size}",
|
||||
"tmpfs", tmp_path
|
||||
], check=True)
|
||||
self._active_mounts[chroot_name].append(("tmpfs", tmp_path))
|
||||
else:
|
||||
# Bind mount host /tmp
|
||||
subprocess.run(["mount", "--bind", "/tmp", tmp_path], check=True)
|
||||
self._active_mounts[chroot_name].append(("tmp", tmp_path))
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to mount /tmp: {e}")
|
||||
|
||||
def _setup_bind_mount(self, chroot_name: str, bind_mount: Dict[str, str]) -> None:
|
||||
"""Setup a custom bind mount"""
|
||||
host_path = bind_mount.get("host")
|
||||
chroot_path = bind_mount.get("chroot")
|
||||
options = bind_mount.get("options", "")
|
||||
|
||||
if not host_path or not chroot_path:
|
||||
print(f"Warning: Invalid bind mount configuration: {bind_mount}")
|
||||
return
|
||||
|
||||
# Create chroot mount point
|
||||
full_chroot_path = os.path.join(self.config.chroot_dir, chroot_name, chroot_path.lstrip("/"))
|
||||
os.makedirs(full_chroot_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
mount_cmd = ["mount", "--bind"]
|
||||
if options:
|
||||
mount_cmd.extend(["-o", options])
|
||||
mount_cmd.extend([host_path, full_chroot_path])
|
||||
|
||||
subprocess.run(mount_cmd, check=True)
|
||||
self._active_mounts[chroot_name].append(("bind", full_chroot_path))
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to setup bind mount {host_path} -> {chroot_path}: {e}")
|
||||
|
||||
def _setup_tmpfs_mount(self, chroot_name: str, tmpfs_mount: Dict[str, str]) -> None:
|
||||
"""Setup a tmpfs mount"""
|
||||
chroot_path = tmpfs_mount.get("chroot")
|
||||
size = tmpfs_mount.get("size", "100M")
|
||||
options = tmpfs_mount.get("options", "")
|
||||
|
||||
if not chroot_path:
|
||||
print(f"Warning: Invalid tmpfs mount configuration: {tmpfs_mount}")
|
||||
return
|
||||
|
||||
# Create chroot mount point
|
||||
full_chroot_path = os.path.join(self.config.chroot_dir, chroot_name, chroot_path.lstrip("/"))
|
||||
os.makedirs(full_chroot_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
mount_cmd = ["mount", "-t", "tmpfs", "-o", f"size={size}"]
|
||||
if options:
|
||||
mount_cmd[-1] += f",{options}"
|
||||
mount_cmd.extend(["tmpfs", full_chroot_path])
|
||||
|
||||
subprocess.run(mount_cmd, check=True)
|
||||
self._active_mounts[chroot_name].append(("tmpfs", full_chroot_path))
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to setup tmpfs mount {chroot_path}: {e}")
|
||||
|
||||
def _setup_overlay_mount(self, chroot_name: str, overlay_mount: Dict[str, str]) -> None:
|
||||
"""Setup an overlay mount (requires overlayfs support)"""
|
||||
lower_dir = overlay_mount.get("lower")
|
||||
upper_dir = overlay_mount.get("upper")
|
||||
work_dir = overlay_mount.get("work")
|
||||
chroot_path = overlay_mount.get("chroot")
|
||||
|
||||
if not all([lower_dir, upper_dir, work_dir, chroot_path]):
|
||||
print(f"Warning: Invalid overlay mount configuration: {overlay_mount}")
|
||||
return
|
||||
|
||||
# Create chroot mount point
|
||||
full_chroot_path = os.path.join(self.config.chroot_dir, chroot_name, chroot_path.lstrip("/"))
|
||||
os.makedirs(full_chroot_path, exist_ok=True)
|
||||
|
||||
try:
|
||||
# Create work directory if it doesn't exist
|
||||
os.makedirs(work_dir, exist_ok=True)
|
||||
|
||||
mount_cmd = [
|
||||
"mount", "-t", "overlay", "overlay",
|
||||
"-o", f"lowerdir={lower_dir},upperdir={upper_dir},workdir={work_dir}",
|
||||
full_chroot_path
|
||||
]
|
||||
|
||||
subprocess.run(mount_cmd, check=True)
|
||||
self._active_mounts[chroot_name].append(("overlay", full_chroot_path))
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to setup overlay mount {chroot_path}: {e}")
|
||||
|
||||
def cleanup_mounts(self, chroot_name: str) -> None:
|
||||
"""Clean up all mounts for a chroot"""
|
||||
if chroot_name not in self._active_mounts:
|
||||
return
|
||||
|
||||
for mount_type, mount_path in reversed(self._active_mounts[chroot_name]):
|
||||
try:
|
||||
subprocess.run(["umount", mount_path], check=True)
|
||||
print(f"Unmounted {mount_type}: {mount_path}")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Failed to unmount {mount_type} {mount_path}: {e}")
|
||||
|
||||
# Clear the mount list
|
||||
self._active_mounts[chroot_name] = []
|
||||
|
||||
def list_mounts(self, chroot_name: str) -> List[Dict[str, str]]:
|
||||
"""List all active mounts for a chroot"""
|
||||
if chroot_name not in self._active_mounts:
|
||||
return []
|
||||
|
||||
mounts = []
|
||||
for mount_type, mount_path in self._active_mounts[chroot_name]:
|
||||
mounts.append({
|
||||
"type": mount_type,
|
||||
"path": mount_path,
|
||||
"chroot": chroot_name
|
||||
})
|
||||
|
||||
return mounts
|
||||
|
||||
def _setup_chroot_users(self, chroot_name: str) -> None:
|
||||
"""Setup users and permissions in the chroot"""
|
||||
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
|
||||
|
||||
try:
|
||||
# Create the build user
|
||||
self.uid_manager.create_chroot_user(chroot_path)
|
||||
|
||||
# Copy host users if configured
|
||||
if hasattr(self.config, 'copy_host_users'):
|
||||
for username in self.config.copy_host_users:
|
||||
self.uid_manager.copy_host_user(chroot_path, username)
|
||||
|
||||
# Setup chroot permissions
|
||||
self.uid_manager.setup_chroot_permissions(chroot_path)
|
||||
|
||||
except Exception as e:
|
||||
raise ChrootError(
|
||||
f"Failed to setup chroot users: {e}",
|
||||
chroot_name=chroot_name,
|
||||
operation="user_setup"
|
||||
)
|
||||
|
|
|
|||
819
deb_mock/cli.py
819
deb_mock/cli.py
|
|
@ -4,6 +4,7 @@ Command-line interface for deb-mock
|
|||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
|
|
@ -130,6 +131,108 @@ def build(
|
|||
click.echo("Build completed successfully")
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("source_packages", nargs=-1, type=click.Path(exists=True))
|
||||
@click.option("--chroot", help="Chroot environment to use")
|
||||
@click.option("--max-workers", type=int, help="Maximum number of parallel workers")
|
||||
@click.option("--arch", help="Target architecture")
|
||||
@click.option("--output-dir", "-o", type=click.Path(), help="Output directory for build artifacts")
|
||||
@click.option("--keep-chroot", is_flag=True, help="Keep chroots after build (for debugging)")
|
||||
@click.option("--no-check", is_flag=True, help="Skip running tests during build")
|
||||
@click.option("--offline", is_flag=True, help="Build in offline mode (no network access)")
|
||||
@click.option("--build-timeout", type=int, help="Build timeout in seconds")
|
||||
@click.option("--force-arch", help="Force target architecture")
|
||||
@click.option("--cleanup-after", is_flag=True, help="Clean chroots after build")
|
||||
@click.option("--no-cleanup-after", is_flag=True, help="Don't clean chroots after build")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def build_parallel(
|
||||
ctx,
|
||||
source_packages,
|
||||
chroot,
|
||||
max_workers,
|
||||
arch,
|
||||
output_dir,
|
||||
keep_chroot,
|
||||
no_check,
|
||||
offline,
|
||||
build_timeout,
|
||||
force_arch,
|
||||
cleanup_after,
|
||||
no_cleanup_after,
|
||||
):
|
||||
"""
|
||||
Build multiple Debian source packages in parallel using separate chroots.
|
||||
|
||||
SOURCE_PACKAGES: One or more paths to .dsc files or source package directories
|
||||
"""
|
||||
if not source_packages:
|
||||
click.echo("Error: No source packages specified", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
if len(source_packages) == 1:
|
||||
click.echo("Warning: Only one package specified, consider using 'build' instead", err=True)
|
||||
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
# Override config with command line options
|
||||
if chroot:
|
||||
ctx.obj["config"].chroot_name = chroot
|
||||
if arch:
|
||||
ctx.obj["config"].architecture = arch
|
||||
if output_dir:
|
||||
ctx.obj["config"].output_dir = output_dir
|
||||
if keep_chroot:
|
||||
ctx.obj["config"].keep_chroot = True
|
||||
if build_timeout:
|
||||
ctx.obj["config"].parallel_build_timeout = build_timeout
|
||||
if force_arch:
|
||||
ctx.obj["config"].architecture = force_arch
|
||||
if cleanup_after:
|
||||
ctx.obj["config"].parallel_build_cleanup = True
|
||||
if no_cleanup_after:
|
||||
ctx.obj["config"].parallel_build_cleanup = False
|
||||
|
||||
# Build options
|
||||
build_kwargs = {}
|
||||
if no_check:
|
||||
build_kwargs["no_check"] = True
|
||||
if offline:
|
||||
build_kwargs["offline"] = True
|
||||
|
||||
click.echo(f"Building {len(source_packages)} packages in parallel...")
|
||||
|
||||
try:
|
||||
results = deb_mock.build_parallel(
|
||||
list(source_packages),
|
||||
max_workers=max_workers,
|
||||
**build_kwargs
|
||||
)
|
||||
|
||||
# Display results summary
|
||||
successful = sum(1 for r in results if r.get("success", False))
|
||||
failed = len(results) - successful
|
||||
|
||||
click.echo(f"\n=== Parallel Build Results ===")
|
||||
click.echo(f"Total packages: {len(results)}")
|
||||
click.echo(f"Successful: {successful}")
|
||||
click.echo(f"Failed: {failed}")
|
||||
|
||||
if failed > 0:
|
||||
click.echo(f"\nFailed packages:")
|
||||
for i, result in enumerate(results):
|
||||
if not result.get("success", False):
|
||||
click.echo(f" {i+1}. {result.get('package_name', 'unknown')}: {result.get('error', 'Unknown error')}")
|
||||
|
||||
sys.exit(1)
|
||||
else:
|
||||
click.echo(f"\n✅ All packages built successfully!")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during parallel build: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("source_packages", nargs=-1, type=click.Path(exists=True))
|
||||
@click.option("--chroot", help="Chroot environment to use")
|
||||
|
|
@ -614,5 +717,721 @@ def debug_config(ctx, expand):
|
|||
click.echo(f" {plugin_name}: {plugin_config}")
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def list_mounts(ctx, chroot_name):
|
||||
"""List all active mounts for a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
mounts = deb_mock.chroot_manager.list_mounts(chroot_name)
|
||||
|
||||
if not mounts:
|
||||
click.echo(f"No active mounts found for chroot '{chroot_name}'")
|
||||
return
|
||||
|
||||
click.echo(f"Active mounts for chroot '{chroot_name}':")
|
||||
for mount in mounts:
|
||||
click.echo(f" {mount['type']}: {mount['path']}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error listing mounts: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def cleanup_mounts(ctx, chroot_name):
|
||||
"""Clean up all mounts for a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo(f"Cleaning up mounts for chroot '{chroot_name}'...")
|
||||
deb_mock.chroot_manager.cleanup_mounts(chroot_name)
|
||||
click.echo(f"✅ Mounts cleaned up for chroot '{chroot_name}'")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error cleaning up mounts: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.option("--mount-proc/--no-mount-proc", default=True, help="Mount /proc in chroot")
|
||||
@click.option("--mount-sys/--no-mount-sys", default=True, help="Mount /sys in chroot")
|
||||
@click.option("--mount-dev/--no-mount-dev", default=True, help="Mount /dev in chroot")
|
||||
@click.option("--mount-devpts/--no-mount-devpts", default=True, help="Mount /dev/pts in chroot")
|
||||
@click.option("--mount-tmp/--no-mount-tmp", default=True, help="Mount /tmp in chroot")
|
||||
@click.option("--mount-home/--no-mount-home", default=False, help="Mount /home in chroot")
|
||||
@click.option("--use-tmpfs", is_flag=True, help="Use tmpfs for /tmp mount")
|
||||
@click.option("--tmpfs-size", default="2G", help="Size for tmpfs mount")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def setup_mounts(
|
||||
ctx,
|
||||
chroot_name,
|
||||
mount_proc,
|
||||
mount_sys,
|
||||
mount_dev,
|
||||
mount_devpts,
|
||||
mount_tmp,
|
||||
mount_home,
|
||||
use_tmpfs,
|
||||
tmpfs_size,
|
||||
):
|
||||
"""Setup advanced mount points for a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
# Update config with mount options
|
||||
ctx.obj["config"].mount_proc = mount_proc
|
||||
ctx.obj["config"].mount_sys = mount_sys
|
||||
ctx.obj["config"].mount_dev = mount_dev
|
||||
ctx.obj["config"].mount_devpts = mount_devpts
|
||||
ctx.obj["config"].mount_tmp = mount_tmp
|
||||
ctx.obj["config"].mount_home = mount_home
|
||||
ctx.obj["config"].use_tmpfs = use_tmpfs
|
||||
ctx.obj["config"].tmpfs_size = tmpfs_size
|
||||
|
||||
try:
|
||||
click.echo(f"Setting up advanced mounts for chroot '{chroot_name}'...")
|
||||
|
||||
# Clean up existing mounts first
|
||||
deb_mock.chroot_manager.cleanup_mounts(chroot_name)
|
||||
|
||||
# Setup new mounts
|
||||
deb_mock.chroot_manager._setup_advanced_mounts(chroot_name)
|
||||
|
||||
click.echo(f"✅ Advanced mounts setup complete for chroot '{chroot_name}'")
|
||||
|
||||
# Show current mounts
|
||||
mounts = deb_mock.chroot_manager.list_mounts(chroot_name)
|
||||
if mounts:
|
||||
click.echo(f"\nActive mounts:")
|
||||
for mount in mounts:
|
||||
click.echo(f" {mount['type']}: {mount['path']}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error setting up mounts: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.argument("host_path", type=click.Path(exists=True))
|
||||
@click.argument("chroot_path")
|
||||
@click.option("--options", help="Mount options (e.g., ro,noexec)")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def bind_mount(ctx, chroot_name, host_path, chroot_path, options):
|
||||
"""Add a custom bind mount to a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
# Create bind mount configuration
|
||||
bind_mount_config = {
|
||||
"host": host_path,
|
||||
"chroot": chroot_path,
|
||||
"options": options or ""
|
||||
}
|
||||
|
||||
click.echo(f"Adding bind mount {host_path} -> {chroot_path} to chroot '{chroot_name}'...")
|
||||
|
||||
# Setup the bind mount
|
||||
deb_mock.chroot_manager._setup_bind_mount(chroot_name, bind_mount_config)
|
||||
|
||||
click.echo(f"✅ Bind mount added successfully")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error adding bind mount: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.argument("chroot_path")
|
||||
@click.option("--size", default="100M", help="Size for tmpfs mount")
|
||||
@click.option("--options", help="Additional mount options")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def tmpfs_mount(ctx, chroot_name, chroot_path, size, options):
|
||||
"""Add a tmpfs mount to a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
# Create tmpfs mount configuration
|
||||
tmpfs_mount_config = {
|
||||
"chroot": chroot_path,
|
||||
"size": size,
|
||||
"options": options or ""
|
||||
}
|
||||
|
||||
click.echo(f"Adding tmpfs mount {chroot_path} (size: {size}) to chroot '{chroot_name}'...")
|
||||
|
||||
# Setup the tmpfs mount
|
||||
deb_mock.chroot_manager._setup_tmpfs_mount(chroot_name, tmpfs_mount_config)
|
||||
|
||||
click.echo(f"✅ Tmpfs mount added successfully")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error adding tmpfs mount: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def user_info(ctx, chroot_name):
|
||||
"""Show UID/GID information for a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
user_info = deb_mock.chroot_manager.uid_manager.get_user_info()
|
||||
|
||||
click.echo(f"=== UID/GID Information for chroot '{chroot_name}' ===")
|
||||
click.echo(f"Current user: {user_info['current_user']} (UID: {user_info['current_uid']}, GID: {user_info['current_gid']})")
|
||||
click.echo(f"Chroot user: {user_info['chroot_user']} (UID: {user_info['chroot_uid']}, GID: {user_info['chroot_gid']})")
|
||||
click.echo(f"Chroot group: {user_info['chroot_group']}")
|
||||
|
||||
# Check if chroot user exists
|
||||
chroot_path = os.path.join(ctx.obj["config"].chroot_dir, chroot_name)
|
||||
if os.path.exists(chroot_path):
|
||||
user_valid = deb_mock.chroot_manager.uid_manager.validate_chroot_user(chroot_path)
|
||||
click.echo(f"Chroot user configured: {'✅ Yes' if user_valid else '❌ No'}")
|
||||
else:
|
||||
click.echo("Chroot does not exist")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error getting user info: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.argument("username")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def copy_host_user(ctx, chroot_name, username):
|
||||
"""Copy a user from the host system to a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo(f"Copying host user '{username}' to chroot '{chroot_name}'...")
|
||||
|
||||
chroot_path = os.path.join(ctx.obj["config"].chroot_dir, chroot_name)
|
||||
if not os.path.exists(chroot_path):
|
||||
click.echo(f"Error: Chroot '{chroot_name}' does not exist", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
deb_mock.chroot_manager.uid_manager.copy_host_user(chroot_path, username)
|
||||
click.echo(f"✅ Successfully copied host user '{username}' to chroot")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error copying host user: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def setup_users(ctx, chroot_name):
|
||||
"""Setup users and permissions for a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo(f"Setting up users and permissions for chroot '{chroot_name}'...")
|
||||
|
||||
chroot_path = os.path.join(ctx.obj["config"].chroot_dir, chroot_name)
|
||||
if not os.path.exists(chroot_path):
|
||||
click.echo(f"Error: Chroot '{chroot_name}' does not exist", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
# Setup chroot users
|
||||
deb_mock.chroot_manager._setup_chroot_users(chroot_name)
|
||||
|
||||
click.echo(f"✅ Users and permissions setup complete for chroot '{chroot_name}'")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error setting up users: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def plugin_info(ctx):
|
||||
"""Show information about loaded plugins"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
plugin_info = deb_mock.plugin_manager.get_plugin_info()
|
||||
|
||||
click.echo("=== Plugin Information ===")
|
||||
click.echo(f"Total plugins configured: {plugin_info['total_plugins']}")
|
||||
click.echo(f"Loaded plugins: {', '.join(plugin_info['loaded_plugins']) if plugin_info['loaded_plugins'] else 'None'}")
|
||||
click.echo(f"Available hook stages: {', '.join(plugin_info['available_stages']) if plugin_info['available_stages'] else 'None'}")
|
||||
click.echo(f"Plugin directory: {plugin_info['plugin_dir']}")
|
||||
click.echo(f"API version: {plugin_info['api_version']}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error getting plugin info: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("stage")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def list_hooks(ctx, stage):
|
||||
"""List hooks registered for a specific stage"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
hooks = deb_mock.plugin_manager.get_hooks(stage)
|
||||
|
||||
if not hooks:
|
||||
click.echo(f"No hooks registered for stage '{stage}'")
|
||||
return
|
||||
|
||||
click.echo(f"=== Hooks for stage '{stage}' ===")
|
||||
for i, hook in enumerate(hooks, 1):
|
||||
click.echo(f"{i}. {hook.__name__} ({hook.__module__})")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error listing hooks: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def list_stages(ctx):
|
||||
"""List all available hook stages"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
stages = deb_mock.plugin_manager.list_stages()
|
||||
|
||||
if not stages:
|
||||
click.echo("No hook stages available")
|
||||
return
|
||||
|
||||
click.echo("=== Available Hook Stages ===")
|
||||
for stage in stages:
|
||||
click.echo(f"- {stage}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error listing stages: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def chroot_info(ctx, chroot_name):
|
||||
"""Show information about a chroot"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
info = deb_mock.sbuild_wrapper.get_chroot_info(chroot_name)
|
||||
|
||||
click.echo(f"=== Chroot Information: {chroot_name} ===")
|
||||
click.echo(f"Status: {info['status']}")
|
||||
click.echo(f"Architecture: {info['architecture'] or 'Unknown'}")
|
||||
click.echo(f"Distribution: {info['distribution'] or 'Unknown'}")
|
||||
if 'package_count' in info:
|
||||
click.echo(f"Package count: {info['package_count']}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error getting chroot info: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("chroot_name")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def update_chroot(ctx, chroot_name):
|
||||
"""Update a chroot to ensure it's current"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo(f"Updating chroot '{chroot_name}'...")
|
||||
deb_mock.sbuild_wrapper.update_chroot(chroot_name)
|
||||
click.echo(f"✅ Chroot '{chroot_name}' updated successfully")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error updating chroot: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("source_package", type=click.Path(exists=True))
|
||||
@click.option("--chroot", help="Chroot environment to use")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def check_deps(ctx, source_package, chroot):
|
||||
"""Check build dependencies for a source package"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
if chroot:
|
||||
ctx.obj["config"].chroot_name = chroot
|
||||
|
||||
click.echo(f"Checking build dependencies for {source_package}...")
|
||||
deps = deb_mock.sbuild_wrapper.check_dependencies(source_package)
|
||||
|
||||
if deps["satisfied"]:
|
||||
click.echo("✅ All build dependencies are satisfied")
|
||||
else:
|
||||
click.echo("❌ Build dependencies are not satisfied")
|
||||
if deps["missing"]:
|
||||
click.echo(f"Missing dependencies: {', '.join(deps['missing'])}")
|
||||
if deps["conflicts"]:
|
||||
click.echo(f"Conflicting dependencies: {', '.join(deps['conflicts'])}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error checking dependencies: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("dependencies", nargs=-1)
|
||||
@click.option("--chroot", help="Chroot environment to use")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def install_deps(ctx, dependencies, chroot):
|
||||
"""Install build dependencies in a chroot"""
|
||||
if not dependencies:
|
||||
click.echo("Error: No dependencies specified", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
if chroot:
|
||||
ctx.obj["config"].chroot_name = chroot
|
||||
|
||||
click.echo(f"Installing build dependencies: {', '.join(dependencies)}")
|
||||
deb_mock.sbuild_wrapper.install_build_dependencies(list(dependencies))
|
||||
click.echo("✅ Build dependencies installed successfully")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error installing dependencies: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("source_package", type=click.Path(exists=True))
|
||||
@click.option("--chroot", help="Chroot environment to use")
|
||||
@click.option("--output-dir", "-o", type=click.Path(), help="Output directory for build artifacts")
|
||||
@click.option("--verbose", is_flag=True, help="Verbose output")
|
||||
@click.option("--debug", is_flag=True, help="Debug output")
|
||||
@click.option("--keep-chroot", is_flag=True, help="Keep chroot after build")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def build_with_sbuild(ctx, source_package, chroot, output_dir, verbose, debug, keep_chroot):
|
||||
"""Build a Debian source package using sbuild"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
# Override config with command line options
|
||||
if chroot:
|
||||
ctx.obj["config"].chroot_name = chroot
|
||||
if output_dir:
|
||||
ctx.obj["config"].output_dir = output_dir
|
||||
if verbose:
|
||||
ctx.obj["config"].verbose = True
|
||||
if debug:
|
||||
ctx.obj["config"].debug = True
|
||||
if keep_chroot:
|
||||
ctx.obj["config"].keep_chroot = True
|
||||
|
||||
click.echo(f"Building {source_package} with sbuild...")
|
||||
|
||||
# Check dependencies first
|
||||
deps = deb_mock.sbuild_wrapper.check_dependencies(source_package)
|
||||
if not deps["satisfied"]:
|
||||
click.echo("⚠️ Build dependencies not satisfied. Attempting to install...")
|
||||
if deps["missing"]:
|
||||
deb_mock.sbuild_wrapper.install_build_dependencies(deps["missing"])
|
||||
|
||||
# Build the package
|
||||
result = deb_mock.sbuild_wrapper.build_package(source_package)
|
||||
|
||||
if result["success"]:
|
||||
click.echo("✅ Package built successfully!")
|
||||
click.echo(f"Output directory: {result['output_dir']}")
|
||||
if result["artifacts"]:
|
||||
click.echo("Build artifacts:")
|
||||
for artifact in result["artifacts"]:
|
||||
click.echo(f" {artifact}")
|
||||
else:
|
||||
click.echo("❌ Package build failed")
|
||||
sys.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error building package: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def performance_summary(ctx):
|
||||
"""Show performance summary and statistics"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
summary = deb_mock.performance_monitor.get_performance_summary()
|
||||
|
||||
if not summary:
|
||||
click.echo("No performance data available yet")
|
||||
return
|
||||
|
||||
click.echo("=== Performance Summary ===")
|
||||
click.echo(f"Total Operations: {summary.get('total_operations', 0)}")
|
||||
click.echo(f"Total Duration: {summary.get('total_duration', 0):.2f}s")
|
||||
click.echo(f"Average Duration: {summary.get('average_duration', 0):.2f}s")
|
||||
click.echo(f"Active Operations: {summary.get('active_operations', 0)}")
|
||||
|
||||
# Operation statistics
|
||||
if 'operation_stats' in summary:
|
||||
click.echo("\n=== Operation Statistics ===")
|
||||
for op_name, stats in summary['operation_stats'].items():
|
||||
click.echo(f"{op_name}:")
|
||||
click.echo(f" Count: {stats['count']}")
|
||||
click.echo(f" Avg Duration: {stats['avg_duration']:.2f}s")
|
||||
click.echo(f" Min Duration: {stats['min_duration']:.2f}s")
|
||||
click.echo(f" Max Duration: {stats['max_duration']:.2f}s")
|
||||
|
||||
# System statistics
|
||||
if 'system_stats' in summary:
|
||||
click.echo("\n=== System Statistics ===")
|
||||
for key, value in summary['system_stats'].items():
|
||||
click.echo(f"{key}: {value:.2f}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error getting performance summary: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("operation_name")
|
||||
@click.option("--iterations", "-i", type=int, default=3, help="Number of benchmark iterations")
|
||||
@click.option("--function", "-f", help="Function to benchmark (e.g., 'build', 'chroot_creation')")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def benchmark(ctx, operation_name, iterations, function):
|
||||
"""Benchmark an operation multiple times"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
if not function:
|
||||
click.echo("Error: Please specify a function to benchmark with --function", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
# Get the function to benchmark
|
||||
if hasattr(deb_mock, function):
|
||||
operation_func = getattr(deb_mock, function)
|
||||
else:
|
||||
click.echo(f"Error: Function '{function}' not found", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
click.echo(f"Benchmarking {operation_name} with {iterations} iterations...")
|
||||
|
||||
result = deb_mock.performance_monitor.benchmark_operation(
|
||||
operation_name, operation_func, iterations
|
||||
)
|
||||
|
||||
click.echo(f"\n=== Benchmark Results for {operation_name} ===")
|
||||
click.echo(f"Iterations: {result['iterations']}")
|
||||
click.echo(f"Average Duration: {result['average_duration']:.2f}s")
|
||||
click.echo(f"Min Duration: {result['min_duration']:.2f}s")
|
||||
click.echo(f"Max Duration: {result['max_duration']:.2f}s")
|
||||
click.echo(f"Variance: {result['variance']:.4f}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during benchmarking: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.option("--output-file", "-o", type=click.Path(), help="Output file for the report")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def performance_report(ctx, output_file):
|
||||
"""Generate a comprehensive performance report"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo("Generating performance report...")
|
||||
|
||||
report_file = deb_mock.performance_reporter.generate_performance_report(
|
||||
deb_mock.performance_monitor, output_file
|
||||
)
|
||||
|
||||
click.echo(f"✅ Performance report generated: {report_file}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error generating performance report: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.argument("build_id")
|
||||
@click.option("--output-file", "-o", type=click.Path(), help="Output file for the report")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def build_profile_report(ctx, build_id, output_file):
|
||||
"""Generate a detailed build profile report"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
# Find the build profile
|
||||
profile = None
|
||||
for profile_id, prof in deb_mock.performance_monitor._build_profiles.items():
|
||||
if prof.build_id == build_id:
|
||||
profile = prof
|
||||
break
|
||||
|
||||
if not profile:
|
||||
click.echo(f"Error: Build profile with ID '{build_id}' not found", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
click.echo(f"Generating build profile report for {profile.package_name}...")
|
||||
|
||||
report_file = deb_mock.performance_reporter.generate_build_profile_report(
|
||||
profile, output_file
|
||||
)
|
||||
|
||||
click.echo(f"✅ Build profile report generated: {report_file}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error generating build profile report: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def performance_analysis(ctx):
|
||||
"""Analyze performance and generate optimization suggestions"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo("Analyzing performance data...")
|
||||
|
||||
# Get all build profiles
|
||||
profiles = list(deb_mock.performance_monitor._build_profiles.values())
|
||||
|
||||
if not profiles:
|
||||
click.echo("No build profiles available for analysis")
|
||||
return
|
||||
|
||||
click.echo(f"Found {len(profiles)} build profiles for analysis")
|
||||
|
||||
# Analyze each profile
|
||||
for i, profile in enumerate(profiles, 1):
|
||||
click.echo(f"\n=== Analysis {i}: {profile.package_name} ===")
|
||||
|
||||
analysis = deb_mock.performance_optimizer.analyze_build_performance(profile)
|
||||
|
||||
click.echo(f"Performance Score: {analysis['score']}/100")
|
||||
|
||||
if analysis['suggestions']:
|
||||
click.echo("\nOptimization Suggestions:")
|
||||
for suggestion in analysis['suggestions']:
|
||||
click.echo(f" • {suggestion}")
|
||||
|
||||
if analysis['automatic_tunings']:
|
||||
click.echo("\nAutomatic Tuning Recommendations:")
|
||||
for tuning in analysis['automatic_tunings']:
|
||||
click.echo(f" • {tuning['reason']}")
|
||||
click.echo(f" Current: {tuning.get('current', 'N/A')}")
|
||||
click.echo(f" Suggested: {tuning.get('suggested', 'N/A')}")
|
||||
|
||||
if analysis['manual_recommendations']:
|
||||
click.echo("\nManual Optimization Recommendations:")
|
||||
for recommendation in analysis['manual_recommendations']:
|
||||
click.echo(f" • {recommendation}")
|
||||
|
||||
click.echo("\n✅ Performance analysis completed")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during performance analysis: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.option("--auto-apply", is_flag=True, help="Automatically apply optimization tunings")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def optimize(ctx, auto_apply):
|
||||
"""Apply performance optimizations based on analysis"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo("Applying performance optimizations...")
|
||||
|
||||
# Get all build profiles
|
||||
profiles = list(deb_mock.performance_monitor._build_profiles.values())
|
||||
|
||||
if not profiles:
|
||||
click.echo("No build profiles available for optimization")
|
||||
return
|
||||
|
||||
total_tunings = 0
|
||||
applied_tunings = 0
|
||||
|
||||
for profile in profiles:
|
||||
analysis = deb_mock.performance_optimizer.analyze_build_performance(profile)
|
||||
total_tunings += len(analysis['automatic_tunings'])
|
||||
|
||||
if auto_apply and analysis['automatic_tunings']:
|
||||
results = deb_mock.performance_optimizer.apply_automatic_tunings(
|
||||
analysis['automatic_tunings']
|
||||
)
|
||||
applied_tunings += len(results['applied'])
|
||||
|
||||
if results['failed']:
|
||||
click.echo(f"⚠️ Some tunings failed for {profile.package_name}")
|
||||
|
||||
click.echo(f"\n=== Optimization Summary ===")
|
||||
click.echo(f"Total tunings available: {total_tunings}")
|
||||
click.echo(f"Tunings applied: {applied_tunings}")
|
||||
|
||||
if auto_apply:
|
||||
click.echo("✅ Automatic optimization completed")
|
||||
else:
|
||||
click.echo("ℹ️ Use --auto-apply to automatically apply optimizations")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during optimization: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.option("--output-file", "-o", type=click.Path(), help="Output file for metrics export")
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def export_metrics(ctx, output_file):
|
||||
"""Export performance metrics to a file"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo("Exporting performance metrics...")
|
||||
|
||||
export_file = deb_mock.performance_monitor.export_metrics(output_file)
|
||||
|
||||
click.echo(f"✅ Performance metrics exported to: {export_file}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error exporting metrics: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
@main.command()
|
||||
@click.pass_context
|
||||
@handle_exception
|
||||
def cleanup_metrics(ctx):
|
||||
"""Clean up old performance metrics"""
|
||||
deb_mock = DebMock(ctx.obj["config"])
|
||||
|
||||
try:
|
||||
click.echo("Cleaning up old performance metrics...")
|
||||
|
||||
deb_mock.performance_monitor.cleanup_old_metrics()
|
||||
|
||||
click.echo("✅ Old performance metrics cleaned up")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error cleaning up metrics: {e}", err=True)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -59,7 +59,57 @@ class Config:
|
|||
# Parallel builds
|
||||
self.parallel_jobs = kwargs.get("parallel_jobs", 4)
|
||||
self.parallel_compression = kwargs.get("parallel_compression", True)
|
||||
|
||||
|
||||
# Advanced parallel build support
|
||||
self.parallel_builds = kwargs.get("parallel_builds", 2) # Number of parallel chroots
|
||||
self.parallel_chroot_prefix = kwargs.get("parallel_chroot_prefix", "parallel")
|
||||
self.parallel_build_timeout = kwargs.get("parallel_build_timeout", 3600) # seconds
|
||||
self.parallel_build_cleanup = kwargs.get("parallel_build_cleanup", True)
|
||||
|
||||
# Advanced mount management
|
||||
self.advanced_mounts = kwargs.get("advanced_mounts", {})
|
||||
self.bind_mounts = kwargs.get("bind_mounts", [])
|
||||
self.tmpfs_mounts = kwargs.get("tmpfs_mounts", [])
|
||||
self.overlay_mounts = kwargs.get("overlay_mounts", [])
|
||||
self.mount_options = kwargs.get("mount_options", {})
|
||||
|
||||
# Mount isolation and security
|
||||
self.mount_proc = kwargs.get("mount_proc", True)
|
||||
self.mount_sys = kwargs.get("mount_sys", True)
|
||||
self.mount_dev = kwargs.get("mount_dev", True)
|
||||
self.mount_devpts = kwargs.get("mount_devpts", True)
|
||||
self.mount_tmp = kwargs.get("mount_tmp", True)
|
||||
self.mount_home = kwargs.get("mount_home", False)
|
||||
|
||||
# Advanced chroot features
|
||||
self.use_namespaces = kwargs.get("use_namespaces", False)
|
||||
self.uid_mapping = kwargs.get("uid_mapping", None)
|
||||
self.gid_mapping = kwargs.get("gid_mapping", None)
|
||||
self.capabilities = kwargs.get("capabilities", [])
|
||||
self.seccomp_profile = kwargs.get("seccomp_profile", None)
|
||||
|
||||
# UID/GID management
|
||||
self.chroot_user = kwargs.get("chroot_user", "build")
|
||||
self.chroot_group = kwargs.get("chroot_group", "build")
|
||||
self.chroot_uid = kwargs.get("chroot_uid", 1000)
|
||||
self.chroot_gid = kwargs.get("chroot_gid", 1000)
|
||||
self.use_host_user = kwargs.get("use_host_user", False)
|
||||
self.copy_host_users = kwargs.get("copy_host_users", [])
|
||||
self.preserve_uid_gid = kwargs.get("preserve_uid_gid", True)
|
||||
|
||||
# Plugin system
|
||||
self.plugins = kwargs.get("plugins", [])
|
||||
self.plugin_conf = kwargs.get("plugin_conf", {})
|
||||
self.plugin_dir = kwargs.get("plugin_dir", "/usr/share/deb-mock/plugins")
|
||||
|
||||
# Performance monitoring and optimization
|
||||
self.enable_performance_monitoring = kwargs.get("enable_performance_monitoring", True)
|
||||
self.performance_metrics_dir = kwargs.get("performance_metrics_dir", "./performance-metrics")
|
||||
self.performance_retention_days = kwargs.get("performance_retention_days", 30)
|
||||
self.performance_auto_optimization = kwargs.get("performance_auto_optimization", False)
|
||||
self.performance_benchmark_iterations = kwargs.get("performance_benchmark_iterations", 3)
|
||||
self.performance_reporting = kwargs.get("performance_reporting", True)
|
||||
|
||||
# Network and proxy
|
||||
self.use_host_resolv = kwargs.get("use_host_resolv", True)
|
||||
self.http_proxy = kwargs.get("http_proxy", None)
|
||||
|
|
@ -124,10 +174,6 @@ class Config:
|
|||
self.apt_command = kwargs.get("apt_command", "apt-get")
|
||||
self.apt_install_command = kwargs.get("apt_install_command", "apt-get install -y")
|
||||
|
||||
# Plugin configuration
|
||||
self.plugins = kwargs.get("plugins", {})
|
||||
self.plugin_dir = kwargs.get("plugin_dir", "/usr/lib/deb-mock/plugins")
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, config_path: str) -> "Config":
|
||||
"""Load configuration from a YAML file"""
|
||||
|
|
@ -179,6 +225,42 @@ class Config:
|
|||
"tmpfs_size": self.tmpfs_size,
|
||||
"parallel_jobs": self.parallel_jobs,
|
||||
"parallel_compression": self.parallel_compression,
|
||||
"parallel_builds": self.parallel_builds,
|
||||
"parallel_chroot_prefix": self.parallel_chroot_prefix,
|
||||
"parallel_build_timeout": self.parallel_build_timeout,
|
||||
"parallel_build_cleanup": self.parallel_build_cleanup,
|
||||
"advanced_mounts": self.advanced_mounts,
|
||||
"bind_mounts": self.bind_mounts,
|
||||
"tmpfs_mounts": self.tmpfs_mounts,
|
||||
"overlay_mounts": self.overlay_mounts,
|
||||
"mount_options": self.mount_options,
|
||||
"mount_proc": self.mount_proc,
|
||||
"mount_sys": self.mount_sys,
|
||||
"mount_dev": self.mount_dev,
|
||||
"mount_devpts": self.mount_devpts,
|
||||
"mount_tmp": self.mount_tmp,
|
||||
"mount_home": self.mount_home,
|
||||
"use_namespaces": self.use_namespaces,
|
||||
"uid_mapping": self.uid_mapping,
|
||||
"gid_mapping": self.gid_mapping,
|
||||
"capabilities": self.capabilities,
|
||||
"seccomp_profile": self.seccomp_profile,
|
||||
"chroot_user": self.chroot_user,
|
||||
"chroot_group": self.chroot_group,
|
||||
"chroot_uid": self.chroot_uid,
|
||||
"chroot_gid": self.chroot_gid,
|
||||
"use_host_user": self.use_host_user,
|
||||
"copy_host_users": self.copy_host_users,
|
||||
"preserve_uid_gid": self.preserve_uid_gid,
|
||||
"plugins": self.plugins,
|
||||
"plugin_conf": self.plugin_conf,
|
||||
"plugin_dir": self.plugin_dir,
|
||||
"enable_performance_monitoring": self.enable_performance_monitoring,
|
||||
"performance_metrics_dir": self.performance_metrics_dir,
|
||||
"performance_retention_days": self.performance_retention_days,
|
||||
"performance_auto_optimization": self.performance_auto_optimization,
|
||||
"performance_benchmark_iterations": self.performance_benchmark_iterations,
|
||||
"performance_reporting": self.performance_reporting,
|
||||
"use_host_resolv": self.use_host_resolv,
|
||||
"http_proxy": self.http_proxy,
|
||||
"https_proxy": self.https_proxy,
|
||||
|
|
@ -229,6 +311,7 @@ class Config:
|
|||
|
||||
# Check suite
|
||||
valid_suites = [
|
||||
"trixie", # Debian 13+ (trixie) - required for OSTree support
|
||||
"bookworm",
|
||||
"sid",
|
||||
"bullseye",
|
||||
|
|
|
|||
36
deb_mock/configs/debian-trixie-amd64.yaml
Normal file
36
deb_mock/configs/debian-trixie-amd64.yaml
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# Debian Trixie (Debian 13) - AMD64
|
||||
# Equivalent to Mock's fedora-39-x86_64 config
|
||||
# Debian 13+ (trixie) has the required OSTree version for bootc support
|
||||
description: "Debian Trixie (Debian 13) - AMD64"
|
||||
chroot_name: "debian-trixie-amd64"
|
||||
architecture: "amd64"
|
||||
suite: "trixie"
|
||||
mirror: "http://deb.debian.org/debian/"
|
||||
|
||||
# Build environment
|
||||
build_env:
|
||||
DEB_BUILD_OPTIONS: "parallel=4,nocheck"
|
||||
DEB_BUILD_PROFILES: "nocheck"
|
||||
DEB_CFLAGS_SET: "-O2"
|
||||
DEB_CXXFLAGS_SET: "-O2"
|
||||
DEB_LDFLAGS_SET: "-Wl,-z,defs"
|
||||
|
||||
# Build options
|
||||
build_options:
|
||||
- "--verbose"
|
||||
- "--no-run-lintian"
|
||||
|
||||
# Chroot configuration
|
||||
chroot_dir: "/var/lib/deb-mock/chroots"
|
||||
chroot_config_dir: "/etc/schroot/chroot.d"
|
||||
|
||||
# sbuild configuration
|
||||
sbuild_config: "/etc/sbuild/sbuild.conf"
|
||||
sbuild_log_dir: "/var/log/sbuild"
|
||||
|
||||
# Output configuration
|
||||
output_dir: "./output"
|
||||
metadata_dir: "./metadata"
|
||||
keep_chroot: false
|
||||
verbose: false
|
||||
debug: false
|
||||
331
deb_mock/core.py
331
deb_mock/core.py
|
|
@ -3,8 +3,11 @@ Core DebMock class for orchestrating the build process
|
|||
"""
|
||||
|
||||
import os
|
||||
import threading
|
||||
import concurrent.futures
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
import time
|
||||
|
||||
from .cache import CacheManager
|
||||
from .chroot import ChrootManager
|
||||
|
|
@ -12,6 +15,8 @@ from .config import Config
|
|||
from .exceptions import ChrootError
|
||||
from .metadata import MetadataManager
|
||||
from .sbuild import SbuildWrapper
|
||||
from .plugin import PluginManager, HookStages
|
||||
from .performance import PerformanceMonitor, PerformanceOptimizer, PerformanceReporter
|
||||
|
||||
|
||||
class DebMock:
|
||||
|
|
@ -23,12 +28,25 @@ class DebMock:
|
|||
self.sbuild_wrapper = SbuildWrapper(config)
|
||||
self.metadata_manager = MetadataManager(config)
|
||||
self.cache_manager = CacheManager(config)
|
||||
self.plugin_manager = PluginManager(config)
|
||||
|
||||
# Validate configuration
|
||||
self.config.validate()
|
||||
|
||||
# Setup caches
|
||||
self._setup_caches()
|
||||
|
||||
# Initialize plugins
|
||||
self.plugin_manager.init_plugins(self)
|
||||
|
||||
# Initialize performance monitoring
|
||||
self.performance_monitor = PerformanceMonitor(config)
|
||||
self.performance_optimizer = PerformanceOptimizer(config)
|
||||
self.performance_reporter = PerformanceReporter(config)
|
||||
|
||||
# Parallel build support
|
||||
self._build_lock = threading.Lock()
|
||||
self._active_builds = {}
|
||||
|
||||
def _setup_caches(self) -> None:
|
||||
"""Setup cache directories and ccache"""
|
||||
|
|
@ -43,42 +61,199 @@ class DebMock:
|
|||
def build(self, source_package: str, **kwargs) -> Dict[str, Any]:
|
||||
"""Build a Debian source package in an isolated environment"""
|
||||
|
||||
# Create build profile for performance tracking
|
||||
build_id = f"build_{int(time.time() * 1000)}"
|
||||
profile_id = self.performance_monitor.create_build_profile(
|
||||
build_id, source_package, self.config.architecture, self.config.suite
|
||||
)
|
||||
|
||||
# Call pre-build hooks
|
||||
self.plugin_manager.call_hooks(HookStages.PREBUILD, source_package, **kwargs)
|
||||
|
||||
# Ensure chroot exists
|
||||
chroot_name = kwargs.get("chroot_name", self.config.chroot_name)
|
||||
chroot_path = self.config.get_chroot_path()
|
||||
|
||||
if not self.chroot_manager.chroot_exists(chroot_name):
|
||||
with self.performance_monitor.monitor_operation("chroot_creation") as op_id:
|
||||
self.chroot_manager.create_chroot(chroot_name)
|
||||
# Add chroot creation metrics to profile
|
||||
self.performance_monitor.add_phase_metrics(profile_id, "chroot_creation",
|
||||
self.performance_monitor._active_operations[op_id])
|
||||
|
||||
# Try to restore from cache first
|
||||
if not self.chroot_manager.chroot_exists(chroot_name):
|
||||
if not self.cache_manager.restore_root_cache(chroot_path):
|
||||
self.chroot_manager.create_chroot(chroot_name)
|
||||
|
||||
# Check build dependencies
|
||||
deps_check = self.sbuild_wrapper.check_dependencies(source_package, chroot_name)
|
||||
if not deps_check["satisfied"]:
|
||||
# Try to install missing dependencies
|
||||
if deps_check["missing"]:
|
||||
self.sbuild_wrapper.install_build_dependencies(deps_check["missing"], chroot_name)
|
||||
|
||||
# Setup build environment
|
||||
build_env = self.config.setup_build_environment()
|
||||
with self.performance_monitor.monitor_operation("build_env_setup") as op_id:
|
||||
build_env = self.config.setup_build_environment()
|
||||
# Add build environment setup metrics to profile
|
||||
self.performance_monitor.add_phase_metrics(profile_id, "build_env_setup",
|
||||
self.performance_monitor._active_operations[op_id])
|
||||
|
||||
# Call build start hook
|
||||
self.plugin_manager.call_hooks(HookStages.BUILD_START, source_package, chroot_name, **kwargs)
|
||||
|
||||
# Build the package
|
||||
build_result = self.sbuild_wrapper.build_package(source_package, chroot_name, build_env=build_env, **kwargs)
|
||||
with self.performance_monitor.monitor_operation("package_build") as op_id:
|
||||
build_result = self.sbuild_wrapper.build_package(source_package, chroot_name, build_env=build_env, **kwargs)
|
||||
# Add package build metrics to profile
|
||||
self.performance_monitor.add_phase_metrics(profile_id, "package_build",
|
||||
self.performance_monitor._active_operations[op_id])
|
||||
|
||||
# Call build end hook
|
||||
self.plugin_manager.call_hooks(HookStages.BUILD_END, build_result, source_package, chroot_name, **kwargs)
|
||||
|
||||
# Create cache after successful build
|
||||
if build_result.get("success", False):
|
||||
self.cache_manager.create_root_cache(chroot_path)
|
||||
with self.performance_monitor.monitor_operation("cache_creation") as op_id:
|
||||
self.cache_manager.create_root_cache(chroot_path)
|
||||
# Add cache creation metrics to profile
|
||||
self.performance_monitor.add_phase_metrics(profile_id, "cache_creation",
|
||||
self.performance_monitor._active_operations[op_id])
|
||||
|
||||
# Capture and store metadata
|
||||
metadata = self._capture_build_metadata(build_result, source_package)
|
||||
self.metadata_manager.store_metadata(metadata)
|
||||
with self.performance_monitor.monitor_operation("metadata_capture") as op_id:
|
||||
metadata = self._capture_build_metadata(build_result, source_package)
|
||||
self.metadata_manager.store_metadata(metadata)
|
||||
# Add metadata capture metrics to profile
|
||||
self.performance_monitor.add_phase_metrics(profile_id, "metadata_capture",
|
||||
self.performance_monitor._active_operations[op_id])
|
||||
|
||||
# Clean up chroot if not keeping it
|
||||
if not kwargs.get("keep_chroot", self.config.keep_chroot):
|
||||
self.chroot_manager.clean_chroot(chroot_name)
|
||||
with self.performance_monitor.monitor_operation("chroot_cleanup") as op_id:
|
||||
self.chroot_manager.clean_chroot(chroot_name)
|
||||
# Add chroot cleanup metrics to profile
|
||||
self.performance_monitor.add_phase_metrics(profile_id, "chroot_cleanup",
|
||||
self.performance_monitor._active_operations[op_id])
|
||||
|
||||
# Call post-build hooks
|
||||
self.plugin_manager.call_hooks(HookStages.POSTBUILD, build_result, source_package, **kwargs)
|
||||
|
||||
# Finalize build profile and generate optimization suggestions
|
||||
build_profile = self.performance_monitor.finalize_build_profile(profile_id)
|
||||
if build_profile and self.config.performance_auto_optimization:
|
||||
analysis = self.performance_optimizer.analyze_build_performance(build_profile)
|
||||
if analysis['automatic_tunings']:
|
||||
self.performance_optimizer.apply_automatic_tunings(analysis['automatic_tunings'])
|
||||
|
||||
return build_result
|
||||
|
||||
def build_parallel(self, source_packages: List[str], max_workers: int = None, **kwargs) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Build multiple packages in parallel using multiple chroots
|
||||
|
||||
Args:
|
||||
source_packages: List of source packages to build
|
||||
max_workers: Maximum number of parallel builds (default: config.parallel_builds)
|
||||
**kwargs: Additional build options
|
||||
|
||||
Returns:
|
||||
List of build results in the same order as source_packages
|
||||
"""
|
||||
if max_workers is None:
|
||||
max_workers = getattr(self.config, 'parallel_builds', 2)
|
||||
|
||||
# Limit max_workers to available system resources
|
||||
max_workers = min(max_workers, os.cpu_count() or 2)
|
||||
|
||||
print(f"Building {len(source_packages)} packages with {max_workers} parallel workers")
|
||||
|
||||
# Create unique chroot names for parallel builds
|
||||
chroot_names = [f"{self.config.chroot_name}-parallel-{i}" for i in range(len(source_packages))]
|
||||
|
||||
# Prepare build tasks
|
||||
build_tasks = []
|
||||
for i, (source_package, chroot_name) in enumerate(zip(source_packages, chroot_names)):
|
||||
task_kwargs = kwargs.copy()
|
||||
task_kwargs['chroot_name'] = chroot_name
|
||||
task_kwargs['package_index'] = i
|
||||
build_tasks.append((source_package, task_kwargs))
|
||||
|
||||
# Execute builds in parallel
|
||||
results = [None] * len(source_packages)
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
# Submit all build tasks
|
||||
future_to_index = {
|
||||
executor.submit(self._build_single_parallel, source_pkg, **task_kwargs): i
|
||||
for i, (source_pkg, task_kwargs) in enumerate(build_tasks)
|
||||
}
|
||||
|
||||
# Collect results as they complete
|
||||
for future in concurrent.futures.as_completed(future_to_index):
|
||||
index = future_to_index[future]
|
||||
try:
|
||||
result = future.result()
|
||||
results[index] = result
|
||||
print(f"✅ Package {index + 1}/{len(source_packages)} completed: {result.get('package_name', 'unknown')}")
|
||||
except Exception as e:
|
||||
results[index] = {
|
||||
'success': False,
|
||||
'error': str(e),
|
||||
'package_name': source_packages[index] if index < len(source_packages) else 'unknown'
|
||||
}
|
||||
print(f"❌ Package {index + 1}/{len(source_packages)} failed: {e}")
|
||||
|
||||
# Clean up parallel chroots
|
||||
for chroot_name in chroot_names:
|
||||
try:
|
||||
self.chroot_manager.clean_chroot(chroot_name)
|
||||
except Exception as e:
|
||||
print(f"Warning: Failed to clean chroot {chroot_name}: {e}")
|
||||
|
||||
return results
|
||||
|
||||
def _build_single_parallel(self, source_package: str, **kwargs) -> Dict[str, Any]:
|
||||
"""Build a single package for parallel execution"""
|
||||
chroot_name = kwargs.get("chroot_name", self.config.chroot_name)
|
||||
package_index = kwargs.get("package_index", 0)
|
||||
|
||||
print(f"🔄 Starting parallel build {package_index + 1}: {source_package}")
|
||||
|
||||
try:
|
||||
# Ensure chroot exists for this parallel build
|
||||
chroot_path = os.path.join(self.config.chroot_dir, chroot_name)
|
||||
|
||||
if not self.chroot_manager.chroot_exists(chroot_name):
|
||||
if not self.cache_manager.restore_root_cache(chroot_path):
|
||||
self.chroot_manager.create_chroot(chroot_name)
|
||||
|
||||
# Check build dependencies
|
||||
deps_check = self.sbuild_wrapper.check_dependencies(source_package, chroot_name)
|
||||
if not deps_check["satisfied"]:
|
||||
if deps_check["missing"]:
|
||||
self.sbuild_wrapper.install_build_dependencies(deps_check["missing"], chroot_name)
|
||||
|
||||
# Setup build environment
|
||||
build_env = self.config.setup_build_environment()
|
||||
|
||||
# Build the package
|
||||
build_result = self.sbuild_wrapper.build_package(
|
||||
source_package, chroot_name, build_env=build_env, **kwargs
|
||||
)
|
||||
|
||||
# Create cache after successful build
|
||||
if build_result.get("success", False):
|
||||
self.cache_manager.create_root_cache(chroot_path)
|
||||
|
||||
# Capture and store metadata
|
||||
metadata = self._capture_build_metadata(build_result, source_package)
|
||||
self.metadata_manager.store_metadata(metadata)
|
||||
|
||||
return build_result
|
||||
|
||||
except Exception as e:
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e),
|
||||
'package_name': source_package,
|
||||
'chroot_name': chroot_name
|
||||
}
|
||||
|
||||
def build_chain(self, source_packages: List[str], **kwargs) -> List[Dict[str, Any]]:
|
||||
"""Build a chain of packages that depend on each other (similar to Mock's --chain)"""
|
||||
|
||||
|
|
@ -98,66 +273,52 @@ class DebMock:
|
|||
try:
|
||||
# Build the package
|
||||
result = self.sbuild_wrapper.build_package(source_package, chroot_name, build_env=build_env, **kwargs)
|
||||
|
||||
results.append(
|
||||
{
|
||||
"package": source_package,
|
||||
"success": True,
|
||||
"result": result,
|
||||
"order": i + 1,
|
||||
}
|
||||
)
|
||||
|
||||
# Install the built package in the chroot for subsequent builds
|
||||
if result.get("artifacts"):
|
||||
self._install_built_package(result["artifacts"], chroot_name)
|
||||
|
||||
except Exception as e:
|
||||
results.append(
|
||||
{
|
||||
"package": source_package,
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"order": i + 1,
|
||||
}
|
||||
)
|
||||
# Stop chain on failure unless continue_on_failure is specified
|
||||
if not kwargs.get("continue_on_failure", False):
|
||||
|
||||
# Store result
|
||||
results.append(result)
|
||||
|
||||
# If build failed, stop the chain
|
||||
if not result.get("success", False):
|
||||
print(f"Chain build failed at package {i+1}: {source_package}")
|
||||
break
|
||||
|
||||
# Create cache after successful chain build
|
||||
if any(r["success"] for r in results):
|
||||
self.cache_manager.create_root_cache(chroot_path)
|
||||
|
||||
# Install the built package for dependency resolution
|
||||
if result.get("success", False) and kwargs.get("install_built", True):
|
||||
self._install_built_package(result, chroot_name)
|
||||
|
||||
except Exception as e:
|
||||
error_result = {
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"package": source_package,
|
||||
"chain_position": i
|
||||
}
|
||||
results.append(error_result)
|
||||
break
|
||||
|
||||
return results
|
||||
|
||||
def _install_built_package(self, artifacts: List[str], chroot_name: str) -> None:
|
||||
"""Install a built package in the chroot for chain building"""
|
||||
|
||||
# Find .deb files in artifacts
|
||||
deb_files = [art for art in artifacts if art.endswith(".deb")]
|
||||
|
||||
if not deb_files:
|
||||
return
|
||||
|
||||
# Copy .deb files to chroot and install them
|
||||
for deb_file in deb_files:
|
||||
try:
|
||||
# Copy to chroot
|
||||
chroot_deb_path = f"/tmp/{os.path.basename(deb_file)}"
|
||||
self.chroot_manager.copy_to_chroot(deb_file, chroot_deb_path, chroot_name)
|
||||
|
||||
# Install in chroot
|
||||
self.chroot_manager.execute_in_chroot(
|
||||
chroot_name, ["dpkg", "-i", chroot_deb_path], capture_output=False
|
||||
)
|
||||
|
||||
# Clean up
|
||||
self.chroot_manager.execute_in_chroot(chroot_name, ["rm", "-f", chroot_deb_path], capture_output=False)
|
||||
|
||||
except Exception as e:
|
||||
# Log warning but continue
|
||||
print(f"Warning: Failed to install {deb_file} in chroot: {e}")
|
||||
def _install_built_package(self, build_result: Dict[str, Any], chroot_name: str) -> None:
|
||||
"""Install a built package in the chroot for dependency resolution"""
|
||||
try:
|
||||
# Extract .deb files from build result
|
||||
deb_files = build_result.get("artifacts", {}).get("deb_files", [])
|
||||
|
||||
for deb_file in deb_files:
|
||||
if deb_file.endswith(".deb"):
|
||||
# Copy .deb to chroot and install
|
||||
self.chroot_manager.copy_in(deb_file, chroot_name, "/tmp/")
|
||||
|
||||
# Install the package
|
||||
install_cmd = ["dpkg", "-i", f"/tmp/{os.path.basename(deb_file)}"]
|
||||
self.chroot_manager.execute_in_chroot(chroot_name, install_cmd)
|
||||
|
||||
# Fix any broken dependencies
|
||||
fix_cmd = ["apt-get", "install", "-f", "-y"]
|
||||
self.chroot_manager.execute_in_chroot(chroot_name, fix_cmd)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Warning: Failed to install built package: {e}")
|
||||
|
||||
def init_chroot(self, chroot_name: str, arch: str = None, suite: str = None) -> None:
|
||||
"""Initialize a new chroot environment"""
|
||||
|
|
@ -391,14 +552,14 @@ class DebMock:
|
|||
result = self.chroot_manager.execute_in_chroot(
|
||||
chroot_name,
|
||||
f"{self.config.apt_install_command} {' '.join(packages)}",
|
||||
as_root=True,
|
||||
|
||||
)
|
||||
|
||||
return {
|
||||
"success": result["returncode"] == 0,
|
||||
"success": result.returncode == 0,
|
||||
"installed": packages,
|
||||
"output": result["stdout"],
|
||||
"error": result["stderr"] if result["returncode"] != 0 else None,
|
||||
"output": result.stdout,
|
||||
"error": result.stderr if result.returncode != 0 else None,
|
||||
}
|
||||
|
||||
def update_packages(self, packages: List[str] = None) -> Dict[str, Any]:
|
||||
|
|
@ -416,13 +577,13 @@ class DebMock:
|
|||
# Update all packages
|
||||
cmd = f"{self.config.apt_command} update && {self.config.apt_command} upgrade -y"
|
||||
|
||||
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd, as_root=True)
|
||||
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd)
|
||||
|
||||
return {
|
||||
"success": result["returncode"] == 0,
|
||||
"success": result.returncode == 0,
|
||||
"updated": packages if packages else "all",
|
||||
"output": result["stdout"],
|
||||
"error": result["stderr"] if result["returncode"] != 0 else None,
|
||||
"output": result.stdout,
|
||||
"error": result.stderr if result.returncode != 0 else None,
|
||||
}
|
||||
|
||||
def remove_packages(self, packages: List[str]) -> Dict[str, Any]:
|
||||
|
|
@ -435,13 +596,13 @@ class DebMock:
|
|||
|
||||
# Remove packages using APT
|
||||
cmd = f"{self.config.apt_command} remove -y {' '.join(packages)}"
|
||||
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd, as_root=True)
|
||||
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd)
|
||||
|
||||
return {
|
||||
"success": result["returncode"] == 0,
|
||||
"success": result.returncode == 0,
|
||||
"removed": packages,
|
||||
"output": result["stdout"],
|
||||
"error": result["stderr"] if result["returncode"] != 0 else None,
|
||||
"output": result.stdout,
|
||||
"error": result.stderr if result.returncode != 0 else None,
|
||||
}
|
||||
|
||||
def execute_apt_command(self, command: str) -> Dict[str, Any]:
|
||||
|
|
@ -454,11 +615,11 @@ class DebMock:
|
|||
|
||||
# Execute APT command
|
||||
cmd = f"{self.config.apt_command} {command}"
|
||||
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd, as_root=True)
|
||||
result = self.chroot_manager.execute_in_chroot(chroot_name, cmd)
|
||||
|
||||
return {
|
||||
"success": result["returncode"] == 0,
|
||||
"success": result.returncode == 0,
|
||||
"command": command,
|
||||
"output": result["stdout"],
|
||||
"error": result["stderr"] if result["returncode"] != 0 else None,
|
||||
"output": result.stdout,
|
||||
"error": result.stderr if result.returncode != 0 else None,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -412,6 +412,23 @@ class ValidationError(DebMockError):
|
|||
super().__init__(message, exit_code=12, context=context, suggestions=suggestions)
|
||||
|
||||
|
||||
class UIDManagerError(DebMockError):
|
||||
"""Raised when UID/GID management operations fail"""
|
||||
|
||||
def __init__(self, message, chroot_name=None, operation=None):
|
||||
super().__init__(message)
|
||||
self.chroot_name = chroot_name
|
||||
self.operation = operation
|
||||
|
||||
def get_exit_code(self):
|
||||
return 20 # UID management error
|
||||
|
||||
|
||||
class PerformanceError(Exception):
|
||||
"""Raised when performance monitoring or optimization fails"""
|
||||
pass
|
||||
|
||||
|
||||
# Convenience functions for common error patterns
|
||||
def handle_exception(func):
|
||||
"""
|
||||
|
|
|
|||
1541
deb_mock/performance.py
Normal file
1541
deb_mock/performance.py
Normal file
File diff suppressed because it is too large
Load diff
248
deb_mock/plugin.py
Normal file
248
deb_mock/plugin.py
Normal file
|
|
@ -0,0 +1,248 @@
|
|||
"""
|
||||
Plugin system for deb-mock
|
||||
Based on Fedora Mock's plugin architecture
|
||||
"""
|
||||
|
||||
import importlib.machinery
|
||||
import importlib.util
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
from typing import Dict, List, Any, Callable, Optional
|
||||
from pathlib import Path
|
||||
|
||||
from .exceptions import PluginError
|
||||
|
||||
|
||||
class PluginManager:
|
||||
"""Manages plugins for deb-mock"""
|
||||
|
||||
# Current API version
|
||||
CURRENT_API_VERSION = "1.0"
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Plugin configuration
|
||||
self.plugins = getattr(config, 'plugins', [])
|
||||
self.plugin_conf = getattr(config, 'plugin_conf', {})
|
||||
self.plugin_dir = getattr(config, 'plugin_dir', '/usr/share/deb-mock/plugins')
|
||||
|
||||
# Hook system
|
||||
self._hooks = {}
|
||||
self._initialized_plugins = []
|
||||
|
||||
# Plugin state tracking
|
||||
self.already_initialized = False
|
||||
|
||||
def __repr__(self):
|
||||
return f"<deb_mock.plugin.PluginManager: plugins={len(self.plugins)}, hooks={len(self._hooks)}>"
|
||||
|
||||
def init_plugins(self, deb_mock):
|
||||
"""Initialize all enabled plugins"""
|
||||
if self.already_initialized:
|
||||
return
|
||||
|
||||
self.already_initialized = True
|
||||
self.logger.info("Initializing plugins...")
|
||||
|
||||
# Update plugin configuration with deb-mock context
|
||||
for key in list(self.plugin_conf.keys()):
|
||||
if key.endswith('_opts'):
|
||||
self.plugin_conf[key].update({
|
||||
'basedir': getattr(deb_mock.config, 'basedir', '/var/lib/deb-mock'),
|
||||
'chroot_dir': deb_mock.config.chroot_dir,
|
||||
'output_dir': deb_mock.config.output_dir,
|
||||
'cache_dir': deb_mock.config.cache_dir,
|
||||
})
|
||||
|
||||
# Import and initialize plugins
|
||||
for plugin_name in self.plugins:
|
||||
if self.plugin_conf.get(f"{plugin_name}_enable", True):
|
||||
try:
|
||||
self._load_plugin(plugin_name, deb_mock)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to load plugin {plugin_name}: {e}")
|
||||
if self.plugin_conf.get(f"{plugin_name}_required", False):
|
||||
raise PluginError(f"Required plugin {plugin_name} failed to load: {e}")
|
||||
|
||||
self.logger.info(f"Plugin initialization complete. Loaded {len(self._initialized_plugins)} plugins")
|
||||
|
||||
def _load_plugin(self, plugin_name: str, deb_mock):
|
||||
"""Load and initialize a single plugin"""
|
||||
self.logger.debug(f"Loading plugin: {plugin_name}")
|
||||
|
||||
# Find plugin module
|
||||
spec = importlib.machinery.PathFinder.find_spec(plugin_name, [self.plugin_dir])
|
||||
if not spec:
|
||||
# Try to find in local plugins directory
|
||||
local_plugin_dir = os.path.join(os.getcwd(), 'plugins')
|
||||
spec = importlib.machinery.PathFinder.find_spec(plugin_name, [local_plugin_dir])
|
||||
|
||||
if not spec:
|
||||
raise PluginError(f"Plugin {plugin_name} not found in {self.plugin_dir} or local plugins directory")
|
||||
|
||||
# Load plugin module
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
sys.modules[spec.name] = module
|
||||
|
||||
# Validate plugin API version
|
||||
if not hasattr(module, 'requires_api_version'):
|
||||
raise PluginError(f'Plugin "{plugin_name}" doesn\'t specify required API version')
|
||||
|
||||
requested_api_version = module.requires_api_version
|
||||
if requested_api_version != self.CURRENT_API_VERSION:
|
||||
raise PluginError(f'Plugin version mismatch - requested = {requested_api_version}, current = {self.CURRENT_API_VERSION}')
|
||||
|
||||
# Check if plugin should run in bootstrap chroots
|
||||
run_in_bootstrap = getattr(module, "run_in_bootstrap", True)
|
||||
|
||||
# Initialize plugin
|
||||
plugin_conf = self.plugin_conf.get(f"{plugin_name}_opts", {})
|
||||
module.init(self, plugin_conf, deb_mock)
|
||||
|
||||
self._initialized_plugins.append(plugin_name)
|
||||
self.logger.info(f"Plugin {plugin_name} loaded successfully")
|
||||
|
||||
def call_hooks(self, stage: str, *args, **kwargs):
|
||||
"""Call all hooks registered for a specific stage"""
|
||||
required = kwargs.pop('required', False)
|
||||
hooks = self._hooks.get(stage, [])
|
||||
|
||||
if required and not hooks:
|
||||
raise PluginError(f"Feature {stage} is not provided by any of enabled plugins")
|
||||
|
||||
self.logger.debug(f"Calling {len(hooks)} hooks for stage: {stage}")
|
||||
|
||||
for hook in hooks:
|
||||
try:
|
||||
hook(*args, **kwargs)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Hook {hook.__name__} failed for stage {stage}: {e}")
|
||||
if required:
|
||||
raise PluginError(f"Required hook {hook.__name__} failed: {e}")
|
||||
|
||||
def add_hook(self, stage: str, function: Callable):
|
||||
"""Add a hook function for a specific stage"""
|
||||
if stage not in self._hooks:
|
||||
self._hooks[stage] = []
|
||||
|
||||
if function not in self._hooks[stage]:
|
||||
self._hooks[stage].append(function)
|
||||
self.logger.debug(f"Added hook {function.__name__} for stage {stage}")
|
||||
|
||||
def remove_hook(self, stage: str, function: Callable):
|
||||
"""Remove a hook function from a specific stage"""
|
||||
if stage in self._hooks and function in self._hooks[stage]:
|
||||
self._hooks[stage].remove(function)
|
||||
self.logger.debug(f"Removed hook {function.__name__} from stage {stage}")
|
||||
|
||||
def get_hooks(self, stage: str) -> List[Callable]:
|
||||
"""Get all hooks registered for a specific stage"""
|
||||
return self._hooks.get(stage, [])
|
||||
|
||||
def list_stages(self) -> List[str]:
|
||||
"""List all available hook stages"""
|
||||
return list(self._hooks.keys())
|
||||
|
||||
def get_plugin_info(self) -> Dict[str, Any]:
|
||||
"""Get information about loaded plugins"""
|
||||
return {
|
||||
'total_plugins': len(self.plugins),
|
||||
'loaded_plugins': self._initialized_plugins,
|
||||
'available_stages': self.list_stages(),
|
||||
'plugin_dir': self.plugin_dir,
|
||||
'api_version': self.CURRENT_API_VERSION
|
||||
}
|
||||
|
||||
|
||||
# Standard hook stages for deb-mock
|
||||
class HookStages:
|
||||
"""Standard hook stages for deb-mock plugins"""
|
||||
|
||||
# Chroot lifecycle
|
||||
PRECHROOT_INIT = "prechroot_init"
|
||||
POSTCHROOT_INIT = "postchroot_init"
|
||||
PRECHROOT_CLEAN = "prechroot_clean"
|
||||
POSTCHROOT_CLEAN = "postchroot_clean"
|
||||
|
||||
# Build lifecycle
|
||||
PREBUILD = "prebuild"
|
||||
POSTBUILD = "postbuild"
|
||||
BUILD_START = "build_start"
|
||||
BUILD_END = "build_end"
|
||||
|
||||
# Package management
|
||||
PRE_INSTALL_DEPS = "pre_install_deps"
|
||||
POST_INSTALL_DEPS = "post_install_deps"
|
||||
PRE_INSTALL_PACKAGE = "pre_install_package"
|
||||
POST_INSTALL_PACKAGE = "post_install_package"
|
||||
|
||||
# Mount management
|
||||
PRE_MOUNT = "pre_mount"
|
||||
POST_MOUNT = "post_mount"
|
||||
PRE_UNMOUNT = "pre_unmount"
|
||||
POST_UNMOUNT = "post_unmount"
|
||||
|
||||
# Cache management
|
||||
PRE_CACHE_CREATE = "pre_cache_create"
|
||||
POST_CACHE_CREATE = "post_cache_create"
|
||||
PRE_CACHE_RESTORE = "pre_cache_restore"
|
||||
POST_CACHE_RESTORE = "post_cache_restore"
|
||||
|
||||
# Parallel build hooks
|
||||
PRE_PARALLEL_BUILD = "pre_parallel_build"
|
||||
POST_PARALLEL_BUILD = "post_parallel_build"
|
||||
PARALLEL_BUILD_START = "parallel_build_start"
|
||||
PARALLEL_BUILD_END = "parallel_build_end"
|
||||
|
||||
# Error handling
|
||||
ON_ERROR = "on_error"
|
||||
ON_WARNING = "on_warning"
|
||||
|
||||
# Custom stages can be added by plugins
|
||||
CUSTOM = "custom"
|
||||
|
||||
|
||||
# Plugin base class for easier plugin development
|
||||
class BasePlugin:
|
||||
"""Base class for deb-mock plugins"""
|
||||
|
||||
def __init__(self, plugin_manager, config, deb_mock):
|
||||
self.plugin_manager = plugin_manager
|
||||
self.config = config
|
||||
self.deb_mock = deb_mock
|
||||
self.logger = logging.getLogger(f"deb_mock.plugin.{self.__class__.__name__}")
|
||||
|
||||
# Register hooks
|
||||
self._register_hooks()
|
||||
|
||||
def _register_hooks(self):
|
||||
"""Override this method to register hooks"""
|
||||
pass
|
||||
|
||||
def get_config(self, key: str, default=None):
|
||||
"""Get plugin configuration value"""
|
||||
return self.config.get(key, default)
|
||||
|
||||
def set_config(self, key: str, value):
|
||||
"""Set plugin configuration value"""
|
||||
self.config[key] = value
|
||||
|
||||
def log_info(self, message: str):
|
||||
"""Log info message"""
|
||||
self.logger.info(message)
|
||||
|
||||
def log_warning(self, message: str):
|
||||
"""Log warning message"""
|
||||
self.logger.warning(message)
|
||||
|
||||
def log_error(self, message: str):
|
||||
"""Log error message"""
|
||||
self.logger.error(message)
|
||||
|
||||
def log_debug(self, message: str):
|
||||
"""Log debug message"""
|
||||
self.logger.debug(message)
|
||||
|
|
@ -5,6 +5,8 @@ sbuild wrapper for deb-mock
|
|||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import grp
|
||||
import pwd
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
|
@ -16,6 +18,73 @@ class SbuildWrapper:
|
|||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self._check_sbuild_requirements()
|
||||
|
||||
def _check_sbuild_requirements(self):
|
||||
"""Check if sbuild requirements are met"""
|
||||
# Check if sbuild is available
|
||||
if not self._is_sbuild_available():
|
||||
raise SbuildError("sbuild not found. Please install sbuild package.")
|
||||
|
||||
# Check if user is in sbuild group
|
||||
if not self._is_user_in_sbuild_group():
|
||||
raise SbuildError(
|
||||
"User not in sbuild group. Please run 'sudo sbuild-adduser $USER' "
|
||||
"and start a new shell session."
|
||||
)
|
||||
|
||||
# Check if sbuild configuration exists
|
||||
if not self._is_sbuild_configured():
|
||||
self._setup_sbuild_config()
|
||||
|
||||
def _is_sbuild_available(self) -> bool:
|
||||
"""Check if sbuild is available in PATH"""
|
||||
try:
|
||||
subprocess.run(["sbuild", "--version"], capture_output=True, check=True)
|
||||
return True
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
return False
|
||||
|
||||
def _is_user_in_sbuild_group(self) -> bool:
|
||||
"""Check if current user is in sbuild group"""
|
||||
try:
|
||||
current_user = pwd.getpwuid(os.getuid()).pw_name
|
||||
sbuild_group = grp.getgrnam("sbuild")
|
||||
return current_user in sbuild_group.gr_mem
|
||||
except (KeyError, OSError):
|
||||
return False
|
||||
|
||||
def _is_sbuild_configured(self) -> bool:
|
||||
"""Check if sbuild configuration exists"""
|
||||
config_paths = [
|
||||
os.path.expanduser("~/.config/sbuild/config.pl"),
|
||||
os.path.expanduser("~/.sbuildrc"),
|
||||
"/etc/sbuild/sbuild.conf"
|
||||
]
|
||||
return any(os.path.exists(path) for path in config_paths)
|
||||
|
||||
def _setup_sbuild_config(self):
|
||||
"""Setup basic sbuild configuration"""
|
||||
config_dir = os.path.expanduser("~/.config/sbuild")
|
||||
config_file = os.path.join(config_dir, "config.pl")
|
||||
|
||||
try:
|
||||
os.makedirs(config_dir, exist_ok=True)
|
||||
|
||||
# Create minimal config
|
||||
config_content = """#!/usr/bin/perl
|
||||
# deb-mock sbuild configuration
|
||||
$chroot_mode = "schroot";
|
||||
$schroot = "schroot";
|
||||
"""
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
f.write(config_content)
|
||||
|
||||
os.chmod(config_file, 0o644)
|
||||
|
||||
except Exception as e:
|
||||
raise SbuildError(f"Failed to create sbuild configuration: {e}")
|
||||
|
||||
def build_package(
|
||||
self,
|
||||
|
|
@ -39,6 +108,10 @@ class SbuildWrapper:
|
|||
output_dir = os.path.join(tempfile.gettempdir(), "deb-mock-output")
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Validate source package
|
||||
if not self._is_valid_source_package(source_package):
|
||||
raise SbuildError(f"Invalid source package: {source_package}")
|
||||
|
||||
# Prepare sbuild command
|
||||
cmd = self._prepare_sbuild_command(source_package, chroot_name, output_dir, **kwargs)
|
||||
|
||||
|
|
@ -49,22 +122,35 @@ class SbuildWrapper:
|
|||
env.update(self.config.build_env)
|
||||
|
||||
# Create temporary log file
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False) as log_file:
|
||||
log_path = log_file.name
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False) as log_path:
|
||||
log_file = log_path.name
|
||||
|
||||
try:
|
||||
# Execute sbuild
|
||||
result = self._execute_sbuild(cmd, log_path, env)
|
||||
result = self._execute_sbuild(cmd, log_file, env)
|
||||
|
||||
# Parse build results
|
||||
build_info = self._parse_build_results(output_dir, log_path, result)
|
||||
build_info = self._parse_build_results(output_dir, log_file, result)
|
||||
|
||||
return build_info
|
||||
|
||||
finally:
|
||||
# Clean up temporary log file
|
||||
if os.path.exists(log_path):
|
||||
os.unlink(log_path)
|
||||
if os.path.exists(log_file):
|
||||
os.unlink(log_file)
|
||||
|
||||
def _is_valid_source_package(self, source_package: str) -> bool:
|
||||
"""Check if source package is valid"""
|
||||
# Check if it's a directory with debian/control
|
||||
if os.path.isdir(source_package):
|
||||
control_file = os.path.join(source_package, "debian", "control")
|
||||
return os.path.exists(control_file)
|
||||
|
||||
# Check if it's a .dsc file
|
||||
if source_package.endswith(".dsc"):
|
||||
return os.path.exists(source_package)
|
||||
|
||||
return False
|
||||
|
||||
def _prepare_sbuild_command(self, source_package: str, chroot_name: str, output_dir: str, **kwargs) -> List[str]:
|
||||
"""Prepare the sbuild command with all necessary options"""
|
||||
|
|
@ -95,9 +181,6 @@ class SbuildWrapper:
|
|||
for option in kwargs["build_options"]:
|
||||
cmd.extend(option.split())
|
||||
|
||||
# Environment variables will be passed to subprocess.run
|
||||
pass
|
||||
|
||||
# Source package
|
||||
cmd.append(source_package)
|
||||
|
||||
|
|
@ -288,3 +371,66 @@ class SbuildWrapper:
|
|||
subprocess.run(cmd, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise SbuildError(f"Failed to install build dependencies: {e}")
|
||||
|
||||
def update_chroot(self, chroot_name: str = None) -> None:
|
||||
"""Update the chroot to ensure it's current"""
|
||||
if chroot_name is None:
|
||||
chroot_name = self.config.chroot_name
|
||||
|
||||
try:
|
||||
# Update package lists
|
||||
cmd = ["schroot", "-c", chroot_name, "--", "apt-get", "update"]
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
# Upgrade packages
|
||||
cmd = ["schroot", "-c", chroot_name, "--", "apt-get", "upgrade", "-y"]
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise SbuildError(f"Failed to update chroot: {e}")
|
||||
|
||||
def get_chroot_info(self, chroot_name: str = None) -> Dict[str, Any]:
|
||||
"""Get information about a chroot"""
|
||||
if chroot_name is None:
|
||||
chroot_name = self.config.chroot_name
|
||||
|
||||
info = {
|
||||
"name": chroot_name,
|
||||
"status": "unknown",
|
||||
"architecture": None,
|
||||
"distribution": None,
|
||||
"packages": [],
|
||||
}
|
||||
|
||||
try:
|
||||
# Get chroot status
|
||||
cmd = ["schroot", "-i", "-c", chroot_name]
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
|
||||
# Parse schroot info output
|
||||
for line in result.stdout.split("\n"):
|
||||
if ":" in line:
|
||||
key, value = line.split(":", 1)
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
|
||||
if key == "Status":
|
||||
info["status"] = value
|
||||
elif key == "Architecture":
|
||||
info["architecture"] = value
|
||||
elif key == "Distribution":
|
||||
info["distribution"] = value
|
||||
|
||||
# Get package count
|
||||
cmd = ["schroot", "-c", chroot_name, "--", "dpkg", "-l", "|", "wc", "-l"]
|
||||
result = subprocess.run(cmd, shell=True, capture_output=True, text=True)
|
||||
if result.returncode == 0:
|
||||
try:
|
||||
info["package_count"] = int(result.stdout.strip())
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
return info
|
||||
|
|
|
|||
305
deb_mock/uid_manager.py
Normal file
305
deb_mock/uid_manager.py
Normal file
|
|
@ -0,0 +1,305 @@
|
|||
"""
|
||||
UID/GID management for deb-mock
|
||||
Based on Fedora Mock's UID management system
|
||||
"""
|
||||
|
||||
import os
|
||||
import grp
|
||||
import pwd
|
||||
import subprocess
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional, Tuple, Dict, Any
|
||||
|
||||
from .exceptions import UIDManagerError
|
||||
|
||||
|
||||
class UIDManager:
|
||||
"""Manages UID/GID operations for deb-mock chroots"""
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Default user/group configuration
|
||||
self.chroot_user = getattr(config, 'chroot_user', 'build')
|
||||
self.chroot_group = getattr(config, 'chroot_group', 'build')
|
||||
self.chroot_uid = getattr(config, 'chroot_uid', 1000)
|
||||
self.chroot_gid = getattr(config, 'chroot_gid', 1000)
|
||||
|
||||
# Current user information
|
||||
self.current_uid = os.getuid()
|
||||
self.current_gid = os.getgid()
|
||||
self.current_user = pwd.getpwuid(self.current_uid).pw_name
|
||||
|
||||
# Privilege stack for context management
|
||||
self._privilege_stack = []
|
||||
self._environment_stack = []
|
||||
|
||||
# Validate configuration
|
||||
self._validate_config()
|
||||
|
||||
def _validate_config(self):
|
||||
"""Validate UID/GID configuration"""
|
||||
try:
|
||||
# Check if chroot user/group exist on host
|
||||
if hasattr(self.config, 'use_host_user') and self.config.use_host_user:
|
||||
try:
|
||||
pwd.getpwnam(self.chroot_user)
|
||||
grp.getgrnam(self.chroot_group)
|
||||
except KeyError as e:
|
||||
self.logger.warning(f"Host user/group not found: {e}")
|
||||
|
||||
# Validate UID/GID ranges
|
||||
if self.chroot_uid < 1000:
|
||||
self.logger.warning(f"Chroot UID {self.chroot_uid} is below 1000")
|
||||
if self.chroot_gid < 1000:
|
||||
self.logger.warning(f"Chroot GID {self.chroot_gid} is below 1000")
|
||||
|
||||
except Exception as e:
|
||||
raise UIDManagerError(f"UID configuration validation failed: {e}")
|
||||
|
||||
@contextmanager
|
||||
def elevated_privileges(self):
|
||||
"""Context manager for elevated privileges"""
|
||||
self._push_privileges()
|
||||
self._elevate_privileges()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._restore_privileges()
|
||||
|
||||
def _push_privileges(self):
|
||||
"""Save current privilege state"""
|
||||
self._privilege_stack.append({
|
||||
'ruid': os.getuid(),
|
||||
'euid': os.geteuid(),
|
||||
'rgid': os.getgid(),
|
||||
'egid': os.getegid(),
|
||||
})
|
||||
self._environment_stack.append(dict(os.environ))
|
||||
|
||||
def _elevate_privileges(self):
|
||||
"""Elevate to root privileges"""
|
||||
try:
|
||||
os.setregid(0, 0)
|
||||
os.setreuid(0, 0)
|
||||
except PermissionError:
|
||||
raise UIDManagerError("Failed to elevate privileges - requires root access")
|
||||
|
||||
def _restore_privileges(self):
|
||||
"""Restore previous privilege state"""
|
||||
if not self._privilege_stack:
|
||||
return
|
||||
|
||||
privs = self._privilege_stack.pop()
|
||||
env = self._environment_stack.pop()
|
||||
|
||||
# Restore environment
|
||||
os.environ.clear()
|
||||
os.environ.update(env)
|
||||
|
||||
# Restore UID/GID
|
||||
os.setregid(privs['rgid'], privs['egid'])
|
||||
os.setreuid(privs['ruid'], privs['euid'])
|
||||
|
||||
def become_user(self, uid: int, gid: Optional[int] = None) -> None:
|
||||
"""Become a specific user/group"""
|
||||
if gid is None:
|
||||
gid = uid
|
||||
|
||||
self._push_privileges()
|
||||
self._elevate_privileges()
|
||||
os.setregid(gid, gid)
|
||||
os.setreuid(uid, uid)
|
||||
|
||||
def restore_privileges(self) -> None:
|
||||
"""Restore previous privilege state"""
|
||||
self._restore_privileges()
|
||||
|
||||
def change_owner(self, path: str, uid: Optional[int] = None, gid: Optional[int] = None, recursive: bool = False) -> None:
|
||||
"""Change ownership of files/directories"""
|
||||
if uid is None:
|
||||
uid = self.chroot_uid
|
||||
if gid is None:
|
||||
gid = self.chroot_gid
|
||||
|
||||
with self.elevated_privileges():
|
||||
self._tolerant_chown(path, uid, gid)
|
||||
if recursive:
|
||||
for root, dirs, files in os.walk(path):
|
||||
for d in dirs:
|
||||
self._tolerant_chown(os.path.join(root, d), uid, gid)
|
||||
for f in files:
|
||||
self._tolerant_chown(os.path.join(root, f), uid, gid)
|
||||
|
||||
def _tolerant_chown(self, path: str, uid: int, gid: int) -> None:
|
||||
"""Change ownership without raising errors for missing files"""
|
||||
try:
|
||||
os.lchown(path, uid, gid)
|
||||
except OSError as e:
|
||||
if e.errno != 2: # ENOENT - No such file or directory
|
||||
self.logger.warning(f"Failed to change ownership of {path}: {e}")
|
||||
|
||||
def create_chroot_user(self, chroot_path: str) -> None:
|
||||
"""Create the build user in the chroot"""
|
||||
with self.elevated_privileges():
|
||||
try:
|
||||
# Create group first
|
||||
self._create_group_in_chroot(chroot_path, self.chroot_group, self.chroot_gid)
|
||||
|
||||
# Create user
|
||||
self._create_user_in_chroot(chroot_path, self.chroot_user, self.chroot_uid, self.chroot_gid)
|
||||
|
||||
# Setup home directory
|
||||
self._setup_home_directory(chroot_path)
|
||||
|
||||
self.logger.info(f"Created chroot user {self.chroot_user} (UID: {self.chroot_uid}, GID: {self.chroot_gid})")
|
||||
|
||||
except Exception as e:
|
||||
raise UIDManagerError(f"Failed to create chroot user: {e}")
|
||||
|
||||
def _create_group_in_chroot(self, chroot_path: str, group_name: str, gid: int) -> None:
|
||||
"""Create a group in the chroot"""
|
||||
group_file = os.path.join(chroot_path, 'etc', 'group')
|
||||
|
||||
# Check if group already exists
|
||||
if os.path.exists(group_file):
|
||||
with open(group_file, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith(f"{group_name}:"):
|
||||
return # Group already exists
|
||||
|
||||
# Create group entry
|
||||
group_entry = f"{group_name}:x:{gid}:\n"
|
||||
|
||||
# Ensure /etc directory exists
|
||||
os.makedirs(os.path.dirname(group_file), exist_ok=True)
|
||||
|
||||
# Append to group file
|
||||
with open(group_file, 'a') as f:
|
||||
f.write(group_entry)
|
||||
|
||||
def _create_user_in_chroot(self, chroot_path: str, username: str, uid: int, gid: int) -> None:
|
||||
"""Create a user in the chroot"""
|
||||
passwd_file = os.path.join(chroot_path, 'etc', 'passwd')
|
||||
home_dir = os.path.join(chroot_path, 'home', username)
|
||||
|
||||
# Check if user already exists
|
||||
if os.path.exists(passwd_file):
|
||||
with open(passwd_file, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith(f"{username}:"):
|
||||
return # User already exists
|
||||
|
||||
# Create user entry
|
||||
user_entry = f"{username}:x:{uid}:{gid}:Build User:/home/{username}:/bin/bash\n"
|
||||
|
||||
# Ensure /etc directory exists
|
||||
os.makedirs(os.path.dirname(passwd_file), exist_ok=True)
|
||||
|
||||
# Append to passwd file
|
||||
with open(passwd_file, 'a') as f:
|
||||
f.write(user_entry)
|
||||
|
||||
def _setup_home_directory(self, chroot_path: str) -> None:
|
||||
"""Setup home directory for the build user"""
|
||||
home_dir = os.path.join(chroot_path, 'home', self.chroot_user)
|
||||
|
||||
# Create home directory
|
||||
os.makedirs(home_dir, exist_ok=True)
|
||||
|
||||
# Set ownership
|
||||
self._tolerant_chown(home_dir, self.chroot_uid, self.chroot_gid)
|
||||
|
||||
# Set permissions
|
||||
os.chmod(home_dir, 0o755)
|
||||
|
||||
def copy_host_user(self, chroot_path: str, username: str) -> None:
|
||||
"""Copy a user from the host system to the chroot"""
|
||||
try:
|
||||
# Get user info from host
|
||||
user_info = pwd.getpwnam(username)
|
||||
uid = user_info.pw_uid
|
||||
gid = user_info.pw_gid
|
||||
|
||||
# Get group info
|
||||
group_info = grp.getgrgid(gid)
|
||||
group_name = group_info.gr_name
|
||||
|
||||
# Create in chroot
|
||||
self._create_group_in_chroot(chroot_path, group_name, gid)
|
||||
self._create_user_in_chroot(chroot_path, username, uid, gid)
|
||||
|
||||
self.logger.info(f"Copied host user {username} (UID: {uid}, GID: {gid}) to chroot")
|
||||
|
||||
except KeyError as e:
|
||||
raise UIDManagerError(f"Host user {username} not found: {e}")
|
||||
except Exception as e:
|
||||
raise UIDManagerError(f"Failed to copy host user {username}: {e}")
|
||||
|
||||
def setup_chroot_permissions(self, chroot_path: str) -> None:
|
||||
"""Setup proper permissions for the chroot"""
|
||||
with self.elevated_privileges():
|
||||
try:
|
||||
# Change ownership of key directories
|
||||
key_dirs = [
|
||||
'home',
|
||||
'tmp',
|
||||
'var/tmp',
|
||||
'var/cache',
|
||||
'var/log'
|
||||
]
|
||||
|
||||
for dir_name in key_dirs:
|
||||
dir_path = os.path.join(chroot_path, dir_name)
|
||||
if os.path.exists(dir_path):
|
||||
self._tolerant_chown(dir_path, self.chroot_uid, self.chroot_gid)
|
||||
|
||||
# Ensure proper permissions on /tmp
|
||||
tmp_path = os.path.join(chroot_path, 'tmp')
|
||||
if os.path.exists(tmp_path):
|
||||
os.chmod(tmp_path, 0o1777)
|
||||
|
||||
self.logger.info("Chroot permissions setup complete")
|
||||
|
||||
except Exception as e:
|
||||
raise UIDManagerError(f"Failed to setup chroot permissions: {e}")
|
||||
|
||||
def get_user_info(self) -> Dict[str, Any]:
|
||||
"""Get current user information"""
|
||||
return {
|
||||
'current_uid': self.current_uid,
|
||||
'current_gid': self.current_gid,
|
||||
'current_user': self.current_user,
|
||||
'chroot_user': self.chroot_user,
|
||||
'chroot_group': self.chroot_group,
|
||||
'chroot_uid': self.chroot_uid,
|
||||
'chroot_gid': self.chroot_gid
|
||||
}
|
||||
|
||||
def validate_chroot_user(self, chroot_path: str) -> bool:
|
||||
"""Validate that the chroot user exists and is properly configured"""
|
||||
passwd_file = os.path.join(chroot_path, 'etc', 'passwd')
|
||||
group_file = os.path.join(chroot_path, 'etc', 'group')
|
||||
|
||||
if not os.path.exists(passwd_file) or not os.path.exists(group_file):
|
||||
return False
|
||||
|
||||
# Check if user exists
|
||||
user_exists = False
|
||||
group_exists = False
|
||||
|
||||
with open(passwd_file, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith(f"{self.chroot_user}:"):
|
||||
user_exists = True
|
||||
break
|
||||
|
||||
with open(group_file, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith(f"{self.chroot_group}:"):
|
||||
group_exists = True
|
||||
break
|
||||
|
||||
return user_exists and group_exists
|
||||
1117
docs/API.md
Normal file
1117
docs/API.md
Normal file
File diff suppressed because it is too large
Load diff
764
docs/DEPLOYMENT.md
Normal file
764
docs/DEPLOYMENT.md
Normal file
|
|
@ -0,0 +1,764 @@
|
|||
# Deb-Mock Deployment Guide
|
||||
|
||||
## Overview
|
||||
|
||||
This guide covers the deployment of `deb-mock` in various environments, from development to production. `deb-mock` is a sophisticated build environment management tool that provides isolated, reproducible package builds with advanced features like performance monitoring, plugin systems, and comprehensive testing.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [System Requirements](#system-requirements)
|
||||
2. [Installation Methods](#installation-methods)
|
||||
3. [Configuration](#configuration)
|
||||
4. [Environment Setup](#environment-setup)
|
||||
5. [Service Deployment](#service-deployment)
|
||||
6. [Production Deployment](#production-deployment)
|
||||
7. [Monitoring and Maintenance](#monitoring-and-maintenance)
|
||||
8. [Troubleshooting](#troubleshooting)
|
||||
9. [Security Considerations](#security-considerations)
|
||||
10. [Backup and Recovery](#backup-and-recovery)
|
||||
|
||||
## System Requirements
|
||||
|
||||
### Minimum Requirements
|
||||
|
||||
- **Operating System**: Debian 13+ (Trixie) or Ubuntu 22.04+
|
||||
- **CPU**: 2 cores (4 recommended)
|
||||
- **Memory**: 4GB RAM (8GB recommended)
|
||||
- **Storage**: 20GB available space (50GB recommended)
|
||||
- **Python**: 3.8+ (3.10+ recommended)
|
||||
|
||||
### Recommended Requirements
|
||||
|
||||
- **Operating System**: Debian 13+ (Trixie) or Ubuntu 22.04+
|
||||
- **CPU**: 8+ cores
|
||||
- **Memory**: 16GB+ RAM
|
||||
- **Storage**: 100GB+ available space (SSD recommended)
|
||||
- **Python**: 3.10+
|
||||
|
||||
### Required System Packages
|
||||
|
||||
```bash
|
||||
# Debian/Ubuntu
|
||||
sudo apt update
|
||||
sudo apt install -y \
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-venv \
|
||||
python3-dev \
|
||||
build-essential \
|
||||
debootstrap \
|
||||
schroot \
|
||||
sbuild \
|
||||
ccache \
|
||||
rsync \
|
||||
curl \
|
||||
wget \
|
||||
git \
|
||||
sudo \
|
||||
procps \
|
||||
sysstat \
|
||||
iotop \
|
||||
htop
|
||||
|
||||
# For advanced features
|
||||
sudo apt install -y \
|
||||
python3-psutil \
|
||||
python3-yaml \
|
||||
python3-click \
|
||||
python3-rich \
|
||||
python3-pytest \
|
||||
python3-pytest-cov \
|
||||
python3-pytest-mock \
|
||||
python3-pytest-xdist \
|
||||
python3-pytest-timeout \
|
||||
python3-pytest-html \
|
||||
python3-pytest-json-report \
|
||||
python3-coverage
|
||||
```
|
||||
|
||||
## Installation Methods
|
||||
|
||||
### Method 1: Python Package Installation (Recommended)
|
||||
|
||||
```bash
|
||||
# Create virtual environment
|
||||
python3 -m venv deb-mock-env
|
||||
source deb-mock-env/bin/activate
|
||||
|
||||
# Install from source
|
||||
git clone https://github.com/your-org/deb-mock.git
|
||||
cd deb-mock
|
||||
pip install -e .
|
||||
|
||||
# Or install from PyPI (when available)
|
||||
pip install deb-mock
|
||||
```
|
||||
|
||||
### Method 2: System-wide Installation
|
||||
|
||||
```bash
|
||||
# Install system-wide (requires root)
|
||||
sudo pip3 install deb-mock
|
||||
|
||||
# Or install from source
|
||||
sudo pip3 install -e .
|
||||
```
|
||||
|
||||
### Method 3: Docker Installation
|
||||
|
||||
```dockerfile
|
||||
FROM debian:13-slim
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
python3-pip \
|
||||
debootstrap \
|
||||
schroot \
|
||||
sbuild \
|
||||
ccache \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install deb-mock
|
||||
COPY . /app/deb-mock
|
||||
WORKDIR /app/deb-mock
|
||||
RUN pip3 install -e .
|
||||
|
||||
# Set up entry point
|
||||
ENTRYPOINT ["deb-mock"]
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Configuration File Structure
|
||||
|
||||
`deb-mock` uses YAML configuration files. The main configuration file is typically located at:
|
||||
|
||||
- **User config**: `~/.config/deb-mock/config.yaml`
|
||||
- **System config**: `/etc/deb-mock/config.yaml`
|
||||
- **Project config**: `./deb-mock.yaml`
|
||||
|
||||
### Basic Configuration Example
|
||||
|
||||
```yaml
|
||||
# deb-mock.yaml
|
||||
chroot:
|
||||
base_dir: /var/lib/deb-mock/chroots
|
||||
suite: trixie
|
||||
architecture: amd64
|
||||
mirror: http://deb.debian.org/debian/
|
||||
components: [main, contrib, non-free]
|
||||
|
||||
cache:
|
||||
enabled: true
|
||||
base_dir: /var/cache/deb-mock
|
||||
ccache_size_mb: 2048
|
||||
root_cache_size_mb: 5120
|
||||
package_cache_size_mb: 1024
|
||||
|
||||
sbuild:
|
||||
enabled: true
|
||||
user: sbuild
|
||||
group: sbuild
|
||||
chroot_suffix: -sbuild
|
||||
build_user: buildd
|
||||
|
||||
performance:
|
||||
enable_performance_monitoring: true
|
||||
performance_metrics_dir: /var/log/deb-mock/performance
|
||||
performance_retention_days: 30
|
||||
performance_auto_optimization: true
|
||||
performance_benchmark_iterations: 10
|
||||
performance_reporting: true
|
||||
|
||||
plugins:
|
||||
enabled: true
|
||||
plugin_dir: /usr/local/lib/deb-mock/plugins
|
||||
auto_load: true
|
||||
|
||||
parallel:
|
||||
enabled: true
|
||||
max_parallel_builds: 4
|
||||
max_parallel_chroots: 8
|
||||
|
||||
mounts:
|
||||
proc: true
|
||||
sys: true
|
||||
dev: true
|
||||
tmpfs: true
|
||||
bind_mounts:
|
||||
- source: /var/cache/apt/archives
|
||||
target: /var/cache/apt/archives
|
||||
options: [ro]
|
||||
overlay_mounts:
|
||||
- source: /var/cache/deb-mock/overlay
|
||||
target: /var/cache/deb-mock/overlay
|
||||
|
||||
uid_management:
|
||||
enabled: true
|
||||
create_users: true
|
||||
copy_host_users: true
|
||||
privilege_escalation: true
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```bash
|
||||
# Core configuration
|
||||
export DEB_MOCK_CONFIG=/path/to/config.yaml
|
||||
export DEB_MOCK_CHROOT_DIR=/var/lib/deb-mock/chroots
|
||||
export DEB_MOCK_CACHE_DIR=/var/cache/deb-mock
|
||||
|
||||
# Performance monitoring
|
||||
export DEB_MOCK_PERFORMANCE_DIR=/var/log/deb-mock/performance
|
||||
export DEB_MOCK_ENABLE_PERFORMANCE_MONITORING=true
|
||||
|
||||
# Plugin system
|
||||
export DEB_MOCK_PLUGIN_DIR=/usr/local/lib/deb-mock/plugins
|
||||
export DEB_MOCK_AUTO_LOAD_PLUGINS=true
|
||||
|
||||
# Logging
|
||||
export DEB_MOCK_LOG_LEVEL=INFO
|
||||
export DEB_MOCK_LOG_FILE=/var/log/deb-mock/deb-mock.log
|
||||
```
|
||||
|
||||
## Environment Setup
|
||||
|
||||
### User Setup
|
||||
|
||||
```bash
|
||||
# Create deb-mock user
|
||||
sudo useradd -m -s /bin/bash deb-mock
|
||||
sudo usermod -aG sbuild deb-mock
|
||||
|
||||
# Set up user environment
|
||||
sudo -u deb-mock mkdir -p ~/.config/deb-mock
|
||||
sudo -u deb-mock mkdir -p ~/.cache/deb-mock
|
||||
sudo -u deb-mock mkdir -p ~/deb-mock-workspace
|
||||
|
||||
# Configure sbuild for the user
|
||||
sudo -u deb-mock sbuild-update --keygen
|
||||
sudo -u deb-mock sbuild-adduser $USER
|
||||
```
|
||||
|
||||
### Directory Structure Setup
|
||||
|
||||
```bash
|
||||
# Create necessary directories
|
||||
sudo mkdir -p /var/lib/deb-mock/chroots
|
||||
sudo mkdir -p /var/cache/deb-mock/{ccache,root,packages,overlay}
|
||||
sudo mkdir -p /var/log/deb-mock/{performance,logs}
|
||||
sudo mkdir -p /usr/local/lib/deb-mock/plugins
|
||||
|
||||
# Set proper permissions
|
||||
sudo chown -R deb-mock:deb-mock /var/lib/deb-mock
|
||||
sudo chown -R deb-mock:deb-mock /var/cache/deb-mock
|
||||
sudo chown -R deb-mock:deb-mock /var/log/deb-mock
|
||||
sudo chown -R deb-mock:deb-mock /usr/local/lib/deb-mock
|
||||
|
||||
# Set proper permissions for sbuild
|
||||
sudo chown -R deb-mock:sbuild /var/lib/deb-mock/chroots
|
||||
sudo chmod 775 /var/lib/deb-mock/chroots
|
||||
```
|
||||
|
||||
### Sbuild Configuration
|
||||
|
||||
```bash
|
||||
# Configure sbuild for deb-mock user
|
||||
sudo -u deb-mock mkdir -p ~/.config/sbuild
|
||||
sudo -u deb-mock cat > ~/.config/sbuild/config.pl << 'EOF'
|
||||
$build_arch = 'amd64';
|
||||
$build_arch_all = 1;
|
||||
$build_source = 1;
|
||||
$build_binary = 1;
|
||||
$build_arch_indep = 1;
|
||||
$build_arch_all = 1;
|
||||
$build_profiles = ['default'];
|
||||
$build_environment = ['debian'];
|
||||
$build_suite = 'trixie';
|
||||
$build_components = ['main', 'contrib', 'non-free'];
|
||||
$build_mirror = 'http://deb.debian.org/debian/';
|
||||
$build_indep = 1;
|
||||
$build_arch_all = 1;
|
||||
$build_source = 1;
|
||||
$build_binary = 1;
|
||||
$build_arch_indep = 1;
|
||||
$build_arch_all = 1;
|
||||
$build_profiles = ['default'];
|
||||
$build_environment = ['debian'];
|
||||
$build_suite = 'trixie';
|
||||
$build_components = ['main', 'contrib', 'non-free'];
|
||||
$build_mirror = 'http://deb.debian.org/debian/';
|
||||
EOF
|
||||
```
|
||||
|
||||
## Service Deployment
|
||||
|
||||
### Systemd Service (Recommended)
|
||||
|
||||
Create a systemd service file for production deployments:
|
||||
|
||||
```ini
|
||||
# /etc/systemd/system/deb-mock.service
|
||||
[Unit]
|
||||
Description=Deb-Mock Build Service
|
||||
After=network.target
|
||||
Wants=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=deb-mock
|
||||
Group=deb-mock
|
||||
Environment=DEB_MOCK_CONFIG=/etc/deb-mock/config.yaml
|
||||
Environment=DEB_MOCK_LOG_LEVEL=INFO
|
||||
Environment=DEB_MOCK_LOG_FILE=/var/log/deb-mock/deb-mock.log
|
||||
WorkingDirectory=/var/lib/deb-mock
|
||||
ExecStart=/usr/local/bin/deb-mock service start
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
|
||||
# Security settings
|
||||
NoNewPrivileges=true
|
||||
PrivateTmp=true
|
||||
ProtectSystem=strict
|
||||
ProtectHome=true
|
||||
ReadWritePaths=/var/lib/deb-mock /var/cache/deb-mock /var/log/deb-mock
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
### Service Management
|
||||
|
||||
```bash
|
||||
# Enable and start the service
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable deb-mock.service
|
||||
sudo systemctl start deb-mock.service
|
||||
|
||||
# Check service status
|
||||
sudo systemctl status deb-mock.service
|
||||
|
||||
# View logs
|
||||
sudo journalctl -u deb-mock.service -f
|
||||
|
||||
# Restart service
|
||||
sudo systemctl restart deb-mock.service
|
||||
```
|
||||
|
||||
### Docker Compose Deployment
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
deb-mock:
|
||||
build: .
|
||||
container_name: deb-mock
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- DEB_MOCK_CONFIG=/etc/deb-mock/config.yaml
|
||||
- DEB_MOCK_LOG_LEVEL=INFO
|
||||
volumes:
|
||||
- ./config:/etc/deb-mock:ro
|
||||
- deb-mock-chroots:/var/lib/deb-mock/chroots
|
||||
- deb-mock-cache:/var/cache/deb-mock
|
||||
- deb-mock-logs:/var/log/deb-mock
|
||||
ports:
|
||||
- "8080:8080"
|
||||
networks:
|
||||
- deb-mock-network
|
||||
|
||||
volumes:
|
||||
deb-mock-chroots:
|
||||
deb-mock-cache:
|
||||
deb-mock-logs:
|
||||
|
||||
networks:
|
||||
deb-mock-network:
|
||||
driver: bridge
|
||||
```
|
||||
|
||||
## Production Deployment
|
||||
|
||||
### High Availability Setup
|
||||
|
||||
```bash
|
||||
# Load balancer configuration (nginx)
|
||||
sudo apt install nginx
|
||||
|
||||
# Create nginx configuration
|
||||
sudo tee /etc/nginx/sites-available/deb-mock << 'EOF'
|
||||
upstream deb-mock_backend {
|
||||
server 127.0.0.1:8080;
|
||||
server 127.0.0.1:8081;
|
||||
server 127.0.0.1:8082;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name deb-mock.yourdomain.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://deb-mock_backend;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
# Enable site
|
||||
sudo ln -s /etc/nginx/sites-available/deb-mock /etc/nginx/sites-enabled/
|
||||
sudo nginx -t
|
||||
sudo systemctl reload nginx
|
||||
```
|
||||
|
||||
### Monitoring Setup
|
||||
|
||||
```bash
|
||||
# Install monitoring tools
|
||||
sudo apt install -y prometheus node-exporter grafana
|
||||
|
||||
# Configure Prometheus
|
||||
sudo tee /etc/prometheus/prometheus.yml << 'EOF'
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'deb-mock'
|
||||
static_configs:
|
||||
- targets: ['localhost:8080']
|
||||
metrics_path: /metrics
|
||||
scrape_interval: 5s
|
||||
|
||||
- job_name: 'node-exporter'
|
||||
static_configs:
|
||||
- targets: ['localhost:9100']
|
||||
EOF
|
||||
|
||||
# Start monitoring services
|
||||
sudo systemctl enable prometheus node-exporter grafana-server
|
||||
sudo systemctl start prometheus node-exporter grafana-server
|
||||
```
|
||||
|
||||
### Backup Strategy
|
||||
|
||||
```bash
|
||||
# Create backup script
|
||||
sudo tee /usr/local/bin/deb-mock-backup << 'EOF'
|
||||
#!/bin/bash
|
||||
|
||||
BACKUP_DIR="/var/backups/deb-mock"
|
||||
DATE=$(date +%Y%m%d_%H%M%S)
|
||||
|
||||
# Create backup directory
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Backup configuration
|
||||
tar -czf "$BACKUP_DIR/config_$DATE.tar.gz" -C /etc deb-mock
|
||||
|
||||
# Backup chroots (excluding temporary files)
|
||||
tar -czf "$BACKUP_DIR/chroots_$DATE.tar.gz" \
|
||||
--exclude='*/tmp/*' \
|
||||
--exclude='*/var/tmp/*' \
|
||||
-C /var/lib deb-mock/chroots
|
||||
|
||||
# Backup cache
|
||||
tar -czf "$BACKUP_DIR/cache_$DATE.tar.gz" -C /var/cache deb-mock
|
||||
|
||||
# Backup logs
|
||||
tar -czf "$BACKUP_DIR/logs_$DATE.tar.gz" -C /var/log deb-mock
|
||||
|
||||
# Clean up old backups (keep last 7 days)
|
||||
find "$BACKUP_DIR" -name "*.tar.gz" -mtime +7 -delete
|
||||
|
||||
echo "Backup completed: $BACKUP_DIR"
|
||||
EOF
|
||||
|
||||
# Make executable and set up cron
|
||||
sudo chmod +x /usr/local/bin/deb-mock-backup
|
||||
sudo crontab -e
|
||||
# Add: 0 2 * * * /usr/local/bin/deb-mock-backup
|
||||
```
|
||||
|
||||
## Monitoring and Maintenance
|
||||
|
||||
### Health Checks
|
||||
|
||||
```bash
|
||||
# Create health check script
|
||||
sudo tee /usr/local/bin/deb-mock-health << 'EOF'
|
||||
#!/bin/bash
|
||||
|
||||
# Check service status
|
||||
if ! systemctl is-active --quiet deb-mock.service; then
|
||||
echo "ERROR: deb-mock service is not running"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check disk space
|
||||
DISK_USAGE=$(df /var/lib/deb-mock | tail -1 | awk '{print $5}' | sed 's/%//')
|
||||
if [ "$DISK_USAGE" -gt 90 ]; then
|
||||
echo "WARNING: Disk usage is ${DISK_USAGE}%"
|
||||
fi
|
||||
|
||||
# Check memory usage
|
||||
MEM_USAGE=$(free | grep Mem | awk '{printf("%.0f", $3/$2 * 100.0)}')
|
||||
if [ "$MEM_USAGE" -gt 90 ]; then
|
||||
echo "WARNING: Memory usage is ${MEM_USAGE}%"
|
||||
fi
|
||||
|
||||
# Check chroot health
|
||||
if ! deb-mock status >/dev/null 2>&1; then
|
||||
echo "ERROR: deb-mock status check failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "OK: All health checks passed"
|
||||
EOF
|
||||
|
||||
sudo chmod +x /usr/local/bin/deb-mock-health
|
||||
```
|
||||
|
||||
### Log Rotation
|
||||
|
||||
```bash
|
||||
# Configure log rotation
|
||||
sudo tee /etc/logrotate.d/deb-mock << 'EOF'
|
||||
/var/log/deb-mock/*.log {
|
||||
daily
|
||||
missingok
|
||||
rotate 52
|
||||
compress
|
||||
delaycompress
|
||||
notifempty
|
||||
create 644 deb-mock deb-mock
|
||||
postrotate
|
||||
systemctl reload deb-mock.service >/dev/null 2>&1 || true
|
||||
endscript
|
||||
}
|
||||
EOF
|
||||
```
|
||||
|
||||
### Performance Monitoring
|
||||
|
||||
```bash
|
||||
# Set up performance monitoring
|
||||
sudo -u deb-mock mkdir -p /var/log/deb-mock/performance
|
||||
|
||||
# Create performance monitoring script
|
||||
sudo tee /usr/local/bin/deb-mock-performance << 'EOF'
|
||||
#!/bin/bash
|
||||
|
||||
# Generate performance report
|
||||
deb-mock performance-report --output /var/log/deb-mock/performance/report_$(date +%Y%m%d_%H%M%S).html
|
||||
|
||||
# Clean up old reports (keep last 30 days)
|
||||
find /var/log/deb-mock/performance -name "report_*.html" -mtime +30 -delete
|
||||
|
||||
# Generate benchmark report if needed
|
||||
if [ "$1" = "benchmark" ]; then
|
||||
deb-mock benchmark --template standard --iterations 20
|
||||
fi
|
||||
EOF
|
||||
|
||||
sudo chmod +x /usr/local/bin/deb-mock-performance
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### Service Won't Start
|
||||
|
||||
```bash
|
||||
# Check service status
|
||||
sudo systemctl status deb-mock.service
|
||||
|
||||
# Check logs
|
||||
sudo journalctl -u deb-mock.service -n 50
|
||||
|
||||
# Check configuration
|
||||
deb-mock --config /etc/deb-mock/config.yaml validate
|
||||
|
||||
# Check permissions
|
||||
sudo ls -la /var/lib/deb-mock/
|
||||
sudo ls -la /var/cache/deb-mock/
|
||||
```
|
||||
|
||||
#### Chroot Issues
|
||||
|
||||
```bash
|
||||
# List chroots
|
||||
deb-mock list-chroots
|
||||
|
||||
# Check chroot status
|
||||
deb-mock status
|
||||
|
||||
# Clean up broken chroots
|
||||
deb-mock cleanup --force
|
||||
|
||||
# Rebuild chroot
|
||||
deb-mock create-chroot --suite trixie --architecture amd64
|
||||
```
|
||||
|
||||
#### Performance Issues
|
||||
|
||||
```bash
|
||||
# Check performance metrics
|
||||
deb-mock performance-summary
|
||||
|
||||
# Run performance analysis
|
||||
deb-mock performance-analysis
|
||||
|
||||
# Generate performance report
|
||||
deb-mock performance-report
|
||||
|
||||
# Run benchmarks
|
||||
deb-mock benchmark --template comprehensive
|
||||
```
|
||||
|
||||
### Debug Mode
|
||||
|
||||
```bash
|
||||
# Enable debug logging
|
||||
export DEB_MOCK_LOG_LEVEL=DEBUG
|
||||
export DEB_MOCK_DEBUG=true
|
||||
|
||||
# Run with verbose output
|
||||
deb-mock --verbose --debug build package-name
|
||||
|
||||
# Check system resources
|
||||
deb-mock --debug status
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### User Isolation
|
||||
|
||||
```bash
|
||||
# Create dedicated user for deb-mock
|
||||
sudo useradd -r -s /bin/false -d /var/lib/deb-mock deb-mock
|
||||
|
||||
# Set up proper file permissions
|
||||
sudo chown -R deb-mock:deb-mock /var/lib/deb-mock
|
||||
sudo chmod 750 /var/lib/deb-mock/chroots
|
||||
sudo chmod 640 /var/log/deb-mock/*.log
|
||||
```
|
||||
|
||||
### Network Security
|
||||
|
||||
```bash
|
||||
# Configure firewall
|
||||
sudo ufw allow from 192.168.1.0/24 to any port 8080
|
||||
sudo ufw enable
|
||||
|
||||
# Use reverse proxy with SSL
|
||||
sudo apt install certbot python3-certbot-nginx
|
||||
sudo certbot --nginx -d deb-mock.yourdomain.com
|
||||
```
|
||||
|
||||
### Access Control
|
||||
|
||||
```bash
|
||||
# Set up API key authentication
|
||||
export DEB_MOCK_API_KEY=your-secure-api-key
|
||||
|
||||
# Configure RBAC
|
||||
sudo tee /etc/deb-mock/rbac.yaml << 'EOF'
|
||||
roles:
|
||||
admin:
|
||||
permissions: ["*"]
|
||||
builder:
|
||||
permissions: ["build", "status", "logs"]
|
||||
viewer:
|
||||
permissions: ["status", "logs"]
|
||||
|
||||
users:
|
||||
admin@example.com:
|
||||
role: admin
|
||||
builder@example.com:
|
||||
role: builder
|
||||
viewer@example.com:
|
||||
role: viewer
|
||||
EOF
|
||||
```
|
||||
|
||||
## Backup and Recovery
|
||||
|
||||
### Automated Backups
|
||||
|
||||
```bash
|
||||
# Create backup script
|
||||
sudo tee /usr/local/bin/deb-mock-backup-full << 'EOF'
|
||||
#!/bin/bash
|
||||
|
||||
BACKUP_DIR="/var/backups/deb-mock/full"
|
||||
DATE=$(date +%Y%m%d_%H%M%S)
|
||||
|
||||
# Create backup directory
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Stop service
|
||||
systemctl stop deb-mock.service
|
||||
|
||||
# Create full backup
|
||||
tar -czf "$BACKUP_DIR/full_backup_$DATE.tar.gz" \
|
||||
-C /var lib/deb-mock \
|
||||
-C /var cache/deb-mock \
|
||||
-C /var log/deb-mock \
|
||||
-C /etc deb-mock
|
||||
|
||||
# Start service
|
||||
systemctl start deb-mock.service
|
||||
|
||||
# Clean up old backups (keep last 30 days)
|
||||
find "$BACKUP_DIR" -name "full_backup_*.tar.gz" -mtime +30 -delete
|
||||
|
||||
echo "Full backup completed: $BACKUP_DIR/full_backup_$DATE.tar.gz"
|
||||
EOF
|
||||
|
||||
sudo chmod +x /usr/local/bin/deb-mock-backup-full
|
||||
```
|
||||
|
||||
### Recovery Procedures
|
||||
|
||||
```bash
|
||||
# Restore from backup
|
||||
sudo systemctl stop deb-mock.service
|
||||
|
||||
# Extract backup
|
||||
sudo tar -xzf /var/backups/deb-mock/full/full_backup_YYYYMMDD_HHMMSS.tar.gz -C /
|
||||
|
||||
# Fix permissions
|
||||
sudo chown -R deb-mock:deb-mock /var/lib/deb-mock
|
||||
sudo chown -R deb-mock:deb-mock /var/cache/deb-mock
|
||||
sudo chown -R deb-mock:deb-mock /var/log/deb-mock
|
||||
|
||||
# Start service
|
||||
sudo systemctl start deb-mock.service
|
||||
|
||||
# Verify recovery
|
||||
deb-mock status
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
|
||||
This deployment guide provides comprehensive instructions for deploying `deb-mock` in various environments. For production deployments, ensure you have proper monitoring, backup, and security measures in place.
|
||||
|
||||
For additional support and troubleshooting, refer to the main documentation or contact the development team.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Main Documentation](../README.md)
|
||||
- [Configuration Guide](CONFIGURATION.md)
|
||||
- [Performance Monitoring](PERFORMANCE_MONITORING.md)
|
||||
- [Plugin System](PLUGIN_SYSTEM.md)
|
||||
- [Testing Guide](TESTING.md)
|
||||
- [API Reference](API.md)
|
||||
525
docs/PERFORMANCE_MONITORING.md
Normal file
525
docs/PERFORMANCE_MONITORING.md
Normal file
|
|
@ -0,0 +1,525 @@
|
|||
# Deb-Mock Performance Monitoring and Optimization
|
||||
|
||||
## Overview
|
||||
|
||||
The `deb-mock` performance monitoring and optimization system provides comprehensive insights into build performance, automatic optimization suggestions, and detailed performance analytics. This system enables users to identify bottlenecks, optimize build configurations, and maintain optimal performance across different build environments.
|
||||
|
||||
## Features
|
||||
|
||||
- **Real-time Performance Monitoring** - Track CPU, memory, disk I/O, and network usage during builds
|
||||
- **Build Profiling** - Detailed analysis of each build phase with performance metrics
|
||||
- **Automatic Optimization** - AI-driven suggestions for improving build performance
|
||||
- **Benchmarking** - Multi-iteration performance testing for accurate measurements
|
||||
- **Performance Reporting** - Comprehensive reports and visualizations
|
||||
- **Resource Utilization Analysis** - Identify resource bottlenecks and optimization opportunities
|
||||
- **Cache Performance Tracking** - Monitor cache hit rates and effectiveness
|
||||
- **Automatic Tuning** - Apply optimization recommendations automatically
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
1. **PerformanceMonitor** - Real-time monitoring and metrics collection
|
||||
2. **PerformanceOptimizer** - Analysis and optimization recommendations
|
||||
3. **PerformanceReporter** - Report generation and data export
|
||||
4. **Build Profiles** - Detailed performance tracking for individual builds
|
||||
5. **System Monitoring** - Background system resource tracking
|
||||
|
||||
### Data Flow
|
||||
|
||||
```
|
||||
Build Operation → Performance Monitor → Metrics Collection → Build Profile → Analysis → Optimization Suggestions
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Performance Monitoring Settings
|
||||
|
||||
```yaml
|
||||
# Performance monitoring configuration
|
||||
enable_performance_monitoring: true
|
||||
performance_metrics_dir: "./performance-metrics"
|
||||
performance_retention_days: 30
|
||||
performance_auto_optimization: false
|
||||
performance_benchmark_iterations: 3
|
||||
performance_reporting: true
|
||||
```
|
||||
|
||||
### Configuration Options
|
||||
|
||||
- **`enable_performance_monitoring`** - Enable/disable performance monitoring (default: true)
|
||||
- **`performance_metrics_dir`** - Directory for storing performance data (default: "./performance-metrics")
|
||||
- **`performance_retention_days`** - How long to keep performance data (default: 30)
|
||||
- **`performance_auto_optimization`** - Automatically apply optimization suggestions (default: false)
|
||||
- **`performance_benchmark_iterations`** - Number of iterations for benchmarking (default: 3)
|
||||
- **`performance_reporting`** - Enable performance reporting (default: true)
|
||||
|
||||
## Usage
|
||||
|
||||
### CLI Commands
|
||||
|
||||
#### Performance Summary
|
||||
|
||||
```bash
|
||||
# Show overall performance statistics
|
||||
deb-mock performance-summary
|
||||
```
|
||||
|
||||
**Output Example:**
|
||||
```
|
||||
=== Performance Summary ===
|
||||
Total Operations: 15
|
||||
Total Duration: 1250.45s
|
||||
Average Duration: 83.36s
|
||||
Active Operations: 0
|
||||
|
||||
=== Operation Statistics ===
|
||||
package_build:
|
||||
Count: 5
|
||||
Avg Duration: 45.23s
|
||||
Min Duration: 32.10s
|
||||
Max Duration: 67.89s
|
||||
|
||||
chroot_creation:
|
||||
Count: 3
|
||||
Avg Duration: 12.45s
|
||||
Min Duration: 8.90s
|
||||
Max Duration: 18.20s
|
||||
```
|
||||
|
||||
#### Benchmarking Operations
|
||||
|
||||
```bash
|
||||
# Benchmark a specific operation
|
||||
deb-mock benchmark "chroot_creation" --function "init_chroot" --iterations 5
|
||||
|
||||
# Benchmark build operations
|
||||
deb-mock benchmark "package_build" --function "build" --iterations 3
|
||||
```
|
||||
|
||||
**Output Example:**
|
||||
```
|
||||
=== Benchmark Results for chroot_creation ===
|
||||
Iterations: 5
|
||||
Average Duration: 12.45s
|
||||
Min Duration: 8.90s
|
||||
Max Duration: 18.20s
|
||||
Variance: 12.3456
|
||||
```
|
||||
|
||||
#### Performance Reports
|
||||
|
||||
```bash
|
||||
# Generate comprehensive performance report
|
||||
deb-mock performance-report
|
||||
|
||||
# Generate report with custom output file
|
||||
deb-mock performance-report --output-file "my_performance_report.txt"
|
||||
```
|
||||
|
||||
#### Build Profile Reports
|
||||
|
||||
```bash
|
||||
# Generate detailed report for a specific build
|
||||
deb-mock build-profile-report "build_1234567890"
|
||||
|
||||
# Generate report with custom output file
|
||||
deb-mock build-profile-report "build_1234567890" --output-file "build_analysis.txt"
|
||||
```
|
||||
|
||||
#### Performance Analysis
|
||||
|
||||
```bash
|
||||
# Analyze performance and generate optimization suggestions
|
||||
deb-mock performance-analysis
|
||||
```
|
||||
|
||||
**Output Example:**
|
||||
```
|
||||
=== Analysis 1: test-package ===
|
||||
Performance Score: 85/100
|
||||
|
||||
Optimization Suggestions:
|
||||
• Consider enabling parallel builds for faster execution
|
||||
• Review chroot caching strategy for better performance
|
||||
|
||||
Automatic Tuning Recommendations:
|
||||
• Low CPU utilization suggests room for more parallel builds
|
||||
Current: 2
|
||||
Suggested: 3
|
||||
|
||||
Manual Optimization Recommendations:
|
||||
• Consider using tmpfs for /tmp to improve I/O performance
|
||||
• Review and optimize chroot package selection
|
||||
```
|
||||
|
||||
#### Optimization
|
||||
|
||||
```bash
|
||||
# Show optimization recommendations
|
||||
deb-mock optimize
|
||||
|
||||
# Automatically apply optimizations
|
||||
deb-mock optimize --auto-apply
|
||||
```
|
||||
|
||||
#### Metrics Management
|
||||
|
||||
```bash
|
||||
# Export performance metrics
|
||||
deb-mock export-metrics
|
||||
|
||||
# Export to specific file
|
||||
deb-mock export-metrics --output-file "performance_data.json"
|
||||
|
||||
# Clean up old metrics
|
||||
deb-mock cleanup-metrics
|
||||
```
|
||||
|
||||
### Programmatic Usage
|
||||
|
||||
#### Basic Performance Monitoring
|
||||
|
||||
```python
|
||||
from deb_mock.config import Config
|
||||
from deb_mock.core import DebMock
|
||||
|
||||
# Create configuration with performance monitoring enabled
|
||||
config = Config(
|
||||
enable_performance_monitoring=True,
|
||||
performance_auto_optimization=True
|
||||
)
|
||||
|
||||
# Initialize deb-mock
|
||||
deb_mock = DebMock(config)
|
||||
|
||||
# Build a package (performance monitoring happens automatically)
|
||||
result = deb_mock.build("source-package")
|
||||
|
||||
# Get performance summary
|
||||
summary = deb_mock.performance_monitor.get_performance_summary()
|
||||
print(f"Total operations: {summary['total_operations']}")
|
||||
```
|
||||
|
||||
#### Custom Performance Monitoring
|
||||
|
||||
```python
|
||||
# Monitor custom operations
|
||||
with deb_mock.performance_monitor.monitor_operation("custom_operation") as op_id:
|
||||
# Your custom operation here
|
||||
result = perform_custom_operation()
|
||||
|
||||
# Add metrics to build profile
|
||||
if hasattr(deb_mock, 'current_build_profile'):
|
||||
deb_mock.performance_monitor.add_phase_metrics(
|
||||
deb_mock.current_build_profile, "custom_operation", metrics
|
||||
)
|
||||
```
|
||||
|
||||
#### Benchmarking Custom Functions
|
||||
|
||||
```python
|
||||
def my_custom_function():
|
||||
# Your function implementation
|
||||
pass
|
||||
|
||||
# Benchmark the function
|
||||
result = deb_mock.performance_monitor.benchmark_operation(
|
||||
"my_custom_function", my_custom_function, iterations=5
|
||||
)
|
||||
|
||||
print(f"Average duration: {result['average_duration']:.2f}s")
|
||||
```
|
||||
|
||||
## Performance Metrics
|
||||
|
||||
### Collected Metrics
|
||||
|
||||
#### Operation Metrics
|
||||
|
||||
- **Duration** - Time taken for the operation
|
||||
- **CPU Usage** - CPU utilization during operation
|
||||
- **Memory Usage** - Memory consumption and changes
|
||||
- **Disk I/O** - Read/write operations and data transfer
|
||||
- **Network I/O** - Network data transfer
|
||||
- **Chroot Size** - Size of chroot environment
|
||||
- **Cache Hit Rate** - Effectiveness of caching
|
||||
- **Parallel Efficiency** - Efficiency of parallel operations
|
||||
- **Resource Utilization** - Overall resource usage
|
||||
|
||||
#### System Metrics
|
||||
|
||||
- **CPU Percentage** - Overall system CPU usage
|
||||
- **Memory Percentage** - System memory utilization
|
||||
- **Disk Usage** - Available disk space
|
||||
- **Active Operations** - Currently running operations
|
||||
|
||||
### Build Profile Structure
|
||||
|
||||
```python
|
||||
@dataclass
|
||||
class BuildProfile:
|
||||
build_id: str # Unique build identifier
|
||||
package_name: str # Name of the package being built
|
||||
architecture: str # Target architecture
|
||||
suite: str # Debian suite
|
||||
total_duration: float # Total build time
|
||||
phases: Dict[str, PerformanceMetrics] # Performance data for each phase
|
||||
resource_peak: Dict[str, float] # Peak resource usage
|
||||
cache_performance: Dict[str, float] # Cache performance metrics
|
||||
optimization_suggestions: List[str] # Generated optimization suggestions
|
||||
timestamp: datetime # When the build was performed
|
||||
```
|
||||
|
||||
## Optimization System
|
||||
|
||||
### Automatic Optimization Rules
|
||||
|
||||
#### Parallel Build Optimization
|
||||
|
||||
- **Low CPU Usage (< 60%)** - Increase parallel builds
|
||||
- **High CPU Usage (> 90%)** - Decrease parallel builds
|
||||
- **Optimal Range** - 70-85% CPU utilization
|
||||
|
||||
#### Cache Optimization
|
||||
|
||||
- **Low Hit Rate (< 30%)** - Increase cache size
|
||||
- **Medium Hit Rate (30-70%)** - Review cache strategy
|
||||
- **High Hit Rate (> 70%)** - Optimal performance
|
||||
|
||||
#### Resource Optimization
|
||||
|
||||
- **Memory Usage > 2GB** - Enable ccache, review dependencies
|
||||
- **Disk I/O High** - Use tmpfs for temporary files
|
||||
- **Network Usage High** - Review mirror configuration
|
||||
|
||||
### Optimization Suggestions
|
||||
|
||||
#### Performance-Based
|
||||
|
||||
1. **Duration > 5 minutes** - Enable parallel builds, optimize chroot caching
|
||||
2. **Duration > 10 minutes** - Review entire build process, consider system upgrades
|
||||
|
||||
#### Resource-Based
|
||||
|
||||
1. **CPU > 80%** - Limit parallel jobs, optimize build processes
|
||||
2. **Memory > 2GB** - Enable ccache, review package dependencies
|
||||
3. **Disk I/O High** - Use tmpfs, optimize chroot structure
|
||||
|
||||
#### Cache-Based
|
||||
|
||||
1. **Hit Rate < 50%** - Increase cache size, review retention policy
|
||||
2. **Cache Misses High** - Optimize cache invalidation strategy
|
||||
|
||||
## Reporting and Analysis
|
||||
|
||||
### Performance Reports
|
||||
|
||||
#### Comprehensive Report
|
||||
|
||||
- **Performance Summary** - Overall statistics and trends
|
||||
- **Operation Breakdown** - Detailed analysis of each operation type
|
||||
- **System Statistics** - Resource utilization patterns
|
||||
- **Optimization History** - Applied optimizations and their effects
|
||||
|
||||
#### Build Profile Report
|
||||
|
||||
- **Build Information** - Package details and build parameters
|
||||
- **Phase Breakdown** - Performance analysis of each build phase
|
||||
- **Resource Peaks** - Maximum resource usage during build
|
||||
- **Cache Performance** - Cache effectiveness metrics
|
||||
- **Optimization Suggestions** - Specific recommendations for improvement
|
||||
|
||||
### Data Export
|
||||
|
||||
#### JSON Export
|
||||
|
||||
```json
|
||||
{
|
||||
"export_timestamp": "2024-08-19T12:00:00",
|
||||
"summary": {
|
||||
"total_operations": 15,
|
||||
"total_duration": 1250.45,
|
||||
"average_duration": 83.36
|
||||
},
|
||||
"build_profiles": {
|
||||
"profile_1": {
|
||||
"build_id": "build_1234567890",
|
||||
"package_name": "test-package",
|
||||
"total_duration": 45.23
|
||||
}
|
||||
},
|
||||
"operation_history": [
|
||||
{
|
||||
"operation": "package_build",
|
||||
"duration": 45.23,
|
||||
"cpu_percent": 75.5
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Text Reports
|
||||
|
||||
- **Human-readable format** - Easy to understand performance summaries
|
||||
- **Detailed breakdowns** - Phase-by-phase performance analysis
|
||||
- **Optimization suggestions** - Actionable recommendations
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Performance Monitoring
|
||||
|
||||
1. **Enable by Default** - Always enable performance monitoring in production
|
||||
2. **Regular Analysis** - Analyze performance data weekly
|
||||
3. **Trend Tracking** - Monitor performance trends over time
|
||||
4. **Resource Planning** - Use performance data for capacity planning
|
||||
|
||||
### Optimization
|
||||
|
||||
1. **Start Conservative** - Begin with conservative optimization settings
|
||||
2. **Monitor Effects** - Track the impact of optimizations
|
||||
3. **Gradual Changes** - Apply optimizations incrementally
|
||||
4. **Test Thoroughly** - Validate optimizations in test environments
|
||||
|
||||
### Data Management
|
||||
|
||||
1. **Regular Cleanup** - Clean up old metrics monthly
|
||||
2. **Data Retention** - Keep performance data for at least 30 days
|
||||
3. **Export Important Data** - Export critical performance data before cleanup
|
||||
4. **Backup Metrics** - Include performance metrics in system backups
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### Performance Monitoring Not Working
|
||||
|
||||
**Symptoms**: No performance data available, commands return empty results
|
||||
|
||||
**Solutions**:
|
||||
1. Check if `enable_performance_monitoring` is set to `true`
|
||||
2. Verify `psutil` package is installed
|
||||
3. Check permissions for metrics directory
|
||||
4. Restart deb-mock to reinitialize monitoring
|
||||
|
||||
#### High Memory Usage
|
||||
|
||||
**Symptoms**: Memory usage > 2GB, build failures due to memory
|
||||
|
||||
**Solutions**:
|
||||
1. Enable ccache to reduce compilation memory usage
|
||||
2. Reduce parallel build count
|
||||
3. Review and optimize chroot package selection
|
||||
4. Increase system swap space
|
||||
|
||||
#### Slow Build Performance
|
||||
|
||||
**Symptoms**: Build duration > 5 minutes, high resource utilization
|
||||
|
||||
**Solutions**:
|
||||
1. Enable parallel builds (if CPU usage allows)
|
||||
2. Optimize chroot caching strategy
|
||||
3. Use tmpfs for temporary files
|
||||
4. Review build dependencies for unnecessary packages
|
||||
|
||||
#### Cache Performance Issues
|
||||
|
||||
**Symptoms**: Low cache hit rate, frequent cache misses
|
||||
|
||||
**Solutions**:
|
||||
1. Increase cache size
|
||||
2. Review cache retention policy
|
||||
3. Optimize cache invalidation strategy
|
||||
4. Check disk space availability
|
||||
|
||||
### Debug Mode
|
||||
|
||||
Enable debug mode for detailed performance information:
|
||||
|
||||
```bash
|
||||
export DEB_MOCK_DEBUG=1
|
||||
deb-mock performance-summary
|
||||
```
|
||||
|
||||
### Getting Help
|
||||
|
||||
- **Performance Reports** - Generate detailed reports for analysis
|
||||
- **Log Files** - Check deb-mock logs for performance-related errors
|
||||
- **System Monitoring** - Use system tools to verify resource usage
|
||||
- **Community Support** - Check project issues and discussions
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
### Planned Features
|
||||
|
||||
- **Real-time Dashboard** - Web-based performance monitoring interface
|
||||
- **Machine Learning Optimization** - AI-driven optimization suggestions
|
||||
- **Performance Alerts** - Automated alerts for performance issues
|
||||
- **Integration with Monitoring Systems** - Prometheus, Grafana, etc.
|
||||
- **Performance Regression Detection** - Automatic detection of performance degradation
|
||||
|
||||
### Extension Points
|
||||
|
||||
- **Custom Metrics** - User-defined performance metrics
|
||||
- **Performance Plugins** - Extensible performance monitoring
|
||||
- **External Integrations** - Third-party monitoring system support
|
||||
- **Performance Testing Framework** - Automated performance testing
|
||||
|
||||
## Integration Examples
|
||||
|
||||
### CI/CD Integration
|
||||
|
||||
```yaml
|
||||
# GitHub Actions example
|
||||
- name: Performance Analysis
|
||||
run: |
|
||||
deb-mock performance-analysis
|
||||
deb-mock performance-report --output-file "performance_report.txt"
|
||||
|
||||
- name: Upload Performance Report
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: performance-report
|
||||
path: performance_report.txt
|
||||
```
|
||||
|
||||
### Monitoring System Integration
|
||||
|
||||
```python
|
||||
# Prometheus metrics export
|
||||
from prometheus_client import Gauge, Histogram
|
||||
|
||||
# Create metrics
|
||||
build_duration = Histogram('deb_mock_build_duration_seconds', 'Build duration in seconds')
|
||||
cpu_usage = Gauge('deb_mock_cpu_usage_percent', 'CPU usage percentage')
|
||||
|
||||
# Export metrics
|
||||
def export_prometheus_metrics(performance_monitor):
|
||||
summary = performance_monitor.get_performance_summary()
|
||||
|
||||
for metrics in performance_monitor._operation_history:
|
||||
build_duration.observe(metrics.duration)
|
||||
cpu_usage.set(metrics.cpu_percent)
|
||||
```
|
||||
|
||||
### Performance Testing
|
||||
|
||||
```python
|
||||
# Automated performance testing
|
||||
def test_build_performance():
|
||||
config = Config(enable_performance_monitoring=True)
|
||||
deb_mock = DebMock(config)
|
||||
|
||||
# Run benchmark
|
||||
result = deb_mock.performance_monitor.benchmark_operation(
|
||||
"test_build", deb_mock.build, iterations=5
|
||||
)
|
||||
|
||||
# Assert performance requirements
|
||||
assert result['average_duration'] < 60.0, "Build too slow"
|
||||
assert result['variance'] < 10.0, "Build performance inconsistent"
|
||||
|
||||
print("✅ Performance requirements met")
|
||||
```
|
||||
|
||||
This comprehensive performance monitoring and optimization system provides the tools needed to maintain optimal build performance, identify bottlenecks, and continuously improve the deb-mock build system.
|
||||
322
docs/PLUGIN_SYSTEM.md
Normal file
322
docs/PLUGIN_SYSTEM.md
Normal file
|
|
@ -0,0 +1,322 @@
|
|||
# Deb-Mock Plugin System
|
||||
|
||||
## Overview
|
||||
|
||||
The deb-mock plugin system provides a powerful and extensible way to customize build behavior, add new features, and integrate with external tools. It's based on Fedora Mock's proven plugin architecture, adapted specifically for Debian-based build environments.
|
||||
|
||||
## Features
|
||||
|
||||
- **Hook-based architecture** - Plugins can hook into various stages of the build process
|
||||
- **Dynamic loading** - Plugins are loaded at runtime based on configuration
|
||||
- **API versioning** - Ensures compatibility between deb-mock versions and plugins
|
||||
- **Configuration-driven** - Rich configuration options for each plugin
|
||||
- **Error handling** - Robust error handling with required vs. optional plugins
|
||||
- **Base classes** - Helper classes for easier plugin development
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
1. **PluginManager** - Main plugin orchestration class
|
||||
2. **HookStages** - Standard hook stages for plugins
|
||||
3. **BasePlugin** - Base class for plugin development
|
||||
4. **Plugin Configuration** - YAML-based plugin configuration
|
||||
|
||||
### Hook Stages
|
||||
|
||||
The plugin system provides hooks at various stages of the build process:
|
||||
|
||||
#### Chroot Lifecycle
|
||||
- `prechroot_init` - Before chroot initialization
|
||||
- `postchroot_init` - After chroot initialization
|
||||
- `prechroot_clean` - Before chroot cleanup
|
||||
- `postchroot_clean` - After chroot cleanup
|
||||
|
||||
#### Build Lifecycle
|
||||
- `prebuild` - Before build starts
|
||||
- `postbuild` - After build completes
|
||||
- `build_start` - When build begins
|
||||
- `build_end` - When build ends
|
||||
|
||||
#### Package Management
|
||||
- `pre_install_deps` - Before installing dependencies
|
||||
- `post_install_deps` - After installing dependencies
|
||||
- `pre_install_package` - Before installing packages
|
||||
- `post_install_package` - After installing packages
|
||||
|
||||
#### Mount Management
|
||||
- `pre_mount` - Before mounting filesystems
|
||||
- `post_mount` - After mounting filesystems
|
||||
- `pre_unmount` - Before unmounting filesystems
|
||||
- `post_unmount` - After unmounting filesystems
|
||||
|
||||
#### Cache Management
|
||||
- `pre_cache_create` - Before creating caches
|
||||
- `post_cache_create` - After creating caches
|
||||
- `pre_cache_restore` - Before restoring caches
|
||||
- `post_cache_restore` - After restoring caches
|
||||
|
||||
#### Parallel Build Hooks
|
||||
- `pre_parallel_build` - Before parallel builds
|
||||
- `post_parallel_build` - After parallel builds
|
||||
- `parallel_build_start` - When parallel build starts
|
||||
- `parallel_build_end` - When parallel build ends
|
||||
|
||||
#### Error Handling
|
||||
- `on_error` - When errors occur
|
||||
- `on_warning` - When warnings occur
|
||||
|
||||
## Configuration
|
||||
|
||||
### Basic Plugin Configuration
|
||||
|
||||
```yaml
|
||||
# Enable plugins
|
||||
plugins: ["ccache_plugin", "build_monitor"]
|
||||
|
||||
# Plugin directory (optional)
|
||||
plugin_dir: "./plugins"
|
||||
|
||||
# Plugin-specific configuration
|
||||
plugin_conf:
|
||||
# CCache plugin
|
||||
ccache_enable: true
|
||||
ccache_required: false
|
||||
ccache_opts:
|
||||
dir: "/var/cache/deb-mock/ccache"
|
||||
max_cache_size: "4G"
|
||||
show_stats: true
|
||||
compress: true
|
||||
hashdir: true
|
||||
debug: false
|
||||
|
||||
# Build monitor plugin
|
||||
build_monitor_enable: true
|
||||
build_monitor_required: false
|
||||
build_monitor_opts:
|
||||
log_file: "/var/log/deb-mock/builds.log"
|
||||
notify_on_completion: true
|
||||
track_build_time: true
|
||||
```
|
||||
|
||||
### Plugin Configuration Options
|
||||
|
||||
- **`{plugin}_enable`** - Enable/disable plugin (default: true)
|
||||
- **`{plugin}_required`** - Make plugin required (default: false)
|
||||
- **`{plugin}_opts`** - Plugin-specific configuration options
|
||||
|
||||
## Plugin Development
|
||||
|
||||
### Basic Plugin Structure
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Example plugin for deb-mock
|
||||
"""
|
||||
|
||||
requires_api_version = "1.0"
|
||||
run_in_bootstrap = False
|
||||
|
||||
|
||||
def init(plugin_manager, conf, deb_mock):
|
||||
"""Plugin entry point"""
|
||||
ExamplePlugin(plugin_manager, conf, deb_mock)
|
||||
|
||||
|
||||
class ExamplePlugin:
|
||||
"""Example plugin implementation"""
|
||||
|
||||
def __init__(self, plugin_manager, conf, deb_mock):
|
||||
self.plugin_manager = plugin_manager
|
||||
self.conf = conf
|
||||
self.deb_mock = deb_mock
|
||||
|
||||
# Register hooks
|
||||
self._register_hooks()
|
||||
|
||||
def _register_hooks(self):
|
||||
"""Register plugin hooks"""
|
||||
self.plugin_manager.add_hook("prebuild", self._prebuild_hook)
|
||||
self.plugin_manager.add_hook("postbuild", self._postbuild_hook)
|
||||
|
||||
def _prebuild_hook(self, source_package, **kwargs):
|
||||
"""Hook called before build starts"""
|
||||
print(f"Example plugin: Pre-build hook for {source_package}")
|
||||
|
||||
def _postbuild_hook(self, build_result, source_package, **kwargs):
|
||||
"""Hook called after build completes"""
|
||||
print(f"Example plugin: Post-build hook for {source_package}")
|
||||
```
|
||||
|
||||
### Using BasePlugin Class
|
||||
|
||||
```python
|
||||
from deb_mock.plugin import BasePlugin, HookStages
|
||||
|
||||
class MyPlugin(BasePlugin):
|
||||
"""Plugin using the base class"""
|
||||
|
||||
def _register_hooks(self):
|
||||
"""Override to register hooks"""
|
||||
self.plugin_manager.add_hook(HookStages.PREBUILD, self._my_hook)
|
||||
|
||||
def _my_hook(self, source_package, **kwargs):
|
||||
"""My custom hook"""
|
||||
self.log_info(f"Processing {source_package}")
|
||||
# Plugin logic here
|
||||
```
|
||||
|
||||
### Plugin API Requirements
|
||||
|
||||
Every plugin must define:
|
||||
|
||||
1. **`requires_api_version`** - API version compatibility
|
||||
2. **`run_in_bootstrap`** - Whether to run in bootstrap chroots
|
||||
3. **`init()` function** - Plugin entry point
|
||||
|
||||
### Available Hooks
|
||||
|
||||
Plugins can register hooks for any of the standard stages defined in `HookStages`, or create custom stages.
|
||||
|
||||
## Built-in Plugins
|
||||
|
||||
### CCache Plugin
|
||||
|
||||
The CCache plugin provides compiler caching for faster rebuilds:
|
||||
|
||||
```yaml
|
||||
plugin_conf:
|
||||
ccache_enable: true
|
||||
ccache_opts:
|
||||
dir: "/var/cache/deb-mock/ccache"
|
||||
max_cache_size: "4G"
|
||||
show_stats: true
|
||||
compress: true
|
||||
hashdir: true
|
||||
debug: false
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Automatic ccache setup in chroots
|
||||
- Configurable cache size and options
|
||||
- Build statistics reporting
|
||||
- Environment variable management
|
||||
|
||||
### Build Monitor Plugin
|
||||
|
||||
The Build Monitor plugin tracks build performance and provides notifications:
|
||||
|
||||
```yaml
|
||||
plugin_conf:
|
||||
build_monitor_enable: true
|
||||
build_monitor_opts:
|
||||
log_file: "/var/log/deb-mock/builds.log"
|
||||
notify_on_completion: true
|
||||
track_build_time: true
|
||||
performance_metrics: true
|
||||
```
|
||||
|
||||
**Features:**
|
||||
- Build time tracking
|
||||
- Performance metrics collection
|
||||
- Completion notifications
|
||||
- Detailed logging
|
||||
|
||||
## CLI Commands
|
||||
|
||||
### Plugin Management
|
||||
|
||||
```bash
|
||||
# Show plugin information
|
||||
deb-mock plugin-info
|
||||
|
||||
# List available hook stages
|
||||
deb-mock list-stages
|
||||
|
||||
# List hooks for a specific stage
|
||||
deb-mock list-hooks prebuild
|
||||
```
|
||||
|
||||
### Plugin Configuration
|
||||
|
||||
Plugins are configured through the main configuration file or command-line options. The plugin system automatically loads enabled plugins and initializes them with the deb-mock instance.
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Plugin Development
|
||||
|
||||
1. **Use descriptive names** - Choose clear, descriptive plugin names
|
||||
2. **Handle errors gracefully** - Don't let plugin failures break builds
|
||||
3. **Use logging** - Use the provided logging methods for debugging
|
||||
4. **Validate configuration** - Check configuration values and provide defaults
|
||||
5. **Document hooks** - Clearly document what each hook does
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Enable only needed plugins** - Don't enable plugins you don't use
|
||||
2. **Use required sparingly** - Only mark plugins as required if builds fail without them
|
||||
3. **Provide defaults** - Always provide sensible default values
|
||||
4. **Test configurations** - Test plugin configurations before production use
|
||||
|
||||
### Performance
|
||||
|
||||
1. **Minimize hook overhead** - Keep hooks lightweight
|
||||
2. **Use async when possible** - Consider async operations for I/O heavy tasks
|
||||
3. **Cache results** - Cache expensive operations when appropriate
|
||||
4. **Profile plugins** - Monitor plugin performance impact
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Plugin not loading** - Check plugin directory and file permissions
|
||||
2. **API version mismatch** - Ensure plugin API version matches deb-mock
|
||||
3. **Hook not firing** - Verify hook stage names and registration
|
||||
4. **Configuration errors** - Check YAML syntax and plugin configuration
|
||||
|
||||
### Debugging
|
||||
|
||||
1. **Enable debug logging** - Use `--debug` flag for verbose output
|
||||
2. **Check plugin info** - Use `plugin-info` command to verify plugin loading
|
||||
3. **Verify hooks** - Use `list-hooks` to check hook registration
|
||||
4. **Test individually** - Test plugins in isolation before integration
|
||||
|
||||
## Examples
|
||||
|
||||
### Complete Plugin Example
|
||||
|
||||
See `examples/plugins/ccache_plugin.py` for a complete working plugin.
|
||||
|
||||
### Configuration Example
|
||||
|
||||
See `examples/plugin-config.yaml` for a complete plugin-enabled configuration.
|
||||
|
||||
## API Reference
|
||||
|
||||
### PluginManager Methods
|
||||
|
||||
- `init_plugins(deb_mock)` - Initialize all enabled plugins
|
||||
- `call_hooks(stage, *args, **kwargs)` - Call hooks for a stage
|
||||
- `add_hook(stage, function)` - Register a hook function
|
||||
- `remove_hook(stage, function)` - Remove a hook function
|
||||
- `get_hooks(stage)` - Get hooks for a stage
|
||||
- `list_stages()` - List available hook stages
|
||||
- `get_plugin_info()` - Get plugin system information
|
||||
|
||||
### BasePlugin Methods
|
||||
|
||||
- `get_config(key, default)` - Get plugin configuration
|
||||
- `set_config(key, value)` - Set plugin configuration
|
||||
- `log_info(message)` - Log info message
|
||||
- `log_warning(message)` - Log warning message
|
||||
- `log_error(message)` - Log error message
|
||||
- `log_debug(message)` - Log debug message
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- **Plugin repositories** - Centralized plugin distribution
|
||||
- **Plugin dependencies** - Plugin-to-plugin dependencies
|
||||
- **Plugin validation** - Automated plugin testing and validation
|
||||
- **Plugin metrics** - Performance and usage metrics
|
||||
- **Plugin hot-reload** - Runtime plugin updates
|
||||
331
docs/SBUILD_INTEGRATION.md
Normal file
331
docs/SBUILD_INTEGRATION.md
Normal file
|
|
@ -0,0 +1,331 @@
|
|||
# Deb-Mock Sbuild Integration
|
||||
|
||||
## Overview
|
||||
|
||||
The `deb-mock` sbuild integration provides a robust, production-ready interface to Debian's `sbuild` package building system. This integration enables `deb-mock` to build actual Debian packages using the same tooling that Debian developers use in production.
|
||||
|
||||
## Features
|
||||
|
||||
- **Automatic requirement checking** - Validates sbuild availability, user permissions, and configuration
|
||||
- **Intelligent error handling** - Provides clear error messages and recovery suggestions
|
||||
- **Dependency management** - Automatic checking and installation of build dependencies
|
||||
- **Chroot management** - Update and query chroot information
|
||||
- **Comprehensive CLI** - Full command-line interface for all sbuild operations
|
||||
- **Integration with deb-orchestrator** - Seamless integration with the build orchestration system
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
1. **SbuildWrapper** - Main wrapper class for sbuild operations
|
||||
2. **Automatic Configuration** - Self-configuring sbuild setup
|
||||
3. **Error Handling** - Comprehensive error detection and reporting
|
||||
4. **CLI Integration** - Full command-line interface
|
||||
|
||||
### Integration Points
|
||||
|
||||
- **deb-mock core** - Integrated into the main build system
|
||||
- **Plugin system** - Hooks for customizing sbuild behavior
|
||||
- **deb-orchestrator** - Task execution and result collection
|
||||
|
||||
## Requirements
|
||||
|
||||
### System Requirements
|
||||
|
||||
- **sbuild package** - Debian package building tool
|
||||
- **schroot** - Chroot management system
|
||||
- **User permissions** - User must be in the `sbuild` group
|
||||
|
||||
### Setup Commands
|
||||
|
||||
```bash
|
||||
# Install sbuild
|
||||
sudo apt-get install sbuild
|
||||
|
||||
# Add user to sbuild group
|
||||
sudo sbuild-adduser $USER
|
||||
|
||||
# Start new shell session or use newgrp
|
||||
newgrp sbuild
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Automatic Configuration
|
||||
|
||||
The sbuild integration automatically creates a minimal configuration file at `~/.config/sbuild/config.pl`:
|
||||
|
||||
```perl
|
||||
#!/usr/bin/perl
|
||||
# deb-mock sbuild configuration
|
||||
$chroot_mode = "schroot";
|
||||
$schroot = "schroot";
|
||||
```
|
||||
|
||||
### Manual Configuration
|
||||
|
||||
You can override the automatic configuration by creating your own `~/.config/sbuild/config.pl` or `~/.sbuildrc` file.
|
||||
|
||||
## Usage
|
||||
|
||||
### CLI Commands
|
||||
|
||||
#### Chroot Management
|
||||
|
||||
```bash
|
||||
# Show chroot information
|
||||
deb-mock chroot-info debian-trixie-amd64
|
||||
|
||||
# Update chroot packages
|
||||
deb-mock update-chroot debian-trixie-amd64
|
||||
```
|
||||
|
||||
#### Dependency Management
|
||||
|
||||
```bash
|
||||
# Check build dependencies
|
||||
deb-mock check-deps /path/to/source-package
|
||||
|
||||
# Install build dependencies
|
||||
deb-mock install-deps package1 package2
|
||||
```
|
||||
|
||||
#### Package Building
|
||||
|
||||
```bash
|
||||
# Build package with sbuild
|
||||
deb-mock build-with-sbuild /path/to/source-package
|
||||
|
||||
# Build with custom options
|
||||
deb-mock build-with-sbuild /path/to/source-package \
|
||||
--chroot debian-trixie-amd64 \
|
||||
--output-dir ./output \
|
||||
--verbose
|
||||
```
|
||||
|
||||
### Programmatic Usage
|
||||
|
||||
```python
|
||||
from deb_mock.config import Config
|
||||
from deb_mock.sbuild import SbuildWrapper
|
||||
|
||||
# Create configuration
|
||||
config = Config(
|
||||
chroot_name="debian-trixie-amd64",
|
||||
suite="trixie",
|
||||
architecture="amd64"
|
||||
)
|
||||
|
||||
# Initialize wrapper
|
||||
wrapper = SbuildWrapper(config)
|
||||
|
||||
# Check dependencies
|
||||
deps = wrapper.check_dependencies("source-package")
|
||||
if not deps["satisfied"]:
|
||||
wrapper.install_build_dependencies(deps["missing"])
|
||||
|
||||
# Build package
|
||||
result = wrapper.build_package("source-package")
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Common Issues and Solutions
|
||||
|
||||
#### 1. User Not in Sbuild Group
|
||||
|
||||
**Error**: `User joe is not currently an effective member of group sbuild`
|
||||
|
||||
**Solution**:
|
||||
```bash
|
||||
sudo sbuild-adduser $USER
|
||||
newgrp sbuild # or start new shell session
|
||||
```
|
||||
|
||||
#### 2. Sbuild Not Found
|
||||
|
||||
**Error**: `sbuild not found. Please install sbuild package`
|
||||
|
||||
**Solution**:
|
||||
```bash
|
||||
sudo apt-get install sbuild
|
||||
```
|
||||
|
||||
#### 3. Chroot Not Available
|
||||
|
||||
**Error**: `Chroot 'debian-trixie-amd64' not found`
|
||||
|
||||
**Solution**:
|
||||
```bash
|
||||
# Create chroot using deb-mock
|
||||
deb-mock init-chroot debian-trixie-amd64
|
||||
|
||||
# Or use sbuild directly
|
||||
sudo sbuild-createchroot --arch=amd64 trixie /var/lib/schroot/chroots/debian-trixie-amd64
|
||||
```
|
||||
|
||||
#### 4. Build Dependencies Missing
|
||||
|
||||
**Error**: `Unmet build dependencies`
|
||||
|
||||
**Solution**:
|
||||
```bash
|
||||
# Check what's missing
|
||||
deb-mock check-deps /path/to/source-package
|
||||
|
||||
# Install missing dependencies
|
||||
deb-mock install-deps package1 package2
|
||||
```
|
||||
|
||||
### Error Recovery
|
||||
|
||||
The integration provides automatic error recovery for common issues:
|
||||
|
||||
- **Missing dependencies** - Automatically attempts to install
|
||||
- **Configuration issues** - Creates minimal working configuration
|
||||
- **Permission problems** - Provides clear setup instructions
|
||||
|
||||
## Integration with deb-orchestrator
|
||||
|
||||
### Task Execution
|
||||
|
||||
The sbuild integration works seamlessly with `deb-orchestrator`:
|
||||
|
||||
1. **Task Creation** - Build tasks are created in the orchestrator
|
||||
2. **Dependency Resolution** - Build dependencies are automatically checked
|
||||
3. **Package Building** - sbuild executes the actual package build
|
||||
4. **Result Collection** - Build artifacts and metadata are collected
|
||||
5. **Status Reporting** - Build success/failure is reported back
|
||||
|
||||
### Example Workflow
|
||||
|
||||
```python
|
||||
# In deb-orchestrator task
|
||||
task = {
|
||||
"id": 123,
|
||||
"type": "build_package",
|
||||
"source_package": "test-package",
|
||||
"chroot": "debian-trixie-amd64",
|
||||
"architecture": "amd64"
|
||||
}
|
||||
|
||||
# deb-mock executes the build
|
||||
result = deb_mock.build_with_sbuild(
|
||||
source_package=task["source_package"],
|
||||
chroot=task["chroot"],
|
||||
arch=task["architecture"]
|
||||
)
|
||||
|
||||
# Result is reported back to orchestrator
|
||||
orchestrator.report_build_result(task["id"], result)
|
||||
```
|
||||
|
||||
## Performance and Optimization
|
||||
|
||||
### Caching
|
||||
|
||||
- **Root cache** - Chroot state caching for faster rebuilds
|
||||
- **Package cache** - APT package caching
|
||||
- **ccache** - Compiler caching for faster compilation
|
||||
|
||||
### Parallelization
|
||||
|
||||
- **Multiple chroots** - Build multiple packages simultaneously
|
||||
- **Worker processes** - Parallel task execution
|
||||
- **Resource management** - Efficient resource utilization
|
||||
|
||||
## Security
|
||||
|
||||
### Isolation
|
||||
|
||||
- **Chroot isolation** - Complete filesystem isolation
|
||||
- **User separation** - Dedicated build users
|
||||
- **Network control** - Controlled network access
|
||||
|
||||
### Permissions
|
||||
|
||||
- **Minimal privileges** - Only necessary permissions granted
|
||||
- **User mapping** - Proper UID/GID handling
|
||||
- **Capability dropping** - Security capability management
|
||||
|
||||
## Monitoring and Debugging
|
||||
|
||||
### Logging
|
||||
|
||||
- **Build logs** - Complete sbuild output capture
|
||||
- **Error logs** - Detailed error information
|
||||
- **Performance metrics** - Build time and resource usage
|
||||
|
||||
### Debugging
|
||||
|
||||
```bash
|
||||
# Enable verbose output
|
||||
deb-mock build-with-sbuild package --verbose --debug
|
||||
|
||||
# Check chroot status
|
||||
deb-mock chroot-info chroot-name
|
||||
|
||||
# Verify dependencies
|
||||
deb-mock check-deps package
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Configuration
|
||||
|
||||
1. **Use dedicated chroots** - Separate chroots for different distributions
|
||||
2. **Regular updates** - Keep chroots updated with latest packages
|
||||
3. **Resource limits** - Set appropriate memory and disk limits
|
||||
|
||||
### Build Process
|
||||
|
||||
1. **Dependency checking** - Always check dependencies before building
|
||||
2. **Clean builds** - Use clean chroots for reproducible builds
|
||||
3. **Artifact collection** - Properly collect and store build artifacts
|
||||
|
||||
### Error Handling
|
||||
|
||||
1. **Graceful degradation** - Handle errors without breaking the build system
|
||||
2. **User feedback** - Provide clear error messages and solutions
|
||||
3. **Recovery mechanisms** - Automatic recovery when possible
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Debug Mode
|
||||
|
||||
Enable debug mode for detailed information:
|
||||
|
||||
```bash
|
||||
export DEB_MOCK_DEBUG=1
|
||||
deb-mock build-with-sbuild package --debug
|
||||
```
|
||||
|
||||
### Common Problems
|
||||
|
||||
1. **Permission denied** - Check user group membership
|
||||
2. **Chroot not found** - Verify chroot exists and is accessible
|
||||
3. **Build failures** - Check build logs for specific errors
|
||||
4. **Dependency issues** - Verify package availability in chroot
|
||||
|
||||
### Getting Help
|
||||
|
||||
- **Error messages** - Read error messages carefully for solutions
|
||||
- **Build logs** - Check build logs for detailed error information
|
||||
- **Documentation** - Refer to this documentation and sbuild man pages
|
||||
- **Community** - Check deb-mock project issues and discussions
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
### Planned Features
|
||||
|
||||
- **Multi-architecture support** - Cross-compilation and multi-arch builds
|
||||
- **Advanced caching** - Intelligent cache management and optimization
|
||||
- **Build profiling** - Performance analysis and optimization suggestions
|
||||
- **Integration testing** - Automated testing of build workflows
|
||||
|
||||
### Extension Points
|
||||
|
||||
- **Plugin system** - Custom build hooks and modifications
|
||||
- **Custom backends** - Alternative build system support
|
||||
- **Monitoring integration** - Integration with monitoring and alerting systems
|
||||
- **CI/CD integration** - Continuous integration and deployment support
|
||||
531
docs/TESTING.md
Normal file
531
docs/TESTING.md
Normal file
|
|
@ -0,0 +1,531 @@
|
|||
# Deb-Mock Testing Guide
|
||||
|
||||
## Overview
|
||||
|
||||
The `deb-mock` project includes a comprehensive test suite that covers all major functionality including core operations, performance monitoring, plugin system, and integration testing. This guide provides detailed information on running tests, understanding test coverage, and contributing to the test suite.
|
||||
|
||||
## Test Structure
|
||||
|
||||
### Test Organization
|
||||
|
||||
```
|
||||
tests/
|
||||
├── __init__.py # Test package initialization
|
||||
├── conftest.py # Pytest configuration and fixtures
|
||||
├── test_core.py # Core functionality tests
|
||||
├── test_performance.py # Performance monitoring tests
|
||||
├── test_plugin_system.py # Plugin system tests
|
||||
└── requirements.txt # Test dependencies
|
||||
```
|
||||
|
||||
### Test Categories
|
||||
|
||||
1. **Unit Tests** - Test individual components in isolation
|
||||
2. **Integration Tests** - Test component interactions
|
||||
3. **Performance Tests** - Test performance monitoring system
|
||||
4. **Plugin Tests** - Test plugin system functionality
|
||||
5. **System Tests** - Test end-to-end workflows
|
||||
|
||||
## Running Tests
|
||||
|
||||
### Prerequisites
|
||||
|
||||
1. **Python Virtual Environment**: Ensure you have activated the virtual environment
|
||||
```bash
|
||||
source venv/bin/activate
|
||||
```
|
||||
|
||||
2. **Test Dependencies**: Install required testing packages
|
||||
```bash
|
||||
pip install -r tests/requirements.txt
|
||||
```
|
||||
|
||||
### Basic Test Execution
|
||||
|
||||
#### Run All Tests
|
||||
```bash
|
||||
python -m pytest tests/
|
||||
```
|
||||
|
||||
#### Run Specific Test File
|
||||
```bash
|
||||
python -m pytest tests/test_core.py
|
||||
```
|
||||
|
||||
#### Run Specific Test Class
|
||||
```bash
|
||||
python -m pytest tests/test_performance.py::TestPerformanceMonitor
|
||||
```
|
||||
|
||||
#### Run Specific Test Method
|
||||
```bash
|
||||
python -m pytest tests/test_performance.py::TestPerformanceMonitor::test_initialization
|
||||
```
|
||||
|
||||
### Using the Test Runner Script
|
||||
|
||||
The project includes a comprehensive test runner script that provides additional functionality:
|
||||
|
||||
#### Run All Tests with Coverage
|
||||
```bash
|
||||
python run_tests.py --all --coverage-report
|
||||
```
|
||||
|
||||
#### Run Specific Test Types
|
||||
```bash
|
||||
# Unit tests only
|
||||
python run_tests.py --unit
|
||||
|
||||
# Integration tests only
|
||||
python run_tests.py --integration
|
||||
|
||||
# Performance tests only
|
||||
python run_tests.py --performance
|
||||
|
||||
# Plugin system tests only
|
||||
python run_tests.py --plugin
|
||||
```
|
||||
|
||||
#### Parallel Test Execution
|
||||
```bash
|
||||
python run_tests.py --all --parallel
|
||||
```
|
||||
|
||||
#### Verbose Output
|
||||
```bash
|
||||
python run_tests.py --all --verbose
|
||||
```
|
||||
|
||||
#### Additional Quality Checks
|
||||
```bash
|
||||
# Run linting
|
||||
python run_tests.py --lint
|
||||
|
||||
# Run type checking
|
||||
python run_tests.py --type-check
|
||||
|
||||
# Run security scanning
|
||||
python run_tests.py --security
|
||||
```
|
||||
|
||||
### Test Runner Options
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--unit` | Run unit tests only |
|
||||
| `--integration` | Run integration tests only |
|
||||
| `--performance` | Run performance tests only |
|
||||
| `--plugin` | Run plugin system tests only |
|
||||
| `--all` | Run all tests |
|
||||
| `--parallel` | Run tests in parallel |
|
||||
| `--no-coverage` | Disable coverage reporting |
|
||||
| `--verbose`, `-v` | Verbose output |
|
||||
| `--install-deps` | Install test dependencies |
|
||||
| `--lint` | Run code linting |
|
||||
| `--type-check` | Run type checking |
|
||||
| `--security` | Run security scanning |
|
||||
| `--coverage-report` | Generate coverage report |
|
||||
|
||||
## Test Configuration
|
||||
|
||||
### Pytest Configuration (`pytest.ini`)
|
||||
|
||||
```ini
|
||||
[tool:pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
addopts =
|
||||
-v
|
||||
--tb=short
|
||||
--strict-markers
|
||||
--disable-warnings
|
||||
--cov=deb_mock
|
||||
--cov-report=term-missing
|
||||
--cov-report=html:htmlcov
|
||||
--cov-report=xml:coverage.xml
|
||||
--cov-fail-under=80
|
||||
markers =
|
||||
slow: marks tests as slow
|
||||
integration: marks tests as integration tests
|
||||
unit: marks tests as unit tests
|
||||
performance: marks tests as performance tests
|
||||
plugin: marks tests as plugin system tests
|
||||
```
|
||||
|
||||
### Coverage Configuration
|
||||
|
||||
- **Minimum Coverage**: 80%
|
||||
- **Coverage Reports**: Terminal, HTML, XML
|
||||
- **Coverage Output**: `htmlcov/` directory
|
||||
|
||||
## Test Fixtures
|
||||
|
||||
### Common Fixtures (`conftest.py`)
|
||||
|
||||
The test suite provides comprehensive fixtures for testing:
|
||||
|
||||
#### Configuration Fixtures
|
||||
- `test_config` - Basic test configuration
|
||||
- `performance_test_config` - Configuration with performance monitoring
|
||||
- `plugin_test_config` - Configuration with plugin support
|
||||
|
||||
#### Mock Fixtures
|
||||
- `mock_chroot_manager` - Mock chroot manager
|
||||
- `mock_cache_manager` - Mock cache manager
|
||||
- `mock_sbuild_wrapper` - Mock sbuild wrapper
|
||||
- `mock_plugin_manager` - Mock plugin manager
|
||||
- `mock_performance_monitor` - Mock performance monitor
|
||||
|
||||
#### Test Data Fixtures
|
||||
- `sample_source_package` - Minimal Debian source package
|
||||
- `test_package_data` - Package metadata for testing
|
||||
- `test_build_result` - Build result data
|
||||
- `test_performance_metrics` - Performance metrics data
|
||||
|
||||
#### Environment Fixtures
|
||||
- `temp_dir` - Temporary directory for tests
|
||||
- `test_environment` - Test environment variables
|
||||
- `isolated_filesystem` - Isolated filesystem for testing
|
||||
|
||||
## Test Categories
|
||||
|
||||
### 1. Core Functionality Tests (`test_core.py`)
|
||||
|
||||
Tests the main `DebMock` class and its core operations:
|
||||
|
||||
- **Initialization** - Component initialization and configuration
|
||||
- **Build Operations** - Package building with various scenarios
|
||||
- **Chroot Management** - Chroot creation, restoration, and cleanup
|
||||
- **Cache Operations** - Cache restoration and creation
|
||||
- **Plugin Integration** - Hook execution and plugin lifecycle
|
||||
- **Performance Monitoring** - Performance tracking integration
|
||||
- **Error Handling** - Build failures and error scenarios
|
||||
|
||||
#### Example Test
|
||||
```python
|
||||
def test_build_with_existing_chroot(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with an existing chroot"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Run build
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify result
|
||||
assert result["success"] is True
|
||||
```
|
||||
|
||||
### 2. Performance Monitoring Tests (`test_performance.py`)
|
||||
|
||||
Tests the performance monitoring and optimization system:
|
||||
|
||||
- **PerformanceMetrics** - Metrics data structure validation
|
||||
- **BuildProfile** - Build performance profile management
|
||||
- **PerformanceMonitor** - Real-time monitoring and metrics collection
|
||||
- **PerformanceOptimizer** - AI-driven optimization suggestions
|
||||
- **PerformanceReporter** - Report generation and data export
|
||||
|
||||
#### Example Test
|
||||
```python
|
||||
def test_monitor_operation_context_manager(self, test_config):
|
||||
"""Test monitor_operation context manager"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
with monitor.monitor_operation("test_op") as op_id:
|
||||
assert op_id.startswith("test_op_")
|
||||
time.sleep(0.1) # Small delay
|
||||
|
||||
# Verify operation was tracked
|
||||
assert len(monitor._operation_history) == 1
|
||||
assert monitor._operation_history[0].operation == "test_op"
|
||||
assert monitor._operation_history[0].duration > 0
|
||||
```
|
||||
|
||||
### 3. Plugin System Tests (`test_plugin_system.py`)
|
||||
|
||||
Tests the extensible plugin system:
|
||||
|
||||
- **HookStages** - Hook stage definitions and values
|
||||
- **BasePlugin** - Base plugin class functionality
|
||||
- **PluginManager** - Plugin discovery, loading, and management
|
||||
- **Plugin Lifecycle** - Initialization, execution, and cleanup
|
||||
- **Hook System** - Hook registration and execution
|
||||
- **Error Handling** - Plugin error scenarios
|
||||
|
||||
#### Example Test
|
||||
```python
|
||||
def test_plugin_lifecycle(self, test_config):
|
||||
"""Test complete plugin lifecycle"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Create a test plugin
|
||||
class TestPlugin(BasePlugin):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
name="TestPlugin",
|
||||
version="1.0.0",
|
||||
description="Test plugin for integration testing"
|
||||
)
|
||||
self.init_called = False
|
||||
self.cleanup_called = False
|
||||
|
||||
def init(self, deb_mock):
|
||||
self.init_called = True
|
||||
return None
|
||||
|
||||
def cleanup(self):
|
||||
self.cleanup_called = True
|
||||
return None
|
||||
|
||||
# Test plugin lifecycle
|
||||
plugin = TestPlugin()
|
||||
manager.plugins["test_plugin"] = plugin
|
||||
|
||||
# Initialize
|
||||
mock_deb_mock = Mock()
|
||||
result = manager.init_plugins(mock_deb_mock)
|
||||
assert result is True
|
||||
assert plugin.init_called is True
|
||||
|
||||
# Cleanup
|
||||
cleanup_result = manager.cleanup_plugins()
|
||||
assert cleanup_result is True
|
||||
assert plugin.cleanup_called is True
|
||||
```
|
||||
|
||||
## Test Markers
|
||||
|
||||
### Available Markers
|
||||
|
||||
- **`@pytest.mark.slow`** - Marks tests as slow (can be deselected)
|
||||
- **`@pytest.mark.integration`** - Marks tests as integration tests
|
||||
- **`@pytest.mark.unit`** - Marks tests as unit tests
|
||||
- **`@pytest.mark.performance`** - Marks tests as performance tests
|
||||
- **`@pytest.mark.plugin`** - Marks tests as plugin system tests
|
||||
|
||||
### Using Markers
|
||||
|
||||
#### Run Only Fast Tests
|
||||
```bash
|
||||
python -m pytest -m "not slow"
|
||||
```
|
||||
|
||||
#### Run Only Integration Tests
|
||||
```bash
|
||||
python -m pytest -m integration
|
||||
```
|
||||
|
||||
#### Run Multiple Marker Types
|
||||
```bash
|
||||
python -m pytest -m "unit or performance"
|
||||
```
|
||||
|
||||
## Coverage Reporting
|
||||
|
||||
### Coverage Types
|
||||
|
||||
1. **Terminal Coverage** - Inline coverage information
|
||||
2. **HTML Coverage** - Detailed HTML report in `htmlcov/` directory
|
||||
3. **XML Coverage** - Machine-readable coverage data
|
||||
|
||||
### Coverage Thresholds
|
||||
|
||||
- **Minimum Coverage**: 80%
|
||||
- **Coverage Failure**: Tests fail if coverage drops below threshold
|
||||
|
||||
### Generating Coverage Reports
|
||||
|
||||
```bash
|
||||
# Generate all coverage reports
|
||||
python run_tests.py --coverage-report
|
||||
|
||||
# Generate specific coverage report
|
||||
python -m coverage report
|
||||
python -m coverage html
|
||||
```
|
||||
|
||||
## Test Data Management
|
||||
|
||||
### Temporary Files
|
||||
|
||||
Tests use temporary directories that are automatically cleaned up:
|
||||
|
||||
```python
|
||||
@pytest.fixture
|
||||
def temp_dir():
|
||||
"""Create a temporary directory for tests"""
|
||||
temp_dir = tempfile.mkdtemp(prefix="deb_mock_test_")
|
||||
yield temp_dir
|
||||
shutil.rmtree(temp_dir, ignore_errors=True)
|
||||
```
|
||||
|
||||
### Mock Data
|
||||
|
||||
Tests use realistic mock data for comprehensive testing:
|
||||
|
||||
```python
|
||||
@pytest.fixture
|
||||
def sample_source_package(temp_dir):
|
||||
"""Create a minimal Debian source package for testing"""
|
||||
package_dir = os.path.join(temp_dir, "test-package")
|
||||
os.makedirs(package_dir)
|
||||
|
||||
# Create debian/control
|
||||
debian_dir = os.path.join(package_dir, "debian")
|
||||
os.makedirs(debian_dir)
|
||||
|
||||
# Add package files...
|
||||
return package_dir
|
||||
```
|
||||
|
||||
## Debugging Tests
|
||||
|
||||
### Verbose Output
|
||||
|
||||
```bash
|
||||
python -m pytest -v -s tests/
|
||||
```
|
||||
|
||||
### Debugging Specific Tests
|
||||
|
||||
```bash
|
||||
# Run with debugger
|
||||
python -m pytest --pdb tests/test_core.py::TestDebMock::test_build
|
||||
|
||||
# Run with trace
|
||||
python -m pytest --trace tests/test_core.py::TestDebMock::test_build
|
||||
```
|
||||
|
||||
### Test Isolation
|
||||
|
||||
```bash
|
||||
# Run single test in isolation
|
||||
python -m pytest -x tests/test_core.py::TestDebMock::test_build
|
||||
|
||||
# Stop on first failure
|
||||
python -m pytest -x tests/
|
||||
```
|
||||
|
||||
## Continuous Integration
|
||||
|
||||
### CI/CD Integration
|
||||
|
||||
The test suite is designed for CI/CD environments:
|
||||
|
||||
```yaml
|
||||
# GitHub Actions example
|
||||
- name: Run Tests
|
||||
run: |
|
||||
source venv/bin/activate
|
||||
python run_tests.py --all --coverage-report --parallel
|
||||
|
||||
- name: Upload Coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
```
|
||||
|
||||
### Test Parallelization
|
||||
|
||||
Tests can be run in parallel for faster execution:
|
||||
|
||||
```bash
|
||||
# Auto-detect CPU cores
|
||||
python -m pytest -n auto tests/
|
||||
|
||||
# Specific number of workers
|
||||
python -m pytest -n 4 tests/
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Writing Tests
|
||||
|
||||
1. **Test Naming** - Use descriptive test names that explain the scenario
|
||||
2. **Test Isolation** - Each test should be independent and not affect others
|
||||
3. **Mock External Dependencies** - Use mocks for system calls and external services
|
||||
4. **Test Data** - Use realistic test data that represents real scenarios
|
||||
5. **Error Scenarios** - Test both success and failure cases
|
||||
|
||||
### Test Organization
|
||||
|
||||
1. **Group Related Tests** - Use test classes to group related functionality
|
||||
2. **Use Fixtures** - Leverage pytest fixtures for common setup
|
||||
3. **Test Categories** - Use markers to categorize tests
|
||||
4. **Coverage** - Aim for high test coverage (80% minimum)
|
||||
|
||||
### Performance Testing
|
||||
|
||||
1. **Realistic Scenarios** - Test with realistic data sizes and complexity
|
||||
2. **Benchmarking** - Use the performance monitoring system for benchmarks
|
||||
3. **Resource Monitoring** - Monitor CPU, memory, and I/O during tests
|
||||
4. **Regression Detection** - Detect performance regressions
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### Import Errors
|
||||
```bash
|
||||
# Ensure virtual environment is activated
|
||||
source venv/bin/activate
|
||||
|
||||
# Install test dependencies
|
||||
pip install -r tests/requirements.txt
|
||||
```
|
||||
|
||||
#### Coverage Issues
|
||||
```bash
|
||||
# Clear coverage data
|
||||
python -m coverage erase
|
||||
|
||||
# Run tests with coverage
|
||||
python -m pytest --cov=deb_mock tests/
|
||||
```
|
||||
|
||||
#### Test Failures
|
||||
```bash
|
||||
# Run with verbose output
|
||||
python -m pytest -v -s tests/
|
||||
|
||||
# Run specific failing test
|
||||
python -m pytest tests/test_core.py::TestDebMock::test_build -v -s
|
||||
```
|
||||
|
||||
### Getting Help
|
||||
|
||||
1. **Check Test Output** - Review test output for error details
|
||||
2. **Review Fixtures** - Ensure test fixtures are properly configured
|
||||
3. **Check Dependencies** - Verify all test dependencies are installed
|
||||
4. **Review Configuration** - Check pytest.ini and test configuration
|
||||
|
||||
## Contributing to Tests
|
||||
|
||||
### Adding New Tests
|
||||
|
||||
1. **Follow Naming Convention** - Use `test_*.py` for test files
|
||||
2. **Use Existing Fixtures** - Leverage existing fixtures when possible
|
||||
3. **Add Markers** - Use appropriate test markers
|
||||
4. **Maintain Coverage** - Ensure new code is covered by tests
|
||||
|
||||
### Test Review Process
|
||||
|
||||
1. **Test Coverage** - Ensure new functionality has adequate test coverage
|
||||
2. **Test Quality** - Tests should be clear, maintainable, and reliable
|
||||
3. **Performance Impact** - Tests should not significantly impact build times
|
||||
4. **Documentation** - Document complex test scenarios and edge cases
|
||||
|
||||
This comprehensive testing guide ensures that the `deb-mock` project maintains high quality and reliability through extensive testing coverage.
|
||||
106
examples/advanced-config.yaml
Normal file
106
examples/advanced-config.yaml
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
# Advanced deb-mock configuration example
|
||||
# Demonstrates parallel builds and advanced mount management
|
||||
|
||||
# Basic chroot configuration
|
||||
chroot_name: "debian-trixie-amd64-advanced"
|
||||
architecture: "amd64"
|
||||
suite: "trixie"
|
||||
output_dir: "./output"
|
||||
keep_chroot: false
|
||||
verbose: true
|
||||
debug: false
|
||||
|
||||
# Chroot paths
|
||||
basedir: "/var/lib/deb-mock"
|
||||
chroot_dir: "/var/lib/deb-mock/chroots"
|
||||
chroot_config_dir: "/etc/schroot/chroot.d"
|
||||
chroot_home: "/home/build"
|
||||
|
||||
# Parallel build configuration
|
||||
parallel_builds: 4 # Number of parallel chroots
|
||||
parallel_chroot_prefix: "parallel"
|
||||
parallel_build_timeout: 7200 # 2 hours
|
||||
parallel_build_cleanup: true
|
||||
|
||||
# Advanced mount management
|
||||
mount_proc: true
|
||||
mount_sys: true
|
||||
mount_dev: true
|
||||
mount_devpts: true
|
||||
mount_tmp: true
|
||||
mount_home: false
|
||||
|
||||
# Tmpfs configuration
|
||||
use_tmpfs: true
|
||||
tmpfs_size: "4G"
|
||||
|
||||
# Custom bind mounts
|
||||
bind_mounts:
|
||||
- host: "/usr/share/doc"
|
||||
chroot: "/usr/share/doc"
|
||||
options: "ro"
|
||||
- host: "/var/cache/apt/archives"
|
||||
chroot: "/var/cache/apt/archives"
|
||||
options: "ro"
|
||||
- host: "/tmp/deb-mock-sources"
|
||||
chroot: "/tmp/sources"
|
||||
options: ""
|
||||
|
||||
# Tmpfs mounts for performance
|
||||
tmpfs_mounts:
|
||||
- chroot: "/tmp/build"
|
||||
size: "2G"
|
||||
options: "noexec,nosuid"
|
||||
- chroot: "/var/cache/ccache"
|
||||
size: "1G"
|
||||
options: ""
|
||||
|
||||
# Overlay mounts (requires overlayfs support)
|
||||
overlay_mounts:
|
||||
- lower: "/var/lib/deb-mock/base-chroot"
|
||||
upper: "/var/lib/deb-mock/overlay-upper"
|
||||
work: "/var/lib/deb-mock/overlay-work"
|
||||
chroot: "/var/lib/deb-mock/overlay-chroot"
|
||||
|
||||
# Advanced chroot features
|
||||
use_namespaces: false
|
||||
uid_mapping: null
|
||||
gid_mapping: null
|
||||
capabilities: []
|
||||
seccomp_profile: null
|
||||
|
||||
# UID/GID management
|
||||
chroot_user: "build"
|
||||
chroot_group: "build"
|
||||
chroot_uid: 1000
|
||||
chroot_gid: 1000
|
||||
use_host_user: false
|
||||
copy_host_users: []
|
||||
preserve_uid_gid: true
|
||||
|
||||
# Build optimization
|
||||
use_root_cache: true
|
||||
root_cache_dir: "/var/cache/deb-mock/root-cache"
|
||||
root_cache_age: 7
|
||||
use_package_cache: true
|
||||
package_cache_dir: "/var/cache/deb-mock/package-cache"
|
||||
use_ccache: true
|
||||
ccache_dir: "/var/cache/deb-mock/ccache"
|
||||
|
||||
# Network configuration
|
||||
use_host_resolv: true
|
||||
enable_network: true
|
||||
http_proxy: null
|
||||
https_proxy: null
|
||||
no_proxy: null
|
||||
|
||||
# Mirror configuration
|
||||
mirror: "http://deb.debian.org/debian/"
|
||||
security_mirror: "http://security.debian.org/debian-security"
|
||||
backports_mirror: "http://deb.debian.org/debian/"
|
||||
|
||||
# Bootstrap chroot support
|
||||
use_bootstrap_chroot: false
|
||||
bootstrap_chroot_name: null
|
||||
bootstrap_arch: null
|
||||
bootstrap_suite: null
|
||||
101
examples/plugins/ccache_plugin.py
Normal file
101
examples/plugins/ccache_plugin.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Sample ccache plugin for deb-mock
|
||||
Demonstrates the plugin system capabilities
|
||||
"""
|
||||
|
||||
requires_api_version = "1.0"
|
||||
run_in_bootstrap = False
|
||||
|
||||
|
||||
def init(plugin_manager, conf, deb_mock):
|
||||
"""Plugin entry point"""
|
||||
CCachePlugin(plugin_manager, conf, deb_mock)
|
||||
|
||||
|
||||
class CCachePlugin:
|
||||
"""Enables ccache in deb-mock chroots"""
|
||||
|
||||
def __init__(self, plugin_manager, conf, deb_mock):
|
||||
self.plugin_manager = plugin_manager
|
||||
self.conf = conf
|
||||
self.deb_mock = deb_mock
|
||||
|
||||
# Plugin configuration with defaults
|
||||
self.ccache_dir = conf.get('dir', '/var/cache/deb-mock/ccache')
|
||||
self.max_cache_size = conf.get('max_cache_size', '2G')
|
||||
self.show_stats = conf.get('show_stats', True)
|
||||
self.compress = conf.get('compress', True)
|
||||
self.hashdir = conf.get('hashdir', True)
|
||||
self.debug = conf.get('debug', False)
|
||||
|
||||
# Register hooks
|
||||
self._register_hooks()
|
||||
|
||||
# Add ccache to build dependencies
|
||||
if hasattr(deb_mock.config, 'chroot_additional_packages'):
|
||||
if 'ccache' not in deb_mock.config.chroot_additional_packages:
|
||||
deb_mock.config.chroot_additional_packages.append('ccache')
|
||||
|
||||
print(f"CCache plugin initialized: cache_dir={self.ccache_dir}, max_size={self.max_cache_size}")
|
||||
|
||||
def _register_hooks(self):
|
||||
"""Register plugin hooks"""
|
||||
self.plugin_manager.add_hook("prebuild", self._ccache_prebuild)
|
||||
self.plugin_manager.add_hook("postbuild", self._ccache_postbuild)
|
||||
self.plugin_manager.add_hook("prechroot_init", self._ccache_prechroot_init)
|
||||
|
||||
def _ccache_prebuild(self, source_package, **kwargs):
|
||||
"""Setup ccache before build starts"""
|
||||
print(f"CCache: Setting up ccache for {source_package}")
|
||||
|
||||
# Set ccache environment variables
|
||||
if hasattr(self.deb_mock.config, 'build_env'):
|
||||
self.deb_mock.config.build_env.update({
|
||||
'CCACHE_DIR': '/var/tmp/ccache',
|
||||
'CCACHE_UMASK': '002',
|
||||
'CCACHE_COMPRESS': str(self.compress),
|
||||
'CCACHE_HASHDIR': '1' if self.hashdir else '0',
|
||||
'CCACHE_DEBUG': '1' if self.debug else '0'
|
||||
})
|
||||
|
||||
def _ccache_postbuild(self, build_result, source_package, **kwargs):
|
||||
"""Show ccache statistics after build"""
|
||||
if not self.show_stats:
|
||||
return
|
||||
|
||||
print("CCache: Build completed, showing statistics...")
|
||||
# In a real implementation, you would execute ccache --show-stats in the chroot
|
||||
|
||||
if build_result.get('success', False):
|
||||
print(f"CCache: Build successful for {source_package}")
|
||||
else:
|
||||
print(f"CCache: Build failed for {source_package}")
|
||||
|
||||
def _ccache_prechroot_init(self, chroot_name):
|
||||
"""Setup ccache in chroot before initialization"""
|
||||
print(f"CCache: Setting up ccache in chroot {chroot_name}")
|
||||
|
||||
# Create ccache directory in chroot
|
||||
chroot_ccache_dir = f"/var/tmp/ccache"
|
||||
|
||||
# In a real implementation, you would:
|
||||
# 1. Create the ccache directory in the chroot
|
||||
# 2. Set proper permissions
|
||||
# 3. Mount the host ccache directory if configured
|
||||
|
||||
print(f"CCache: Created {chroot_ccache_dir} in chroot {chroot_name}")
|
||||
|
||||
|
||||
# Example configuration for this plugin:
|
||||
# plugin_conf = {
|
||||
# 'ccache_enable': True,
|
||||
# 'ccache_opts': {
|
||||
# 'dir': '/var/cache/deb-mock/ccache',
|
||||
# 'max_cache_size': '4G',
|
||||
# 'show_stats': True,
|
||||
# 'compress': True,
|
||||
# 'hashdir': True,
|
||||
# 'debug': False
|
||||
# }
|
||||
# }
|
||||
24
pytest.ini
Normal file
24
pytest.ini
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
[tool:pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
addopts =
|
||||
-v
|
||||
--tb=short
|
||||
--strict-markers
|
||||
--disable-warnings
|
||||
--cov=deb_mock
|
||||
--cov-report=term-missing
|
||||
--cov-report=html:htmlcov
|
||||
--cov-report=xml:coverage.xml
|
||||
--cov-fail-under=80
|
||||
markers =
|
||||
slow: marks tests as slow (deselect with '-m "not slow"')
|
||||
integration: marks tests as integration tests
|
||||
unit: marks tests as unit tests
|
||||
performance: marks tests as performance tests
|
||||
plugin: marks tests as plugin system tests
|
||||
filterwarnings =
|
||||
ignore::DeprecationWarning
|
||||
ignore::PendingDeprecationWarning
|
||||
259
run_tests.py
Executable file
259
run_tests.py
Executable file
|
|
@ -0,0 +1,259 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Comprehensive test runner for deb-mock
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import argparse
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run_command(cmd, description, check=True):
|
||||
"""Run a command and handle errors"""
|
||||
print(f"\n🔄 {description}...")
|
||||
print(f"Command: {' '.join(cmd)}")
|
||||
|
||||
start_time = time.time()
|
||||
try:
|
||||
result = subprocess.run(cmd, check=check, capture_output=True, text=True)
|
||||
duration = time.time() - start_time
|
||||
|
||||
if result.returncode == 0:
|
||||
print(f"✅ {description} completed successfully in {duration:.2f}s")
|
||||
if result.stdout:
|
||||
print("Output:", result.stdout)
|
||||
else:
|
||||
print(f"❌ {description} failed with return code {result.returncode}")
|
||||
if result.stderr:
|
||||
print("Error:", result.stderr)
|
||||
if result.stdout:
|
||||
print("Output:", result.stdout)
|
||||
|
||||
return result
|
||||
except subprocess.CalledProcessError as e:
|
||||
duration = time.time() - start_time
|
||||
print(f"❌ {description} failed with exception in {duration:.2f}s")
|
||||
print(f"Error: {e}")
|
||||
if e.stdout:
|
||||
print("Output:", e.stdout)
|
||||
if e.stderr:
|
||||
print("Error:", e.stderr)
|
||||
return e
|
||||
|
||||
|
||||
def install_test_dependencies():
|
||||
"""Install test dependencies"""
|
||||
cmd = [sys.executable, "-m", "pip", "install", "-r", "tests/requirements.txt"]
|
||||
return run_command(cmd, "Installing test dependencies")
|
||||
|
||||
|
||||
def run_unit_tests(parallel=False, coverage=True, verbose=False):
|
||||
"""Run unit tests"""
|
||||
cmd = [sys.executable, "-m", "pytest"]
|
||||
|
||||
if parallel:
|
||||
cmd.extend(["-n", "auto"])
|
||||
|
||||
if coverage:
|
||||
cmd.extend(["--cov=deb_mock", "--cov-report=term-missing"])
|
||||
|
||||
if verbose:
|
||||
cmd.extend(["-v", "-s"])
|
||||
|
||||
cmd.append("tests/")
|
||||
|
||||
return run_command(cmd, "Running unit tests")
|
||||
|
||||
|
||||
def run_integration_tests(parallel=False, coverage=True, verbose=False):
|
||||
"""Run integration tests"""
|
||||
cmd = [sys.executable, "-m", "pytest", "-m", "integration"]
|
||||
|
||||
if parallel:
|
||||
cmd.extend(["-n", "auto"])
|
||||
|
||||
if coverage:
|
||||
cmd.extend(["--cov=deb_mock", "--cov-report=term-missing"])
|
||||
|
||||
if verbose:
|
||||
cmd.extend(["-v", "-s"])
|
||||
|
||||
cmd.append("tests/")
|
||||
|
||||
return run_command(cmd, "Running integration tests")
|
||||
|
||||
|
||||
def run_performance_tests(parallel=False, coverage=True, verbose=False):
|
||||
"""Run performance tests"""
|
||||
cmd = [sys.executable, "-m", "pytest", "-m", "performance"]
|
||||
|
||||
if parallel:
|
||||
cmd.extend(["-n", "auto"])
|
||||
|
||||
if coverage:
|
||||
cmd.extend(["--cov=deb_mock", "--cov-report=term-missing"])
|
||||
|
||||
if verbose:
|
||||
cmd.extend(["-v", "-s"])
|
||||
|
||||
cmd.append("tests/")
|
||||
|
||||
return run_command(cmd, "Running performance tests")
|
||||
|
||||
|
||||
def run_plugin_tests(parallel=False, coverage=True, verbose=False):
|
||||
"""Run plugin system tests"""
|
||||
cmd = [sys.executable, "-m", "pytest", "-m", "plugin"]
|
||||
|
||||
if parallel:
|
||||
cmd.extend(["-n", "auto"])
|
||||
|
||||
if coverage:
|
||||
cmd.extend(["--cov=deb_mock", "--cov-report=term-missing"])
|
||||
|
||||
if coverage:
|
||||
cmd.extend(["--cov=deb_mock", "--cov-report=term-missing"])
|
||||
|
||||
if verbose:
|
||||
cmd.extend(["-v", "-s"])
|
||||
|
||||
cmd.append("tests/")
|
||||
|
||||
return run_command(cmd, "Running plugin system tests")
|
||||
|
||||
|
||||
def run_all_tests(parallel=False, coverage=True, verbose=False):
|
||||
"""Run all tests"""
|
||||
cmd = [sys.executable, "-m", "pytest"]
|
||||
|
||||
if parallel:
|
||||
cmd.extend(["-n", "auto"])
|
||||
|
||||
if coverage:
|
||||
cmd.extend(["--cov=deb_mock", "--cov-report=term-missing", "--cov-report=html:htmlcov"])
|
||||
|
||||
if verbose:
|
||||
cmd.extend(["-v", "-s"])
|
||||
|
||||
cmd.append("tests/")
|
||||
|
||||
return run_command(cmd, "Running all tests")
|
||||
|
||||
|
||||
def generate_coverage_report():
|
||||
"""Generate coverage report"""
|
||||
cmd = [sys.executable, "-m", "coverage", "report"]
|
||||
return run_command(cmd, "Generating coverage report")
|
||||
|
||||
|
||||
def generate_html_coverage():
|
||||
"""Generate HTML coverage report"""
|
||||
cmd = [sys.executable, "-m", "coverage", "html"]
|
||||
return run_command(cmd, "Generating HTML coverage report")
|
||||
|
||||
|
||||
def run_linting():
|
||||
"""Run code linting"""
|
||||
# Check if flake8 is available
|
||||
try:
|
||||
cmd = [sys.executable, "-m", "flake8", "deb_mock/", "tests/"]
|
||||
return run_command(cmd, "Running code linting")
|
||||
except FileNotFoundError:
|
||||
print("⚠️ flake8 not available, skipping linting")
|
||||
return None
|
||||
|
||||
|
||||
def run_type_checking():
|
||||
"""Run type checking"""
|
||||
# Check if mypy is available
|
||||
try:
|
||||
cmd = [sys.executable, "-m", "mypy", "deb_mock/"]
|
||||
return run_command(cmd, "Running type checking")
|
||||
except FileNotFoundError:
|
||||
print("⚠️ mypy not available, skipping type checking")
|
||||
return None
|
||||
|
||||
|
||||
def run_security_scan():
|
||||
"""Run security scanning"""
|
||||
# Check if bandit is available
|
||||
try:
|
||||
cmd = [sys.executable, "-m", "bandit", "-r", "deb_mock/"]
|
||||
return run_command(cmd, "Running security scan")
|
||||
except FileNotFoundError:
|
||||
print("⚠️ bandit not available, skipping security scan")
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
"""Main test runner"""
|
||||
parser = argparse.ArgumentParser(description="Comprehensive test runner for deb-mock")
|
||||
parser.add_argument("--unit", action="store_true", help="Run unit tests only")
|
||||
parser.add_argument("--integration", action="store_true", help="Run integration tests only")
|
||||
parser.add_argument("--performance", action="store_true", help="Run performance tests only")
|
||||
parser.add_argument("--plugin", action="store_true", help="Run plugin system tests only")
|
||||
parser.add_argument("--all", action="store_true", help="Run all tests")
|
||||
parser.add_argument("--parallel", action="store_true", help="Run tests in parallel")
|
||||
parser.add_argument("--no-coverage", action="store_true", help="Disable coverage reporting")
|
||||
parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output")
|
||||
parser.add_argument("--install-deps", action="store_true", help="Install test dependencies")
|
||||
parser.add_argument("--lint", action="store_true", help="Run code linting")
|
||||
parser.add_argument("--type-check", action="store_true", help="Run type checking")
|
||||
parser.add_argument("--security", action="store_true", help="Run security scanning")
|
||||
parser.add_argument("--coverage-report", action="store_true", help="Generate coverage report")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print("🚀 deb-mock Test Runner")
|
||||
print("=" * 50)
|
||||
|
||||
# Change to project directory
|
||||
project_dir = Path(__file__).parent
|
||||
os.chdir(project_dir)
|
||||
print(f"Working directory: {os.getcwd()}")
|
||||
|
||||
# Install dependencies if requested
|
||||
if args.install_deps:
|
||||
install_test_dependencies()
|
||||
|
||||
# Set coverage flag
|
||||
coverage = not args.no_coverage
|
||||
|
||||
# Run specific test types
|
||||
if args.unit:
|
||||
run_unit_tests(args.parallel, coverage, args.verbose)
|
||||
elif args.integration:
|
||||
run_integration_tests(args.parallel, coverage, args.verbose)
|
||||
elif args.performance:
|
||||
run_performance_tests(args.parallel, coverage, args.verbose)
|
||||
elif args.plugin:
|
||||
run_plugin_tests(args.parallel, coverage, args.verbose)
|
||||
elif args.all:
|
||||
run_all_tests(args.parallel, coverage, args.verbose)
|
||||
else:
|
||||
# Default: run all tests
|
||||
run_all_tests(args.parallel, coverage, args.verbose)
|
||||
|
||||
# Run additional checks if requested
|
||||
if args.lint:
|
||||
run_linting()
|
||||
|
||||
if args.type_check:
|
||||
run_type_checking()
|
||||
|
||||
if args.security:
|
||||
run_security_scan()
|
||||
|
||||
if args.coverage_report:
|
||||
generate_coverage_report()
|
||||
generate_html_coverage()
|
||||
|
||||
print("\n🎉 Test runner completed!")
|
||||
print("=" * 50)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
119
test_sbuild_integration.py
Normal file
119
test_sbuild_integration.py
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test script for deb-mock sbuild integration
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
# Add the current directory to Python path
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from deb_mock.config import Config
|
||||
from deb_mock.sbuild import SbuildWrapper
|
||||
from deb_mock.exceptions import SbuildError
|
||||
|
||||
|
||||
def test_sbuild_integration():
|
||||
"""Test the sbuild integration end-to-end"""
|
||||
print("🧪 Testing deb-mock sbuild integration...")
|
||||
|
||||
# Create test configuration
|
||||
config = Config(
|
||||
chroot_name="debian-trixie-amd64",
|
||||
suite="trixie",
|
||||
architecture="amd64",
|
||||
output_dir="./test-output",
|
||||
verbose=True
|
||||
)
|
||||
|
||||
try:
|
||||
# Initialize sbuild wrapper
|
||||
print("📦 Initializing sbuild wrapper...")
|
||||
wrapper = SbuildWrapper(config)
|
||||
print("✅ Sbuild wrapper initialized successfully")
|
||||
|
||||
# Test chroot info
|
||||
print("\n🔍 Testing chroot info...")
|
||||
chroot_info = wrapper.get_chroot_info()
|
||||
print(f"Chroot info: {chroot_info}")
|
||||
|
||||
# Test dependency checking
|
||||
print("\n📋 Testing dependency checking...")
|
||||
test_package = "test-sbuild-package"
|
||||
if os.path.exists(test_package):
|
||||
deps = wrapper.check_dependencies(test_package)
|
||||
print(f"Dependencies: {deps}")
|
||||
|
||||
if not deps["satisfied"] and deps["missing"]:
|
||||
print(f"Missing dependencies: {deps['missing']}")
|
||||
print("Attempting to install missing dependencies...")
|
||||
|
||||
try:
|
||||
wrapper.install_build_dependencies(deps["missing"])
|
||||
print("✅ Dependencies installed successfully")
|
||||
except SbuildError as e:
|
||||
print(f"⚠️ Could not install dependencies: {e}")
|
||||
|
||||
# Test package building (if dependencies are satisfied)
|
||||
print("\n🔨 Testing package building...")
|
||||
if os.path.exists(test_package):
|
||||
try:
|
||||
result = wrapper.build_package(test_package)
|
||||
print(f"Build result: {result}")
|
||||
if result["success"]:
|
||||
print("✅ Package built successfully!")
|
||||
else:
|
||||
print("❌ Package build failed")
|
||||
except SbuildError as e:
|
||||
print(f"⚠️ Package build failed (expected for test): {e}")
|
||||
|
||||
print("\n🎉 Sbuild integration test completed!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Test failed: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def test_cli_commands():
|
||||
"""Test the new CLI commands"""
|
||||
print("\n🖥️ Testing CLI commands...")
|
||||
|
||||
try:
|
||||
# Test chroot info command
|
||||
print("Testing chroot-info command...")
|
||||
os.system("python3 -m deb_mock.cli chroot-info debian-trixie-amd64")
|
||||
|
||||
# Test check-deps command
|
||||
print("\nTesting check-deps command...")
|
||||
if os.path.exists("test-sbuild-package"):
|
||||
os.system("python3 -m deb_mock.cli check-deps test-sbuild-package")
|
||||
|
||||
print("✅ CLI command tests completed!")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ CLI test failed: {e}")
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("🚀 Starting deb-mock sbuild integration tests...\n")
|
||||
|
||||
# Run tests
|
||||
success = True
|
||||
success &= test_sbuild_integration()
|
||||
success &= test_cli_commands()
|
||||
|
||||
if success:
|
||||
print("\n🎯 All tests passed! Sbuild integration is working.")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("\n💥 Some tests failed. Check the output above.")
|
||||
sys.exit(1)
|
||||
|
|
@ -1,3 +1,3 @@
|
|||
"""
|
||||
Tests for deb-mock
|
||||
Integration tests for deb-mock
|
||||
"""
|
||||
|
|
|
|||
324
tests/conftest.py
Normal file
324
tests/conftest.py
Normal file
|
|
@ -0,0 +1,324 @@
|
|||
"""
|
||||
Common pytest fixtures and configuration for deb-mock tests
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
import shutil
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from deb_mock.config import Config
|
||||
from deb_mock.core import DebMock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_dir():
|
||||
"""Create a temporary directory for tests"""
|
||||
temp_dir = tempfile.mkdtemp(prefix="deb_mock_test_")
|
||||
yield temp_dir
|
||||
shutil.rmtree(temp_dir, ignore_errors=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_config(temp_dir):
|
||||
"""Create a test configuration"""
|
||||
config = Config(
|
||||
chroot_name="test-chroot",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
chroot_dir=os.path.join(temp_dir, "chroots"),
|
||||
cache_dir=os.path.join(temp_dir, "cache"),
|
||||
work_dir=os.path.join(temp_dir, "work"),
|
||||
enable_performance_monitoring=False, # Disable for tests
|
||||
parallel_builds=1,
|
||||
use_namespaces=False, # Disable for tests
|
||||
mount_proc=False, # Disable for tests
|
||||
mount_sys=False, # Disable for tests
|
||||
mount_dev=False, # Disable for tests
|
||||
mount_tmp=False, # Disable for tests
|
||||
plugins=[],
|
||||
plugin_conf={}
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_deb_mock(test_config):
|
||||
"""Create a DebMock instance for testing"""
|
||||
return DebMock(test_config)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_source_package(temp_dir):
|
||||
"""Create a minimal Debian source package for testing"""
|
||||
package_dir = os.path.join(temp_dir, "test-package")
|
||||
os.makedirs(package_dir)
|
||||
|
||||
# Create debian/control
|
||||
debian_dir = os.path.join(package_dir, "debian")
|
||||
os.makedirs(debian_dir)
|
||||
|
||||
control_content = """Source: test-package
|
||||
Section: devel
|
||||
Priority: optional
|
||||
Maintainer: Test User <test@example.com>
|
||||
Build-Depends: debhelper-compat (= 13)
|
||||
|
||||
Package: test-package
|
||||
Architecture: any
|
||||
Depends: ${shlibs:Depends}, ${misc:Depends}
|
||||
Description: Test package for deb-mock testing
|
||||
This is a test package used for testing deb-mock functionality.
|
||||
"""
|
||||
|
||||
with open(os.path.join(debian_dir, "control"), "w") as f:
|
||||
f.write(control_content)
|
||||
|
||||
# Create debian/rules
|
||||
rules_content = """#!/usr/bin/make -f
|
||||
%:
|
||||
dh $@
|
||||
"""
|
||||
|
||||
with open(os.path.join(debian_dir, "rules"), "w") as f:
|
||||
f.write(rules_content)
|
||||
|
||||
# Make rules executable
|
||||
os.chmod(os.path.join(debian_dir, "rules"), 0o755)
|
||||
|
||||
# Create debian/changelog
|
||||
changelog_content = """test-package (1.0-1) trixie; urgency=medium
|
||||
|
||||
* Initial release for testing
|
||||
|
||||
-- Test User <test@example.com> Mon, 19 Aug 2024 12:00:00 +0000
|
||||
"""
|
||||
|
||||
with open(os.path.join(debian_dir, "changelog"), "w") as f:
|
||||
f.write(changelog_content)
|
||||
|
||||
# Create simple Makefile
|
||||
makefile_content = """all:
|
||||
@echo "Building test package..."
|
||||
|
||||
clean:
|
||||
@echo "Cleaning test package..."
|
||||
"""
|
||||
|
||||
with open(os.path.join(package_dir, "Makefile"), "w") as f:
|
||||
f.write(makefile_content)
|
||||
|
||||
return package_dir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_chroot_manager():
|
||||
"""Mock chroot manager for testing"""
|
||||
mock_manager = Mock()
|
||||
mock_manager.chroot_exists.return_value = True
|
||||
mock_manager.create_chroot.return_value = True
|
||||
mock_manager.clean_chroot.return_value = True
|
||||
mock_manager.get_chroot_path.return_value = "/tmp/test-chroot"
|
||||
return mock_manager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_cache_manager():
|
||||
"""Mock cache manager for testing"""
|
||||
mock_manager = Mock()
|
||||
mock_manager.restore_root_cache.return_value = True
|
||||
mock_manager.create_root_cache.return_value = True
|
||||
mock_manager.get_cache_path.return_value = "/tmp/test-cache"
|
||||
return mock_manager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_sbuild_wrapper():
|
||||
"""Mock sbuild wrapper for testing"""
|
||||
mock_wrapper = Mock()
|
||||
mock_wrapper.build_package.return_value = {
|
||||
"success": True,
|
||||
"output": "Build completed successfully",
|
||||
"artifacts": ["test-package_1.0-1_amd64.deb"]
|
||||
}
|
||||
mock_wrapper.check_dependencies.return_value = {
|
||||
"satisfied": True,
|
||||
"missing": [],
|
||||
"installed": ["build-essential", "debhelper"]
|
||||
}
|
||||
mock_wrapper.install_build_dependencies.return_value = True
|
||||
return mock_wrapper
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_plugin_manager():
|
||||
"""Mock plugin manager for testing"""
|
||||
mock_manager = Mock()
|
||||
mock_manager.init_plugins.return_value = True
|
||||
mock_manager.call_hooks.return_value = []
|
||||
return mock_manager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_performance_monitor():
|
||||
"""Mock performance monitor for testing"""
|
||||
mock_monitor = Mock()
|
||||
mock_monitor.monitor_operation.return_value.__enter__ = Mock(return_value="op_123")
|
||||
mock_monitor.monitor_operation.return_value.__exit__ = Mock(return_value=None)
|
||||
mock_monitor.create_build_profile.return_value = "profile_123"
|
||||
mock_monitor.add_phase_metrics.return_value = None
|
||||
mock_monitor.finalize_build_profile.return_value = Mock()
|
||||
return mock_monitor
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_environment():
|
||||
"""Set up test environment variables"""
|
||||
original_env = os.environ.copy()
|
||||
|
||||
# Set test environment variables
|
||||
test_env = {
|
||||
"DEB_MOCK_TEST": "1",
|
||||
"DEB_MOCK_DEBUG": "0",
|
||||
"PATH": os.environ.get("PATH", ""),
|
||||
"HOME": os.environ.get("HOME", "/tmp"),
|
||||
"TMPDIR": "/tmp"
|
||||
}
|
||||
|
||||
os.environ.update(test_env)
|
||||
|
||||
yield test_env
|
||||
|
||||
# Restore original environment
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def isolated_filesystem(temp_dir):
|
||||
"""Create an isolated filesystem for testing"""
|
||||
original_cwd = os.getcwd()
|
||||
|
||||
# Change to test directory
|
||||
os.chdir(temp_dir)
|
||||
|
||||
yield temp_dir
|
||||
|
||||
# Restore original working directory
|
||||
os.chdir(original_cwd)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_system_commands():
|
||||
"""Mock system commands for testing"""
|
||||
with patch("subprocess.run") as mock_run:
|
||||
mock_run.return_value.returncode = 0
|
||||
mock_run.return_value.stdout = b"Mock output"
|
||||
mock_run.return_value.stderr = b""
|
||||
yield mock_run
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_file_operations():
|
||||
"""Mock file operations for testing"""
|
||||
with patch("shutil.copy2") as mock_copy, \
|
||||
patch("shutil.rmtree") as mock_rmtree, \
|
||||
patch("os.makedirs") as mock_makedirs:
|
||||
|
||||
mock_copy.return_value = None
|
||||
mock_rmtree.return_value = None
|
||||
mock_makedirs.return_value = None
|
||||
|
||||
yield {
|
||||
"copy": mock_copy,
|
||||
"rmtree": mock_rmtree,
|
||||
"makedirs": mock_makedirs
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def performance_test_config(temp_dir):
|
||||
"""Configuration for performance testing"""
|
||||
config = Config(
|
||||
chroot_name="perf-test-chroot",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
chroot_dir=os.path.join(temp_dir, "chroots"),
|
||||
cache_dir=os.path.join(temp_dir, "cache"),
|
||||
work_dir=os.path.join(temp_dir, "work"),
|
||||
enable_performance_monitoring=True,
|
||||
performance_metrics_dir=os.path.join(temp_dir, "metrics"),
|
||||
performance_auto_optimization=False,
|
||||
parallel_builds=2,
|
||||
plugins=[],
|
||||
plugin_conf={}
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def plugin_test_config(temp_dir):
|
||||
"""Configuration for plugin testing"""
|
||||
config = Config(
|
||||
chroot_name="plugin-test-chroot",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
chroot_dir=os.path.join(temp_dir, "chroots"),
|
||||
cache_dir=os.path.join(temp_dir, "cache"),
|
||||
work_dir=os.path.join(temp_dir, "work"),
|
||||
enable_performance_monitoring=False,
|
||||
plugins=["test_plugin"],
|
||||
plugin_conf={
|
||||
"test_plugin": {
|
||||
"enabled": True,
|
||||
"config_option": "test_value"
|
||||
}
|
||||
},
|
||||
plugin_dir=os.path.join(temp_dir, "plugins")
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
# Test data fixtures
|
||||
@pytest.fixture
|
||||
def test_package_data():
|
||||
"""Test data for package operations"""
|
||||
return {
|
||||
"name": "test-package",
|
||||
"version": "1.0-1",
|
||||
"architecture": "amd64",
|
||||
"suite": "trixie",
|
||||
"dependencies": ["build-essential", "debhelper"],
|
||||
"build_depends": ["debhelper-compat (= 13)"]
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_build_result():
|
||||
"""Test data for build results"""
|
||||
return {
|
||||
"success": True,
|
||||
"package_name": "test-package",
|
||||
"version": "1.0-1",
|
||||
"architecture": "amd64",
|
||||
"build_time": 45.23,
|
||||
"artifacts": ["test-package_1.0-1_amd64.deb"],
|
||||
"log_file": "/tmp/build.log",
|
||||
"chroot_used": "test-chroot"
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_performance_metrics():
|
||||
"""Test data for performance metrics"""
|
||||
return {
|
||||
"operation": "test_operation",
|
||||
"duration": 12.34,
|
||||
"cpu_percent": 75.5,
|
||||
"memory_mb": 512.0,
|
||||
"disk_io_read_mb": 25.6,
|
||||
"disk_io_write_mb": 15.3,
|
||||
"network_io_mb": 2.1
|
||||
}
|
||||
9
tests/requirements.txt
Normal file
9
tests/requirements.txt
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
pytest>=7.0.0
|
||||
pytest-cov>=4.0.0
|
||||
pytest-mock>=3.10.0
|
||||
pytest-xdist>=3.0.0
|
||||
pytest-timeout>=2.1.0
|
||||
pytest-html>=3.1.0
|
||||
pytest-json-report>=1.5.0
|
||||
coverage>=7.0.0
|
||||
psutil>=5.9.0
|
||||
643
tests/test_core.py
Normal file
643
tests/test_core.py
Normal file
|
|
@ -0,0 +1,643 @@
|
|||
"""
|
||||
Tests for deb-mock core functionality
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import os
|
||||
import tempfile
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
|
||||
from deb_mock.core import DebMock
|
||||
from deb_mock.exceptions import DebMockError, ChrootError, CacheError
|
||||
|
||||
|
||||
class TestDebMock:
|
||||
"""Test the core DebMock class"""
|
||||
|
||||
def test_initialization(self, test_config):
|
||||
"""Test DebMock initialization"""
|
||||
deb_mock = DebMock(test_config)
|
||||
|
||||
assert deb_mock.config == test_config
|
||||
assert deb_mock.chroot_manager is not None
|
||||
assert deb_mock.cache_manager is not None
|
||||
assert deb_mock.sbuild_wrapper is not None
|
||||
assert deb_mock.plugin_manager is not None
|
||||
assert deb_mock.performance_monitor is not None
|
||||
|
||||
def test_initialization_with_performance_monitoring(self, performance_test_config):
|
||||
"""Test DebMock initialization with performance monitoring enabled"""
|
||||
deb_mock = DebMock(performance_test_config)
|
||||
|
||||
assert deb_mock.performance_monitor.enable_monitoring is True
|
||||
assert deb_mock.performance_optimizer is not None
|
||||
assert deb_mock.performance_reporter is not None
|
||||
|
||||
def test_initialization_with_plugins(self, plugin_test_config):
|
||||
"""Test DebMock initialization with plugins"""
|
||||
deb_mock = DebMock(plugin_test_config)
|
||||
|
||||
assert deb_mock.plugin_manager is not None
|
||||
# Plugin manager should be initialized
|
||||
deb_mock.plugin_manager.init_plugins.assert_called_once_with(deb_mock)
|
||||
|
||||
@patch('deb_mock.core.ChrootManager')
|
||||
@patch('deb_mock.core.CacheManager')
|
||||
@patch('deb_mock.core.SbuildWrapper')
|
||||
@patch('deb_mock.core.PluginManager')
|
||||
@patch('deb_mock.core.PerformanceMonitor')
|
||||
def test_component_initialization(self, mock_perf_mon, mock_plugin_mgr,
|
||||
mock_sbuild, mock_cache_mgr, mock_chroot_mgr,
|
||||
test_config):
|
||||
"""Test that all components are properly initialized"""
|
||||
deb_mock = DebMock(test_config)
|
||||
|
||||
# Verify all managers are created
|
||||
mock_chroot_mgr.assert_called_once_with(test_config)
|
||||
mock_cache_mgr.assert_called_once_with(test_config)
|
||||
mock_sbuild.assert_called_once_with(test_config)
|
||||
mock_plugin_mgr.assert_called_once_with(test_config)
|
||||
mock_perf_mon.assert_called_once_with(test_config)
|
||||
|
||||
def test_build_with_existing_chroot(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with an existing chroot"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify chroot was not created
|
||||
mock_chroot_manager.create_chroot.assert_not_called()
|
||||
|
||||
# Verify hooks were called
|
||||
mock_plugin_manager.call_hooks.assert_any_call(
|
||||
mock_plugin_manager.HookStages.PREBUILD, sample_source_package, chroot_name="test-chroot"
|
||||
)
|
||||
mock_plugin_manager.call_hooks.assert_any_call(
|
||||
mock_plugin_manager.HookStages.BUILD_START, sample_source_package, "test-chroot"
|
||||
)
|
||||
mock_plugin_manager.call_hooks.assert_any_call(
|
||||
mock_plugin_manager.HookStages.BUILD_END, result, sample_source_package, "test-chroot"
|
||||
)
|
||||
mock_plugin_manager.call_hooks.assert_any_call(
|
||||
mock_plugin_manager.HookStages.POSTBUILD, result, sample_source_package
|
||||
)
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_new_chroot(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with a new chroot"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot doesn't exist
|
||||
mock_chroot_manager.chroot_exists.return_value = False
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify chroot was created
|
||||
mock_chroot_manager.create_chroot.assert_called_once_with("test-chroot")
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_cache_restore(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with cache restoration"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot doesn't exist but cache restore succeeds
|
||||
mock_chroot_manager.chroot_exists.return_value = False
|
||||
mock_cache_manager.restore_root_cache.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify cache was restored
|
||||
mock_cache_manager.restore_root_cache.assert_called_once()
|
||||
|
||||
# Verify chroot was not created (cache restore succeeded)
|
||||
mock_chroot_manager.create_chroot.assert_not_called()
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_failed_cache_restore(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with failed cache restoration"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot doesn't exist and cache restore fails
|
||||
mock_chroot_manager.chroot_exists.return_value = False
|
||||
mock_cache_manager.restore_root_cache.return_value = False
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify cache restore was attempted
|
||||
mock_cache_manager.restore_root_cache.assert_called_once()
|
||||
|
||||
# Verify chroot was created (cache restore failed)
|
||||
mock_chroot_manager.create_chroot.assert_called_once_with("test-chroot")
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_custom_chroot_name(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with a custom chroot name"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/custom-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build with custom chroot name
|
||||
result = mock_deb_mock.build(sample_source_package, chroot_name="custom-chroot")
|
||||
|
||||
# Verify custom chroot name was used
|
||||
mock_chroot_manager.chroot_exists.assert_called_with("custom-chroot")
|
||||
|
||||
# Verify hooks were called with custom chroot name
|
||||
mock_plugin_manager.call_hooks.assert_any_call(
|
||||
mock_plugin_manager.HookStages.BUILD_START, sample_source_package, "custom-chroot"
|
||||
)
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_keep_chroot(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with keep_chroot option"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
mock_deb_mock.config.keep_chroot = False # Default is False
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build with keep_chroot=True
|
||||
result = mock_deb_mock.build(sample_source_package, keep_chroot=True)
|
||||
|
||||
# Verify chroot was not cleaned up
|
||||
mock_chroot_manager.clean_chroot.assert_not_called()
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_without_keep_chroot(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building without keep_chroot option"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
mock_deb_mock.config.keep_chroot = False # Default is False
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build (default keep_chroot=False)
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify chroot was cleaned up
|
||||
mock_chroot_manager.clean_chroot.assert_called_once_with("test-chroot")
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_successful_result(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with successful result triggers cache creation"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify cache was created (successful build)
|
||||
mock_cache_manager.create_root_cache.assert_called_once()
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_failed_result(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building with failed result doesn't trigger cache creation"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock failed build result
|
||||
mock_sbuild_wrapper.build_package.return_value = {
|
||||
"success": False,
|
||||
"error": "Build failed",
|
||||
"output": "Error output"
|
||||
}
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify cache was not created (failed build)
|
||||
mock_cache_manager.create_root_cache.assert_not_called()
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is False
|
||||
assert "error" in result
|
||||
|
||||
def test_build_metadata_capture(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test that build metadata is captured and stored"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={"test": "data"})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build
|
||||
result = mock_deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify metadata was captured
|
||||
mock_deb_mock._capture_build_metadata.assert_called_once_with(result, sample_source_package)
|
||||
|
||||
# Verify metadata was stored
|
||||
mock_deb_mock.metadata_manager.store_metadata.assert_called_once_with({"test": "data"})
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_performance_monitoring(self, performance_test_config, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager):
|
||||
"""Test building with performance monitoring enabled"""
|
||||
# Create DebMock with performance monitoring
|
||||
deb_mock = DebMock(performance_test_config)
|
||||
|
||||
# Mock the components
|
||||
deb_mock.chroot_manager = mock_chroot_manager
|
||||
deb_mock.cache_manager = mock_cache_manager
|
||||
deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
deb_mock.plugin_manager = mock_plugin_manager
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Run build
|
||||
result = deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify performance monitoring was used
|
||||
assert deb_mock.performance_monitor.create_build_profile.called
|
||||
assert deb_mock.performance_monitor.finalize_build_profile.called
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_with_auto_optimization(self, performance_test_config, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager):
|
||||
"""Test building with automatic optimization enabled"""
|
||||
# Enable auto optimization
|
||||
performance_test_config.performance_auto_optimization = True
|
||||
|
||||
# Create DebMock with performance monitoring and auto optimization
|
||||
deb_mock = DebMock(performance_test_config)
|
||||
|
||||
# Mock the components
|
||||
deb_mock.chroot_manager = mock_chroot_manager
|
||||
deb_mock.cache_manager = mock_cache_manager
|
||||
deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
deb_mock.plugin_manager = mock_plugin_manager
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Mock performance analysis
|
||||
mock_analysis = {
|
||||
"automatic_tunings": [{"type": "parallel_builds", "suggested": 3}]
|
||||
}
|
||||
deb_mock.performance_optimizer.analyze_build_performance = Mock(return_value=mock_analysis)
|
||||
|
||||
# Run build
|
||||
result = deb_mock.build(sample_source_package)
|
||||
|
||||
# Verify optimization analysis was performed
|
||||
deb_mock.performance_optimizer.analyze_build_performance.assert_called_once()
|
||||
|
||||
# Verify build result
|
||||
assert result["success"] is True
|
||||
|
||||
def test_build_chain(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building multiple packages in a chain"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Create multiple packages
|
||||
packages = [sample_source_package, sample_source_package]
|
||||
|
||||
# Run build chain
|
||||
results = mock_deb_mock.build_chain(packages)
|
||||
|
||||
# Verify all packages were built
|
||||
assert len(results) == 2
|
||||
assert all(result["success"] for result in results)
|
||||
|
||||
# Verify build was called for each package
|
||||
assert mock_deb_mock.sbuild_wrapper.build_package.call_count == 2
|
||||
|
||||
def test_build_chain_with_failure(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test building chain stops on first failure"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Mock first build succeeds, second fails
|
||||
mock_deb_mock.sbuild_wrapper.build_package.side_effect = [
|
||||
{"success": True, "output": "First build succeeded"},
|
||||
{"success": False, "error": "Second build failed"}
|
||||
]
|
||||
|
||||
# Create multiple packages
|
||||
packages = [sample_source_package, sample_source_package]
|
||||
|
||||
# Run build chain
|
||||
results = mock_deb_mock.build_chain(packages)
|
||||
|
||||
# Verify only first package was built successfully
|
||||
assert len(results) == 1
|
||||
assert results[0]["success"] is True
|
||||
|
||||
# Verify build was called only once (stopped on failure)
|
||||
assert mock_deb_mock.sbuild_wrapper.build_package.call_count == 2
|
||||
|
||||
def test_parallel_build(self, mock_deb_mock, sample_source_package,
|
||||
mock_chroot_manager, mock_cache_manager,
|
||||
mock_sbuild_wrapper, mock_plugin_manager,
|
||||
mock_performance_monitor):
|
||||
"""Test parallel building of packages"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
mock_deb_mock.sbuild_wrapper = mock_sbuild_wrapper
|
||||
mock_deb_mock.plugin_manager = mock_plugin_manager
|
||||
mock_deb_mock.performance_monitor = mock_performance_monitor
|
||||
|
||||
# Mock chroot exists
|
||||
mock_chroot_manager.chroot_exists.return_value = True
|
||||
|
||||
# Mock config methods
|
||||
mock_deb_mock.config.setup_build_environment = Mock(return_value={})
|
||||
mock_deb_mock.config.get_chroot_path = Mock(return_value="/tmp/test-chroot")
|
||||
mock_deb_mock.config.parallel_builds = 2
|
||||
|
||||
# Mock metadata methods
|
||||
mock_deb_mock._capture_build_metadata = Mock(return_value={})
|
||||
mock_deb_mock.metadata_manager = Mock()
|
||||
|
||||
# Create multiple packages
|
||||
packages = [sample_source_package, sample_source_package, sample_source_package]
|
||||
|
||||
# Run parallel build
|
||||
results = mock_deb_mock.build_parallel(packages)
|
||||
|
||||
# Verify all packages were built
|
||||
assert len(results) == 3
|
||||
assert all(result["success"] for result in results)
|
||||
|
||||
# Verify build was called for each package
|
||||
assert mock_deb_mock.sbuild_wrapper.build_package.call_count == 3
|
||||
|
||||
def test_cleanup(self, mock_deb_mock, mock_chroot_manager, mock_cache_manager):
|
||||
"""Test cleanup functionality"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
|
||||
# Run cleanup
|
||||
mock_deb_mock.cleanup()
|
||||
|
||||
# Verify cleanup was called on managers
|
||||
mock_chroot_manager.cleanup.assert_called_once()
|
||||
mock_cache_manager.cleanup.assert_called_once()
|
||||
|
||||
def test_get_status(self, mock_deb_mock, mock_chroot_manager, mock_cache_manager):
|
||||
"""Test status reporting"""
|
||||
# Mock the components
|
||||
mock_deb_mock.chroot_manager = mock_chroot_manager
|
||||
mock_deb_mock.cache_manager = mock_cache_manager
|
||||
|
||||
# Mock status methods
|
||||
mock_chroot_manager.get_status.return_value = {"chroots": 2, "active": 1}
|
||||
mock_cache_manager.get_status.return_value = {"caches": 3, "size_mb": 1024}
|
||||
|
||||
# Get status
|
||||
status = mock_deb_mock.get_status()
|
||||
|
||||
# Verify status includes all components
|
||||
assert "chroot_manager" in status
|
||||
assert "cache_manager" in status
|
||||
assert status["chroot_manager"]["chroots"] == 2
|
||||
assert status["cache_manager"]["caches"] == 3
|
||||
748
tests/test_performance.py
Normal file
748
tests/test_performance.py
Normal file
|
|
@ -0,0 +1,748 @@
|
|||
"""
|
||||
Tests for deb-mock performance monitoring and optimization
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import time
|
||||
import tempfile
|
||||
import os
|
||||
import json
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from deb_mock.performance import (
|
||||
PerformanceMonitor, PerformanceOptimizer, PerformanceReporter,
|
||||
PerformanceMetrics, BuildProfile
|
||||
)
|
||||
from deb_mock.exceptions import PerformanceError
|
||||
|
||||
|
||||
class TestPerformanceMetrics:
|
||||
"""Test PerformanceMetrics data class"""
|
||||
|
||||
def test_metrics_creation(self):
|
||||
"""Test creating PerformanceMetrics"""
|
||||
metrics = PerformanceMetrics(
|
||||
operation="test_operation",
|
||||
start_time=1000.0,
|
||||
end_time=1010.0,
|
||||
duration=10.0,
|
||||
cpu_percent=75.5,
|
||||
memory_mb=512.0,
|
||||
disk_io_read_mb=25.6,
|
||||
disk_io_write_mb=15.3,
|
||||
network_io_mb=2.1,
|
||||
chroot_size_mb=1024.0,
|
||||
cache_hit_rate=0.8,
|
||||
parallel_efficiency=0.9,
|
||||
resource_utilization=0.85
|
||||
)
|
||||
|
||||
assert metrics.operation == "test_operation"
|
||||
assert metrics.duration == 10.0
|
||||
assert metrics.cpu_percent == 75.5
|
||||
assert metrics.memory_mb == 512.0
|
||||
assert metrics.cache_hit_rate == 0.8
|
||||
|
||||
def test_metrics_calculation(self):
|
||||
"""Test metrics calculation from start/end times"""
|
||||
start_time = time.time()
|
||||
time.sleep(0.1) # Small delay
|
||||
end_time = time.time()
|
||||
|
||||
metrics = PerformanceMetrics(
|
||||
operation="test_calc",
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
duration=end_time - start_time,
|
||||
cpu_percent=50.0,
|
||||
memory_mb=256.0,
|
||||
disk_io_read_mb=10.0,
|
||||
disk_io_write_mb=5.0,
|
||||
network_io_mb=1.0,
|
||||
chroot_size_mb=512.0,
|
||||
cache_hit_rate=0.7,
|
||||
parallel_efficiency=0.8,
|
||||
resource_utilization=0.75
|
||||
)
|
||||
|
||||
assert metrics.duration > 0
|
||||
assert metrics.duration < 1.0 # Should be small
|
||||
|
||||
|
||||
class TestBuildProfile:
|
||||
"""Test BuildProfile data class"""
|
||||
|
||||
def test_profile_creation(self):
|
||||
"""Test creating BuildProfile"""
|
||||
profile = BuildProfile(
|
||||
build_id="test_build_123",
|
||||
package_name="test-package",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
total_duration=45.23,
|
||||
phases={},
|
||||
resource_peak={},
|
||||
cache_performance={},
|
||||
optimization_suggestions=[],
|
||||
timestamp=datetime.now()
|
||||
)
|
||||
|
||||
assert profile.build_id == "test_build_123"
|
||||
assert profile.package_name == "test-package"
|
||||
assert profile.architecture == "amd64"
|
||||
assert profile.suite == "trixie"
|
||||
assert profile.total_duration == 45.23
|
||||
|
||||
def test_profile_with_phases(self):
|
||||
"""Test BuildProfile with phase metrics"""
|
||||
metrics = PerformanceMetrics(
|
||||
operation="test_phase",
|
||||
start_time=1000.0,
|
||||
end_time=1010.0,
|
||||
duration=10.0,
|
||||
cpu_percent=75.5,
|
||||
memory_mb=512.0,
|
||||
disk_io_read_mb=25.6,
|
||||
disk_io_write_mb=15.3,
|
||||
network_io_mb=2.1,
|
||||
chroot_size_mb=1024.0,
|
||||
cache_hit_rate=0.8,
|
||||
parallel_efficiency=0.9,
|
||||
resource_utilization=0.85
|
||||
)
|
||||
|
||||
profile = BuildProfile(
|
||||
build_id="test_build_123",
|
||||
package_name="test-package",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
total_duration=10.0,
|
||||
phases={"test_phase": metrics},
|
||||
resource_peak={"cpu_percent": 75.5, "memory_mb": 512.0},
|
||||
cache_performance={"hit_rate": 0.8},
|
||||
optimization_suggestions=["Test suggestion"],
|
||||
timestamp=datetime.now()
|
||||
)
|
||||
|
||||
assert "test_phase" in profile.phases
|
||||
assert profile.phases["test_phase"] == metrics
|
||||
assert profile.resource_peak["cpu_percent"] == 75.5
|
||||
assert profile.cache_performance["hit_rate"] == 0.8
|
||||
assert len(profile.optimization_suggestions) == 1
|
||||
|
||||
|
||||
class TestPerformanceMonitor:
|
||||
"""Test PerformanceMonitor class"""
|
||||
|
||||
def test_initialization(self, test_config):
|
||||
"""Test PerformanceMonitor initialization"""
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
assert monitor.config == test_config
|
||||
assert monitor.enable_monitoring == test_config.enable_performance_monitoring
|
||||
assert monitor.metrics_dir == test_config.performance_metrics_dir
|
||||
assert monitor.retention_days == test_config.performance_retention_days
|
||||
assert monitor._active_operations == {}
|
||||
assert monitor._operation_history == []
|
||||
assert monitor._build_profiles == {}
|
||||
|
||||
def test_initialization_with_monitoring_disabled(self, test_config):
|
||||
"""Test PerformanceMonitor initialization with monitoring disabled"""
|
||||
test_config.enable_performance_monitoring = False
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
assert monitor.enable_monitoring is False
|
||||
assert monitor._monitoring_active is False
|
||||
|
||||
def test_metrics_directory_creation(self, temp_dir):
|
||||
"""Test that metrics directory is created"""
|
||||
config = Mock()
|
||||
config.enable_performance_monitoring = True
|
||||
config.performance_metrics_dir = os.path.join(temp_dir, "metrics")
|
||||
config.performance_retention_days = 30
|
||||
|
||||
monitor = PerformanceMonitor(config)
|
||||
|
||||
assert os.path.exists(config.performance_metrics_dir)
|
||||
|
||||
@patch('deb_mock.performance.psutil')
|
||||
def test_system_monitoring_start(self, mock_psutil, test_config):
|
||||
"""Test starting system monitoring"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
|
||||
# Mock psutil methods
|
||||
mock_psutil.cpu_percent.return_value = 50.0
|
||||
mock_psutil.virtual_memory.return_value = Mock(percent=60.0, available=1024*1024*1024)
|
||||
mock_psutil.disk_usage.return_value = Mock(percent=70.0, free=1024*1024*1024*10)
|
||||
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
# Wait a bit for monitoring to start
|
||||
time.sleep(0.1)
|
||||
|
||||
# Stop monitoring
|
||||
monitor.stop_monitoring()
|
||||
|
||||
# Verify monitoring was active
|
||||
assert monitor._monitoring_active is False
|
||||
|
||||
def test_monitor_operation_context_manager(self, test_config):
|
||||
"""Test monitor_operation context manager"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
with monitor.monitor_operation("test_op") as op_id:
|
||||
assert op_id.startswith("test_op_")
|
||||
time.sleep(0.1) # Small delay
|
||||
|
||||
# Verify operation was tracked
|
||||
assert len(monitor._operation_history) == 1
|
||||
assert monitor._operation_history[0].operation == "test_op"
|
||||
assert monitor._operation_history[0].duration > 0
|
||||
|
||||
def test_monitor_operation_disabled(self, test_config):
|
||||
"""Test monitor_operation when monitoring is disabled"""
|
||||
test_config.enable_performance_monitoring = False
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
with monitor.monitor_operation("test_op") as op_id:
|
||||
assert op_id is None # Should yield None when disabled
|
||||
|
||||
# Verify no operation was tracked
|
||||
assert len(monitor._operation_history) == 0
|
||||
|
||||
@patch('deb_mock.performance.psutil')
|
||||
def test_operation_metrics_collection(self, mock_psutil, test_config):
|
||||
"""Test that operation metrics are properly collected"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
|
||||
# Mock psutil methods
|
||||
mock_psutil.cpu_percent.side_effect = [25.0, 75.0]
|
||||
mock_psutil.virtual_memory.side_effect = [
|
||||
Mock(used=1024*1024*1024), # 1GB
|
||||
Mock(used=1536*1024*1024) # 1.5GB
|
||||
]
|
||||
mock_psutil.disk_io_counters.side_effect = [
|
||||
Mock(read_bytes=1000, write_bytes=500),
|
||||
Mock(read_bytes=2000, write_bytes=1000)
|
||||
]
|
||||
mock_psutil.net_io_counters.side_effect = [
|
||||
Mock(bytes_sent=100, bytes_recv=200),
|
||||
Mock(bytes_sent=300, bytes_recv=600)
|
||||
]
|
||||
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
with monitor.monitor_operation("test_op") as op_id:
|
||||
time.sleep(0.1)
|
||||
|
||||
# Verify metrics were collected
|
||||
assert len(monitor._operation_history) == 1
|
||||
metrics = monitor._operation_history[0]
|
||||
|
||||
assert metrics.operation == "test_op"
|
||||
assert metrics.duration > 0
|
||||
assert metrics.cpu_percent == 50.0 # Average of 25 and 75
|
||||
assert metrics.memory_mb > 0 # Should be positive
|
||||
assert metrics.disk_io_read_mb > 0
|
||||
assert metrics.disk_io_write_mb > 0
|
||||
assert metrics.network_io_mb > 0
|
||||
|
||||
def test_benchmark_operation(self, test_config):
|
||||
"""Test benchmarking an operation"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
def test_function():
|
||||
time.sleep(0.1)
|
||||
return "test_result"
|
||||
|
||||
result = monitor.benchmark_operation("test_bench", test_function, iterations=3)
|
||||
|
||||
assert result["operation"] == "test_bench"
|
||||
assert result["iterations"] == 3
|
||||
assert result["average_duration"] > 0
|
||||
assert result["min_duration"] > 0
|
||||
assert result["max_duration"] > 0
|
||||
assert result["variance"] >= 0
|
||||
assert len(result["results"]) == 3
|
||||
|
||||
# Verify all iterations returned the expected result
|
||||
for iteration_result in result["results"]:
|
||||
assert iteration_result["result"] == "test_result"
|
||||
|
||||
def test_benchmark_operation_disabled(self, test_config):
|
||||
"""Test benchmarking when monitoring is disabled"""
|
||||
test_config.enable_performance_monitoring = False
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
def test_function():
|
||||
return "test_result"
|
||||
|
||||
result = monitor.benchmark_operation("test_bench", test_function, iterations=3)
|
||||
|
||||
# Should return function result directly when disabled
|
||||
assert result == "test_result"
|
||||
|
||||
def test_create_build_profile(self, test_config):
|
||||
"""Test creating a build profile"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
profile_id = monitor.create_build_profile(
|
||||
"test_build", "test-package", "amd64", "trixie"
|
||||
)
|
||||
|
||||
assert profile_id in monitor._build_profiles
|
||||
profile = monitor._build_profiles[profile_id]
|
||||
|
||||
assert profile.build_id == "test_build"
|
||||
assert profile.package_name == "test-package"
|
||||
assert profile.architecture == "amd64"
|
||||
assert profile.suite == "trixie"
|
||||
assert profile.total_duration == 0
|
||||
assert profile.phases == {}
|
||||
assert profile.resource_peak == {}
|
||||
assert profile.cache_performance == {}
|
||||
assert profile.optimization_suggestions == []
|
||||
assert isinstance(profile.timestamp, datetime)
|
||||
|
||||
def test_add_phase_metrics(self, test_config):
|
||||
"""Test adding phase metrics to a build profile"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
# Create profile
|
||||
profile_id = monitor.create_build_profile(
|
||||
"test_build", "test-package", "amd64", "trixie"
|
||||
)
|
||||
|
||||
# Create metrics
|
||||
metrics = PerformanceMetrics(
|
||||
operation="test_phase",
|
||||
start_time=1000.0,
|
||||
end_time=1010.0,
|
||||
duration=10.0,
|
||||
cpu_percent=75.5,
|
||||
memory_mb=512.0,
|
||||
disk_io_read_mb=25.6,
|
||||
disk_io_write_mb=15.3,
|
||||
network_io_mb=2.1,
|
||||
chroot_size_mb=1024.0,
|
||||
cache_hit_rate=0.8,
|
||||
parallel_efficiency=0.9,
|
||||
resource_utilization=0.85
|
||||
)
|
||||
|
||||
# Add metrics
|
||||
monitor.add_phase_metrics(profile_id, "test_phase", metrics)
|
||||
|
||||
profile = monitor._build_profiles[profile_id]
|
||||
assert "test_phase" in profile.phases
|
||||
assert profile.phases["test_phase"] == metrics
|
||||
assert profile.total_duration == 10.0
|
||||
assert profile.resource_peak["cpu_percent"] == 75.5
|
||||
assert profile.resource_peak["memory_mb"] == 512.0
|
||||
|
||||
def test_finalize_build_profile(self, test_config):
|
||||
"""Test finalizing a build profile"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
# Create profile with metrics
|
||||
profile_id = monitor.create_build_profile(
|
||||
"test_build", "test-package", "amd64", "trixie"
|
||||
)
|
||||
|
||||
# Add some metrics
|
||||
metrics = PerformanceMetrics(
|
||||
operation="cache_operation",
|
||||
start_time=1000.0,
|
||||
end_time=1010.0,
|
||||
duration=10.0,
|
||||
cpu_percent=50.0,
|
||||
memory_mb=256.0,
|
||||
disk_io_read_mb=10.0,
|
||||
disk_io_write_mb=5.0,
|
||||
network_io_mb=1.0,
|
||||
chroot_size_mb=512.0,
|
||||
cache_hit_rate=0.8,
|
||||
parallel_efficiency=0.9,
|
||||
resource_utilization=0.85
|
||||
)
|
||||
|
||||
monitor.add_phase_metrics(profile_id, "cache_operation", metrics)
|
||||
|
||||
# Finalize profile
|
||||
profile = monitor.finalize_build_profile(profile_id)
|
||||
|
||||
assert profile is not None
|
||||
assert profile.cache_performance["average_hit_rate"] == 0.8
|
||||
assert len(profile.optimization_suggestions) > 0
|
||||
|
||||
def test_get_performance_summary(self, test_config):
|
||||
"""Test getting performance summary"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
# Add some operations
|
||||
with monitor.monitor_operation("op1"):
|
||||
time.sleep(0.1)
|
||||
|
||||
with monitor.monitor_operation("op2"):
|
||||
time.sleep(0.1)
|
||||
|
||||
with monitor.monitor_operation("op1"): # Same operation type
|
||||
time.sleep(0.1)
|
||||
|
||||
summary = monitor.get_performance_summary()
|
||||
|
||||
assert summary["total_operations"] == 3
|
||||
assert summary["total_duration"] > 0
|
||||
assert summary["average_duration"] > 0
|
||||
assert summary["active_operations"] == 0
|
||||
|
||||
# Check operation stats
|
||||
assert "op1" in summary["operation_stats"]
|
||||
assert summary["operation_stats"]["op1"]["count"] == 2
|
||||
assert summary["operation_stats"]["op2"]["count"] == 1
|
||||
|
||||
def test_cleanup_old_metrics(self, test_config):
|
||||
"""Test cleaning up old metrics"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
test_config.performance_retention_days = 1
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
# Add some old operations
|
||||
old_metrics = PerformanceMetrics(
|
||||
operation="old_op",
|
||||
start_time=time.time() - 86400 * 2, # 2 days ago
|
||||
end_time=time.time() - 86400 * 2 + 10,
|
||||
duration=10.0,
|
||||
cpu_percent=50.0,
|
||||
memory_mb=256.0,
|
||||
disk_io_read_mb=10.0,
|
||||
disk_io_write_mb=5.0,
|
||||
network_io_mb=1.0,
|
||||
chroot_size_mb=512.0,
|
||||
cache_hit_rate=0.8,
|
||||
parallel_efficiency=0.9,
|
||||
resource_utilization=0.85
|
||||
)
|
||||
|
||||
monitor._operation_history.append(old_metrics)
|
||||
|
||||
# Add recent operations
|
||||
with monitor.monitor_operation("recent_op"):
|
||||
time.sleep(0.1)
|
||||
|
||||
# Clean up old metrics
|
||||
monitor.cleanup_old_metrics()
|
||||
|
||||
# Verify old metrics were removed
|
||||
assert len(monitor._operation_history) == 1
|
||||
assert monitor._operation_history[0].operation == "recent_op"
|
||||
|
||||
def test_export_metrics(self, test_config, temp_dir):
|
||||
"""Test exporting metrics to file"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
test_config.performance_metrics_dir = temp_dir
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
# Add some operations
|
||||
with monitor.monitor_operation("test_op"):
|
||||
time.sleep(0.1)
|
||||
|
||||
# Export metrics
|
||||
export_file = monitor.export_metrics()
|
||||
|
||||
assert os.path.exists(export_file)
|
||||
|
||||
# Verify export file content
|
||||
with open(export_file, 'r') as f:
|
||||
export_data = json.load(f)
|
||||
|
||||
assert "export_timestamp" in export_data
|
||||
assert "summary" in export_data
|
||||
assert "operation_history" in export_data
|
||||
assert "build_profiles" in export_data
|
||||
|
||||
# Verify summary data
|
||||
summary = export_data["summary"]
|
||||
assert summary["total_operations"] == 1
|
||||
assert summary["total_duration"] > 0
|
||||
|
||||
def test_stop_monitoring(self, test_config):
|
||||
"""Test stopping performance monitoring"""
|
||||
test_config.enable_performance_monitoring = True
|
||||
monitor = PerformanceMonitor(test_config)
|
||||
|
||||
# Start monitoring
|
||||
monitor._start_system_monitoring()
|
||||
assert monitor._monitoring_active is True
|
||||
|
||||
# Stop monitoring
|
||||
monitor.stop_monitoring()
|
||||
assert monitor._monitoring_active is False
|
||||
|
||||
|
||||
class TestPerformanceOptimizer:
|
||||
"""Test PerformanceOptimizer class"""
|
||||
|
||||
def test_initialization(self, test_config):
|
||||
"""Test PerformanceOptimizer initialization"""
|
||||
optimizer = PerformanceOptimizer(test_config)
|
||||
|
||||
assert optimizer.config == test_config
|
||||
assert optimizer._optimization_rules is not None
|
||||
assert "parallel_builds" in optimizer._optimization_rules
|
||||
assert "cache_settings" in optimizer._optimization_rules
|
||||
assert "chroot_optimization" in optimizer._optimization_rules
|
||||
|
||||
def test_analyze_build_performance(self, test_config):
|
||||
"""Test analyzing build performance"""
|
||||
optimizer = PerformanceOptimizer(test_config)
|
||||
|
||||
# Create a build profile
|
||||
profile = BuildProfile(
|
||||
build_id="test_build",
|
||||
package_name="test-package",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
total_duration=300.0, # 5 minutes
|
||||
phases={},
|
||||
resource_peak={"cpu_percent": 85.0, "memory_mb": 2048.0},
|
||||
cache_performance={"average_hit_rate": 0.3},
|
||||
optimization_suggestions=[],
|
||||
timestamp=datetime.now()
|
||||
)
|
||||
|
||||
analysis = optimizer.analyze_build_performance(profile)
|
||||
|
||||
assert "score" in analysis
|
||||
assert "suggestions" in analysis
|
||||
assert "automatic_tunings" in analysis
|
||||
assert "manual_recommendations" in analysis
|
||||
|
||||
# Verify score calculation
|
||||
assert analysis["score"] < 100 # Should have penalties
|
||||
assert analysis["score"] > 0
|
||||
|
||||
# Verify suggestions were generated
|
||||
assert len(analysis["suggestions"]) > 0
|
||||
|
||||
# Verify automatic tunings were generated
|
||||
assert len(analysis["automatic_tunings"]) > 0
|
||||
|
||||
def test_generate_automatic_tunings(self, test_config):
|
||||
"""Test generating automatic tuning recommendations"""
|
||||
optimizer = PerformanceOptimizer(test_config)
|
||||
|
||||
# Test profile with low CPU usage
|
||||
profile = BuildProfile(
|
||||
build_id="test_build",
|
||||
package_name="test-package",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
total_duration=200.0,
|
||||
phases={},
|
||||
resource_peak={"cpu_percent": 50.0, "memory_mb": 1024.0},
|
||||
cache_performance={"average_hit_rate": 0.2},
|
||||
optimization_suggestions=[],
|
||||
timestamp=datetime.now()
|
||||
)
|
||||
|
||||
tunings = optimizer._generate_automatic_tunings(profile)
|
||||
|
||||
# Should suggest increasing parallel builds for low CPU usage
|
||||
parallel_tunings = [t for t in tunings if t["type"] == "parallel_builds"]
|
||||
assert len(parallel_tunings) > 0
|
||||
|
||||
# Should suggest cache optimization for low hit rate
|
||||
cache_tunings = [t for t in tunings if t["type"] == "cache_size"]
|
||||
assert len(cache_tunings) > 0
|
||||
|
||||
def test_generate_manual_recommendations(self, test_config):
|
||||
"""Test generating manual optimization recommendations"""
|
||||
optimizer = PerformanceOptimizer(test_config)
|
||||
|
||||
# Test profile with high memory usage
|
||||
profile = BuildProfile(
|
||||
build_id="test_build",
|
||||
package_name="test-package",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
total_duration=400.0,
|
||||
phases={},
|
||||
resource_peak={"cpu_percent": 70.0, "memory_mb": 3072.0}, # > 2GB
|
||||
cache_performance={"average_hit_rate": 0.6},
|
||||
optimization_suggestions=[],
|
||||
timestamp=datetime.now()
|
||||
)
|
||||
|
||||
recommendations = optimizer._generate_manual_recommendations(profile)
|
||||
|
||||
assert len(recommendations) > 0
|
||||
|
||||
# Should include memory-related recommendations
|
||||
memory_recommendations = [r for r in recommendations if "memory" in r.lower()]
|
||||
assert len(memory_recommendations) > 0
|
||||
|
||||
# Should include general system recommendations
|
||||
system_recommendations = [r for r in recommendations if "system" in r.lower()]
|
||||
assert len(system_recommendations) > 0
|
||||
|
||||
def test_apply_automatic_tunings(self, test_config):
|
||||
"""Test applying automatic tuning recommendations"""
|
||||
optimizer = PerformanceOptimizer(test_config)
|
||||
|
||||
# Mock config attributes
|
||||
test_config.parallel_builds = 2
|
||||
|
||||
tunings = [
|
||||
{
|
||||
"type": "parallel_builds",
|
||||
"current": 2,
|
||||
"suggested": 3,
|
||||
"reason": "Low CPU utilization"
|
||||
}
|
||||
]
|
||||
|
||||
results = optimizer.apply_automatic_tunings(tunings)
|
||||
|
||||
assert len(results["applied"]) == 1
|
||||
assert len(results["failed"]) == 0
|
||||
assert len(results["skipped"]) == 0
|
||||
|
||||
# Verify tuning was applied
|
||||
assert test_config.parallel_builds == 3
|
||||
|
||||
|
||||
class TestPerformanceReporter:
|
||||
"""Test PerformanceReporter class"""
|
||||
|
||||
def test_initialization(self, test_config):
|
||||
"""Test PerformanceReporter initialization"""
|
||||
reporter = PerformanceReporter(test_config)
|
||||
|
||||
assert reporter.config == test_config
|
||||
|
||||
def test_generate_performance_report(self, test_config, temp_dir):
|
||||
"""Test generating performance report"""
|
||||
reporter = PerformanceReporter(test_config)
|
||||
|
||||
# Create a mock monitor with data
|
||||
monitor = Mock()
|
||||
monitor.get_performance_summary.return_value = {
|
||||
"total_operations": 5,
|
||||
"total_duration": 250.0,
|
||||
"average_duration": 50.0,
|
||||
"active_operations": 0,
|
||||
"operation_stats": {
|
||||
"test_op": {
|
||||
"count": 5,
|
||||
"avg_duration": 50.0,
|
||||
"min_duration": 45.0,
|
||||
"max_duration": 55.0
|
||||
}
|
||||
},
|
||||
"system_stats": {
|
||||
"cpu_percent": 75.0,
|
||||
"memory_percent": 60.0
|
||||
}
|
||||
}
|
||||
|
||||
# Generate report
|
||||
report_file = reporter.generate_performance_report(monitor)
|
||||
|
||||
assert os.path.exists(report_file)
|
||||
|
||||
# Verify report content
|
||||
with open(report_file, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
assert "DEB-MOCK PERFORMANCE REPORT" in content
|
||||
assert "Total Operations: 5" in content
|
||||
assert "Total Duration: 250.00s" in content
|
||||
assert "test_op:" in content
|
||||
|
||||
def test_generate_build_profile_report(self, test_config, temp_dir):
|
||||
"""Test generating build profile report"""
|
||||
reporter = PerformanceReporter(test_config)
|
||||
|
||||
# Create a build profile
|
||||
profile = BuildProfile(
|
||||
build_id="test_build_123",
|
||||
package_name="test-package",
|
||||
architecture="amd64",
|
||||
suite="trixie",
|
||||
total_duration=45.23,
|
||||
phases={
|
||||
"build_phase": PerformanceMetrics(
|
||||
operation="build_phase",
|
||||
start_time=1000.0,
|
||||
end_time=1045.23,
|
||||
duration=45.23,
|
||||
cpu_percent=75.5,
|
||||
memory_mb=512.0,
|
||||
disk_io_read_mb=25.6,
|
||||
disk_io_write_mb=15.3,
|
||||
network_io_mb=2.1,
|
||||
chroot_size_mb=1024.0,
|
||||
cache_hit_rate=0.8,
|
||||
parallel_efficiency=0.9,
|
||||
resource_utilization=0.85
|
||||
)
|
||||
},
|
||||
resource_peak={"cpu_percent": 75.5, "memory_mb": 512.0},
|
||||
cache_performance={"average_hit_rate": 0.8},
|
||||
optimization_suggestions=["Test suggestion"],
|
||||
timestamp=datetime.now()
|
||||
)
|
||||
|
||||
# Generate report
|
||||
report_file = reporter.generate_build_profile_report(profile)
|
||||
|
||||
assert os.path.exists(report_file)
|
||||
|
||||
# Verify report content
|
||||
with open(report_file, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
assert "BUILD PROFILE: test-package" in content
|
||||
assert "Build ID: test_build_123" in content
|
||||
assert "Architecture: amd64" in content
|
||||
assert "Suite: trixie" in content
|
||||
assert "Total Duration: 45.23s" in content
|
||||
assert "build_phase:" in content
|
||||
assert "Test suggestion" in content
|
||||
|
||||
def test_generate_report_with_custom_output(self, test_config, temp_dir):
|
||||
"""Test generating report with custom output file"""
|
||||
reporter = PerformanceReporter(test_config)
|
||||
|
||||
monitor = Mock()
|
||||
monitor.get_performance_summary.return_value = {
|
||||
"total_operations": 1,
|
||||
"total_duration": 10.0,
|
||||
"average_duration": 10.0,
|
||||
"active_operations": 0
|
||||
}
|
||||
|
||||
custom_file = os.path.join(temp_dir, "custom_report.txt")
|
||||
report_file = reporter.generate_performance_report(monitor, custom_file)
|
||||
|
||||
assert report_file == custom_file
|
||||
assert os.path.exists(custom_file)
|
||||
|
||||
def test_report_generation_error_handling(self, test_config):
|
||||
"""Test error handling in report generation"""
|
||||
reporter = PerformanceReporter(test_config)
|
||||
|
||||
# Mock monitor that raises an error
|
||||
monitor = Mock()
|
||||
monitor.get_performance_summary.side_effect = Exception("Test error")
|
||||
|
||||
with pytest.raises(PerformanceError):
|
||||
reporter.generate_performance_report(monitor)
|
||||
632
tests/test_plugin_system.py
Normal file
632
tests/test_plugin_system.py
Normal file
|
|
@ -0,0 +1,632 @@
|
|||
"""
|
||||
Tests for deb-mock plugin system
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
import os
|
||||
import sys
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
from pathlib import Path
|
||||
|
||||
from deb_mock.plugin import PluginManager, HookStages, BasePlugin
|
||||
from deb_mock.exceptions import PluginError
|
||||
|
||||
|
||||
class TestHookStages:
|
||||
"""Test HookStages enum"""
|
||||
|
||||
def test_hook_stages_defined(self):
|
||||
"""Test that all hook stages are defined"""
|
||||
assert hasattr(HookStages, 'PREBUILD')
|
||||
assert hasattr(HookStages, 'BUILD_START')
|
||||
assert hasattr(HookStages, 'BUILD_END')
|
||||
assert hasattr(HookStages, 'POSTBUILD')
|
||||
assert hasattr(HookStages, 'PRECHROOT_INIT')
|
||||
assert hasattr(HookStages, 'POSTCHROOT_INIT')
|
||||
assert hasattr(HookStages, 'PRECACHE')
|
||||
assert hasattr(HookStages, 'POSTCACHE')
|
||||
|
||||
def test_hook_stages_values(self):
|
||||
"""Test that hook stages have string values"""
|
||||
for stage_name in dir(HookStages):
|
||||
if not stage_name.startswith('_'):
|
||||
stage_value = getattr(HookStages, stage_name)
|
||||
assert isinstance(stage_value, str)
|
||||
assert stage_value == stage_name.lower()
|
||||
|
||||
|
||||
class TestBasePlugin:
|
||||
"""Test BasePlugin class"""
|
||||
|
||||
def test_base_plugin_creation(self):
|
||||
"""Test creating a base plugin"""
|
||||
plugin = BasePlugin()
|
||||
|
||||
assert plugin.name == "BasePlugin"
|
||||
assert plugin.version == "1.0.0"
|
||||
assert plugin.description == "Base plugin class"
|
||||
assert plugin.enabled is True
|
||||
|
||||
def test_base_plugin_custom_values(self):
|
||||
"""Test creating a base plugin with custom values"""
|
||||
plugin = BasePlugin(
|
||||
name="TestPlugin",
|
||||
version="2.0.0",
|
||||
description="Test plugin description",
|
||||
enabled=False
|
||||
)
|
||||
|
||||
assert plugin.name == "TestPlugin"
|
||||
assert plugin.version == "2.0.0"
|
||||
assert plugin.description == "Test plugin description"
|
||||
assert plugin.enabled is False
|
||||
|
||||
def test_base_plugin_methods(self):
|
||||
"""Test base plugin methods"""
|
||||
plugin = BasePlugin()
|
||||
|
||||
# Test default implementations
|
||||
assert plugin.init() is None
|
||||
assert plugin.cleanup() is None
|
||||
assert plugin.get_hooks() == {}
|
||||
|
||||
def test_base_plugin_hook_registration(self):
|
||||
"""Test hook registration in base plugin"""
|
||||
plugin = BasePlugin()
|
||||
|
||||
# Register a hook
|
||||
plugin.register_hook(HookStages.PREBUILD, "test_hook")
|
||||
|
||||
hooks = plugin.get_hooks()
|
||||
assert HookStages.PREBUILD in hooks
|
||||
assert "test_hook" in hooks[HookStages.PREBUILD]
|
||||
|
||||
def test_base_plugin_multiple_hooks(self):
|
||||
"""Test registering multiple hooks"""
|
||||
plugin = BasePlugin()
|
||||
|
||||
# Register multiple hooks
|
||||
plugin.register_hook(HookStages.PREBUILD, "hook1")
|
||||
plugin.register_hook(HookStages.PREBUILD, "hook2")
|
||||
plugin.register_hook(HookStages.POSTBUILD, "hook3")
|
||||
|
||||
hooks = plugin.get_hooks()
|
||||
assert len(hooks[HookStages.PREBUILD]) == 2
|
||||
assert len(hooks[HookStages.POSTBUILD]) == 1
|
||||
assert "hook1" in hooks[HookStages.PREBUILD]
|
||||
assert "hook2" in hooks[HookStages.PREBUILD]
|
||||
assert "hook3" in hooks[HookStages.POSTBUILD]
|
||||
|
||||
|
||||
class TestPluginManager:
|
||||
"""Test PluginManager class"""
|
||||
|
||||
def test_initialization(self, test_config):
|
||||
"""Test PluginManager initialization"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
assert manager.config == test_config
|
||||
assert manager.plugins == {}
|
||||
assert manager.hooks == {}
|
||||
assert manager.plugin_dir == test_config.plugin_dir
|
||||
|
||||
def test_initialization_with_custom_plugin_dir(self, test_config):
|
||||
"""Test PluginManager initialization with custom plugin directory"""
|
||||
test_config.plugin_dir = "/custom/plugin/dir"
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
assert manager.plugin_dir == "/custom/plugin/dir"
|
||||
|
||||
def test_discover_plugins_no_directory(self, test_config):
|
||||
"""Test plugin discovery when plugin directory doesn't exist"""
|
||||
test_config.plugin_dir = "/nonexistent/directory"
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
plugins = manager.discover_plugins()
|
||||
assert plugins == []
|
||||
|
||||
def test_discover_plugins_empty_directory(self, test_config, temp_dir):
|
||||
"""Test plugin discovery in empty directory"""
|
||||
plugin_dir = os.path.join(temp_dir, "plugins")
|
||||
os.makedirs(plugin_dir)
|
||||
|
||||
test_config.plugin_dir = plugin_dir
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
plugins = manager.discover_plugins()
|
||||
assert plugins == []
|
||||
|
||||
def test_discover_plugins_with_python_files(self, test_config, temp_dir):
|
||||
"""Test plugin discovery with Python files"""
|
||||
plugin_dir = os.path.join(temp_dir, "plugins")
|
||||
os.makedirs(plugin_dir)
|
||||
|
||||
# Create a Python file that's not a plugin
|
||||
with open(os.path.join(plugin_dir, "not_a_plugin.py"), "w") as f:
|
||||
f.write("# This is not a plugin\n")
|
||||
|
||||
# Create a Python file that could be a plugin
|
||||
with open(os.path.join(plugin_dir, "test_plugin.py"), "w") as f:
|
||||
f.write("""
|
||||
class TestPlugin:
|
||||
pass
|
||||
""")
|
||||
|
||||
test_config.plugin_dir = plugin_dir
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
plugins = manager.discover_plugins()
|
||||
# Should find Python files but not load them as plugins
|
||||
assert len(plugins) == 0
|
||||
|
||||
def test_load_plugin_success(self, test_config):
|
||||
"""Test successfully loading a plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Create a mock plugin class
|
||||
class MockPlugin(BasePlugin):
|
||||
def __init__(self):
|
||||
super().__init__(name="MockPlugin")
|
||||
|
||||
# Mock the plugin module
|
||||
mock_module = Mock()
|
||||
mock_module.MockPlugin = MockPlugin
|
||||
|
||||
with patch('builtins.__import__', return_value=mock_module):
|
||||
plugin = manager.load_plugin("MockPlugin", "mock_plugin")
|
||||
|
||||
assert plugin is not None
|
||||
assert plugin.name == "MockPlugin"
|
||||
|
||||
def test_load_plugin_missing_class(self, test_config):
|
||||
"""Test loading a plugin with missing class"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock the plugin module without the expected class
|
||||
mock_module = Mock()
|
||||
mock_module.spec = None
|
||||
|
||||
with patch('builtins.__import__', return_value=mock_module):
|
||||
with pytest.raises(PluginError, match="Plugin class 'TestPlugin' not found"):
|
||||
manager.load_plugin("TestPlugin", "test_plugin")
|
||||
|
||||
def test_load_plugin_import_error(self, test_config):
|
||||
"""Test loading a plugin with import error"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
with patch('builtins.__import__', side_effect=ImportError("Test import error")):
|
||||
with pytest.raises(PluginError, match="Failed to import plugin 'test_plugin'"):
|
||||
manager.load_plugin("TestPlugin", "test_plugin")
|
||||
|
||||
def test_load_plugin_instantiation_error(self, test_config):
|
||||
"""Test loading a plugin with instantiation error"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Create a plugin class that raises an error when instantiated
|
||||
class ErrorPlugin(BasePlugin):
|
||||
def __init__(self):
|
||||
raise Exception("Test instantiation error")
|
||||
|
||||
# Mock the plugin module
|
||||
mock_module = Mock()
|
||||
mock_module.ErrorPlugin = ErrorPlugin
|
||||
|
||||
with patch('builtins.__import__', return_value=mock_module):
|
||||
with pytest.raises(PluginError, match="Failed to instantiate plugin 'ErrorPlugin'"):
|
||||
manager.load_plugin("ErrorPlugin", "error_plugin")
|
||||
|
||||
def test_init_plugins(self, test_config):
|
||||
"""Test initializing plugins"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin discovery and loading
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.name = "TestPlugin"
|
||||
mock_plugin.enabled = True
|
||||
mock_plugin.init.return_value = None
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
# Mock deb_mock instance
|
||||
mock_deb_mock = Mock()
|
||||
|
||||
result = manager.init_plugins(mock_deb_mock)
|
||||
|
||||
assert result is True
|
||||
mock_plugin.init.assert_called_once_with(mock_deb_mock)
|
||||
|
||||
def test_init_plugins_disabled_plugin(self, test_config):
|
||||
"""Test initializing plugins with disabled plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin discovery and loading
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.name = "TestPlugin"
|
||||
mock_plugin.enabled = False
|
||||
mock_plugin.init.return_value = None
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
# Mock deb_mock instance
|
||||
mock_deb_mock = Mock()
|
||||
|
||||
result = manager.init_plugins(mock_deb_mock)
|
||||
|
||||
assert result is True
|
||||
mock_plugin.init.assert_not_called()
|
||||
|
||||
def test_init_plugins_with_error(self, test_config):
|
||||
"""Test initializing plugins with plugin error"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin discovery and loading
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.name = "TestPlugin"
|
||||
mock_plugin.enabled = True
|
||||
mock_plugin.init.side_effect = Exception("Test plugin error")
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
# Mock deb_mock instance
|
||||
mock_deb_mock = Mock()
|
||||
|
||||
with pytest.raises(PluginError, match="Failed to initialize plugin 'TestPlugin'"):
|
||||
manager.init_plugins(mock_deb_mock)
|
||||
|
||||
def test_register_hooks(self, test_config):
|
||||
"""Test registering hooks from plugins"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin with hooks
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.get_hooks.return_value = {
|
||||
HookStages.PREBUILD: ["hook1", "hook2"],
|
||||
HookStages.POSTBUILD: ["hook3"]
|
||||
}
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
manager.register_hooks()
|
||||
|
||||
# Verify hooks were registered
|
||||
assert HookStages.PREBUILD in manager.hooks
|
||||
assert HookStages.POSTBUILD in manager.hooks
|
||||
assert "hook1" in manager.hooks[HookStages.PREBUILD]
|
||||
assert "hook2" in manager.hooks[HookStages.PREBUILD]
|
||||
assert "hook3" in manager.hooks[HookStages.POSTBUILD]
|
||||
|
||||
def test_call_hooks(self, test_config):
|
||||
"""Test calling hooks"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin with hooks
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.get_hooks.return_value = {
|
||||
HookStages.PREBUILD: ["hook1"]
|
||||
}
|
||||
|
||||
# Mock hook methods
|
||||
def hook1(*args, **kwargs):
|
||||
return "hook1_result"
|
||||
|
||||
mock_plugin.hook1 = hook1
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
manager.register_hooks()
|
||||
|
||||
# Call hooks
|
||||
results = manager.call_hooks(HookStages.PREBUILD, "arg1", kwarg1="value1")
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0] == "hook1_result"
|
||||
|
||||
def test_call_hooks_no_hooks(self, test_config):
|
||||
"""Test calling hooks when no hooks are registered"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
results = manager.call_hooks(HookStages.PREBUILD, "arg1")
|
||||
|
||||
assert results == []
|
||||
|
||||
def test_call_hooks_with_error(self, test_config):
|
||||
"""Test calling hooks with hook error"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin with hooks
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.get_hooks.return_value = {
|
||||
HookStages.PREBUILD: ["hook1"]
|
||||
}
|
||||
|
||||
# Mock hook method that raises an error
|
||||
def hook1(*args, **kwargs):
|
||||
raise Exception("Test hook error")
|
||||
|
||||
mock_plugin.hook1 = hook1
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
manager.register_hooks()
|
||||
|
||||
# Call hooks - should handle errors gracefully
|
||||
results = manager.call_hooks(HookStages.PREBUILD, "arg1")
|
||||
|
||||
# Should return empty list when hooks fail
|
||||
assert results == []
|
||||
|
||||
def test_cleanup_plugins(self, test_config):
|
||||
"""Test cleaning up plugins"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin with cleanup method
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.cleanup.return_value = None
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
result = manager.cleanup_plugins()
|
||||
|
||||
assert result is True
|
||||
mock_plugin.cleanup.assert_called_once()
|
||||
|
||||
def test_cleanup_plugins_with_error(self, test_config):
|
||||
"""Test cleaning up plugins with plugin error"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin with cleanup method that raises an error
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.cleanup.side_effect = Exception("Test cleanup error")
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
with pytest.raises(PluginError, match="Failed to cleanup plugin 'test_plugin'"):
|
||||
manager.cleanup_plugins()
|
||||
|
||||
def test_get_plugin_info(self, test_config):
|
||||
"""Test getting plugin information"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.name = "TestPlugin"
|
||||
mock_plugin.version = "1.0.0"
|
||||
mock_plugin.description = "Test plugin description"
|
||||
mock_plugin.enabled = True
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
info = manager.get_plugin_info("test_plugin")
|
||||
|
||||
assert info["name"] == "TestPlugin"
|
||||
assert info["version"] == "1.0.0"
|
||||
assert info["description"] == "Test plugin description"
|
||||
assert info["enabled"] is True
|
||||
|
||||
def test_get_plugin_info_not_found(self, test_config):
|
||||
"""Test getting plugin information for non-existent plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
with pytest.raises(PluginError, match="Plugin 'nonexistent' not found"):
|
||||
manager.get_plugin_info("nonexistent")
|
||||
|
||||
def test_list_plugins(self, test_config):
|
||||
"""Test listing all plugins"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugins
|
||||
mock_plugin1 = Mock()
|
||||
mock_plugin1.name = "Plugin1"
|
||||
mock_plugin1.version = "1.0.0"
|
||||
mock_plugin1.enabled = True
|
||||
|
||||
mock_plugin2 = Mock()
|
||||
mock_plugin2.name = "Plugin2"
|
||||
mock_plugin2.version = "2.0.0"
|
||||
mock_plugin2.enabled = False
|
||||
|
||||
manager.plugins = {
|
||||
"plugin1": mock_plugin1,
|
||||
"plugin2": mock_plugin2
|
||||
}
|
||||
|
||||
plugins = manager.list_plugins()
|
||||
|
||||
assert len(plugins) == 2
|
||||
assert plugins["plugin1"]["name"] == "Plugin1"
|
||||
assert plugins["plugin2"]["name"] == "Plugin2"
|
||||
assert plugins["plugin1"]["enabled"] is True
|
||||
assert plugins["plugin2"]["enabled"] is False
|
||||
|
||||
def test_enable_plugin(self, test_config):
|
||||
"""Test enabling a plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.enabled = False
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
result = manager.enable_plugin("test_plugin")
|
||||
|
||||
assert result is True
|
||||
assert mock_plugin.enabled is True
|
||||
|
||||
def test_enable_plugin_not_found(self, test_config):
|
||||
"""Test enabling a non-existent plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
with pytest.raises(PluginError, match="Plugin 'nonexistent' not found"):
|
||||
manager.enable_plugin("nonexistent")
|
||||
|
||||
def test_disable_plugin(self, test_config):
|
||||
"""Test disabling a plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.enabled = True
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
result = manager.disable_plugin("test_plugin")
|
||||
|
||||
assert result is True
|
||||
assert mock_plugin.enabled is False
|
||||
|
||||
def test_disable_plugin_not_found(self, test_config):
|
||||
"""Test disabling a non-existent plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
with pytest.raises(PluginError, match="Plugin 'nonexistent' not found"):
|
||||
manager.disable_plugin("nonexistent")
|
||||
|
||||
def test_reload_plugin(self, test_config):
|
||||
"""Test reloading a plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Mock plugin
|
||||
mock_plugin = Mock()
|
||||
mock_plugin.name = "TestPlugin"
|
||||
mock_plugin.cleanup.return_value = None
|
||||
|
||||
manager.plugins = {"test_plugin": mock_plugin}
|
||||
|
||||
# Mock plugin loading
|
||||
with patch.object(manager, 'load_plugin', return_value=mock_plugin):
|
||||
result = manager.reload_plugin("test_plugin")
|
||||
|
||||
assert result is True
|
||||
mock_plugin.cleanup.assert_called_once()
|
||||
|
||||
def test_reload_plugin_not_found(self, test_config):
|
||||
"""Test reloading a non-existent plugin"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
with pytest.raises(PluginError, match="Plugin 'nonexistent' not found"):
|
||||
manager.reload_plugin("nonexistent")
|
||||
|
||||
|
||||
class TestPluginIntegration:
|
||||
"""Test plugin system integration"""
|
||||
|
||||
def test_plugin_lifecycle(self, test_config):
|
||||
"""Test complete plugin lifecycle"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Create a test plugin
|
||||
class TestPlugin(BasePlugin):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
name="TestPlugin",
|
||||
version="1.0.0",
|
||||
description="Test plugin for integration testing"
|
||||
)
|
||||
self.init_called = False
|
||||
self.cleanup_called = False
|
||||
|
||||
def init(self, deb_mock):
|
||||
self.init_called = True
|
||||
return None
|
||||
|
||||
def cleanup(self):
|
||||
self.cleanup_called = True
|
||||
return None
|
||||
|
||||
def get_hooks(self):
|
||||
return {
|
||||
HookStages.PREBUILD: ["prebuild_hook"],
|
||||
HookStages.POSTBUILD: ["postbuild_hook"]
|
||||
}
|
||||
|
||||
def prebuild_hook(self, *args, **kwargs):
|
||||
return "prebuild_result"
|
||||
|
||||
def postbuild_hook(self, *args, **kwargs):
|
||||
return "postbuild_result"
|
||||
|
||||
# Mock plugin module
|
||||
mock_module = Mock()
|
||||
mock_module.TestPlugin = TestPlugin
|
||||
|
||||
with patch('builtins.__import__', return_value=mock_module):
|
||||
# Load plugin
|
||||
plugin = manager.load_plugin("TestPlugin", "test_plugin")
|
||||
|
||||
# Add to plugins
|
||||
manager.plugins["test_plugin"] = plugin
|
||||
|
||||
# Initialize plugins
|
||||
mock_deb_mock = Mock()
|
||||
result = manager.init_plugins(mock_deb_mock)
|
||||
|
||||
assert result is True
|
||||
assert plugin.init_called is True
|
||||
|
||||
# Register hooks
|
||||
manager.register_hooks()
|
||||
|
||||
# Call hooks
|
||||
prebuild_results = manager.call_hooks(HookStages.PREBUILD, "arg1")
|
||||
postbuild_results = manager.call_hooks(HookStages.POSTBUILD, "arg2")
|
||||
|
||||
assert prebuild_results == ["prebuild_result"]
|
||||
assert postbuild_results == ["postbuild_result"]
|
||||
|
||||
# Cleanup plugins
|
||||
cleanup_result = manager.cleanup_plugins()
|
||||
|
||||
assert cleanup_result is True
|
||||
assert plugin.cleanup_called is True
|
||||
|
||||
def test_plugin_configuration(self, plugin_test_config):
|
||||
"""Test plugin configuration integration"""
|
||||
manager = PluginManager(plugin_test_config)
|
||||
|
||||
# Mock plugin discovery
|
||||
with patch.object(manager, 'discover_plugins', return_value=[]):
|
||||
# Initialize plugins
|
||||
mock_deb_mock = Mock()
|
||||
result = manager.init_plugins(mock_deb_mock)
|
||||
|
||||
assert result is True
|
||||
|
||||
# Verify plugin configuration was loaded
|
||||
assert "test_plugin" in plugin_test_config.plugin_conf
|
||||
assert plugin_test_config.plugin_conf["test_plugin"]["enabled"] is True
|
||||
assert plugin_test_config.plugin_conf["test_plugin"]["config_option"] == "test_value"
|
||||
|
||||
def test_plugin_error_handling(self, test_config):
|
||||
"""Test plugin error handling"""
|
||||
manager = PluginManager(test_config)
|
||||
|
||||
# Create a plugin that raises errors
|
||||
class ErrorPlugin(BasePlugin):
|
||||
def __init__(self):
|
||||
super().__init__(name="ErrorPlugin")
|
||||
|
||||
def init(self, deb_mock):
|
||||
raise Exception("Init error")
|
||||
|
||||
def cleanup(self):
|
||||
raise Exception("Cleanup error")
|
||||
|
||||
# Mock plugin module
|
||||
mock_module = Mock()
|
||||
mock_module.ErrorPlugin = ErrorPlugin
|
||||
|
||||
with patch('builtins.__import__', return_value=mock_module):
|
||||
# Load plugin
|
||||
plugin = manager.load_plugin("ErrorPlugin", "error_plugin")
|
||||
|
||||
# Add to plugins
|
||||
manager.plugins["error_plugin"] = plugin
|
||||
|
||||
# Initialize plugins should fail
|
||||
mock_deb_mock = Mock()
|
||||
with pytest.raises(PluginError, match="Failed to initialize plugin 'ErrorPlugin'"):
|
||||
manager.init_plugins(mock_deb_mock)
|
||||
|
||||
# Cleanup plugins should fail
|
||||
with pytest.raises(PluginError, match="Failed to cleanup plugin 'error_plugin'"):
|
||||
manager.cleanup_plugins()
|
||||
Loading…
Add table
Add a link
Reference in a new issue