- Add complete pytest testing framework with conftest.py and test files - Add performance monitoring and benchmarking capabilities - Add plugin system with ccache plugin example - Add comprehensive documentation (API, deployment, testing, etc.) - Add Docker API wrapper for service deployment - Add advanced configuration examples - Remove old wget package file - Update core modules with enhanced functionality
748 lines
27 KiB
Python
748 lines
27 KiB
Python
"""
|
|
Tests for deb-mock performance monitoring and optimization
|
|
"""
|
|
|
|
import pytest
|
|
import time
|
|
import tempfile
|
|
import os
|
|
import json
|
|
from unittest.mock import Mock, patch, MagicMock
|
|
from datetime import datetime, timedelta
|
|
|
|
from deb_mock.performance import (
|
|
PerformanceMonitor, PerformanceOptimizer, PerformanceReporter,
|
|
PerformanceMetrics, BuildProfile
|
|
)
|
|
from deb_mock.exceptions import PerformanceError
|
|
|
|
|
|
class TestPerformanceMetrics:
|
|
"""Test PerformanceMetrics data class"""
|
|
|
|
def test_metrics_creation(self):
|
|
"""Test creating PerformanceMetrics"""
|
|
metrics = PerformanceMetrics(
|
|
operation="test_operation",
|
|
start_time=1000.0,
|
|
end_time=1010.0,
|
|
duration=10.0,
|
|
cpu_percent=75.5,
|
|
memory_mb=512.0,
|
|
disk_io_read_mb=25.6,
|
|
disk_io_write_mb=15.3,
|
|
network_io_mb=2.1,
|
|
chroot_size_mb=1024.0,
|
|
cache_hit_rate=0.8,
|
|
parallel_efficiency=0.9,
|
|
resource_utilization=0.85
|
|
)
|
|
|
|
assert metrics.operation == "test_operation"
|
|
assert metrics.duration == 10.0
|
|
assert metrics.cpu_percent == 75.5
|
|
assert metrics.memory_mb == 512.0
|
|
assert metrics.cache_hit_rate == 0.8
|
|
|
|
def test_metrics_calculation(self):
|
|
"""Test metrics calculation from start/end times"""
|
|
start_time = time.time()
|
|
time.sleep(0.1) # Small delay
|
|
end_time = time.time()
|
|
|
|
metrics = PerformanceMetrics(
|
|
operation="test_calc",
|
|
start_time=start_time,
|
|
end_time=end_time,
|
|
duration=end_time - start_time,
|
|
cpu_percent=50.0,
|
|
memory_mb=256.0,
|
|
disk_io_read_mb=10.0,
|
|
disk_io_write_mb=5.0,
|
|
network_io_mb=1.0,
|
|
chroot_size_mb=512.0,
|
|
cache_hit_rate=0.7,
|
|
parallel_efficiency=0.8,
|
|
resource_utilization=0.75
|
|
)
|
|
|
|
assert metrics.duration > 0
|
|
assert metrics.duration < 1.0 # Should be small
|
|
|
|
|
|
class TestBuildProfile:
|
|
"""Test BuildProfile data class"""
|
|
|
|
def test_profile_creation(self):
|
|
"""Test creating BuildProfile"""
|
|
profile = BuildProfile(
|
|
build_id="test_build_123",
|
|
package_name="test-package",
|
|
architecture="amd64",
|
|
suite="trixie",
|
|
total_duration=45.23,
|
|
phases={},
|
|
resource_peak={},
|
|
cache_performance={},
|
|
optimization_suggestions=[],
|
|
timestamp=datetime.now()
|
|
)
|
|
|
|
assert profile.build_id == "test_build_123"
|
|
assert profile.package_name == "test-package"
|
|
assert profile.architecture == "amd64"
|
|
assert profile.suite == "trixie"
|
|
assert profile.total_duration == 45.23
|
|
|
|
def test_profile_with_phases(self):
|
|
"""Test BuildProfile with phase metrics"""
|
|
metrics = PerformanceMetrics(
|
|
operation="test_phase",
|
|
start_time=1000.0,
|
|
end_time=1010.0,
|
|
duration=10.0,
|
|
cpu_percent=75.5,
|
|
memory_mb=512.0,
|
|
disk_io_read_mb=25.6,
|
|
disk_io_write_mb=15.3,
|
|
network_io_mb=2.1,
|
|
chroot_size_mb=1024.0,
|
|
cache_hit_rate=0.8,
|
|
parallel_efficiency=0.9,
|
|
resource_utilization=0.85
|
|
)
|
|
|
|
profile = BuildProfile(
|
|
build_id="test_build_123",
|
|
package_name="test-package",
|
|
architecture="amd64",
|
|
suite="trixie",
|
|
total_duration=10.0,
|
|
phases={"test_phase": metrics},
|
|
resource_peak={"cpu_percent": 75.5, "memory_mb": 512.0},
|
|
cache_performance={"hit_rate": 0.8},
|
|
optimization_suggestions=["Test suggestion"],
|
|
timestamp=datetime.now()
|
|
)
|
|
|
|
assert "test_phase" in profile.phases
|
|
assert profile.phases["test_phase"] == metrics
|
|
assert profile.resource_peak["cpu_percent"] == 75.5
|
|
assert profile.cache_performance["hit_rate"] == 0.8
|
|
assert len(profile.optimization_suggestions) == 1
|
|
|
|
|
|
class TestPerformanceMonitor:
|
|
"""Test PerformanceMonitor class"""
|
|
|
|
def test_initialization(self, test_config):
|
|
"""Test PerformanceMonitor initialization"""
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
assert monitor.config == test_config
|
|
assert monitor.enable_monitoring == test_config.enable_performance_monitoring
|
|
assert monitor.metrics_dir == test_config.performance_metrics_dir
|
|
assert monitor.retention_days == test_config.performance_retention_days
|
|
assert monitor._active_operations == {}
|
|
assert monitor._operation_history == []
|
|
assert monitor._build_profiles == {}
|
|
|
|
def test_initialization_with_monitoring_disabled(self, test_config):
|
|
"""Test PerformanceMonitor initialization with monitoring disabled"""
|
|
test_config.enable_performance_monitoring = False
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
assert monitor.enable_monitoring is False
|
|
assert monitor._monitoring_active is False
|
|
|
|
def test_metrics_directory_creation(self, temp_dir):
|
|
"""Test that metrics directory is created"""
|
|
config = Mock()
|
|
config.enable_performance_monitoring = True
|
|
config.performance_metrics_dir = os.path.join(temp_dir, "metrics")
|
|
config.performance_retention_days = 30
|
|
|
|
monitor = PerformanceMonitor(config)
|
|
|
|
assert os.path.exists(config.performance_metrics_dir)
|
|
|
|
@patch('deb_mock.performance.psutil')
|
|
def test_system_monitoring_start(self, mock_psutil, test_config):
|
|
"""Test starting system monitoring"""
|
|
test_config.enable_performance_monitoring = True
|
|
|
|
# Mock psutil methods
|
|
mock_psutil.cpu_percent.return_value = 50.0
|
|
mock_psutil.virtual_memory.return_value = Mock(percent=60.0, available=1024*1024*1024)
|
|
mock_psutil.disk_usage.return_value = Mock(percent=70.0, free=1024*1024*1024*10)
|
|
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
# Wait a bit for monitoring to start
|
|
time.sleep(0.1)
|
|
|
|
# Stop monitoring
|
|
monitor.stop_monitoring()
|
|
|
|
# Verify monitoring was active
|
|
assert monitor._monitoring_active is False
|
|
|
|
def test_monitor_operation_context_manager(self, test_config):
|
|
"""Test monitor_operation context manager"""
|
|
test_config.enable_performance_monitoring = True
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
with monitor.monitor_operation("test_op") as op_id:
|
|
assert op_id.startswith("test_op_")
|
|
time.sleep(0.1) # Small delay
|
|
|
|
# Verify operation was tracked
|
|
assert len(monitor._operation_history) == 1
|
|
assert monitor._operation_history[0].operation == "test_op"
|
|
assert monitor._operation_history[0].duration > 0
|
|
|
|
def test_monitor_operation_disabled(self, test_config):
|
|
"""Test monitor_operation when monitoring is disabled"""
|
|
test_config.enable_performance_monitoring = False
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
with monitor.monitor_operation("test_op") as op_id:
|
|
assert op_id is None # Should yield None when disabled
|
|
|
|
# Verify no operation was tracked
|
|
assert len(monitor._operation_history) == 0
|
|
|
|
@patch('deb_mock.performance.psutil')
|
|
def test_operation_metrics_collection(self, mock_psutil, test_config):
|
|
"""Test that operation metrics are properly collected"""
|
|
test_config.enable_performance_monitoring = True
|
|
|
|
# Mock psutil methods
|
|
mock_psutil.cpu_percent.side_effect = [25.0, 75.0]
|
|
mock_psutil.virtual_memory.side_effect = [
|
|
Mock(used=1024*1024*1024), # 1GB
|
|
Mock(used=1536*1024*1024) # 1.5GB
|
|
]
|
|
mock_psutil.disk_io_counters.side_effect = [
|
|
Mock(read_bytes=1000, write_bytes=500),
|
|
Mock(read_bytes=2000, write_bytes=1000)
|
|
]
|
|
mock_psutil.net_io_counters.side_effect = [
|
|
Mock(bytes_sent=100, bytes_recv=200),
|
|
Mock(bytes_sent=300, bytes_recv=600)
|
|
]
|
|
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
with monitor.monitor_operation("test_op") as op_id:
|
|
time.sleep(0.1)
|
|
|
|
# Verify metrics were collected
|
|
assert len(monitor._operation_history) == 1
|
|
metrics = monitor._operation_history[0]
|
|
|
|
assert metrics.operation == "test_op"
|
|
assert metrics.duration > 0
|
|
assert metrics.cpu_percent == 50.0 # Average of 25 and 75
|
|
assert metrics.memory_mb > 0 # Should be positive
|
|
assert metrics.disk_io_read_mb > 0
|
|
assert metrics.disk_io_write_mb > 0
|
|
assert metrics.network_io_mb > 0
|
|
|
|
def test_benchmark_operation(self, test_config):
|
|
"""Test benchmarking an operation"""
|
|
test_config.enable_performance_monitoring = True
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
def test_function():
|
|
time.sleep(0.1)
|
|
return "test_result"
|
|
|
|
result = monitor.benchmark_operation("test_bench", test_function, iterations=3)
|
|
|
|
assert result["operation"] == "test_bench"
|
|
assert result["iterations"] == 3
|
|
assert result["average_duration"] > 0
|
|
assert result["min_duration"] > 0
|
|
assert result["max_duration"] > 0
|
|
assert result["variance"] >= 0
|
|
assert len(result["results"]) == 3
|
|
|
|
# Verify all iterations returned the expected result
|
|
for iteration_result in result["results"]:
|
|
assert iteration_result["result"] == "test_result"
|
|
|
|
def test_benchmark_operation_disabled(self, test_config):
|
|
"""Test benchmarking when monitoring is disabled"""
|
|
test_config.enable_performance_monitoring = False
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
def test_function():
|
|
return "test_result"
|
|
|
|
result = monitor.benchmark_operation("test_bench", test_function, iterations=3)
|
|
|
|
# Should return function result directly when disabled
|
|
assert result == "test_result"
|
|
|
|
def test_create_build_profile(self, test_config):
|
|
"""Test creating a build profile"""
|
|
test_config.enable_performance_monitoring = True
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
profile_id = monitor.create_build_profile(
|
|
"test_build", "test-package", "amd64", "trixie"
|
|
)
|
|
|
|
assert profile_id in monitor._build_profiles
|
|
profile = monitor._build_profiles[profile_id]
|
|
|
|
assert profile.build_id == "test_build"
|
|
assert profile.package_name == "test-package"
|
|
assert profile.architecture == "amd64"
|
|
assert profile.suite == "trixie"
|
|
assert profile.total_duration == 0
|
|
assert profile.phases == {}
|
|
assert profile.resource_peak == {}
|
|
assert profile.cache_performance == {}
|
|
assert profile.optimization_suggestions == []
|
|
assert isinstance(profile.timestamp, datetime)
|
|
|
|
def test_add_phase_metrics(self, test_config):
|
|
"""Test adding phase metrics to a build profile"""
|
|
test_config.enable_performance_monitoring = True
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
# Create profile
|
|
profile_id = monitor.create_build_profile(
|
|
"test_build", "test-package", "amd64", "trixie"
|
|
)
|
|
|
|
# Create metrics
|
|
metrics = PerformanceMetrics(
|
|
operation="test_phase",
|
|
start_time=1000.0,
|
|
end_time=1010.0,
|
|
duration=10.0,
|
|
cpu_percent=75.5,
|
|
memory_mb=512.0,
|
|
disk_io_read_mb=25.6,
|
|
disk_io_write_mb=15.3,
|
|
network_io_mb=2.1,
|
|
chroot_size_mb=1024.0,
|
|
cache_hit_rate=0.8,
|
|
parallel_efficiency=0.9,
|
|
resource_utilization=0.85
|
|
)
|
|
|
|
# Add metrics
|
|
monitor.add_phase_metrics(profile_id, "test_phase", metrics)
|
|
|
|
profile = monitor._build_profiles[profile_id]
|
|
assert "test_phase" in profile.phases
|
|
assert profile.phases["test_phase"] == metrics
|
|
assert profile.total_duration == 10.0
|
|
assert profile.resource_peak["cpu_percent"] == 75.5
|
|
assert profile.resource_peak["memory_mb"] == 512.0
|
|
|
|
def test_finalize_build_profile(self, test_config):
|
|
"""Test finalizing a build profile"""
|
|
test_config.enable_performance_monitoring = True
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
# Create profile with metrics
|
|
profile_id = monitor.create_build_profile(
|
|
"test_build", "test-package", "amd64", "trixie"
|
|
)
|
|
|
|
# Add some metrics
|
|
metrics = PerformanceMetrics(
|
|
operation="cache_operation",
|
|
start_time=1000.0,
|
|
end_time=1010.0,
|
|
duration=10.0,
|
|
cpu_percent=50.0,
|
|
memory_mb=256.0,
|
|
disk_io_read_mb=10.0,
|
|
disk_io_write_mb=5.0,
|
|
network_io_mb=1.0,
|
|
chroot_size_mb=512.0,
|
|
cache_hit_rate=0.8,
|
|
parallel_efficiency=0.9,
|
|
resource_utilization=0.85
|
|
)
|
|
|
|
monitor.add_phase_metrics(profile_id, "cache_operation", metrics)
|
|
|
|
# Finalize profile
|
|
profile = monitor.finalize_build_profile(profile_id)
|
|
|
|
assert profile is not None
|
|
assert profile.cache_performance["average_hit_rate"] == 0.8
|
|
assert len(profile.optimization_suggestions) > 0
|
|
|
|
def test_get_performance_summary(self, test_config):
|
|
"""Test getting performance summary"""
|
|
test_config.enable_performance_monitoring = True
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
# Add some operations
|
|
with monitor.monitor_operation("op1"):
|
|
time.sleep(0.1)
|
|
|
|
with monitor.monitor_operation("op2"):
|
|
time.sleep(0.1)
|
|
|
|
with monitor.monitor_operation("op1"): # Same operation type
|
|
time.sleep(0.1)
|
|
|
|
summary = monitor.get_performance_summary()
|
|
|
|
assert summary["total_operations"] == 3
|
|
assert summary["total_duration"] > 0
|
|
assert summary["average_duration"] > 0
|
|
assert summary["active_operations"] == 0
|
|
|
|
# Check operation stats
|
|
assert "op1" in summary["operation_stats"]
|
|
assert summary["operation_stats"]["op1"]["count"] == 2
|
|
assert summary["operation_stats"]["op2"]["count"] == 1
|
|
|
|
def test_cleanup_old_metrics(self, test_config):
|
|
"""Test cleaning up old metrics"""
|
|
test_config.enable_performance_monitoring = True
|
|
test_config.performance_retention_days = 1
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
# Add some old operations
|
|
old_metrics = PerformanceMetrics(
|
|
operation="old_op",
|
|
start_time=time.time() - 86400 * 2, # 2 days ago
|
|
end_time=time.time() - 86400 * 2 + 10,
|
|
duration=10.0,
|
|
cpu_percent=50.0,
|
|
memory_mb=256.0,
|
|
disk_io_read_mb=10.0,
|
|
disk_io_write_mb=5.0,
|
|
network_io_mb=1.0,
|
|
chroot_size_mb=512.0,
|
|
cache_hit_rate=0.8,
|
|
parallel_efficiency=0.9,
|
|
resource_utilization=0.85
|
|
)
|
|
|
|
monitor._operation_history.append(old_metrics)
|
|
|
|
# Add recent operations
|
|
with monitor.monitor_operation("recent_op"):
|
|
time.sleep(0.1)
|
|
|
|
# Clean up old metrics
|
|
monitor.cleanup_old_metrics()
|
|
|
|
# Verify old metrics were removed
|
|
assert len(monitor._operation_history) == 1
|
|
assert monitor._operation_history[0].operation == "recent_op"
|
|
|
|
def test_export_metrics(self, test_config, temp_dir):
|
|
"""Test exporting metrics to file"""
|
|
test_config.enable_performance_monitoring = True
|
|
test_config.performance_metrics_dir = temp_dir
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
# Add some operations
|
|
with monitor.monitor_operation("test_op"):
|
|
time.sleep(0.1)
|
|
|
|
# Export metrics
|
|
export_file = monitor.export_metrics()
|
|
|
|
assert os.path.exists(export_file)
|
|
|
|
# Verify export file content
|
|
with open(export_file, 'r') as f:
|
|
export_data = json.load(f)
|
|
|
|
assert "export_timestamp" in export_data
|
|
assert "summary" in export_data
|
|
assert "operation_history" in export_data
|
|
assert "build_profiles" in export_data
|
|
|
|
# Verify summary data
|
|
summary = export_data["summary"]
|
|
assert summary["total_operations"] == 1
|
|
assert summary["total_duration"] > 0
|
|
|
|
def test_stop_monitoring(self, test_config):
|
|
"""Test stopping performance monitoring"""
|
|
test_config.enable_performance_monitoring = True
|
|
monitor = PerformanceMonitor(test_config)
|
|
|
|
# Start monitoring
|
|
monitor._start_system_monitoring()
|
|
assert monitor._monitoring_active is True
|
|
|
|
# Stop monitoring
|
|
monitor.stop_monitoring()
|
|
assert monitor._monitoring_active is False
|
|
|
|
|
|
class TestPerformanceOptimizer:
|
|
"""Test PerformanceOptimizer class"""
|
|
|
|
def test_initialization(self, test_config):
|
|
"""Test PerformanceOptimizer initialization"""
|
|
optimizer = PerformanceOptimizer(test_config)
|
|
|
|
assert optimizer.config == test_config
|
|
assert optimizer._optimization_rules is not None
|
|
assert "parallel_builds" in optimizer._optimization_rules
|
|
assert "cache_settings" in optimizer._optimization_rules
|
|
assert "chroot_optimization" in optimizer._optimization_rules
|
|
|
|
def test_analyze_build_performance(self, test_config):
|
|
"""Test analyzing build performance"""
|
|
optimizer = PerformanceOptimizer(test_config)
|
|
|
|
# Create a build profile
|
|
profile = BuildProfile(
|
|
build_id="test_build",
|
|
package_name="test-package",
|
|
architecture="amd64",
|
|
suite="trixie",
|
|
total_duration=300.0, # 5 minutes
|
|
phases={},
|
|
resource_peak={"cpu_percent": 85.0, "memory_mb": 2048.0},
|
|
cache_performance={"average_hit_rate": 0.3},
|
|
optimization_suggestions=[],
|
|
timestamp=datetime.now()
|
|
)
|
|
|
|
analysis = optimizer.analyze_build_performance(profile)
|
|
|
|
assert "score" in analysis
|
|
assert "suggestions" in analysis
|
|
assert "automatic_tunings" in analysis
|
|
assert "manual_recommendations" in analysis
|
|
|
|
# Verify score calculation
|
|
assert analysis["score"] < 100 # Should have penalties
|
|
assert analysis["score"] > 0
|
|
|
|
# Verify suggestions were generated
|
|
assert len(analysis["suggestions"]) > 0
|
|
|
|
# Verify automatic tunings were generated
|
|
assert len(analysis["automatic_tunings"]) > 0
|
|
|
|
def test_generate_automatic_tunings(self, test_config):
|
|
"""Test generating automatic tuning recommendations"""
|
|
optimizer = PerformanceOptimizer(test_config)
|
|
|
|
# Test profile with low CPU usage
|
|
profile = BuildProfile(
|
|
build_id="test_build",
|
|
package_name="test-package",
|
|
architecture="amd64",
|
|
suite="trixie",
|
|
total_duration=200.0,
|
|
phases={},
|
|
resource_peak={"cpu_percent": 50.0, "memory_mb": 1024.0},
|
|
cache_performance={"average_hit_rate": 0.2},
|
|
optimization_suggestions=[],
|
|
timestamp=datetime.now()
|
|
)
|
|
|
|
tunings = optimizer._generate_automatic_tunings(profile)
|
|
|
|
# Should suggest increasing parallel builds for low CPU usage
|
|
parallel_tunings = [t for t in tunings if t["type"] == "parallel_builds"]
|
|
assert len(parallel_tunings) > 0
|
|
|
|
# Should suggest cache optimization for low hit rate
|
|
cache_tunings = [t for t in tunings if t["type"] == "cache_size"]
|
|
assert len(cache_tunings) > 0
|
|
|
|
def test_generate_manual_recommendations(self, test_config):
|
|
"""Test generating manual optimization recommendations"""
|
|
optimizer = PerformanceOptimizer(test_config)
|
|
|
|
# Test profile with high memory usage
|
|
profile = BuildProfile(
|
|
build_id="test_build",
|
|
package_name="test-package",
|
|
architecture="amd64",
|
|
suite="trixie",
|
|
total_duration=400.0,
|
|
phases={},
|
|
resource_peak={"cpu_percent": 70.0, "memory_mb": 3072.0}, # > 2GB
|
|
cache_performance={"average_hit_rate": 0.6},
|
|
optimization_suggestions=[],
|
|
timestamp=datetime.now()
|
|
)
|
|
|
|
recommendations = optimizer._generate_manual_recommendations(profile)
|
|
|
|
assert len(recommendations) > 0
|
|
|
|
# Should include memory-related recommendations
|
|
memory_recommendations = [r for r in recommendations if "memory" in r.lower()]
|
|
assert len(memory_recommendations) > 0
|
|
|
|
# Should include general system recommendations
|
|
system_recommendations = [r for r in recommendations if "system" in r.lower()]
|
|
assert len(system_recommendations) > 0
|
|
|
|
def test_apply_automatic_tunings(self, test_config):
|
|
"""Test applying automatic tuning recommendations"""
|
|
optimizer = PerformanceOptimizer(test_config)
|
|
|
|
# Mock config attributes
|
|
test_config.parallel_builds = 2
|
|
|
|
tunings = [
|
|
{
|
|
"type": "parallel_builds",
|
|
"current": 2,
|
|
"suggested": 3,
|
|
"reason": "Low CPU utilization"
|
|
}
|
|
]
|
|
|
|
results = optimizer.apply_automatic_tunings(tunings)
|
|
|
|
assert len(results["applied"]) == 1
|
|
assert len(results["failed"]) == 0
|
|
assert len(results["skipped"]) == 0
|
|
|
|
# Verify tuning was applied
|
|
assert test_config.parallel_builds == 3
|
|
|
|
|
|
class TestPerformanceReporter:
|
|
"""Test PerformanceReporter class"""
|
|
|
|
def test_initialization(self, test_config):
|
|
"""Test PerformanceReporter initialization"""
|
|
reporter = PerformanceReporter(test_config)
|
|
|
|
assert reporter.config == test_config
|
|
|
|
def test_generate_performance_report(self, test_config, temp_dir):
|
|
"""Test generating performance report"""
|
|
reporter = PerformanceReporter(test_config)
|
|
|
|
# Create a mock monitor with data
|
|
monitor = Mock()
|
|
monitor.get_performance_summary.return_value = {
|
|
"total_operations": 5,
|
|
"total_duration": 250.0,
|
|
"average_duration": 50.0,
|
|
"active_operations": 0,
|
|
"operation_stats": {
|
|
"test_op": {
|
|
"count": 5,
|
|
"avg_duration": 50.0,
|
|
"min_duration": 45.0,
|
|
"max_duration": 55.0
|
|
}
|
|
},
|
|
"system_stats": {
|
|
"cpu_percent": 75.0,
|
|
"memory_percent": 60.0
|
|
}
|
|
}
|
|
|
|
# Generate report
|
|
report_file = reporter.generate_performance_report(monitor)
|
|
|
|
assert os.path.exists(report_file)
|
|
|
|
# Verify report content
|
|
with open(report_file, 'r') as f:
|
|
content = f.read()
|
|
|
|
assert "DEB-MOCK PERFORMANCE REPORT" in content
|
|
assert "Total Operations: 5" in content
|
|
assert "Total Duration: 250.00s" in content
|
|
assert "test_op:" in content
|
|
|
|
def test_generate_build_profile_report(self, test_config, temp_dir):
|
|
"""Test generating build profile report"""
|
|
reporter = PerformanceReporter(test_config)
|
|
|
|
# Create a build profile
|
|
profile = BuildProfile(
|
|
build_id="test_build_123",
|
|
package_name="test-package",
|
|
architecture="amd64",
|
|
suite="trixie",
|
|
total_duration=45.23,
|
|
phases={
|
|
"build_phase": PerformanceMetrics(
|
|
operation="build_phase",
|
|
start_time=1000.0,
|
|
end_time=1045.23,
|
|
duration=45.23,
|
|
cpu_percent=75.5,
|
|
memory_mb=512.0,
|
|
disk_io_read_mb=25.6,
|
|
disk_io_write_mb=15.3,
|
|
network_io_mb=2.1,
|
|
chroot_size_mb=1024.0,
|
|
cache_hit_rate=0.8,
|
|
parallel_efficiency=0.9,
|
|
resource_utilization=0.85
|
|
)
|
|
},
|
|
resource_peak={"cpu_percent": 75.5, "memory_mb": 512.0},
|
|
cache_performance={"average_hit_rate": 0.8},
|
|
optimization_suggestions=["Test suggestion"],
|
|
timestamp=datetime.now()
|
|
)
|
|
|
|
# Generate report
|
|
report_file = reporter.generate_build_profile_report(profile)
|
|
|
|
assert os.path.exists(report_file)
|
|
|
|
# Verify report content
|
|
with open(report_file, 'r') as f:
|
|
content = f.read()
|
|
|
|
assert "BUILD PROFILE: test-package" in content
|
|
assert "Build ID: test_build_123" in content
|
|
assert "Architecture: amd64" in content
|
|
assert "Suite: trixie" in content
|
|
assert "Total Duration: 45.23s" in content
|
|
assert "build_phase:" in content
|
|
assert "Test suggestion" in content
|
|
|
|
def test_generate_report_with_custom_output(self, test_config, temp_dir):
|
|
"""Test generating report with custom output file"""
|
|
reporter = PerformanceReporter(test_config)
|
|
|
|
monitor = Mock()
|
|
monitor.get_performance_summary.return_value = {
|
|
"total_operations": 1,
|
|
"total_duration": 10.0,
|
|
"average_duration": 10.0,
|
|
"active_operations": 0
|
|
}
|
|
|
|
custom_file = os.path.join(temp_dir, "custom_report.txt")
|
|
report_file = reporter.generate_performance_report(monitor, custom_file)
|
|
|
|
assert report_file == custom_file
|
|
assert os.path.exists(custom_file)
|
|
|
|
def test_report_generation_error_handling(self, test_config):
|
|
"""Test error handling in report generation"""
|
|
reporter = PerformanceReporter(test_config)
|
|
|
|
# Mock monitor that raises an error
|
|
monitor = Mock()
|
|
monitor.get_performance_summary.side_effect = Exception("Test error")
|
|
|
|
with pytest.raises(PerformanceError):
|
|
reporter.generate_performance_report(monitor)
|