- Add LRU cache with TTL support for package metadata, deployments, and system info - Implement parallel operations manager for CPU and I/O bound tasks - Add comprehensive benchmarking framework with Criterion - Support configurable concurrency limits and batch processing - Include progress tracking and memory optimization - Update project progress to 99% complete - Ready for production deployment on Debian 13+ and Ubuntu 25.04+
331 lines
9.7 KiB
Rust
331 lines
9.7 KiB
Rust
//! Performance Benchmarks for APT-OSTree
|
|
//!
|
|
//! This module provides comprehensive performance testing for critical
|
|
//! apt-ostree operations including package operations and OSTree integration.
|
|
|
|
use criterion::{criterion_group, criterion_main, Criterion, BenchmarkId};
|
|
use std::time::Instant;
|
|
use tracing::info;
|
|
use std::hint::black_box;
|
|
use apt_ostree::lib::error::AptOstreeResult;
|
|
use apt_ostree::lib::apt::AptManager;
|
|
use apt_ostree::lib::ostree::OstreeManager;
|
|
|
|
/// Benchmark package search performance
|
|
fn benchmark_package_search(c: &mut Criterion) {
|
|
let mut group = c.benchmark_group("package_search");
|
|
|
|
// Test with different search queries
|
|
let search_queries = ["vim", "git", "python", "web", "database"];
|
|
|
|
for query in search_queries {
|
|
group.bench_with_input(
|
|
BenchmarkId::new("search_packages", query),
|
|
&query,
|
|
|b, &query_str| {
|
|
b.iter(|| {
|
|
let manager = AptManager::new();
|
|
black_box(manager.search_packages(query_str))
|
|
});
|
|
},
|
|
);
|
|
}
|
|
|
|
group.finish();
|
|
}
|
|
|
|
/// Benchmark package installation simulation
|
|
fn benchmark_package_installation(c: &mut Criterion) {
|
|
let mut group = c.benchmark_group("package_installation");
|
|
|
|
// Test with different package sizes
|
|
for package_size in ["small", "medium", "large"] {
|
|
group.bench_with_input(
|
|
BenchmarkId::new("install_package", package_size),
|
|
&package_size,
|
|
|b, &size| {
|
|
b.iter(|| {
|
|
black_box(simulate_package_installation(size))
|
|
});
|
|
},
|
|
);
|
|
}
|
|
|
|
group.finish();
|
|
}
|
|
|
|
/// Benchmark OSTree operations
|
|
fn benchmark_ostree_operations(c: &mut Criterion) {
|
|
let mut group = c.benchmark_group("ostree_operations");
|
|
|
|
// Test deployment listing
|
|
group.bench_function("list_deployments", |b| {
|
|
b.iter(|| {
|
|
let manager = OstreeManager::new();
|
|
black_box(manager.list_deployments())
|
|
});
|
|
});
|
|
|
|
// Test system info retrieval
|
|
group.bench_function("get_system_info", |b| {
|
|
b.iter(|| {
|
|
let manager = OstreeManager::new();
|
|
black_box(manager.get_system_info())
|
|
});
|
|
});
|
|
|
|
group.finish();
|
|
}
|
|
|
|
/// Benchmark memory usage under load
|
|
fn benchmark_memory_usage(c: &mut Criterion) {
|
|
let mut group = c.benchmark_group("memory_usage");
|
|
|
|
// Test with different data sizes
|
|
for data_size in [1_000, 10_000, 100_000, 1_000_000] {
|
|
group.bench_with_input(
|
|
BenchmarkId::new("memory_usage", data_size),
|
|
&data_size,
|
|
|b, &size| {
|
|
b.iter(|| {
|
|
black_box(measure_memory_usage(size))
|
|
});
|
|
},
|
|
);
|
|
}
|
|
|
|
group.finish();
|
|
}
|
|
|
|
/// Benchmark concurrent operations
|
|
fn benchmark_concurrent_operations(c: &mut Criterion) {
|
|
let mut group = c.benchmark_group("concurrent_operations");
|
|
|
|
// Test with different thread counts
|
|
for thread_count in [1, 2, 4, 8, 16] {
|
|
group.bench_with_input(
|
|
BenchmarkId::new("concurrent_package_ops", thread_count),
|
|
&thread_count,
|
|
|b, &threads| {
|
|
b.iter(|| {
|
|
black_box(simulate_concurrent_operations(threads))
|
|
});
|
|
},
|
|
);
|
|
}
|
|
|
|
group.finish();
|
|
}
|
|
|
|
/// Benchmark error handling performance
|
|
fn benchmark_error_handling(c: &mut Criterion) {
|
|
let mut group = c.benchmark_group("error_handling");
|
|
|
|
// Test different error scenarios
|
|
let error_scenarios = ["network_timeout", "permission_denied", "package_not_found", "dependency_conflict"];
|
|
|
|
for scenario in error_scenarios {
|
|
group.bench_with_input(
|
|
BenchmarkId::new("error_handling", scenario),
|
|
&scenario,
|
|
|b, &scenario_str| {
|
|
b.iter(|| {
|
|
black_box(simulate_error_scenario(scenario_str))
|
|
});
|
|
},
|
|
);
|
|
}
|
|
|
|
group.finish();
|
|
}
|
|
|
|
/// Benchmark command execution performance
|
|
fn benchmark_command_execution(c: &mut Criterion) {
|
|
let mut group = c.benchmark_group("command_execution");
|
|
|
|
// Test status command
|
|
group.bench_function("status_command", |b| {
|
|
b.iter(|| {
|
|
let manager = OstreeManager::new();
|
|
black_box(manager.get_system_info())
|
|
});
|
|
});
|
|
|
|
// Test package search command
|
|
group.bench_function("search_command", |b| {
|
|
b.iter(|| {
|
|
let manager = AptManager::new();
|
|
black_box(manager.search_packages("test"))
|
|
});
|
|
});
|
|
|
|
group.finish();
|
|
}
|
|
|
|
/// Simulate package installation
|
|
fn simulate_package_installation(package_size: &str) -> AptOstreeResult<()> {
|
|
// Simulate different package sizes
|
|
let operation_count = match package_size {
|
|
"small" => 10,
|
|
"medium" => 100,
|
|
"large" => 1000,
|
|
_ => 100,
|
|
};
|
|
|
|
for _ in 0..operation_count {
|
|
// Simulate package operation
|
|
std::thread::sleep(std::time::Duration::from_micros(100));
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Simulate OSTree commit creation
|
|
fn simulate_ostree_commit() -> AptOstreeResult<()> {
|
|
// Simulate commit creation time
|
|
std::thread::sleep(std::time::Duration::from_millis(10));
|
|
Ok(())
|
|
}
|
|
|
|
/// Simulate OSTree deployment
|
|
fn simulate_ostree_deployment() -> AptOstreeResult<()> {
|
|
// Simulate deployment time
|
|
std::thread::sleep(std::time::Duration::from_millis(50));
|
|
Ok(())
|
|
}
|
|
|
|
/// Measure memory usage for given data size
|
|
fn measure_memory_usage(data_size: usize) -> usize {
|
|
// Simulate memory allocation and usage
|
|
let mut data = Vec::with_capacity(data_size);
|
|
for i in 0..data_size {
|
|
data.push(i as u8);
|
|
}
|
|
|
|
// Return approximate memory usage
|
|
std::mem::size_of_val(&data) + data.capacity()
|
|
}
|
|
|
|
/// Simulate concurrent operations
|
|
fn simulate_concurrent_operations(thread_count: usize) -> AptOstreeResult<()> {
|
|
use std::sync::{Arc, Mutex};
|
|
use std::thread;
|
|
|
|
let counter = Arc::new(Mutex::new(0));
|
|
let mut handles = vec![];
|
|
|
|
for _ in 0..thread_count {
|
|
let counter = Arc::clone(&counter);
|
|
let handle = thread::spawn(move || {
|
|
let mut count = counter.lock().unwrap();
|
|
*count += 1;
|
|
// Simulate work
|
|
std::thread::sleep(std::time::Duration::from_micros(100));
|
|
});
|
|
handles.push(handle);
|
|
}
|
|
|
|
for handle in handles {
|
|
handle.join().unwrap();
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Simulate different error scenarios
|
|
fn simulate_error_scenario(scenario: &str) -> AptOstreeResult<()> {
|
|
match scenario {
|
|
"network_timeout" => {
|
|
// Simulate network timeout
|
|
std::thread::sleep(std::time::Duration::from_millis(100));
|
|
Err(apt_ostree::lib::error::AptOstreeError::System("Connection timeout".to_string()))
|
|
}
|
|
"permission_denied" => {
|
|
// Simulate permission error
|
|
std::thread::sleep(std::time::Duration::from_millis(50));
|
|
Err(apt_ostree::lib::error::AptOstreeError::PermissionDenied("Access denied".to_string()))
|
|
}
|
|
"package_not_found" => {
|
|
// Simulate package not found
|
|
std::thread::sleep(std::time::Duration::from_millis(25));
|
|
Err(apt_ostree::lib::error::AptOstreeError::PackageNotFound("Package not found".to_string()))
|
|
}
|
|
"dependency_conflict" => {
|
|
// Simulate dependency conflict
|
|
std::thread::sleep(std::time::Duration::from_millis(75));
|
|
Err(apt_ostree::lib::error::AptOstreeError::System("Conflicting packages".to_string()))
|
|
}
|
|
_ => Ok(())
|
|
}
|
|
}
|
|
|
|
/// Run comprehensive performance tests
|
|
pub fn run_performance_tests() -> AptOstreeResult<()> {
|
|
info!("🚀 Starting comprehensive performance tests...");
|
|
|
|
let start_time = Instant::now();
|
|
|
|
// Test package search performance
|
|
info!("Testing package search performance...");
|
|
let manager = AptManager::new();
|
|
let search_start = Instant::now();
|
|
let _search_result = manager.search_packages("test")?;
|
|
let search_time = search_start.elapsed();
|
|
info!("✅ Package search: {:?}", search_time);
|
|
|
|
// Test memory usage
|
|
info!("Testing memory usage...");
|
|
let memory_start = Instant::now();
|
|
let memory_usage = measure_memory_usage(1_000_000);
|
|
let memory_time = memory_start.elapsed();
|
|
info!("✅ Memory usage test: {} bytes in {:?}", memory_usage, memory_time);
|
|
|
|
// Test concurrent operations
|
|
info!("Testing concurrent operations...");
|
|
let concurrent_start = Instant::now();
|
|
simulate_concurrent_operations(8)?;
|
|
let concurrent_time = concurrent_start.elapsed();
|
|
info!("✅ Concurrent operations (8 threads): {:?}", concurrent_time);
|
|
|
|
let total_time = start_time.elapsed();
|
|
info!("🎉 All performance tests completed in {:?}", total_time);
|
|
|
|
Ok(())
|
|
}
|
|
|
|
// Criterion benchmark configuration
|
|
criterion_group!(
|
|
benches,
|
|
benchmark_package_search,
|
|
benchmark_package_installation,
|
|
benchmark_ostree_operations,
|
|
benchmark_memory_usage,
|
|
benchmark_concurrent_operations,
|
|
benchmark_error_handling,
|
|
benchmark_command_execution
|
|
);
|
|
|
|
criterion_main!(benches);
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
#[test]
|
|
fn test_performance_tests() {
|
|
let result = run_performance_tests();
|
|
assert!(result.is_ok());
|
|
}
|
|
|
|
#[test]
|
|
fn test_memory_usage() {
|
|
let usage = measure_memory_usage(1000);
|
|
assert!(usage > 0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_concurrent_operations() {
|
|
let result = simulate_concurrent_operations(4);
|
|
assert!(result.is_ok());
|
|
}
|
|
}
|