debian-forge/scripts/performance-test.sh
Joe 7c724dd149
Some checks failed
Debian Forge CI/CD Pipeline / Build and Test (push) Successful in 1m48s
Debian Forge CI/CD Pipeline / Security Audit (push) Failing after 6s
Debian Forge CI/CD Pipeline / Package Validation (push) Successful in 1m44s
Debian Forge CI/CD Pipeline / Status Report (push) Has been skipped
feat: Complete Phase 7.3 Advanced Features
- Enhanced APT stage with advanced features:
  - Package version pinning and holds
  - Custom repository priorities
  - Specific version installation
  - Updated schemas for all new options

- New dependency resolution stage (org.osbuild.apt.depsolve):
  - Advanced dependency solving with conflict resolution
  - Multiple strategies (conservative, aggressive, resolve)
  - Package optimization and dry-run support

- New Docker/OCI image building stage (org.osbuild.docker):
  - Docker and OCI container image creation
  - Flexible configuration for entrypoints, commands, env vars
  - Image export and multi-format support

- New cloud image generation stage (org.osbuild.cloud):
  - Multi-cloud support (AWS, GCP, Azure, OpenStack, DigitalOcean)
  - Cloud-init integration and provider-specific metadata
  - Live ISO and network boot image creation

- New debug and developer tools stage (org.osbuild.debug):
  - Debug logging and manifest validation
  - Performance profiling and dependency tracing
  - Comprehensive debug reports

- Example manifests for all new features:
  - debian-advanced-apt.json - Advanced APT features
  - debian-docker-container.json - Container image building
  - debian-aws-image.json - AWS cloud image
  - debian-live-iso.json - Live ISO creation
  - debian-debug-build.json - Debug mode

- Updated .gitignore with comprehensive artifact patterns
- All tests passing with 292 passed, 198 skipped
- Phase 7.3 marked as completed in todo.txt

debian-forge is now production-ready with advanced features! 🎉
2025-09-04 09:33:45 -07:00

236 lines
6.2 KiB
Bash
Executable file

#!/bin/bash
# Performance Testing Script for debian-forge
# This script tests build performance and generates benchmarks
set -e
echo "🚀 Debian Forge Performance Testing"
echo "===================================="
# Configuration
TEST_DIR="./performance-tests"
RESULTS_DIR="./performance-results"
MANIFESTS_DIR="./test/data/manifests/debian"
# Create directories
mkdir -p "$TEST_DIR" "$RESULTS_DIR"
# Test configurations
declare -A TESTS=(
["debian-minimal"]="debian-trixie-minimal.json"
["ubuntu-server"]="ubuntu-jammy-server.json"
["debian-atomic"]="debian-atomic-container.json"
["debian-arm64"]="debian-trixie-arm64.json"
)
# Performance metrics
declare -A BUILD_TIMES
declare -A PACKAGE_COUNTS
declare -A IMAGE_SIZES
echo ""
echo "📊 Running Performance Tests..."
echo "==============================="
for test_name in "${!TESTS[@]}"; do
manifest="${TESTS[$test_name]}"
manifest_path="$MANIFESTS_DIR/$manifest"
if [ ! -f "$manifest_path" ]; then
echo "❌ Manifest not found: $manifest_path"
continue
fi
echo ""
echo "🧪 Testing: $test_name ($manifest)"
echo "-----------------------------------"
# Clean previous build
rm -rf "$TEST_DIR/$test_name"
mkdir -p "$TEST_DIR/$test_name"
# Start timing
start_time=$(date +%s.%N)
# Run build
echo "⏱️ Starting build..."
if python3 -m osbuild "$manifest_path" --output-dir "$TEST_DIR/$test_name" --libdir . --json > "$RESULTS_DIR/${test_name}_build.json" 2>&1; then
end_time=$(date +%s.%N)
build_time=$(echo "$end_time - $start_time" | bc -l)
BUILD_TIMES[$test_name]=$build_time
echo "✅ Build completed in $(printf "%.2f" $build_time) seconds"
# Extract package count from build log
package_count=$(grep -o '"packages":\[[^]]*\]' "$RESULTS_DIR/${test_name}_build.json" | wc -l || echo "0")
PACKAGE_COUNTS[$test_name]=$package_count
# Calculate image size (if output exists)
if [ -d "$TEST_DIR/$test_name" ]; then
image_size=$(du -sh "$TEST_DIR/$test_name" 2>/dev/null | cut -f1 || echo "0B")
IMAGE_SIZES[$test_name]=$image_size
else
IMAGE_SIZES[$test_name]="0B"
fi
echo "📦 Packages: $package_count"
echo "💾 Size: ${IMAGE_SIZES[$test_name]}"
else
echo "❌ Build failed for $test_name"
BUILD_TIMES[$test_name]="FAILED"
PACKAGE_COUNTS[$test_name]="0"
IMAGE_SIZES[$test_name]="0B"
fi
done
echo ""
echo "📈 Performance Summary"
echo "======================"
# Create performance report
cat > "$RESULTS_DIR/performance-report.md" << EOF
# Debian Forge Performance Report
Generated: $(date)
## Build Times
| Test Case | Build Time | Status |
|-----------|------------|--------|
EOF
for test_name in "${!TESTS[@]}"; do
build_time="${BUILD_TIMES[$test_name]}"
if [ "$build_time" = "FAILED" ]; then
status="❌ FAILED"
time_display="N/A"
else
status="✅ SUCCESS"
time_display="$(printf "%.2f" $build_time)s"
fi
echo "| $test_name | $time_display | $status |" >> "$RESULTS_DIR/performance-report.md"
done
cat >> "$RESULTS_DIR/performance-report.md" << EOF
## Package Counts
| Test Case | Package Count |
|-----------|---------------|
EOF
for test_name in "${!TESTS[@]}"; do
package_count="${PACKAGE_COUNTS[$test_name]}"
echo "| $test_name | $package_count |" >> "$RESULTS_DIR/performance-report.md"
done
cat >> "$RESULTS_DIR/performance-report.md" << EOF
## Image Sizes
| Test Case | Size |
|-----------|------|
EOF
for test_name in "${!TESTS[@]}"; do
image_size="${IMAGE_SIZES[$test_name]}"
echo "| $test_name | $image_size |" >> "$RESULTS_DIR/performance-report.md"
done
cat >> "$RESULTS_DIR/performance-report.md" << EOF
## Performance Analysis
### Fastest Build
EOF
# Find fastest build
fastest_time=999999
fastest_test=""
for test_name in "${!TESTS[@]}"; do
build_time="${BUILD_TIMES[$test_name]}"
if [ "$build_time" != "FAILED" ]; then
if (( $(echo "$build_time < $fastest_time" | bc -l) )); then
fastest_time=$build_time
fastest_test=$test_name
fi
fi
done
if [ -n "$fastest_test" ]; then
echo "- **$fastest_test**: $(printf "%.2f" $fastest_time)s" >> "$RESULTS_DIR/performance-report.md"
else
echo "- No successful builds" >> "$RESULTS_DIR/performance-report.md"
fi
cat >> "$RESULTS_DIR/performance-report.md" << EOF
### Slowest Build
EOF
# Find slowest build
slowest_time=0
slowest_test=""
for test_name in "${!TESTS[@]}"; do
build_time="${BUILD_TIMES[$test_name]}"
if [ "$build_time" != "FAILED" ]; then
if (( $(echo "$build_time > $slowest_time" | bc -l) )); then
slowest_time=$build_time
slowest_test=$test_name
fi
fi
done
if [ -n "$slowest_test" ]; then
echo "- **$slowest_test**: $(printf "%.2f" $slowest_time)s" >> "$RESULTS_DIR/performance-report.md"
else
echo "- No successful builds" >> "$RESULTS_DIR/performance-report.md"
fi
cat >> "$RESULTS_DIR/performance-report.md" << EOF
## Recommendations
1. **Use apt-cacher-ng** for 2-3x faster builds
2. **Minimize package count** for faster builds
3. **Use minimal base images** when possible
4. **Monitor build times** regularly
5. **Optimize manifest structure** for better performance
## Next Steps
1. Implement apt-cacher-ng integration
2. Add parallel build support
3. Optimize package installation
4. Add build caching
5. Monitor memory usage
EOF
echo ""
echo "📊 Performance Report Generated"
echo "==============================="
echo "📄 Report: $RESULTS_DIR/performance-report.md"
echo "📁 Results: $RESULTS_DIR/"
echo "🧪 Test Data: $TEST_DIR/"
echo ""
echo "🎯 Performance Summary:"
echo "======================="
for test_name in "${!TESTS[@]}"; do
build_time="${BUILD_TIMES[$test_name]}"
package_count="${PACKAGE_COUNTS[$test_name]}"
image_size="${IMAGE_SIZES[$test_name]}"
if [ "$build_time" = "FAILED" ]; then
echo "$test_name: FAILED"
else
echo "$test_name: $(printf "%.2f" $build_time)s | $package_count packages | $image_size"
fi
done
echo ""
echo "🚀 Performance testing completed!"