338 lines
11 KiB
Python
338 lines
11 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Test Composer Orchestration with Debian Forge
|
|
|
|
This script tests the integration between OSBuild Composer and our
|
|
Debian Forge build orchestration system.
|
|
"""
|
|
|
|
import json
|
|
import os
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
def test_blueprint_loading():
|
|
"""Test loading and validation of blueprint files"""
|
|
print("Testing blueprint loading...")
|
|
|
|
blueprint_dir = Path("blueprints")
|
|
if not blueprint_dir.exists():
|
|
print(" ❌ Blueprint directory not found")
|
|
return False
|
|
|
|
blueprints = ["debian-atomic-base.json", "debian-atomic-workstation.json", "debian-atomic-server.json"]
|
|
|
|
for blueprint_file in blueprints:
|
|
blueprint_path = blueprint_dir / blueprint_file
|
|
if not blueprint_path.exists():
|
|
print(f" ❌ Blueprint file not found: {blueprint_file}")
|
|
return False
|
|
|
|
try:
|
|
with open(blueprint_path, 'r') as f:
|
|
blueprint = json.load(f)
|
|
|
|
# Validate basic structure
|
|
required_fields = ["name", "description", "version", "packages"]
|
|
for field in required_fields:
|
|
if field not in blueprint:
|
|
print(f" ❌ {blueprint_file} missing field: {field}")
|
|
return False
|
|
|
|
print(f" ✅ {blueprint_file} loaded and validated")
|
|
|
|
except json.JSONDecodeError as e:
|
|
print(f" ❌ {blueprint_file} invalid JSON: {e}")
|
|
return False
|
|
except Exception as e:
|
|
print(f" ❌ {blueprint_file} error: {e}")
|
|
return False
|
|
|
|
return True
|
|
|
|
def test_pipeline_generation():
|
|
"""Test OSBuild pipeline generation from blueprints"""
|
|
print("\nTesting pipeline generation...")
|
|
|
|
# Test pipeline generation for base blueprint
|
|
base_pipeline = {
|
|
"version": "2",
|
|
"pipelines": [
|
|
{
|
|
"name": "build",
|
|
"runner": "org.osbuild.linux",
|
|
"stages": [
|
|
{
|
|
"type": "org.osbuild.debootstrap",
|
|
"options": {
|
|
"suite": "bookworm",
|
|
"mirror": "http://deb.debian.org/debian",
|
|
"arch": "amd64",
|
|
"variant": "minbase",
|
|
"apt_proxy": "http://192.168.1.101:3142"
|
|
}
|
|
},
|
|
{
|
|
"type": "org.osbuild.apt",
|
|
"options": {
|
|
"packages": ["systemd", "systemd-sysv", "dbus", "udev", "ostree", "linux-image-amd64"],
|
|
"recommends": False,
|
|
"update": True,
|
|
"apt_proxy": "http://192.168.1.101:3142"
|
|
}
|
|
},
|
|
{
|
|
"type": "org.osbuild.ostree.commit",
|
|
"options": {
|
|
"repo": "debian-atomic",
|
|
"branch": "debian/bookworm",
|
|
"subject": "Debian Bookworm atomic system",
|
|
"body": "Debian Bookworm minbase system with systemd and OSTree"
|
|
}
|
|
}
|
|
]
|
|
}
|
|
]
|
|
}
|
|
|
|
# Validate pipeline structure
|
|
if "version" not in base_pipeline:
|
|
print(" ❌ Pipeline missing version")
|
|
return False
|
|
|
|
if "pipelines" not in base_pipeline:
|
|
print(" ❌ Pipeline missing pipelines array")
|
|
return False
|
|
|
|
if len(base_pipeline["pipelines"]) == 0:
|
|
print(" ❌ Pipeline array is empty")
|
|
return False
|
|
|
|
build_pipeline = base_pipeline["pipelines"][0]
|
|
if "stages" not in build_pipeline:
|
|
print(" ❌ Build pipeline missing stages")
|
|
return False
|
|
|
|
# Validate stages
|
|
expected_stages = ["org.osbuild.debootstrap", "org.osbuild.apt", "org.osbuild.ostree.commit"]
|
|
actual_stages = [stage["type"] for stage in build_pipeline["stages"]]
|
|
|
|
for expected_stage in expected_stages:
|
|
if expected_stage not in actual_stages:
|
|
print(f" ❌ Missing expected stage: {expected_stage}")
|
|
return False
|
|
|
|
print(" ✅ Pipeline generation is valid")
|
|
return True
|
|
|
|
def test_build_orchestration_integration():
|
|
"""Test integration with our build orchestration system"""
|
|
print("\nTesting build orchestration integration...")
|
|
|
|
# Check if build orchestration components exist
|
|
orchestration_files = [
|
|
"build_orchestrator.py",
|
|
"artifact_manager.py",
|
|
"build_environment.py",
|
|
"osbuild_integration.py"
|
|
]
|
|
|
|
for file in orchestration_files:
|
|
if not os.path.exists(file):
|
|
print(f" ❌ Build orchestration file not found: {file}")
|
|
return False
|
|
|
|
# Test build request structure
|
|
build_request = {
|
|
"blueprint": "debian-atomic-base",
|
|
"target": "qcow2",
|
|
"architecture": "amd64",
|
|
"compose_type": "debian-atomic",
|
|
"priority": "normal"
|
|
}
|
|
|
|
required_fields = ["blueprint", "target", "architecture"]
|
|
for field in required_fields:
|
|
if field not in build_request:
|
|
print(f" ❌ Build request missing field: {field}")
|
|
return False
|
|
|
|
print(" ✅ Build orchestration integration is valid")
|
|
return True
|
|
|
|
def test_composer_api_integration():
|
|
"""Test composer API integration patterns"""
|
|
print("\nTesting composer API integration...")
|
|
|
|
# Test API endpoints
|
|
api_endpoints = {
|
|
"blueprints": "/api/v1/blueprints",
|
|
"compose": "/api/v1/compose",
|
|
"status": "/api/v1/compose/status",
|
|
"logs": "/api/v1/compose/logs",
|
|
"upload": "/api/v1/upload"
|
|
}
|
|
|
|
for endpoint, path in api_endpoints.items():
|
|
if not path.startswith("/api/v1/"):
|
|
print(f" ❌ Invalid API path for {endpoint}: {path}")
|
|
return False
|
|
|
|
# Test HTTP methods
|
|
http_methods = {
|
|
"submit_blueprint": "POST",
|
|
"get_blueprint": "GET",
|
|
"update_blueprint": "PUT",
|
|
"delete_blueprint": "DELETE",
|
|
"start_compose": "POST",
|
|
"get_compose_status": "GET",
|
|
"cancel_compose": "DELETE"
|
|
}
|
|
|
|
valid_methods = ["GET", "POST", "PUT", "DELETE"]
|
|
for operation, method in http_methods.items():
|
|
if method not in valid_methods:
|
|
print(f" ❌ Invalid HTTP method for {operation}: {method}")
|
|
return False
|
|
|
|
print(" ✅ Composer API integration is valid")
|
|
return True
|
|
|
|
def test_debian_specific_features():
|
|
"""Test Debian-specific composer features"""
|
|
print("\nTesting Debian-specific features...")
|
|
|
|
# Test Debian package management
|
|
debian_packages = {
|
|
"base_system": ["systemd", "systemd-sysv", "dbus", "udev"],
|
|
"desktop_environment": ["gnome-shell", "gnome-session", "gdm3"],
|
|
"server_services": ["nginx", "postgresql", "redis-server"],
|
|
"development_tools": ["build-essential", "git", "python3"],
|
|
"security_tools": ["fail2ban", "unattended-upgrades"]
|
|
}
|
|
|
|
for category, packages in debian_packages.items():
|
|
if not isinstance(packages, list):
|
|
print(f" ❌ {category} packages must be a list")
|
|
return False
|
|
|
|
for package in packages:
|
|
if not isinstance(package, str):
|
|
print(f" ❌ Package name must be string: {package}")
|
|
return False
|
|
|
|
# Test Debian repository configuration
|
|
debian_repos = {
|
|
"main": "http://deb.debian.org/debian",
|
|
"security": "http://security.debian.org/debian-security",
|
|
"updates": "http://deb.debian.org/debian"
|
|
}
|
|
|
|
for repo_name, repo_url in debian_repos.items():
|
|
if not repo_url.startswith("http"):
|
|
print(f" ❌ Invalid repository URL for {repo_name}: {repo_url}")
|
|
return False
|
|
|
|
print(" ✅ Debian-specific features are valid")
|
|
return True
|
|
|
|
def test_end_to_end_workflow():
|
|
"""Test end-to-end Debian atomic build workflow"""
|
|
print("\nTesting end-to-end workflow...")
|
|
|
|
# Define the complete workflow
|
|
workflow = [
|
|
"blueprint_submission",
|
|
"pipeline_generation",
|
|
"build_execution",
|
|
"ostree_composition",
|
|
"image_generation",
|
|
"deployment_preparation"
|
|
]
|
|
|
|
# Test workflow dependencies
|
|
workflow_deps = {
|
|
"blueprint_submission": [],
|
|
"pipeline_generation": ["blueprint_submission"],
|
|
"build_execution": ["pipeline_generation"],
|
|
"ostree_composition": ["build_execution"],
|
|
"image_generation": ["ostree_composition"],
|
|
"deployment_preparation": ["image_generation"]
|
|
}
|
|
|
|
for step, dependencies in workflow_deps.items():
|
|
if step not in workflow:
|
|
print(f" ❌ Workflow step not found: {step}")
|
|
return False
|
|
|
|
for dep in dependencies:
|
|
if dep not in workflow:
|
|
print(f" ❌ Workflow dependency not found: {dep}")
|
|
return False
|
|
|
|
# Test workflow validation
|
|
workflow_validation = {
|
|
"blueprint_submission": "User submits blueprint via composer API",
|
|
"pipeline_generation": "Composer generates OSBuild pipeline from blueprint",
|
|
"build_execution": "Our build orchestrator executes the pipeline",
|
|
"ostree_composition": "Debian stages create atomic filesystem",
|
|
"image_generation": "Output formats (ISO, QCOW2, RAW) generated",
|
|
"deployment_preparation": "OSTree commits available for deployment"
|
|
}
|
|
|
|
for step, description in workflow_validation.items():
|
|
if not description or len(description) < 10:
|
|
print(f" ❌ Workflow step {step} missing description")
|
|
return False
|
|
|
|
print(" ✅ End-to-end workflow is valid")
|
|
return True
|
|
|
|
def main():
|
|
"""Main test function"""
|
|
print("Composer Orchestration Test for Debian Forge")
|
|
print("=" * 60)
|
|
|
|
tests = [
|
|
("Blueprint Loading", test_blueprint_loading),
|
|
("Pipeline Generation", test_pipeline_generation),
|
|
("Build Orchestration Integration", test_build_orchestration_integration),
|
|
("Composer API Integration", test_composer_api_integration),
|
|
("Debian-Specific Features", test_debian_specific_features),
|
|
("End-to-End Workflow", test_end_to_end_workflow)
|
|
]
|
|
|
|
results = []
|
|
for test_name, test_func in tests:
|
|
try:
|
|
result = test_func()
|
|
results.append((test_name, result))
|
|
except Exception as e:
|
|
print(f" ❌ {test_name} test failed with exception: {e}")
|
|
results.append((test_name, False))
|
|
|
|
# Summary
|
|
print("\n" + "=" * 60)
|
|
print("TEST SUMMARY")
|
|
print("=" * 60)
|
|
|
|
passed = 0
|
|
total = len(results)
|
|
|
|
for test_name, result in results:
|
|
status = "✅ PASS" if result else "❌ FAIL"
|
|
print(f"{test_name}: {status}")
|
|
if result:
|
|
passed += 1
|
|
|
|
print(f"\nOverall: {passed}/{total} tests passed")
|
|
|
|
if passed == total:
|
|
print("🎉 All tests passed! Composer orchestration is ready for production.")
|
|
return 0
|
|
else:
|
|
print("⚠️ Some tests failed. Please review the issues above.")
|
|
return 1
|
|
|
|
if __name__ == '__main__':
|
|
sys.exit(main())
|