Some checks failed
Checks / Spelling (push) Has been cancelled
Checks / Python Linters (push) Has been cancelled
Checks / Shell Linters (push) Has been cancelled
Checks / 📦 Packit config lint (push) Has been cancelled
Checks / 🔍 Check for valid snapshot urls (push) Has been cancelled
Checks / 🔍 Check JSON files for formatting consistency (push) Has been cancelled
Generate / Documentation (push) Has been cancelled
Generate / Test Data (push) Has been cancelled
Tests / Unittest (push) Has been cancelled
Tests / Assembler test (legacy) (push) Has been cancelled
Tests / Smoke run: unittest as normal user on default runner (push) Has been cancelled
365 lines
13 KiB
Python
365 lines
13 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Debian Package Dependency Resolver for Debian Forge
|
|
|
|
This module provides Debian package dependency resolution for OSBuild Composer,
|
|
handling package dependencies, conflicts, and installation order.
|
|
"""
|
|
|
|
import json
|
|
import os
|
|
import subprocess
|
|
import tempfile
|
|
from typing import Dict, List, Optional, Any, Set, Tuple
|
|
from dataclasses import dataclass, asdict
|
|
from pathlib import Path
|
|
import urllib.parse
|
|
from datetime import datetime
|
|
|
|
@dataclass
|
|
class PackageInfo:
|
|
"""Represents package information and dependencies"""
|
|
name: str
|
|
version: str
|
|
architecture: str
|
|
depends: List[str]
|
|
recommends: List[str]
|
|
suggests: List[str]
|
|
conflicts: List[str]
|
|
breaks: List[str]
|
|
replaces: List[str]
|
|
provides: List[str]
|
|
essential: bool = False
|
|
priority: str = "optional"
|
|
|
|
@dataclass
|
|
class DependencyResolution:
|
|
"""Represents the result of dependency resolution"""
|
|
packages: List[str]
|
|
install_order: List[str]
|
|
conflicts: List[str]
|
|
missing: List[str]
|
|
circular_deps: List[str]
|
|
|
|
class DebianPackageResolver:
|
|
"""Resolves Debian package dependencies for composer builds"""
|
|
|
|
def __init__(self, repository_manager=None):
|
|
self.repository_manager = repository_manager
|
|
self.package_cache = {}
|
|
self.dependency_graph = {}
|
|
self.conflict_cache = {}
|
|
|
|
def resolve_package_dependencies(self, packages: List[str], suite: str = "bookworm",
|
|
architecture: str = "amd64",
|
|
include_recommends: bool = False) -> DependencyResolution:
|
|
"""Resolve dependencies for a list of packages"""
|
|
try:
|
|
# Initialize resolution
|
|
resolved_packages = set()
|
|
install_order = []
|
|
conflicts = []
|
|
missing = []
|
|
circular_deps = []
|
|
|
|
# Build dependency graph
|
|
self._build_dependency_graph(packages, suite, architecture)
|
|
|
|
# Check for conflicts
|
|
conflicts = self._check_conflicts(packages)
|
|
|
|
# Resolve dependencies
|
|
resolved_packages, install_order, missing, circular_deps = self._resolve_dependencies(
|
|
packages, include_recommends
|
|
)
|
|
|
|
return DependencyResolution(
|
|
packages=list(resolved_packages),
|
|
install_order=install_order,
|
|
conflicts=conflicts,
|
|
missing=missing,
|
|
circular_deps=circular_deps
|
|
)
|
|
|
|
except Exception as e:
|
|
print(f"Dependency resolution failed: {e}")
|
|
return DependencyResolution([], [], [], packages, [])
|
|
|
|
def _build_dependency_graph(self, packages: List[str], suite: str, architecture: str):
|
|
"""Build dependency graph for packages"""
|
|
self.dependency_graph = {}
|
|
|
|
for package in packages:
|
|
if package not in self.dependency_graph:
|
|
self.dependency_graph[package] = {
|
|
'deps': set(),
|
|
'reverse_deps': set(),
|
|
'visited': False,
|
|
'installing': False
|
|
}
|
|
|
|
# Get package dependencies
|
|
deps = self._get_package_dependencies(package, suite, architecture)
|
|
self.dependency_graph[package]['deps'] = deps
|
|
|
|
# Add reverse dependencies
|
|
for dep in deps:
|
|
if dep not in self.dependency_graph:
|
|
self.dependency_graph[dep] = {
|
|
'deps': set(),
|
|
'reverse_deps': set(),
|
|
'visited': False,
|
|
'installing': False
|
|
}
|
|
self.dependency_graph[dep]['reverse_deps'].add(package)
|
|
|
|
def _get_package_dependencies(self, package: str, suite: str, architecture: str) -> Set[str]:
|
|
"""Get dependencies for a specific package"""
|
|
# This would typically query the Debian repository
|
|
# For now, return common dependencies based on package type
|
|
|
|
common_deps = {
|
|
'systemd': {'libsystemd0', 'libc6'},
|
|
'systemd-sysv': {'systemd'},
|
|
'dbus': {'libdbus-1-3', 'libc6'},
|
|
'udev': {'libudev1', 'libc6'},
|
|
'ostree': {'libostree-1-1', 'libc6', 'libglib2.0-0'},
|
|
'linux-image-amd64': {'linux-image-6.1.0-13-amd64', 'linux-firmware'},
|
|
'openssh-server': {'openssh-client', 'libc6', 'libssl3'},
|
|
'nginx': {'libc6', 'libssl3', 'libpcre3'},
|
|
'postgresql': {'libc6', 'libssl3', 'libpq5'}
|
|
}
|
|
|
|
if package in common_deps:
|
|
return common_deps[package]
|
|
|
|
# Return minimal dependencies for unknown packages
|
|
return {'libc6'}
|
|
|
|
def _check_conflicts(self, packages: List[str]) -> List[str]:
|
|
"""Check for package conflicts"""
|
|
conflicts = []
|
|
|
|
# Common conflicts
|
|
conflict_pairs = [
|
|
('systemd', 'sysvinit-core'),
|
|
('systemd-sysv', 'sysvinit-core'),
|
|
('lightdm', 'gdm3'),
|
|
('nginx', 'apache2'),
|
|
('postgresql', 'mysql-server')
|
|
]
|
|
|
|
for pkg1, pkg2 in conflict_pairs:
|
|
if pkg1 in packages and pkg2 in packages:
|
|
conflicts.append(f"{pkg1} conflicts with {pkg2}")
|
|
|
|
return conflicts
|
|
|
|
def _resolve_dependencies(self, packages: List[str], include_recommends: bool) -> Tuple[Set[str], List[str], List[str], List[str]]:
|
|
"""Resolve dependencies using topological sort"""
|
|
resolved = set()
|
|
install_order = []
|
|
missing = []
|
|
circular_deps = []
|
|
|
|
# Reset visited flags
|
|
for pkg in self.dependency_graph:
|
|
self.dependency_graph[pkg]['visited'] = False
|
|
self.dependency_graph[pkg]['installing'] = False
|
|
|
|
# Process each package
|
|
for package in packages:
|
|
if package not in resolved:
|
|
try:
|
|
self._visit_package(package, resolved, install_order, missing, circular_deps)
|
|
except Exception as e:
|
|
missing.append(package)
|
|
|
|
return resolved, install_order, missing, circular_deps
|
|
|
|
def _visit_package(self, package: str, resolved: Set[str], install_order: List[str],
|
|
missing: List[str], circular_deps: List[str]):
|
|
"""Visit a package for dependency resolution (DFS)"""
|
|
if package not in self.dependency_graph:
|
|
missing.append(package)
|
|
return
|
|
|
|
node = self.dependency_graph[package]
|
|
|
|
if node['installing']:
|
|
circular_deps.append(package)
|
|
return
|
|
|
|
if node['visited']:
|
|
return
|
|
|
|
node['installing'] = True
|
|
|
|
# Process dependencies first
|
|
for dep in node['deps']:
|
|
if dep not in resolved:
|
|
self._visit_package(dep, resolved, install_order, missing, circular_deps)
|
|
|
|
node['installing'] = False
|
|
node['visited'] = True
|
|
|
|
resolved.add(package)
|
|
install_order.append(package)
|
|
|
|
def generate_apt_install_command(self, packages: List[str],
|
|
include_recommends: bool = False,
|
|
allow_unauthenticated: bool = False) -> List[str]:
|
|
"""Generate apt install command for resolved packages"""
|
|
cmd = ['apt-get', '-y']
|
|
|
|
if not include_recommends:
|
|
cmd.append('--no-install-recommends')
|
|
|
|
if allow_unauthenticated:
|
|
cmd.append('--allow-unauthenticated')
|
|
|
|
cmd.extend(['install'] + packages)
|
|
return cmd
|
|
|
|
def generate_debootstrap_command(self, suite: str, mirror: str,
|
|
components: List[str] = None,
|
|
variant: str = "minbase") -> List[str]:
|
|
"""Generate debootstrap command for base system"""
|
|
if components is None:
|
|
components = ["main"]
|
|
|
|
cmd = [
|
|
'debootstrap',
|
|
'--arch=amd64',
|
|
f'--variant={variant}',
|
|
'--components=' + ','.join(components),
|
|
suite,
|
|
'/target',
|
|
mirror
|
|
]
|
|
|
|
return cmd
|
|
|
|
def validate_package_list(self, packages: List[str], suite: str = "bookworm") -> Dict[str, Any]:
|
|
"""Validate a list of packages for a specific suite"""
|
|
validation_result = {
|
|
'valid': True,
|
|
'errors': [],
|
|
'warnings': [],
|
|
'suggestions': []
|
|
}
|
|
|
|
# Check for empty package list
|
|
if not packages:
|
|
validation_result['valid'] = False
|
|
validation_result['errors'].append("Package list is empty")
|
|
return validation_result
|
|
|
|
# Check for duplicate packages
|
|
duplicates = [pkg for pkg in set(packages) if packages.count(pkg) > 1]
|
|
if duplicates:
|
|
validation_result['warnings'].append(f"Duplicate packages: {duplicates}")
|
|
|
|
# Check for essential packages
|
|
essential_packages = ['systemd', 'systemd-sysv', 'dbus', 'udev']
|
|
missing_essential = [pkg for pkg in essential_packages if pkg not in packages]
|
|
if missing_essential:
|
|
validation_result['suggestions'].append(f"Consider adding essential packages: {missing_essential}")
|
|
|
|
# Check for conflicting packages
|
|
conflicts = self._check_conflicts(packages)
|
|
if conflicts:
|
|
validation_result['valid'] = False
|
|
validation_result['errors'].extend(conflicts)
|
|
|
|
return validation_result
|
|
|
|
def get_package_metadata(self, package: str, suite: str = "bookworm",
|
|
architecture: str = "amd64") -> Optional[PackageInfo]:
|
|
"""Get metadata for a specific package"""
|
|
# This would typically query the Debian repository
|
|
# For now, return mock data
|
|
|
|
mock_packages = {
|
|
'systemd': PackageInfo(
|
|
name='systemd',
|
|
version='252.19-1',
|
|
architecture='amd64',
|
|
depends=['libsystemd0', 'libc6'],
|
|
recommends=['systemd-sysv'],
|
|
suggests=['systemd-container', 'systemd-resolved'],
|
|
conflicts=['sysvinit-core'],
|
|
breaks=[],
|
|
replaces=[],
|
|
provides=['systemd-sysv'],
|
|
essential=True,
|
|
priority='important'
|
|
),
|
|
'ostree': PackageInfo(
|
|
name='ostree',
|
|
version='2023.8-1',
|
|
architecture='amd64',
|
|
depends=['libostree-1-1', 'libc6', 'libglib2.0-0'],
|
|
recommends=[],
|
|
suggests=['ostree-tools'],
|
|
conflicts=[],
|
|
breaks=[],
|
|
replaces=[],
|
|
provides=[],
|
|
essential=False,
|
|
priority='optional'
|
|
)
|
|
}
|
|
|
|
return mock_packages.get(package)
|
|
|
|
def export_dependency_graph(self, output_path: str) -> bool:
|
|
"""Export dependency graph to file"""
|
|
try:
|
|
graph_data = {
|
|
'packages': {},
|
|
'dependencies': {},
|
|
'exported_at': str(datetime.now())
|
|
}
|
|
|
|
for package, node in self.dependency_graph.items():
|
|
graph_data['packages'][package] = {
|
|
'deps': list(node['deps']),
|
|
'reverse_deps': list(node['reverse_deps'])
|
|
}
|
|
|
|
with open(output_path, 'w') as f:
|
|
json.dump(graph_data, f, indent=2)
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"Failed to export dependency graph: {e}")
|
|
return False
|
|
|
|
def main():
|
|
"""Example usage of Debian package resolver"""
|
|
print("Debian Package Resolver Example")
|
|
|
|
# Create resolver
|
|
resolver = DebianPackageResolver()
|
|
|
|
# Test package resolution
|
|
packages = ['systemd', 'ostree', 'openssh-server']
|
|
|
|
print(f"\nResolving dependencies for: {packages}")
|
|
resolution = resolver.resolve_package_dependencies(packages)
|
|
|
|
print(f"Resolved packages: {len(resolution.packages)}")
|
|
print(f"Install order: {resolution.install_order[:5]}...")
|
|
print(f"Conflicts: {resolution.conflicts}")
|
|
print(f"Missing: {resolution.missing}")
|
|
|
|
# Test validation
|
|
validation = resolver.validate_package_list(packages)
|
|
print(f"\nValidation: {'Valid' if validation['valid'] else 'Invalid'}")
|
|
if validation['errors']:
|
|
print(f"Errors: {validation['errors']}")
|
|
|
|
if __name__ == '__main__':
|
|
main()
|