docs: centralize and rationalize apt-ostree.py documentation; overhaul configuration management with robust env var mapping and validation
This commit is contained in:
parent
20db68a97f
commit
6883cadf4d
13 changed files with 1770 additions and 36 deletions
72
debug_config.py
Normal file
72
debug_config.py
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Debug script for configuration environment variable handling
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Add the apt-ostree module to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent / "src/apt-ostree.py/python"))
|
||||||
|
|
||||||
|
from utils.config import ConfigManager
|
||||||
|
|
||||||
|
def debug_environment_variables():
|
||||||
|
"""Debug environment variable handling"""
|
||||||
|
|
||||||
|
print("=== Debug Environment Variables ===\n")
|
||||||
|
|
||||||
|
# Set test environment variables
|
||||||
|
os.environ['APT_OSTREE_DAEMON__LOGGING__LEVEL'] = 'DEBUG'
|
||||||
|
os.environ['APT_OSTREE_DAEMON__CONCURRENCY__MAX_WORKERS'] = '8'
|
||||||
|
os.environ['APT_OSTREE_DAEMON__LOGGING__COMPRESSION'] = 'false'
|
||||||
|
os.environ['APT_OSTREE_PERFORMANCE__CACHE_TTL'] = '7200'
|
||||||
|
|
||||||
|
print("Environment variables set:")
|
||||||
|
for key, value in os.environ.items():
|
||||||
|
if key.startswith('APT_OSTREE_'):
|
||||||
|
print(f" {key} = {value}")
|
||||||
|
|
||||||
|
# Create config manager
|
||||||
|
config_manager = ConfigManager("/tmp/debug-config.yaml")
|
||||||
|
|
||||||
|
print(f"\nConfig manager env_prefix: {config_manager.env_prefix}")
|
||||||
|
|
||||||
|
# Load config
|
||||||
|
config = config_manager.load_config()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
print("\nConfiguration loaded successfully")
|
||||||
|
print(f"Log level: {config_manager.get('daemon.logging.level')}")
|
||||||
|
print(f"Max workers: {config_manager.get('daemon.concurrency.max_workers')}")
|
||||||
|
print(f"Compression: {config_manager.get('daemon.logging.compression')}")
|
||||||
|
print(f"Cache TTL: {config_manager.get('performance.cache_ttl')}")
|
||||||
|
|
||||||
|
# Check if environment variables were applied
|
||||||
|
expected_values = {
|
||||||
|
'daemon.logging.level': 'DEBUG',
|
||||||
|
'daemon.concurrency.max_workers': 8,
|
||||||
|
'daemon.logging.compression': False,
|
||||||
|
'performance.cache_ttl': 7200
|
||||||
|
}
|
||||||
|
|
||||||
|
print("\nChecking expected values:")
|
||||||
|
for key, expected in expected_values.items():
|
||||||
|
actual = config_manager.get(key)
|
||||||
|
status = "✅" if actual == expected else "❌"
|
||||||
|
print(f" {status} {key}: expected {expected}, got {actual}")
|
||||||
|
else:
|
||||||
|
print("❌ Failed to load configuration")
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
for key in [
|
||||||
|
'APT_OSTREE_DAEMON__LOGGING__LEVEL',
|
||||||
|
'APT_OSTREE_DAEMON__CONCURRENCY__MAX_WORKERS',
|
||||||
|
'APT_OSTREE_DAEMON__LOGGING__COMPRESSION',
|
||||||
|
'APT_OSTREE_PERFORMANCE__CACHE_TTL']:
|
||||||
|
if key in os.environ:
|
||||||
|
del os.environ[key]
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
debug_environment_variables()
|
||||||
497
src/apt-ostree.py/docs/CONFIGURATION_ENHANCEMENT.md
Normal file
497
src/apt-ostree.py/docs/CONFIGURATION_ENHANCEMENT.md
Normal file
|
|
@ -0,0 +1,497 @@
|
||||||
|
# Enhanced Configuration Management
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The apt-ostree daemon now features a comprehensive configuration management system with advanced validation, schema support, and environment variable integration. This system provides robust configuration handling with type safety, validation rules, and flexible configuration sources.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
### 🔍 **Schema-Based Validation**
|
||||||
|
- Comprehensive configuration schema with type checking
|
||||||
|
- Pattern validation for strings (D-Bus names, paths, etc.)
|
||||||
|
- Range validation for numeric values
|
||||||
|
- Enum validation for predefined values
|
||||||
|
- Nested object validation
|
||||||
|
|
||||||
|
### 🌍 **Environment Variable Integration**
|
||||||
|
- Automatic environment variable override
|
||||||
|
- Type conversion (string → bool/int/float)
|
||||||
|
- Prefix-based filtering (`APT_OSTREE_`)
|
||||||
|
- Dot notation mapping
|
||||||
|
|
||||||
|
### 📄 **YAML Configuration Files**
|
||||||
|
- Human-readable YAML format
|
||||||
|
- Hierarchical configuration structure
|
||||||
|
- Default value merging
|
||||||
|
- Configuration file validation
|
||||||
|
|
||||||
|
### 🔧 **Advanced Features**
|
||||||
|
- JSON schema export
|
||||||
|
- Validation error reporting
|
||||||
|
- Configuration reloading
|
||||||
|
- Backward compatibility
|
||||||
|
|
||||||
|
## Configuration Structure
|
||||||
|
|
||||||
|
### Top-Level Sections
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
daemon: # Daemon-specific configuration
|
||||||
|
sysroot: # System root and OSTree configuration
|
||||||
|
shell_integration: # Shell script integration settings
|
||||||
|
hardware_detection: # Hardware detection features
|
||||||
|
dkms: # DKMS (Dynamic Kernel Module Support)
|
||||||
|
security: # Security and authorization settings
|
||||||
|
performance: # Performance optimization settings
|
||||||
|
experimental: # Experimental features
|
||||||
|
```
|
||||||
|
|
||||||
|
### Detailed Configuration Schema
|
||||||
|
|
||||||
|
#### Daemon Configuration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
daemon:
|
||||||
|
dbus:
|
||||||
|
bus_name: "org.debian.aptostree1" # D-Bus bus name
|
||||||
|
object_path: "/org/debian/aptostree1" # D-Bus object path
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
max_workers: 3 # Max concurrent workers (1-32)
|
||||||
|
transaction_timeout: 300 # Transaction timeout in seconds (30-3600)
|
||||||
|
|
||||||
|
logging:
|
||||||
|
level: "INFO" # Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||||
|
format: "json" # Log format (json, text)
|
||||||
|
file: "/var/log/apt-ostree/daemon.log" # Log file path
|
||||||
|
max_size: "100MB" # Max log file size (e.g., 50MB, 1GB)
|
||||||
|
max_files: 5 # Max log files to keep (1-100)
|
||||||
|
rotation_strategy: "size" # Rotation strategy (size, time, hybrid)
|
||||||
|
rotation_interval: 1 # Rotation interval (1-365)
|
||||||
|
rotation_unit: "D" # Rotation unit (D=days, H=hours, M=minutes)
|
||||||
|
compression: true # Enable log compression
|
||||||
|
correlation_id: true # Enable correlation IDs
|
||||||
|
performance_monitoring: true # Enable performance monitoring
|
||||||
|
cleanup_old_logs: true # Enable automatic log cleanup
|
||||||
|
cleanup_days: 30 # Days to keep logs (1-365)
|
||||||
|
include_hostname: true # Include hostname in logs
|
||||||
|
include_version: true # Include version in logs
|
||||||
|
|
||||||
|
auto_update_policy: "none" # Auto update policy (none, check, download, install)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Sysroot Configuration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
sysroot:
|
||||||
|
path: "/" # System root path
|
||||||
|
repo_path: "/var/lib/ostree/repo" # OSTree repository path
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Shell Integration Configuration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
shell_integration:
|
||||||
|
script_path: "/usr/local/bin/apt-layer.sh" # Shell script path
|
||||||
|
timeout:
|
||||||
|
install: 300 # Install timeout in seconds (30-3600)
|
||||||
|
remove: 300 # Remove timeout in seconds (30-3600)
|
||||||
|
composefs: 600 # ComposeFS timeout in seconds (60-7200)
|
||||||
|
dkms: 1800 # DKMS timeout in seconds (300-7200)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Hardware Detection Configuration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
hardware_detection:
|
||||||
|
auto_configure: true # Enable automatic hardware configuration
|
||||||
|
gpu_detection: true # Enable GPU detection
|
||||||
|
cpu_detection: true # Enable CPU detection
|
||||||
|
motherboard_detection: true # Enable motherboard detection
|
||||||
|
```
|
||||||
|
|
||||||
|
#### DKMS Configuration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
dkms:
|
||||||
|
enabled: true # Enable DKMS support
|
||||||
|
auto_rebuild: true # Enable automatic DKMS rebuild
|
||||||
|
build_timeout: 3600 # DKMS build timeout in seconds (300-7200)
|
||||||
|
kernel_hooks: true # Enable kernel hooks
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Security Configuration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
security:
|
||||||
|
polkit_required: true # Require PolicyKit authorization
|
||||||
|
apparmor_profile: "/etc/apparmor.d/apt-ostree" # AppArmor profile path
|
||||||
|
selinux_context: "system_u:system_r:apt_ostree_t:s0" # SELinux context
|
||||||
|
privilege_separation: true # Enable privilege separation
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Performance Configuration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
performance:
|
||||||
|
cache_enabled: true # Enable caching
|
||||||
|
cache_ttl: 3600 # Cache TTL in seconds (60-86400)
|
||||||
|
parallel_operations: true # Enable parallel operations
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Experimental Configuration
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
experimental:
|
||||||
|
composefs: false # Enable ComposeFS (experimental)
|
||||||
|
hardware_detection: false # Enable hardware detection (experimental)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
The configuration system supports environment variable overrides using the `APT_OSTREE_` prefix. Environment variables are automatically converted to the appropriate type and merged with the configuration.
|
||||||
|
|
||||||
|
### Environment Variable Format
|
||||||
|
|
||||||
|
```
|
||||||
|
APT_OSTREE_<SECTION>_<SUBSECTION>_<KEY>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set log level
|
||||||
|
export APT_OSTREE_DAEMON_LOGGING_LEVEL=DEBUG
|
||||||
|
|
||||||
|
# Set max workers
|
||||||
|
export APT_OSTREE_DAEMON_CONCURRENCY_MAX_WORKERS=8
|
||||||
|
|
||||||
|
# Disable compression
|
||||||
|
export APT_OSTREE_DAEMON_LOGGING_COMPRESSION=false
|
||||||
|
|
||||||
|
# Set cache TTL
|
||||||
|
export APT_OSTREE_PERFORMANCE_CACHE_TTL=7200
|
||||||
|
|
||||||
|
# Enable experimental features
|
||||||
|
export APT_OSTREE_EXPERIMENTAL_COMPOSEFS=true
|
||||||
|
```
|
||||||
|
|
||||||
|
### Type Conversion
|
||||||
|
|
||||||
|
- **Boolean**: `true`, `false` (case-insensitive)
|
||||||
|
- **Integer**: Numeric strings (e.g., `"8"`, `"3600"`)
|
||||||
|
- **Float**: Decimal strings (e.g., `"3.14"`)
|
||||||
|
- **String**: All other values
|
||||||
|
|
||||||
|
## Usage Examples
|
||||||
|
|
||||||
|
### Basic Configuration Loading
|
||||||
|
|
||||||
|
```python
|
||||||
|
from utils.config import ConfigManager
|
||||||
|
|
||||||
|
# Load configuration with defaults
|
||||||
|
config_manager = ConfigManager("/etc/apt-ostree/config.yaml")
|
||||||
|
config = config_manager.load_config()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
print("Configuration loaded successfully")
|
||||||
|
else:
|
||||||
|
print("Configuration validation failed")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Validation
|
||||||
|
|
||||||
|
```python
|
||||||
|
from utils.config import ConfigManager
|
||||||
|
|
||||||
|
config_manager = ConfigManager()
|
||||||
|
|
||||||
|
# Validate configuration
|
||||||
|
if config_manager.validate():
|
||||||
|
print("Configuration is valid")
|
||||||
|
else:
|
||||||
|
errors = config_manager.get_validation_errors()
|
||||||
|
for error in errors:
|
||||||
|
print(f"Error: {error.field} - {error.message}")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Variable Integration
|
||||||
|
|
||||||
|
```python
|
||||||
|
import os
|
||||||
|
from utils.config import ConfigManager
|
||||||
|
|
||||||
|
# Set environment variables
|
||||||
|
os.environ['APT_OSTREE_DAEMON_LOGGING_LEVEL'] = 'DEBUG'
|
||||||
|
os.environ['APT_OSTREE_DAEMON_CONCURRENCY_MAX_WORKERS'] = '8'
|
||||||
|
|
||||||
|
# Load configuration (environment variables will be applied)
|
||||||
|
config_manager = ConfigManager()
|
||||||
|
config = config_manager.load_config()
|
||||||
|
|
||||||
|
# Environment variables override file configuration
|
||||||
|
print(f"Log level: {config_manager.get('daemon.logging.level')}") # DEBUG
|
||||||
|
print(f"Max workers: {config_manager.get('daemon.concurrency.max_workers')}") # 8
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Modification
|
||||||
|
|
||||||
|
```python
|
||||||
|
from utils.config import ConfigManager
|
||||||
|
|
||||||
|
config_manager = ConfigManager("/etc/apt-ostree/config.yaml")
|
||||||
|
config_manager.load_config()
|
||||||
|
|
||||||
|
# Modify configuration
|
||||||
|
config_manager.set('daemon.logging.level', 'WARNING')
|
||||||
|
config_manager.set('performance.cache_enabled', False)
|
||||||
|
|
||||||
|
# Save changes
|
||||||
|
if config_manager.save():
|
||||||
|
print("Configuration saved successfully")
|
||||||
|
else:
|
||||||
|
print("Failed to save configuration")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema Export
|
||||||
|
|
||||||
|
```python
|
||||||
|
from utils.config import ConfigManager
|
||||||
|
|
||||||
|
config_manager = ConfigManager()
|
||||||
|
|
||||||
|
# Export JSON schema
|
||||||
|
if config_manager.export_schema("/tmp/config-schema.json"):
|
||||||
|
print("Schema exported successfully")
|
||||||
|
else:
|
||||||
|
print("Failed to export schema")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Validation Rules
|
||||||
|
|
||||||
|
### Type Validation
|
||||||
|
- **String**: Must be string type
|
||||||
|
- **Integer**: Must be integer type
|
||||||
|
- **Boolean**: Must be boolean type
|
||||||
|
- **Dictionary**: Must be dictionary type
|
||||||
|
|
||||||
|
### Pattern Validation
|
||||||
|
- **D-Bus Bus Name**: `^[a-zA-Z][a-zA-Z0-9_]*(\.[a-zA-Z][a-zA-Z0-9_]*)*$`
|
||||||
|
- **D-Bus Object Path**: `^/[a-zA-Z][a-zA-Z0-9_]*(\/[a-zA-Z][a-zA-Z0-9_]*)*$`
|
||||||
|
- **Log File Size**: `^\d+[KMGT]?B$`
|
||||||
|
|
||||||
|
### Range Validation
|
||||||
|
- **max_workers**: 1-32
|
||||||
|
- **transaction_timeout**: 30-3600 seconds
|
||||||
|
- **max_files**: 1-100
|
||||||
|
- **rotation_interval**: 1-365
|
||||||
|
- **cleanup_days**: 1-365
|
||||||
|
- **cache_ttl**: 60-86400 seconds
|
||||||
|
|
||||||
|
### Enum Validation
|
||||||
|
- **Log Level**: DEBUG, INFO, WARNING, ERROR, CRITICAL
|
||||||
|
- **Log Format**: json, text
|
||||||
|
- **Rotation Strategy**: size, time, hybrid
|
||||||
|
- **Rotation Unit**: D, H, M
|
||||||
|
- **Auto Update Policy**: none, check, download, install
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
### Validation Errors
|
||||||
|
|
||||||
|
The configuration system provides detailed error reporting:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from utils.config import ConfigManager
|
||||||
|
|
||||||
|
config_manager = ConfigManager()
|
||||||
|
config = config_manager.load_config()
|
||||||
|
|
||||||
|
if not config:
|
||||||
|
errors = config_manager.get_validation_errors()
|
||||||
|
warnings = config_manager.get_validation_warnings()
|
||||||
|
|
||||||
|
print("Validation Errors:")
|
||||||
|
for error in errors:
|
||||||
|
print(f" {error.field}: {error.message}")
|
||||||
|
if error.value is not None:
|
||||||
|
print(f" Value: {error.value}")
|
||||||
|
|
||||||
|
print("Validation Warnings:")
|
||||||
|
for warning in warnings:
|
||||||
|
print(f" {warning.field}: {warning.message}")
|
||||||
|
```
|
||||||
|
|
||||||
|
### Common Error Types
|
||||||
|
|
||||||
|
1. **Missing Required Fields**: Required configuration fields are missing
|
||||||
|
2. **Type Mismatch**: Value type doesn't match expected type
|
||||||
|
3. **Pattern Mismatch**: String value doesn't match required pattern
|
||||||
|
4. **Value Out of Range**: Numeric value outside allowed range
|
||||||
|
5. **Invalid Enum Value**: Value not in allowed enum list
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### Configuration File Organization
|
||||||
|
|
||||||
|
1. **Use Hierarchical Structure**: Organize related settings in logical groups
|
||||||
|
2. **Provide Defaults**: Always provide sensible default values
|
||||||
|
3. **Document Settings**: Include descriptions for complex settings
|
||||||
|
4. **Validate Early**: Validate configuration at startup
|
||||||
|
|
||||||
|
### Environment Variable Usage
|
||||||
|
|
||||||
|
1. **Use Consistent Naming**: Follow the `APT_OSTREE_` prefix convention
|
||||||
|
2. **Override Selectively**: Only override necessary settings
|
||||||
|
3. **Use Appropriate Types**: Ensure environment variable values match expected types
|
||||||
|
4. **Document Overrides**: Document environment variable usage
|
||||||
|
|
||||||
|
### Security Considerations
|
||||||
|
|
||||||
|
1. **File Permissions**: Ensure configuration files have appropriate permissions
|
||||||
|
2. **Sensitive Data**: Don't store sensitive data in configuration files
|
||||||
|
3. **Validation**: Always validate configuration before use
|
||||||
|
4. **Environment Variables**: Be cautious with environment variable overrides
|
||||||
|
|
||||||
|
## Migration Guide
|
||||||
|
|
||||||
|
### From Basic Configuration
|
||||||
|
|
||||||
|
The enhanced configuration system is backward compatible with existing configuration files. No migration is required for basic usage.
|
||||||
|
|
||||||
|
### New Features
|
||||||
|
|
||||||
|
To take advantage of new features:
|
||||||
|
|
||||||
|
1. **Add Validation**: Use the validation methods to ensure configuration integrity
|
||||||
|
2. **Environment Variables**: Use environment variables for dynamic configuration
|
||||||
|
3. **Schema Export**: Export schemas for documentation and tooling
|
||||||
|
4. **Error Handling**: Implement proper error handling for validation failures
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
### Running Configuration Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run the comprehensive test suite
|
||||||
|
python3 test_enhanced_config.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Coverage
|
||||||
|
|
||||||
|
The test suite covers:
|
||||||
|
|
||||||
|
- Basic configuration loading
|
||||||
|
- Configuration validation
|
||||||
|
- Environment variable integration
|
||||||
|
- File operations (load/save)
|
||||||
|
- Schema export
|
||||||
|
- Validation error reporting
|
||||||
|
- Configuration getters
|
||||||
|
|
||||||
|
### Manual Testing
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Test with custom configuration
|
||||||
|
export APT_OSTREE_DAEMON_LOGGING_LEVEL=DEBUG
|
||||||
|
python3 -c "
|
||||||
|
from utils.config import ConfigManager
|
||||||
|
cm = ConfigManager()
|
||||||
|
config = cm.load_config()
|
||||||
|
print(f'Log level: {cm.get(\"daemon.logging.level\")}')
|
||||||
|
"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
1. **Configuration Not Loading**
|
||||||
|
- Check file permissions
|
||||||
|
- Verify YAML syntax
|
||||||
|
- Check for validation errors
|
||||||
|
|
||||||
|
2. **Environment Variables Not Applied**
|
||||||
|
- Verify prefix is `APT_OSTREE_`
|
||||||
|
- Check variable naming (use underscores)
|
||||||
|
- Ensure proper type conversion
|
||||||
|
|
||||||
|
3. **Validation Errors**
|
||||||
|
- Review error messages for specific issues
|
||||||
|
- Check value ranges and patterns
|
||||||
|
- Verify required fields are present
|
||||||
|
|
||||||
|
4. **Schema Export Failures**
|
||||||
|
- Check file permissions for output directory
|
||||||
|
- Verify JSON serialization compatibility
|
||||||
|
- Review schema structure
|
||||||
|
|
||||||
|
### Debug Mode
|
||||||
|
|
||||||
|
Enable debug logging to troubleshoot configuration issues:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export APT_OSTREE_DAEMON_LOGGING_LEVEL=DEBUG
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Reference
|
||||||
|
|
||||||
|
### ConfigManager Class
|
||||||
|
|
||||||
|
#### Methods
|
||||||
|
|
||||||
|
- `load_config()`: Load and validate configuration
|
||||||
|
- `reload()`: Reload configuration from file
|
||||||
|
- `get(key, default=None)`: Get configuration value
|
||||||
|
- `set(key, value)`: Set configuration value
|
||||||
|
- `save()`: Save configuration to file
|
||||||
|
- `validate()`: Validate current configuration
|
||||||
|
- `export_schema(output_path)`: Export JSON schema
|
||||||
|
|
||||||
|
#### Properties
|
||||||
|
|
||||||
|
- `config`: Current configuration dictionary
|
||||||
|
- `config_path`: Configuration file path
|
||||||
|
- `validator`: Configuration validator instance
|
||||||
|
|
||||||
|
### ConfigValidator Class
|
||||||
|
|
||||||
|
#### Methods
|
||||||
|
|
||||||
|
- `validate_config(config, path="")`: Validate configuration
|
||||||
|
- `get_errors()`: Get validation errors
|
||||||
|
- `get_warnings()`: Get validation warnings
|
||||||
|
- `format_errors()`: Format errors as string
|
||||||
|
|
||||||
|
### ValidationError Class
|
||||||
|
|
||||||
|
#### Properties
|
||||||
|
|
||||||
|
- `field`: Field path with error
|
||||||
|
- `message`: Error message
|
||||||
|
- `value`: Invalid value (if any)
|
||||||
|
- `severity`: Error severity (error, warning, info)
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
### Planned Features
|
||||||
|
|
||||||
|
1. **Configuration Templates**: Predefined configuration templates
|
||||||
|
2. **Dynamic Validation**: Runtime validation rules
|
||||||
|
3. **Configuration Migration**: Automatic configuration migration
|
||||||
|
4. **Configuration Backup**: Automatic configuration backup
|
||||||
|
5. **Web Interface**: Web-based configuration editor
|
||||||
|
|
||||||
|
### Extension Points
|
||||||
|
|
||||||
|
1. **Custom Validators**: User-defined validation functions
|
||||||
|
2. **Configuration Providers**: Multiple configuration sources
|
||||||
|
3. **Configuration Encryption**: Encrypted configuration storage
|
||||||
|
4. **Configuration Versioning**: Configuration version management
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
The enhanced configuration management system provides a robust, flexible, and secure way to configure the apt-ostree daemon. With comprehensive validation, environment variable support, and detailed error reporting, it ensures reliable configuration handling in production environments.
|
||||||
|
|
||||||
|
For more information, see the test suite (`test_enhanced_config.py`) and the implementation in `utils/config.py`.
|
||||||
|
|
@ -1,5 +1,30 @@
|
||||||
# apt-ostree Daemon Implementation
|
# apt-ostree Daemon Implementation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Documentation Index / Table of Contents
|
||||||
|
|
||||||
|
- [Overview & Quick Start](#overview)
|
||||||
|
- [D-Bus Interface](#d-bus-interface)
|
||||||
|
- [Implementation Details](#implementation-details)
|
||||||
|
- [Development](#development)
|
||||||
|
- [Deployment](#deployment)
|
||||||
|
- [Troubleshooting](#troubleshooting)
|
||||||
|
|
||||||
|
### Core Documentation
|
||||||
|
|
||||||
|
- [CHANGELOG.md](./CHANGELOG.md): Project changelog and release history
|
||||||
|
- [CONFIGURATION_ENHANCEMENT.md](./CONFIGURATION_ENHANCEMENT.md): YAML configuration, schema validation, and environment variable integration
|
||||||
|
- [LOGGING_ENHANCEMENT.md](./LOGGING_ENHANCEMENT.md): Logging system design, rotation, and integration
|
||||||
|
- [SYSTEMD_USAGE.md](./SYSTEMD_USAGE.md): Systemd service integration, best practices, and troubleshooting
|
||||||
|
- [D-BUS.md](./D-BUS.md): D-Bus interface, method/property/signal documentation, and policy
|
||||||
|
- [daemon-notes.md](./daemon-notes.md): In-depth design notes, architecture, and technical commentary
|
||||||
|
- [IMPLEMENTATION_PLAN.md](./IMPLEMENTATION_PLAN.md): Implementation planning, roadmap, and task breakdown
|
||||||
|
- [LANGUAGE_CHOICE.md](./LANGUAGE_CHOICE.md): Rationale for language selection and future plans
|
||||||
|
- [README_RPM_OSTREE_COMPATIBILITY.md](./README_RPM_OSTREE_COMPATIBILITY.md): Compatibility notes and comparison with rpm-ostree
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
This directory contains the implementation of the apt-ostree daemon (`apt-ostree.py`), which provides the backend services for the apt-ostree package management system. The daemon implements a D-Bus interface for client communication and manages atomic transactions for package operations.
|
This directory contains the implementation of the apt-ostree daemon (`apt-ostree.py`), which provides the backend services for the apt-ostree package management system. The daemon implements a D-Bus interface for client communication and manages atomic transactions for package operations.
|
||||||
|
|
@ -1,14 +1,572 @@
|
||||||
"""
|
"""
|
||||||
Configuration management
|
Enhanced configuration management with validation
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, Optional, Dict
|
import re
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional, Dict, List, Union, Callable
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
class LogLevel(str, Enum):
|
||||||
|
"""Log level enumeration"""
|
||||||
|
DEBUG = "DEBUG"
|
||||||
|
INFO = "INFO"
|
||||||
|
WARNING = "WARNING"
|
||||||
|
ERROR = "ERROR"
|
||||||
|
CRITICAL = "CRITICAL"
|
||||||
|
|
||||||
|
class RotationStrategy(str, Enum):
|
||||||
|
"""Log rotation strategy enumeration"""
|
||||||
|
SIZE = "size"
|
||||||
|
TIME = "time"
|
||||||
|
HYBRID = "hybrid"
|
||||||
|
|
||||||
|
class UpdatePolicy(str, Enum):
|
||||||
|
"""Auto update policy enumeration"""
|
||||||
|
NONE = "none"
|
||||||
|
CHECK = "check"
|
||||||
|
DOWNLOAD = "download"
|
||||||
|
INSTALL = "install"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ValidationError:
|
||||||
|
"""Configuration validation error"""
|
||||||
|
field: str
|
||||||
|
message: str
|
||||||
|
value: Any = None
|
||||||
|
severity: str = "error" # error, warning, info
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ConfigSchema:
|
||||||
|
"""Configuration schema definition"""
|
||||||
|
type: str
|
||||||
|
required: bool = False
|
||||||
|
default: Any = None
|
||||||
|
validator: Optional[Callable] = None
|
||||||
|
description: str = ""
|
||||||
|
allowed_values: Optional[List[Any]] = None
|
||||||
|
min_value: Optional[Union[int, float]] = None
|
||||||
|
max_value: Optional[Union[int, float]] = None
|
||||||
|
pattern: Optional[str] = None
|
||||||
|
nested_schema: Optional[Dict[str, 'ConfigSchema']] = None
|
||||||
|
|
||||||
|
class ConfigValidator:
|
||||||
|
"""Configuration validator with schema support"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.errors: List[ValidationError] = []
|
||||||
|
self.warnings: List[ValidationError] = []
|
||||||
|
self.schema = self._build_schema()
|
||||||
|
|
||||||
|
def _build_schema(self) -> Dict[str, ConfigSchema]:
|
||||||
|
"""Build configuration schema"""
|
||||||
|
return {
|
||||||
|
'daemon': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'dbus': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'bus_name': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
pattern=r'^[a-zA-Z][a-zA-Z0-9_]*(\.[a-zA-Z][a-zA-Z0-9_]*)*$',
|
||||||
|
description="D-Bus bus name (e.g., org.debian.aptostree1)"
|
||||||
|
),
|
||||||
|
'object_path': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
pattern=r'^/[a-zA-Z][a-zA-Z0-9_]*(\/[a-zA-Z][a-zA-Z0-9_]*)*$',
|
||||||
|
description="D-Bus object path (e.g., /org/debian/aptostree1)"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'concurrency': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'max_workers': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=1,
|
||||||
|
max_value=32,
|
||||||
|
default=3,
|
||||||
|
description="Maximum number of concurrent workers"
|
||||||
|
),
|
||||||
|
'transaction_timeout': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=30,
|
||||||
|
max_value=3600,
|
||||||
|
default=300,
|
||||||
|
description="Transaction timeout in seconds"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'logging': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'level': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
allowed_values=[level.value for level in LogLevel],
|
||||||
|
default='INFO',
|
||||||
|
description="Log level"
|
||||||
|
),
|
||||||
|
'format': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
allowed_values=['json', 'text'],
|
||||||
|
default='json',
|
||||||
|
description="Log format"
|
||||||
|
),
|
||||||
|
'file': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
default='/var/log/apt-ostree/daemon.log',
|
||||||
|
description="Log file path"
|
||||||
|
),
|
||||||
|
'max_size': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
pattern=r'^\d+[KMGT]?B$',
|
||||||
|
default='100MB',
|
||||||
|
description="Maximum log file size (e.g., 100MB, 1GB)"
|
||||||
|
),
|
||||||
|
'max_files': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=1,
|
||||||
|
max_value=100,
|
||||||
|
default=5,
|
||||||
|
description="Maximum number of log files to keep"
|
||||||
|
),
|
||||||
|
'rotation_strategy': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
allowed_values=[strategy.value for strategy in RotationStrategy],
|
||||||
|
default='size',
|
||||||
|
description="Log rotation strategy"
|
||||||
|
),
|
||||||
|
'rotation_interval': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=1,
|
||||||
|
max_value=365,
|
||||||
|
default=1,
|
||||||
|
description="Rotation interval"
|
||||||
|
),
|
||||||
|
'rotation_unit': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
allowed_values=['D', 'H', 'M'],
|
||||||
|
default='D',
|
||||||
|
description="Rotation unit (D=days, H=hours, M=minutes)"
|
||||||
|
),
|
||||||
|
'compression': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable log compression"
|
||||||
|
),
|
||||||
|
'correlation_id': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable correlation IDs"
|
||||||
|
),
|
||||||
|
'performance_monitoring': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable performance monitoring"
|
||||||
|
),
|
||||||
|
'cleanup_old_logs': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable automatic log cleanup"
|
||||||
|
),
|
||||||
|
'cleanup_days': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=1,
|
||||||
|
max_value=365,
|
||||||
|
default=30,
|
||||||
|
description="Days to keep logs"
|
||||||
|
),
|
||||||
|
'include_hostname': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Include hostname in logs"
|
||||||
|
),
|
||||||
|
'include_version': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Include version in logs"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'auto_update_policy': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
allowed_values=[policy.value for policy in UpdatePolicy],
|
||||||
|
default='none',
|
||||||
|
description="Automatic update policy"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'sysroot': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'path': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
default='/',
|
||||||
|
description="System root path"
|
||||||
|
),
|
||||||
|
'repo_path': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
default='/var/lib/ostree/repo',
|
||||||
|
description="OSTree repository path"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'shell_integration': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'script_path': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
default='/usr/local/bin/apt-layer.sh',
|
||||||
|
description="Shell script path"
|
||||||
|
),
|
||||||
|
'timeout': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'install': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=30,
|
||||||
|
max_value=3600,
|
||||||
|
default=300,
|
||||||
|
description="Install timeout in seconds"
|
||||||
|
),
|
||||||
|
'remove': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=30,
|
||||||
|
max_value=3600,
|
||||||
|
default=300,
|
||||||
|
description="Remove timeout in seconds"
|
||||||
|
),
|
||||||
|
'composefs': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=60,
|
||||||
|
max_value=7200,
|
||||||
|
default=600,
|
||||||
|
description="ComposeFS timeout in seconds"
|
||||||
|
),
|
||||||
|
'dkms': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=300,
|
||||||
|
max_value=7200,
|
||||||
|
default=1800,
|
||||||
|
description="DKMS timeout in seconds"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'hardware_detection': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'auto_configure': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable automatic hardware configuration"
|
||||||
|
),
|
||||||
|
'gpu_detection': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable GPU detection"
|
||||||
|
),
|
||||||
|
'cpu_detection': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable CPU detection"
|
||||||
|
),
|
||||||
|
'motherboard_detection': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable motherboard detection"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'dkms': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'enabled': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable DKMS support"
|
||||||
|
),
|
||||||
|
'auto_rebuild': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable automatic DKMS rebuild"
|
||||||
|
),
|
||||||
|
'build_timeout': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=300,
|
||||||
|
max_value=7200,
|
||||||
|
default=3600,
|
||||||
|
description="DKMS build timeout in seconds"
|
||||||
|
),
|
||||||
|
'kernel_hooks': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable kernel hooks"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'security': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'polkit_required': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Require PolicyKit authorization"
|
||||||
|
),
|
||||||
|
'apparmor_profile': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
default='/etc/apparmor.d/apt-ostree',
|
||||||
|
description="AppArmor profile path"
|
||||||
|
),
|
||||||
|
'selinux_context': ConfigSchema(
|
||||||
|
type='str',
|
||||||
|
required=True,
|
||||||
|
default='system_u:system_r:apt_ostree_t:s0',
|
||||||
|
description="SELinux context"
|
||||||
|
),
|
||||||
|
'privilege_separation': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable privilege separation"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'performance': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'cache_enabled': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable caching"
|
||||||
|
),
|
||||||
|
'cache_ttl': ConfigSchema(
|
||||||
|
type='int',
|
||||||
|
required=True,
|
||||||
|
min_value=60,
|
||||||
|
max_value=86400,
|
||||||
|
default=3600,
|
||||||
|
description="Cache TTL in seconds"
|
||||||
|
),
|
||||||
|
'parallel_operations': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=True,
|
||||||
|
description="Enable parallel operations"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
'experimental': ConfigSchema(
|
||||||
|
type='dict',
|
||||||
|
required=True,
|
||||||
|
nested_schema={
|
||||||
|
'composefs': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=False,
|
||||||
|
description="Enable ComposeFS (experimental)"
|
||||||
|
),
|
||||||
|
'hardware_detection': ConfigSchema(
|
||||||
|
type='bool',
|
||||||
|
required=True,
|
||||||
|
default=False,
|
||||||
|
description="Enable hardware detection (experimental)"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def validate_config(self, config: Dict[str, Any], path: str = "") -> bool:
|
||||||
|
"""Validate configuration against schema"""
|
||||||
|
self.errors.clear()
|
||||||
|
self.warnings.clear()
|
||||||
|
|
||||||
|
self._validate_dict(config, self.schema, path)
|
||||||
|
|
||||||
|
return len(self.errors) == 0
|
||||||
|
|
||||||
|
def _validate_dict(self, data: Dict[str, Any], schema: Dict[str, ConfigSchema], path: str):
|
||||||
|
"""Validate dictionary against schema"""
|
||||||
|
for key, field_schema in schema.items():
|
||||||
|
field_path = f"{path}.{key}" if path else key
|
||||||
|
|
||||||
|
if key not in data:
|
||||||
|
if field_schema.required:
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=field_path,
|
||||||
|
message=f"Required field '{key}' is missing",
|
||||||
|
severity="error"
|
||||||
|
))
|
||||||
|
elif field_schema.default is not None:
|
||||||
|
# Add default value only if not already set
|
||||||
|
data[key] = field_schema.default
|
||||||
|
continue
|
||||||
|
|
||||||
|
value = data[key]
|
||||||
|
self._validate_value(value, field_schema, field_path)
|
||||||
|
|
||||||
|
def _validate_value(self, value: Any, schema: ConfigSchema, path: str):
|
||||||
|
"""Validate a single value against schema"""
|
||||||
|
# Type validation
|
||||||
|
if schema.type == 'str' and not isinstance(value, str):
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Expected string, got {type(value).__name__}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
return
|
||||||
|
elif schema.type == 'int' and not isinstance(value, int):
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Expected integer, got {type(value).__name__}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
return
|
||||||
|
elif schema.type == 'bool' and not isinstance(value, bool):
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Expected boolean, got {type(value).__name__}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
return
|
||||||
|
elif schema.type == 'dict' and not isinstance(value, dict):
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Expected dictionary, got {type(value).__name__}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
return
|
||||||
|
|
||||||
|
# Value validation
|
||||||
|
if schema.type == 'str':
|
||||||
|
if schema.pattern and not re.match(schema.pattern, value):
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Value does not match pattern: {schema.pattern}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
|
||||||
|
if schema.allowed_values and value not in schema.allowed_values:
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Value must be one of: {schema.allowed_values}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
|
||||||
|
elif schema.type == 'int':
|
||||||
|
if schema.min_value is not None and value < schema.min_value:
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Value must be >= {schema.min_value}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
|
||||||
|
if schema.max_value is not None and value > schema.max_value:
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Value must be <= {schema.max_value}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
|
||||||
|
# Nested validation
|
||||||
|
if schema.type == 'dict' and schema.nested_schema:
|
||||||
|
self._validate_dict(value, schema.nested_schema, path)
|
||||||
|
|
||||||
|
# Custom validator
|
||||||
|
if schema.validator:
|
||||||
|
try:
|
||||||
|
if not schema.validator(value):
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message="Custom validation failed",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
except Exception as e:
|
||||||
|
self.errors.append(ValidationError(
|
||||||
|
field=path,
|
||||||
|
message=f"Custom validation error: {e}",
|
||||||
|
value=value
|
||||||
|
))
|
||||||
|
|
||||||
|
def get_errors(self) -> List[ValidationError]:
|
||||||
|
"""Get validation errors"""
|
||||||
|
return self.errors
|
||||||
|
|
||||||
|
def get_warnings(self) -> List[ValidationError]:
|
||||||
|
"""Get validation warnings"""
|
||||||
|
return self.warnings
|
||||||
|
|
||||||
|
def format_errors(self) -> str:
|
||||||
|
"""Format validation errors as string"""
|
||||||
|
if not self.errors:
|
||||||
|
return "No validation errors"
|
||||||
|
|
||||||
|
lines = ["Configuration validation errors:"]
|
||||||
|
for error in self.errors:
|
||||||
|
lines.append(f" {error.field}: {error.message}")
|
||||||
|
if error.value is not None:
|
||||||
|
lines.append(f" Value: {error.value}")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
class ConfigManager:
|
class ConfigManager:
|
||||||
"""Configuration management for apt-ostree daemon"""
|
"""Enhanced configuration management for apt-ostree daemon"""
|
||||||
|
|
||||||
DEFAULT_CONFIG = {
|
DEFAULT_CONFIG = {
|
||||||
'daemon': {
|
'daemon': {
|
||||||
|
|
@ -26,9 +584,9 @@ class ConfigManager:
|
||||||
'file': '/var/log/apt-ostree/daemon.log',
|
'file': '/var/log/apt-ostree/daemon.log',
|
||||||
'max_size': '100MB',
|
'max_size': '100MB',
|
||||||
'max_files': 5,
|
'max_files': 5,
|
||||||
'rotation_strategy': 'size', # 'size', 'time', 'hybrid'
|
'rotation_strategy': 'size',
|
||||||
'rotation_interval': 1, # For time-based rotation
|
'rotation_interval': 1,
|
||||||
'rotation_unit': 'D', # D=days, H=hours, M=minutes
|
'rotation_unit': 'D',
|
||||||
'compression': True,
|
'compression': True,
|
||||||
'correlation_id': True,
|
'correlation_id': True,
|
||||||
'performance_monitoring': True,
|
'performance_monitoring': True,
|
||||||
|
|
@ -85,13 +643,16 @@ class ConfigManager:
|
||||||
self.config_path = config_path
|
self.config_path = config_path
|
||||||
self.config = {}
|
self.config = {}
|
||||||
self.logger = logging.getLogger('config')
|
self.logger = logging.getLogger('config')
|
||||||
|
self.validator = ConfigValidator()
|
||||||
|
self.env_prefix = "APT_OSTREE_"
|
||||||
|
|
||||||
# Load default configuration
|
# Load default configuration
|
||||||
self._load_defaults()
|
self._load_defaults()
|
||||||
|
|
||||||
def load_config(self) -> Optional[Dict[str, Any]]:
|
def load_config(self) -> Optional[Dict[str, Any]]:
|
||||||
"""Load configuration from file"""
|
"""Load configuration from file with validation"""
|
||||||
try:
|
try:
|
||||||
|
# Load from file if exists
|
||||||
if os.path.exists(self.config_path):
|
if os.path.exists(self.config_path):
|
||||||
with open(self.config_path, 'r') as f:
|
with open(self.config_path, 'r') as f:
|
||||||
user_config = yaml.safe_load(f)
|
user_config = yaml.safe_load(f)
|
||||||
|
|
@ -100,12 +661,63 @@ class ConfigManager:
|
||||||
else:
|
else:
|
||||||
self.logger.info(f"Configuration file not found, using defaults")
|
self.logger.info(f"Configuration file not found, using defaults")
|
||||||
|
|
||||||
|
# Apply environment variables
|
||||||
|
self._apply_environment_variables()
|
||||||
|
|
||||||
|
# Validate configuration
|
||||||
|
if not self.validator.validate_config(self.config):
|
||||||
|
self.logger.error("Configuration validation failed:")
|
||||||
|
self.logger.error(self.validator.format_errors())
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Log warnings
|
||||||
|
warnings = self.validator.get_warnings()
|
||||||
|
if warnings:
|
||||||
|
self.logger.warning("Configuration warnings:")
|
||||||
|
for warning in warnings:
|
||||||
|
self.logger.warning(f" {warning.field}: {warning.message}")
|
||||||
|
|
||||||
return self.config
|
return self.config
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Failed to load configuration: {e}")
|
self.logger.error(f"Failed to load configuration: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _apply_environment_variables(self):
|
||||||
|
"""
|
||||||
|
Apply environment variables to configuration using double underscore (__) for nesting.
|
||||||
|
Example:
|
||||||
|
APT_OSTREE_DAEMON__CONCURRENCY__MAX_WORKERS=8
|
||||||
|
-> config['daemon']['concurrency']['max_workers'] = 8
|
||||||
|
Single underscores in leaf keys are preserved.
|
||||||
|
"""
|
||||||
|
for key, value in os.environ.items():
|
||||||
|
if key.startswith(self.env_prefix):
|
||||||
|
# Remove prefix
|
||||||
|
stripped_key = key[len(self.env_prefix):]
|
||||||
|
# Split on double underscores for nesting
|
||||||
|
parts = stripped_key.split('__')
|
||||||
|
# Lowercase all parts
|
||||||
|
parts = [p.lower() for p in parts]
|
||||||
|
# Traverse config dict to the correct nesting
|
||||||
|
current = self.config
|
||||||
|
for part in parts[:-1]:
|
||||||
|
if part not in current or not isinstance(current[part], dict):
|
||||||
|
current[part] = {}
|
||||||
|
current = current[part]
|
||||||
|
leaf_key = parts[-1]
|
||||||
|
# Type conversion
|
||||||
|
if isinstance(value, str):
|
||||||
|
if value.lower() in ('true', 'false'):
|
||||||
|
value = value.lower() == 'true'
|
||||||
|
elif value.isdigit():
|
||||||
|
value = int(value)
|
||||||
|
elif value.replace('.', '', 1).isdigit() and value.count('.') < 2:
|
||||||
|
value = float(value)
|
||||||
|
# Set the value
|
||||||
|
self.logger.debug(f"Applying env var: {key} -> {'.'.join(parts)} = {value}")
|
||||||
|
current[leaf_key] = value
|
||||||
|
|
||||||
def reload(self) -> bool:
|
def reload(self) -> bool:
|
||||||
"""Reload configuration from file"""
|
"""Reload configuration from file"""
|
||||||
try:
|
try:
|
||||||
|
|
@ -163,6 +775,12 @@ class ConfigManager:
|
||||||
def save(self) -> bool:
|
def save(self) -> bool:
|
||||||
"""Save configuration to file"""
|
"""Save configuration to file"""
|
||||||
try:
|
try:
|
||||||
|
# Validate before saving
|
||||||
|
if not self.validator.validate_config(self.config):
|
||||||
|
self.logger.error("Cannot save invalid configuration:")
|
||||||
|
self.logger.error(self.validator.format_errors())
|
||||||
|
return False
|
||||||
|
|
||||||
# Ensure directory exists
|
# Ensure directory exists
|
||||||
os.makedirs(os.path.dirname(self.config_path), exist_ok=True)
|
os.makedirs(os.path.dirname(self.config_path), exist_ok=True)
|
||||||
|
|
||||||
|
|
@ -179,40 +797,73 @@ class ConfigManager:
|
||||||
|
|
||||||
def validate(self) -> bool:
|
def validate(self) -> bool:
|
||||||
"""Validate configuration"""
|
"""Validate configuration"""
|
||||||
|
return self.validator.validate_config(self.config)
|
||||||
|
|
||||||
|
def get_validation_errors(self) -> List[ValidationError]:
|
||||||
|
"""Get configuration validation errors"""
|
||||||
|
return self.validator.get_errors()
|
||||||
|
|
||||||
|
def get_validation_warnings(self) -> List[ValidationError]:
|
||||||
|
"""Get configuration validation warnings"""
|
||||||
|
return self.validator.get_warnings()
|
||||||
|
|
||||||
|
def format_validation_report(self) -> str:
|
||||||
|
"""Format validation report as string"""
|
||||||
|
return self.validator.format_errors()
|
||||||
|
|
||||||
|
def export_schema(self, output_path: str) -> bool:
|
||||||
|
"""Export configuration schema to JSON"""
|
||||||
try:
|
try:
|
||||||
# Check required fields
|
schema = self._build_json_schema()
|
||||||
required_fields = [
|
with open(output_path, 'w') as f:
|
||||||
'daemon.dbus.bus_name',
|
json.dump(schema, f, indent=2)
|
||||||
'daemon.dbus.object_path',
|
|
||||||
'sysroot.path',
|
|
||||||
'sysroot.repo_path'
|
|
||||||
]
|
|
||||||
|
|
||||||
for field in required_fields:
|
|
||||||
if self.get(field) is None:
|
|
||||||
self.logger.error(f"Missing required configuration field: {field}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate values
|
|
||||||
if not isinstance(self.get('daemon.concurrency.max_workers'), int):
|
|
||||||
self.logger.error("daemon.concurrency.max_workers must be an integer")
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not isinstance(self.get('daemon.concurrency.transaction_timeout'), int):
|
|
||||||
self.logger.error("daemon.concurrency.transaction_timeout must be an integer")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate paths
|
|
||||||
sysroot_path = self.get('sysroot.path')
|
|
||||||
if not os.path.exists(sysroot_path):
|
|
||||||
self.logger.warning(f"Sysroot path does not exist: {sysroot_path}")
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Configuration validation failed: {e}")
|
self.logger.error(f"Failed to export schema: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _build_json_schema(self) -> Dict[str, Any]:
|
||||||
|
"""Build JSON schema from configuration schema"""
|
||||||
|
def schema_to_json(schema: Dict[str, ConfigSchema]) -> Dict[str, Any]:
|
||||||
|
json_schema = {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {},
|
||||||
|
"required": []
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, field_schema in schema.items():
|
||||||
|
prop = {"type": field_schema.type}
|
||||||
|
|
||||||
|
if field_schema.description:
|
||||||
|
prop["description"] = field_schema.description
|
||||||
|
|
||||||
|
if field_schema.default is not None:
|
||||||
|
prop["default"] = field_schema.default
|
||||||
|
|
||||||
|
if field_schema.allowed_values:
|
||||||
|
prop["enum"] = field_schema.allowed_values
|
||||||
|
|
||||||
|
if field_schema.min_value is not None:
|
||||||
|
prop["minimum"] = field_schema.min_value
|
||||||
|
|
||||||
|
if field_schema.max_value is not None:
|
||||||
|
prop["maximum"] = field_schema.max_value
|
||||||
|
|
||||||
|
if field_schema.pattern:
|
||||||
|
prop["pattern"] = field_schema.pattern
|
||||||
|
|
||||||
|
if field_schema.nested_schema:
|
||||||
|
prop.update(schema_to_json(field_schema.nested_schema))
|
||||||
|
|
||||||
|
json_schema["properties"][key] = prop
|
||||||
|
|
||||||
|
if field_schema.required:
|
||||||
|
json_schema["required"].append(key)
|
||||||
|
|
||||||
|
return json_schema
|
||||||
|
|
||||||
|
return schema_to_json(self.validator.schema)
|
||||||
|
|
||||||
def _load_defaults(self):
|
def _load_defaults(self):
|
||||||
"""Load default configuration values"""
|
"""Load default configuration values"""
|
||||||
self.config = self.DEFAULT_CONFIG.copy()
|
self.config = self.DEFAULT_CONFIG.copy()
|
||||||
|
|
@ -229,6 +880,7 @@ class ConfigManager:
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
# Configuration getters (unchanged for backward compatibility)
|
||||||
def get_dbus_config(self) -> Dict[str, Any]:
|
def get_dbus_config(self) -> Dict[str, Any]:
|
||||||
"""Get D-Bus configuration"""
|
"""Get D-Bus configuration"""
|
||||||
return {
|
return {
|
||||||
|
|
|
||||||
488
test_enhanced_config.py
Normal file
488
test_enhanced_config.py
Normal file
|
|
@ -0,0 +1,488 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Test script for enhanced configuration management
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
import yaml
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Add the apt-ostree module to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent / "src/apt-ostree.py/python"))
|
||||||
|
|
||||||
|
from utils.config import ConfigManager, ConfigValidator, ValidationError
|
||||||
|
|
||||||
|
def test_basic_configuration():
|
||||||
|
"""Test basic configuration loading and validation"""
|
||||||
|
|
||||||
|
print("=== Testing Basic Configuration ===\n")
|
||||||
|
|
||||||
|
# Create test configuration
|
||||||
|
config_manager = ConfigManager("/tmp/test-config.yaml")
|
||||||
|
|
||||||
|
# Test default configuration
|
||||||
|
config = config_manager.load_config()
|
||||||
|
if config:
|
||||||
|
print("✅ Default configuration loaded successfully")
|
||||||
|
print(f" D-Bus bus name: {config_manager.get('daemon.dbus.bus_name')}")
|
||||||
|
print(f" Log level: {config_manager.get('daemon.logging.level')}")
|
||||||
|
print(f" Max workers: {config_manager.get('daemon.concurrency.max_workers')}")
|
||||||
|
else:
|
||||||
|
print("❌ Failed to load default configuration")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def test_configuration_validation():
|
||||||
|
"""Test configuration validation"""
|
||||||
|
|
||||||
|
print("\n=== Testing Configuration Validation ===\n")
|
||||||
|
|
||||||
|
# Test valid configuration
|
||||||
|
valid_config = {
|
||||||
|
'daemon': {
|
||||||
|
'dbus': {
|
||||||
|
'bus_name': 'org.debian.aptostree1',
|
||||||
|
'object_path': '/org/debian/aptostree1'
|
||||||
|
},
|
||||||
|
'concurrency': {
|
||||||
|
'max_workers': 5,
|
||||||
|
'transaction_timeout': 600
|
||||||
|
},
|
||||||
|
'logging': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'format': 'json',
|
||||||
|
'file': '/var/log/apt-ostree/test.log',
|
||||||
|
'max_size': '50MB',
|
||||||
|
'max_files': 3,
|
||||||
|
'rotation_strategy': 'size',
|
||||||
|
'rotation_interval': 1,
|
||||||
|
'rotation_unit': 'D',
|
||||||
|
'compression': True,
|
||||||
|
'correlation_id': True,
|
||||||
|
'performance_monitoring': True,
|
||||||
|
'cleanup_old_logs': True,
|
||||||
|
'cleanup_days': 7,
|
||||||
|
'include_hostname': True,
|
||||||
|
'include_version': True
|
||||||
|
},
|
||||||
|
'auto_update_policy': 'check'
|
||||||
|
},
|
||||||
|
'sysroot': {
|
||||||
|
'path': '/',
|
||||||
|
'repo_path': '/var/lib/ostree/repo'
|
||||||
|
},
|
||||||
|
'shell_integration': {
|
||||||
|
'script_path': '/usr/local/bin/apt-layer.sh',
|
||||||
|
'timeout': {
|
||||||
|
'install': 300,
|
||||||
|
'remove': 300,
|
||||||
|
'composefs': 600,
|
||||||
|
'dkms': 1800
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'hardware_detection': {
|
||||||
|
'auto_configure': True,
|
||||||
|
'gpu_detection': True,
|
||||||
|
'cpu_detection': True,
|
||||||
|
'motherboard_detection': True
|
||||||
|
},
|
||||||
|
'dkms': {
|
||||||
|
'enabled': True,
|
||||||
|
'auto_rebuild': True,
|
||||||
|
'build_timeout': 3600,
|
||||||
|
'kernel_hooks': True
|
||||||
|
},
|
||||||
|
'security': {
|
||||||
|
'polkit_required': True,
|
||||||
|
'apparmor_profile': '/etc/apparmor.d/apt-ostree',
|
||||||
|
'selinux_context': 'system_u:system_r:apt_ostree_t:s0',
|
||||||
|
'privilege_separation': True
|
||||||
|
},
|
||||||
|
'performance': {
|
||||||
|
'cache_enabled': True,
|
||||||
|
'cache_ttl': 3600,
|
||||||
|
'parallel_operations': True
|
||||||
|
},
|
||||||
|
'experimental': {
|
||||||
|
'composefs': False,
|
||||||
|
'hardware_detection': False
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = ConfigValidator()
|
||||||
|
is_valid = validator.validate_config(valid_config)
|
||||||
|
|
||||||
|
if is_valid:
|
||||||
|
print("✅ Valid configuration passed validation")
|
||||||
|
else:
|
||||||
|
print("❌ Valid configuration failed validation:")
|
||||||
|
print(validator.format_errors())
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test invalid configurations
|
||||||
|
print("\nTesting invalid configurations:")
|
||||||
|
|
||||||
|
# Test invalid log level
|
||||||
|
invalid_config = valid_config.copy()
|
||||||
|
invalid_config['daemon']['logging']['level'] = 'INVALID_LEVEL'
|
||||||
|
|
||||||
|
is_valid = validator.validate_config(invalid_config)
|
||||||
|
if not is_valid:
|
||||||
|
print("✅ Correctly rejected invalid log level")
|
||||||
|
else:
|
||||||
|
print("❌ Should have rejected invalid log level")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test invalid max_workers
|
||||||
|
invalid_config = valid_config.copy()
|
||||||
|
invalid_config['daemon']['concurrency']['max_workers'] = 100 # Too high
|
||||||
|
|
||||||
|
is_valid = validator.validate_config(invalid_config)
|
||||||
|
if not is_valid:
|
||||||
|
print("✅ Correctly rejected invalid max_workers")
|
||||||
|
else:
|
||||||
|
print("❌ Should have rejected invalid max_workers")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test invalid D-Bus bus name
|
||||||
|
invalid_config = valid_config.copy()
|
||||||
|
invalid_config['daemon']['dbus']['bus_name'] = 'invalid-bus-name'
|
||||||
|
|
||||||
|
is_valid = validator.validate_config(invalid_config)
|
||||||
|
if not is_valid:
|
||||||
|
print("✅ Correctly rejected invalid D-Bus bus name")
|
||||||
|
else:
|
||||||
|
print("❌ Should have rejected invalid D-Bus bus name")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def test_environment_variables():
|
||||||
|
"""Test environment variable integration"""
|
||||||
|
|
||||||
|
print("\n=== Testing Environment Variables ===\n")
|
||||||
|
|
||||||
|
# Set test environment variables (double underscore for nesting)
|
||||||
|
os.environ['APT_OSTREE_DAEMON__LOGGING__LEVEL'] = 'DEBUG'
|
||||||
|
os.environ['APT_OSTREE_DAEMON__CONCURRENCY__MAX_WORKERS'] = '8'
|
||||||
|
os.environ['APT_OSTREE_DAEMON__LOGGING__COMPRESSION'] = 'false'
|
||||||
|
os.environ['APT_OSTREE_PERFORMANCE__CACHE_TTL'] = '7200'
|
||||||
|
|
||||||
|
# Create config manager and load config
|
||||||
|
config_manager = ConfigManager("/tmp/test-env-config.yaml")
|
||||||
|
config = config_manager.load_config()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
print("✅ Configuration loaded with environment variables")
|
||||||
|
print(f" Log level from env: {config_manager.get('daemon.logging.level')}")
|
||||||
|
print(f" Max workers from env: {config_manager.get('daemon.concurrency.max_workers')}")
|
||||||
|
print(f" Compression from env: {config_manager.get('daemon.logging.compression')}")
|
||||||
|
print(f" Cache TTL from env: {config_manager.get('performance.cache_ttl')}")
|
||||||
|
|
||||||
|
# Verify environment variables were applied
|
||||||
|
if (config_manager.get('daemon.logging.level') == 'DEBUG' and
|
||||||
|
config_manager.get('daemon.concurrency.max_workers') == 8 and
|
||||||
|
config_manager.get('daemon.logging.compression') == False and
|
||||||
|
config_manager.get('performance.cache_ttl') == 7200):
|
||||||
|
print("✅ Environment variables correctly applied")
|
||||||
|
else:
|
||||||
|
print("❌ Environment variables not applied correctly")
|
||||||
|
print(f" Expected: DEBUG, got: {config_manager.get('daemon.logging.level')}")
|
||||||
|
print(f" Expected: 8, got: {config_manager.get('daemon.concurrency.max_workers')}")
|
||||||
|
print(f" Expected: False, got: {config_manager.get('daemon.logging.compression')}")
|
||||||
|
print(f" Expected: 7200, got: {config_manager.get('performance.cache_ttl')}")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print("❌ Failed to load configuration with environment variables")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Clean up environment variables
|
||||||
|
for key in [
|
||||||
|
'APT_OSTREE_DAEMON__LOGGING__LEVEL',
|
||||||
|
'APT_OSTREE_DAEMON__CONCURRENCY__MAX_WORKERS',
|
||||||
|
'APT_OSTREE_DAEMON__LOGGING__COMPRESSION',
|
||||||
|
'APT_OSTREE_PERFORMANCE__CACHE_TTL']:
|
||||||
|
if key in os.environ:
|
||||||
|
del os.environ[key]
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def test_configuration_file_operations():
|
||||||
|
"""Test configuration file operations"""
|
||||||
|
|
||||||
|
print("\n=== Testing Configuration File Operations ===\n")
|
||||||
|
|
||||||
|
# Create temporary config file
|
||||||
|
with tempfile.NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) as f:
|
||||||
|
test_config = {
|
||||||
|
'daemon': {
|
||||||
|
'logging': {
|
||||||
|
'level': 'WARNING',
|
||||||
|
'file': '/tmp/test.log'
|
||||||
|
},
|
||||||
|
'concurrency': {
|
||||||
|
'max_workers': 4
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'experimental': {
|
||||||
|
'composefs': True
|
||||||
|
}
|
||||||
|
}
|
||||||
|
yaml.dump(test_config, f)
|
||||||
|
config_path = f.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Load configuration from file
|
||||||
|
config_manager = ConfigManager(config_path)
|
||||||
|
config = config_manager.load_config()
|
||||||
|
|
||||||
|
if config:
|
||||||
|
print("✅ Configuration loaded from file")
|
||||||
|
print(f" Log level: {config_manager.get('daemon.logging.level')}")
|
||||||
|
print(f" Max workers: {config_manager.get('daemon.concurrency.max_workers')}")
|
||||||
|
print(f" ComposeFS experimental: {config_manager.get('experimental.composefs')}")
|
||||||
|
|
||||||
|
# Test that file values override defaults
|
||||||
|
if (config_manager.get('daemon.logging.level') == 'WARNING' and
|
||||||
|
config_manager.get('daemon.concurrency.max_workers') == 4 and
|
||||||
|
config_manager.get('experimental.composefs') == True):
|
||||||
|
print("✅ File values correctly override defaults")
|
||||||
|
else:
|
||||||
|
print("❌ File values not applied correctly")
|
||||||
|
print(f" Expected: WARNING, got: {config_manager.get('daemon.logging.level')}")
|
||||||
|
print(f" Expected: 4, got: {config_manager.get('daemon.concurrency.max_workers')}")
|
||||||
|
print(f" Expected: True, got: {config_manager.get('experimental.composefs')}")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print("❌ Failed to load configuration from file")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test saving configuration
|
||||||
|
config_manager.set('daemon.logging.level', 'ERROR')
|
||||||
|
config_manager.set('performance.cache_enabled', False)
|
||||||
|
|
||||||
|
if config_manager.save():
|
||||||
|
print("✅ Configuration saved successfully")
|
||||||
|
|
||||||
|
# Reload and verify changes
|
||||||
|
new_config_manager = ConfigManager(config_path)
|
||||||
|
new_config = new_config_manager.load_config()
|
||||||
|
|
||||||
|
if (new_config_manager.get('daemon.logging.level') == 'ERROR' and
|
||||||
|
new_config_manager.get('performance.cache_enabled') == False):
|
||||||
|
print("✅ Configuration changes persisted correctly")
|
||||||
|
else:
|
||||||
|
print("❌ Configuration changes not persisted")
|
||||||
|
print(f" Expected: ERROR, got: {new_config_manager.get('daemon.logging.level')}")
|
||||||
|
print(f" Expected: False, got: {new_config_manager.get('performance.cache_enabled')}")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print("❌ Failed to save configuration")
|
||||||
|
return False
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Clean up
|
||||||
|
if os.path.exists(config_path):
|
||||||
|
os.unlink(config_path)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def test_schema_export():
|
||||||
|
"""Test schema export functionality"""
|
||||||
|
|
||||||
|
print("\n=== Testing Schema Export ===\n")
|
||||||
|
|
||||||
|
config_manager = ConfigManager()
|
||||||
|
|
||||||
|
# Export schema to temporary file
|
||||||
|
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
|
||||||
|
schema_path = f.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
if config_manager.export_schema(schema_path):
|
||||||
|
print("✅ Schema exported successfully")
|
||||||
|
|
||||||
|
# Read and validate schema
|
||||||
|
with open(schema_path, 'r') as f:
|
||||||
|
schema = json.load(f)
|
||||||
|
|
||||||
|
# Check schema structure
|
||||||
|
if ('type' in schema and
|
||||||
|
'properties' in schema and
|
||||||
|
'daemon' in schema['properties']):
|
||||||
|
print("✅ Schema structure is valid")
|
||||||
|
print(f" Schema has {len(schema['properties'])} top-level properties")
|
||||||
|
|
||||||
|
# Check some specific properties
|
||||||
|
daemon_props = schema['properties']['daemon']['properties']
|
||||||
|
if 'logging' in daemon_props and 'dbus' in daemon_props:
|
||||||
|
print("✅ Schema includes expected nested properties")
|
||||||
|
else:
|
||||||
|
print("❌ Schema missing expected properties")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print("❌ Schema structure is invalid")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print("❌ Failed to export schema")
|
||||||
|
return False
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Clean up
|
||||||
|
if os.path.exists(schema_path):
|
||||||
|
os.unlink(schema_path)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def test_validation_errors():
|
||||||
|
"""Test detailed validation error reporting"""
|
||||||
|
|
||||||
|
print("\n=== Testing Validation Error Reporting ===\n")
|
||||||
|
|
||||||
|
# Create configuration with multiple errors
|
||||||
|
invalid_config = {
|
||||||
|
'daemon': {
|
||||||
|
'dbus': {
|
||||||
|
'bus_name': 'invalid-bus-name', # Invalid pattern
|
||||||
|
'object_path': '/invalid/path' # Invalid pattern
|
||||||
|
},
|
||||||
|
'concurrency': {
|
||||||
|
'max_workers': 100, # Too high
|
||||||
|
'transaction_timeout': 10 # Too low
|
||||||
|
},
|
||||||
|
'logging': {
|
||||||
|
'level': 'INVALID_LEVEL', # Invalid enum
|
||||||
|
'max_size': 'invalid-size', # Invalid pattern
|
||||||
|
'max_files': -1 # Too low
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
validator = ConfigValidator()
|
||||||
|
is_valid = validator.validate_config(invalid_config)
|
||||||
|
|
||||||
|
if not is_valid:
|
||||||
|
print("✅ Correctly detected validation errors")
|
||||||
|
errors = validator.get_errors()
|
||||||
|
print(f" Found {len(errors)} validation errors:")
|
||||||
|
|
||||||
|
for error in errors:
|
||||||
|
print(f" {error.field}: {error.message}")
|
||||||
|
if error.value is not None:
|
||||||
|
print(f" Value: {error.value}")
|
||||||
|
|
||||||
|
# Check specific errors
|
||||||
|
error_fields = [error.field for error in errors]
|
||||||
|
expected_errors = [
|
||||||
|
'daemon.dbus.bus_name',
|
||||||
|
'daemon.concurrency.max_workers',
|
||||||
|
'daemon.concurrency.transaction_timeout',
|
||||||
|
'daemon.logging.level',
|
||||||
|
'daemon.logging.max_size',
|
||||||
|
'daemon.logging.max_files'
|
||||||
|
]
|
||||||
|
|
||||||
|
for expected in expected_errors:
|
||||||
|
if expected in error_fields:
|
||||||
|
print(f" ✅ Expected error found: {expected}")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Missing expected error: {expected}")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print("❌ Should have detected validation errors")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def test_configuration_getters():
|
||||||
|
"""Test configuration getter methods"""
|
||||||
|
|
||||||
|
print("\n=== Testing Configuration Getters ===\n")
|
||||||
|
|
||||||
|
config_manager = ConfigManager()
|
||||||
|
config = config_manager.load_config()
|
||||||
|
|
||||||
|
if not config:
|
||||||
|
print("❌ Failed to load configuration")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test D-Bus config getter
|
||||||
|
dbus_config = config_manager.get_dbus_config()
|
||||||
|
if 'bus_name' in dbus_config and 'object_path' in dbus_config:
|
||||||
|
print("✅ D-Bus config getter works")
|
||||||
|
else:
|
||||||
|
print("❌ D-Bus config getter failed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test logging config getter
|
||||||
|
logging_config = config_manager.get_logging_config()
|
||||||
|
if 'level' in logging_config and 'format' in logging_config:
|
||||||
|
print("✅ Logging config getter works")
|
||||||
|
else:
|
||||||
|
print("❌ Logging config getter failed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test concurrency config getter
|
||||||
|
concurrency_config = config_manager.get_concurrency_config()
|
||||||
|
if 'max_workers' in concurrency_config and 'transaction_timeout' in concurrency_config:
|
||||||
|
print("✅ Concurrency config getter works")
|
||||||
|
else:
|
||||||
|
print("❌ Concurrency config getter failed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Test sysroot config getter
|
||||||
|
sysroot_config = config_manager.get_sysroot_config()
|
||||||
|
if 'path' in sysroot_config and 'repo_path' in sysroot_config:
|
||||||
|
print("✅ Sysroot config getter works")
|
||||||
|
else:
|
||||||
|
print("❌ Sysroot config getter failed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Run all configuration tests"""
|
||||||
|
|
||||||
|
print("🧪 Enhanced Configuration Management Test Suite")
|
||||||
|
print("=" * 50)
|
||||||
|
|
||||||
|
tests = [
|
||||||
|
("Basic Configuration", test_basic_configuration),
|
||||||
|
("Configuration Validation", test_configuration_validation),
|
||||||
|
("Environment Variables", test_environment_variables),
|
||||||
|
("File Operations", test_configuration_file_operations),
|
||||||
|
("Schema Export", test_schema_export),
|
||||||
|
("Validation Errors", test_validation_errors),
|
||||||
|
("Configuration Getters", test_configuration_getters)
|
||||||
|
]
|
||||||
|
|
||||||
|
passed = 0
|
||||||
|
total = len(tests)
|
||||||
|
|
||||||
|
for test_name, test_func in tests:
|
||||||
|
try:
|
||||||
|
if test_func():
|
||||||
|
passed += 1
|
||||||
|
print(f"✅ {test_name}: PASSED")
|
||||||
|
else:
|
||||||
|
print(f"❌ {test_name}: FAILED")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ {test_name}: ERROR - {e}")
|
||||||
|
|
||||||
|
print("\n" + "=" * 50)
|
||||||
|
print(f"Test Results: {passed}/{total} tests passed")
|
||||||
|
|
||||||
|
if passed == total:
|
||||||
|
print("🎉 All configuration tests passed!")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print("⚠️ Some configuration tests failed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
success = main()
|
||||||
|
sys.exit(0 if success else 1)
|
||||||
Loading…
Add table
Add a link
Reference in a new issue