feat(experimental): Build multiple recipes in parallel (#182)

The `build` subcommand can now take in any number of recipe files and
will build them all in parallel. Along with this new ability, I've added
a way to easily distinguish which part of the build log belongs to which
recipe. Check out the `docker_build` action of this PR for an example.


![gif](https://gitlab.com/wunker-bunker/wunker-os/-/raw/main/bluebuild.gif)

## Tasks

- [x] Make build log follow same pattern as normal logs to keep things
consistent
- [x] Update color ranges based on @xynydev 's feedback
- [x] Deal with ANSI control characters in log output
- [x] Add [`indicatif`](https://crates.io/crates/indicatif) to make logs
look nicer
- [x] Add ability to print logs to a file
This commit is contained in:
Gerald Pinder 2024-06-07 17:52:26 -04:00 committed by GitHub
parent 18e48a34a4
commit 4ca98c1c2a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
24 changed files with 1449 additions and 500 deletions

View file

@ -9,6 +9,8 @@ on:
env:
FORCE_COLOR: 1
CLICOLOR_FORCE: 1
RUST_LOG_STYLE: always
jobs:
arm64-prebuild:
@ -203,7 +205,7 @@ jobs:
- name: Install bluebuild
run: |
cargo install --path . --debug
cargo install --path . --debug --all-features
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v3
@ -218,7 +220,7 @@ jobs:
cd integration-tests/test-repo
bluebuild template -vv | tee Containerfile
grep -q 'ARG IMAGE_REGISTRY=ghcr.io/blue-build' Containerfile || exit 1
bluebuild build --push -vv
bluebuild build --push -vv recipes/recipe.yml recipes/recipe-39.yml
docker-build-external-login:
timeout-minutes: 60
@ -259,7 +261,7 @@ jobs:
- name: Install bluebuild
run: |
cargo install --path . --debug
cargo install --path . --debug --all-features
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v3
@ -273,7 +275,7 @@ jobs:
cd integration-tests/test-repo
bluebuild template -vv | tee Containerfile
grep -q 'ARG IMAGE_REGISTRY=ghcr.io/blue-build' Containerfile || exit 1
bluebuild build --push -vv
bluebuild build --push -vv recipes/recipe.yml recipes/recipe-39.yml
podman-build:
timeout-minutes: 60
@ -314,7 +316,7 @@ jobs:
- name: Install bluebuild
run: |
cargo install --path . --debug
cargo install --path . --debug --all-features
- name: Run Build
env:
@ -325,7 +327,7 @@ jobs:
cd integration-tests/test-repo
bluebuild template -vv | tee Containerfile
grep -q 'ARG IMAGE_REGISTRY=ghcr.io/blue-build' Containerfile || exit 1
bluebuild build -B podman --push -vv
bluebuild build -B podman --push -vv recipes/recipe.yml recipes/recipe-39.yml
buildah-build:
timeout-minutes: 60
@ -366,7 +368,7 @@ jobs:
- name: Install bluebuild
run: |
cargo install --path . --debug
cargo install --path . --debug --all-features
- name: Run Build
env:
@ -377,4 +379,4 @@ jobs:
cd integration-tests/test-repo
bluebuild template -vv | tee Containerfile
grep -q 'ARG IMAGE_REGISTRY=ghcr.io/blue-build' Containerfile || exit 1
bluebuild build -B buildah --push -vv
bluebuild build -B buildah --push -vv recipes/recipe.yml recipes/recipe-39.yml

View file

@ -12,6 +12,8 @@ on:
env:
FORCE_COLOR: 1
CLICOLOR_FORCE: 1
RUST_LOG_STYLE: always
jobs:
arm64-prebuild:
@ -201,7 +203,7 @@ jobs:
- name: Install bluebuild
run: |
cargo install --path . --debug
cargo install --path . --debug --all-features
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v3
@ -216,7 +218,7 @@ jobs:
cd integration-tests/test-repo
bluebuild template -vv | tee Containerfile
grep -q 'ARG IMAGE_REGISTRY=ghcr.io/blue-build' Containerfile || exit 1
bluebuild build --push -vv
bluebuild build --push -vv recipes/recipe.yml recipes/recipe-39.yml
docker-build-external-login:
timeout-minutes: 60
@ -257,7 +259,7 @@ jobs:
- name: Install bluebuild
run: |
cargo install --path . --debug
cargo install --path . --debug --all-features
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v3
@ -271,7 +273,7 @@ jobs:
cd integration-tests/test-repo
bluebuild template -vv | tee Containerfile
grep -q 'ARG IMAGE_REGISTRY=ghcr.io/blue-build' Containerfile || exit 1
bluebuild build --push -vv
bluebuild build --push -vv recipes/recipe.yml recipes/recipe-39.yml
podman-build:
timeout-minutes: 60
@ -312,7 +314,7 @@ jobs:
- name: Install bluebuild
run: |
cargo install --path . --debug
cargo install --path . --debug --all-features
- name: Run Build
env:
@ -323,7 +325,7 @@ jobs:
cd integration-tests/test-repo
bluebuild template -vv | tee Containerfile
grep -q 'ARG IMAGE_REGISTRY=ghcr.io/blue-build' Containerfile || exit 1
bluebuild build -B podman --push -vv
bluebuild build -B podman --push -vv recipes/recipe.yml recipes/recipe-39.yml
buildah-build:
timeout-minutes: 60
@ -364,7 +366,7 @@ jobs:
- name: Install bluebuild
run: |
cargo install --path . --debug
cargo install --path . --debug --all-features
- name: Run Build
env:
@ -375,4 +377,4 @@ jobs:
cd integration-tests/test-repo
bluebuild template -vv | tee Containerfile
grep -q 'ARG IMAGE_REGISTRY=ghcr.io/blue-build' Containerfile || exit 1
bluebuild build -B buildah --push -vv
bluebuild build -B buildah --push -vv recipes/recipe.yml recipes/recipe-39.yml

View file

@ -1,5 +1,5 @@
[hooks]
pre-commit = "cargo fmt --check && cargo test --all-features && cargo clippy --all-features -- -D warnings"
pre-commit = "cargo fmt --check && cargo test && cargo test --all-features && cargo clippy -- -D warnings && cargo clippy --all-features -- -D warnings"
[logging]
verbose = true

851
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -14,10 +14,12 @@ anyhow = "1"
chrono = "0.4"
clap = "4"
colored = "2"
env_logger = "0.11"
format_serde_error = "0.3"
indexmap = { version = "2", features = ["serde"] }
indicatif = { version = "0.17", features = ["improved_unicode"] }
indicatif-log-bridge = "0.2"
log = "0.4"
once_cell = "1"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_yaml = "0.9"
@ -61,9 +63,9 @@ clap_complete = "4"
clap_complete_nushell = "4"
fuzzy-matcher = "0.3"
lenient_semver = "0.4"
once_cell = "1"
open = "5"
os_info = "3"
rayon = { version = "1.10.0", optional = true }
requestty = { version = "0.5", features = ["macros", "termion"] }
semver = { version = "1", features = ["serde"] }
shadow-rs = "0.26"
@ -76,9 +78,10 @@ anyhow.workspace = true
chrono.workspace = true
clap = { workspace = true, features = ["derive", "cargo", "unicode", "env"] }
colored.workspace = true
env_logger.workspace = true
indexmap.workspace = true
indicatif.workspace = true
log.workspace = true
once_cell.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_yaml.workspace = true
@ -89,6 +92,7 @@ uuid.workspace = true
default = []
stages = ["blue-build-recipe/stages"]
copy = ["blue-build-recipe/copy"]
multi-recipe = ["rayon", "indicatif/rayon"]
switch = []
[dev-dependencies]

View file

@ -8,8 +8,8 @@ print_version_json() {
main() {
if [[ "$1" == "version" && "$2" == "--json" ]]; then
print_version_json
elif [[ "$1" == "build" && "$6" == *"cli_test.tar.gz" ]]; then
tarpath=$(echo "$6" | awk -F ':' '{print $2}')
elif [[ "$1" == "build" && "$7" == *"cli_test.tar.gz" ]]; then
tarpath=$(echo "$7" | awk -F ':' '{print $2}')
echo "Exporting image to a tarball (JK JUST A MOCK!)"
echo "${tarpath}"
touch $tarpath

View file

@ -8,8 +8,8 @@ print_version_json() {
main() {
if [[ "$1" == "version" && "$2" == "-f" && "$3" == "json" ]]; then
print_version_json
elif [[ "$1" == "build" && "$6" == *"cli_test.tar.gz" ]]; then
tarpath=$(echo "$6" | awk -F ':' '{print $2}')
elif [[ "$1" == "build" && "$7" == *"cli_test.tar.gz" ]]; then
tarpath=$(echo "$7" | awk -F ':' '{print $2}')
echo "Exporting image to a tarball (JK JUST A MOCK!)"
echo "${tarpath}"
touch $tarpath

View file

@ -0,0 +1,38 @@
name: cli/test
description: This is my personal OS image.
base-image: ghcr.io/ublue-os/silverblue-main
alt-tags:
- gts
- stable
image-version: 39
modules:
- from-file: akmods.yml
- from-file: flatpaks.yml
- type: files
files:
- usr: /usr
- type: script
scripts:
- example.sh
- type: rpm-ostree
repos:
- https://copr.fedorainfracloud.org/coprs/atim/starship/repo/fedora-%OS_VERSION%/atim-starship-fedora-%OS_VERSION%.repo
install:
- micro
- starship
remove:
- firefox
- firefox-langpacks
- type: signing
- type: test-module
- type: containerfile
containerfiles:
- labels
snippets:
- RUN echo "This is a snippet" && ostree container commit

View file

@ -1,7 +1,4 @@
name: cli/test
alt-tags:
- gts
- stable
description: This is my personal OS image.
base-image: ghcr.io/ublue-os/silverblue-main
image-version: 40

View file

@ -198,7 +198,7 @@ impl<'a> Recipe<'a> {
/// # Errors
/// Errors when a yaml file cannot be deserialized,
/// or a linked module yaml file does not exist.
pub fn parse<P: AsRef<Path>>(path: &P) -> Result<Self> {
pub fn parse<P: AsRef<Path>>(path: P) -> Result<Self> {
trace!("Recipe::parse({})", path.as_ref().display());
let file_path = if Path::new(path.as_ref()).is_absolute() {

View file

@ -1,17 +1,15 @@
use blue_build::commands::{BlueBuildArgs, BlueBuildCommand, CommandArgs};
use blue_build_utils::logging;
use blue_build_utils::logging::Logger;
use clap::Parser;
use log::LevelFilter;
fn main() {
let args = BlueBuildArgs::parse();
let log_level = args.verbosity.log_level_filter();
env_logger::builder()
Logger::new()
.filter_level(args.verbosity.log_level_filter())
.filter_module("hyper::proto", LevelFilter::Info)
.format(logging::format_log(log_level))
.filter_modules([("hyper::proto", LevelFilter::Info)])
.log_out_dir(args.log_out.clone())
.init();
log::trace!("Parsed arguments: {args:#?}");

View file

@ -1,3 +1,5 @@
use std::path::PathBuf;
use log::error;
use clap::{command, crate_authors, Args, Parser, Subcommand};
@ -50,6 +52,10 @@ pub struct BlueBuildArgs {
#[command(subcommand)]
pub command: CommandArgs,
/// The directory to output build logs.
#[arg(long)]
pub log_out: Option<PathBuf>,
#[clap(flatten)]
pub verbosity: Verbosity<InfoLevel>,
}

View file

@ -6,13 +6,16 @@ use std::{
use anyhow::{bail, Context, Result};
use blue_build_recipe::Recipe;
use blue_build_utils::constants::{
ARCHIVE_SUFFIX, BB_PASSWORD, BB_REGISTRY, BB_REGISTRY_NAMESPACE, BB_USERNAME, BUILD_ID_LABEL,
CI_DEFAULT_BRANCH, CI_PROJECT_NAME, CI_PROJECT_NAMESPACE, CI_PROJECT_URL, CI_REGISTRY,
CI_SERVER_HOST, CI_SERVER_PROTOCOL, CONFIG_PATH, CONTAINER_FILE, COSIGN_PRIVATE_KEY,
COSIGN_PRIV_PATH, COSIGN_PUB_PATH, GITHUB_REPOSITORY_OWNER, GITHUB_TOKEN,
GITHUB_TOKEN_ISSUER_URL, GITHUB_WORKFLOW_REF, GITIGNORE_PATH, LABELED_ERROR_MESSAGE,
NO_LABEL_ERROR_MESSAGE, RECIPE_FILE, RECIPE_PATH, SIGSTORE_ID_TOKEN,
use blue_build_utils::{
constants::{
ARCHIVE_SUFFIX, BB_PASSWORD, BB_REGISTRY, BB_REGISTRY_NAMESPACE, BB_USERNAME,
BUILD_ID_LABEL, CI_DEFAULT_BRANCH, CI_PROJECT_NAME, CI_PROJECT_NAMESPACE, CI_PROJECT_URL,
CI_REGISTRY, CI_SERVER_HOST, CI_SERVER_PROTOCOL, CONFIG_PATH, CONTAINER_FILE,
COSIGN_PRIVATE_KEY, COSIGN_PRIV_PATH, COSIGN_PUB_PATH, GITHUB_REPOSITORY_OWNER,
GITHUB_TOKEN, GITHUB_TOKEN_ISSUER_URL, GITHUB_WORKFLOW_REF, GITIGNORE_PATH,
LABELED_ERROR_MESSAGE, NO_LABEL_ERROR_MESSAGE, RECIPE_FILE, RECIPE_PATH, SIGSTORE_ID_TOKEN,
},
generate_containerfile_path,
};
use clap::Args;
use colored::Colorize;
@ -35,6 +38,13 @@ use super::{BlueBuildCommand, DriverArgs};
pub struct BuildCommand {
/// The recipe file to build an image
#[arg()]
#[cfg(feature = "multi-recipe")]
#[builder(default, setter(into, strip_option))]
recipe: Option<Vec<PathBuf>>,
/// The recipe file to build an image
#[arg()]
#[cfg(not(feature = "multi-recipe"))]
#[builder(default, setter(into, strip_option))]
recipe: Option<PathBuf>,
@ -126,6 +136,8 @@ impl BlueBuildCommand for BuildCommand {
.build()
.init()?;
self.update_gitignore()?;
if self.push && self.archive.is_some() {
bail!("You cannot use '--archive' and '--push' at the same time");
}
@ -133,96 +145,129 @@ impl BlueBuildCommand for BuildCommand {
if self.push {
blue_build_utils::check_command_exists("cosign")?;
self.check_cosign_files()?;
Self::login()?;
}
Self::login()?;
// Check if the Containerfile exists
// - If doesn't => *Build*
// - If it does:
// - check entry in .gitignore
// -> If it is => *Build*
// -> If isn't:
// - check if it has the BlueBuild tag (LABEL)
// -> If it does => *Ask* to add to .gitignore and remove from git
// -> If it doesn't => *Ask* to continue and override the file
let container_file_path = Path::new(CONTAINER_FILE);
if !self.force && container_file_path.exists() {
let gitignore = fs::read_to_string(GITIGNORE_PATH)
.context(format!("Failed to read {GITIGNORE_PATH}"))?;
let is_ignored = gitignore
.lines()
.any(|line: &str| line.contains(CONTAINER_FILE));
if !is_ignored {
let containerfile = fs::read_to_string(container_file_path)
.context(format!("Failed to read {}", container_file_path.display()))?;
let has_label = containerfile.lines().any(|line| {
let label = format!("LABEL {BUILD_ID_LABEL}");
line.to_string().trim().starts_with(&label)
});
let question = requestty::Question::confirm("build")
.message(
if has_label {
LABELED_ERROR_MESSAGE
} else {
NO_LABEL_ERROR_MESSAGE
}
.bright_yellow()
.to_string(),
)
.default(true)
.build();
if let Ok(answer) = requestty::prompt_one(question) {
if answer.as_bool().unwrap_or(false) {
blue_build_utils::append_to_file(
&GITIGNORE_PATH,
&format!("/{CONTAINER_FILE}"),
)?;
}
#[cfg(feature = "multi-recipe")]
{
use rayon::prelude::*;
let recipe_paths = self.recipe.clone().map_or_else(|| {
let legacy_path = Path::new(CONFIG_PATH);
let recipe_path = Path::new(RECIPE_PATH);
if recipe_path.exists() && recipe_path.is_dir() {
vec![recipe_path.join(RECIPE_FILE)]
} else {
warn!("Use of {CONFIG_PATH} for recipes is deprecated, please move your recipe files into {RECIPE_PATH}");
vec![legacy_path.join(RECIPE_FILE)]
}
}
},
|recipes| {
let mut same = std::collections::HashSet::new();
recipes.into_iter().filter(|recipe| same.insert(recipe.clone())).collect()
});
recipe_paths.par_iter().try_for_each(|recipe| {
GenerateCommand::builder()
.output(generate_containerfile_path(recipe)?)
.recipe(recipe)
.drivers(DriverArgs::builder().squash(self.drivers.squash).build())
.build()
.try_run()
})?;
self.start(&recipe_paths)
}
let recipe_path = self.recipe.clone().unwrap_or_else(|| {
let legacy_path = Path::new(CONFIG_PATH);
let recipe_path = Path::new(RECIPE_PATH);
if recipe_path.exists() && recipe_path.is_dir() {
recipe_path.join(RECIPE_FILE)
} else {
warn!("Use of {CONFIG_PATH} for recipes is deprecated, please move your recipe files into {RECIPE_PATH}");
legacy_path.join(RECIPE_FILE)
}
});
#[cfg(not(feature = "multi-recipe"))]
{
let recipe_path = self.recipe.clone().unwrap_or_else(|| {
let legacy_path = Path::new(CONFIG_PATH);
let recipe_path = Path::new(RECIPE_PATH);
if recipe_path.exists() && recipe_path.is_dir() {
recipe_path.join(RECIPE_FILE)
} else {
warn!("Use of {CONFIG_PATH} for recipes is deprecated, please move your recipe files into {RECIPE_PATH}");
legacy_path.join(RECIPE_FILE)
}
});
GenerateCommand::builder()
.recipe(&recipe_path)
.output(PathBuf::from("Containerfile"))
.build()
.try_run()?;
GenerateCommand::builder()
.output(generate_containerfile_path(&recipe_path)?)
.recipe(&recipe_path)
.drivers(DriverArgs::builder().squash(self.drivers.squash).build())
.build()
.try_run()?;
info!("Building image for recipe at {}", recipe_path.display());
self.start(&recipe_path)
self.start(&recipe_path)
}
}
}
impl BuildCommand {
fn start(&self, recipe_path: &Path) -> Result<()> {
#[cfg(feature = "multi-recipe")]
fn start(&self, recipe_paths: &[PathBuf]) -> Result<()> {
use rayon::prelude::*;
trace!("BuildCommand::build_image()");
let recipe = Recipe::parse(&recipe_path)?;
recipe_paths
.par_iter()
.try_for_each(|recipe_path| -> Result<()> {
let recipe = Recipe::parse(recipe_path)?;
let os_version = Driver::get_os_version(&recipe)?;
let containerfile = generate_containerfile_path(recipe_path)?;
let tags = recipe.generate_tags(os_version);
let image_name = self.generate_full_image_name(&recipe)?;
let opts = if let Some(archive_dir) = self.archive.as_ref() {
BuildTagPushOpts::builder()
.containerfile(&containerfile)
.archive_path(format!(
"{}/{}.{ARCHIVE_SUFFIX}",
archive_dir.to_string_lossy().trim_end_matches('/'),
recipe.name.to_lowercase().replace('/', "_"),
))
.squash(self.drivers.squash)
.build()
} else {
BuildTagPushOpts::builder()
.image(&image_name)
.containerfile(&containerfile)
.tags(tags.iter().map(String::as_str).collect::<Vec<_>>())
.push(self.push)
.no_retry_push(self.no_retry_push)
.retry_count(self.retry_count)
.compression(self.compression_format)
.squash(self.drivers.squash)
.build()
};
Driver::get_build_driver().build_tag_push(&opts)?;
if self.push && !self.no_sign {
sign_images(&image_name, tags.first().map(String::as_str))?;
}
Ok(())
})?;
info!("Build complete!");
Ok(())
}
#[cfg(not(feature = "multi-recipe"))]
fn start(&self, recipe_path: &Path) -> Result<()> {
trace!("BuildCommand::start()");
let recipe = Recipe::parse(recipe_path)?;
let os_version = Driver::get_os_version(&recipe)?;
let containerfile = generate_containerfile_path(recipe_path)?;
let tags = recipe.generate_tags(os_version);
let image_name = self.generate_full_image_name(&recipe)?;
let opts = if let Some(archive_dir) = self.archive.as_ref() {
BuildTagPushOpts::builder()
.containerfile(&containerfile)
.archive_path(format!(
"{}/{}.{ARCHIVE_SUFFIX}",
archive_dir.to_string_lossy().trim_end_matches('/'),
@ -233,6 +278,7 @@ impl BuildCommand {
} else {
BuildTagPushOpts::builder()
.image(&image_name)
.containerfile(&containerfile)
.tags(tags.iter().map(String::as_str).collect::<Vec<_>>())
.push(self.push)
.no_retry_push(self.no_retry_push)
@ -249,7 +295,6 @@ impl BuildCommand {
}
info!("Build complete!");
Ok(())
}
@ -348,6 +393,75 @@ impl BuildCommand {
Ok(image_name)
}
fn update_gitignore(&self) -> Result<()> {
// Check if the Containerfile exists
// - If doesn't => *Build*
// - If it does:
// - check entry in .gitignore
// -> If it is => *Build*
// -> If isn't:
// - check if it has the BlueBuild tag (LABEL)
// -> If it does => *Ask* to add to .gitignore and remove from git
// -> If it doesn't => *Ask* to continue and override the file
let container_file_path = Path::new(CONTAINER_FILE);
let label = format!("LABEL {BUILD_ID_LABEL}");
if !self.force && container_file_path.exists() {
let to_ignore_lines = [format!("/{CONTAINER_FILE}"), format!("/{CONTAINER_FILE}.*")];
let gitignore = fs::read_to_string(GITIGNORE_PATH)
.context(format!("Failed to read {GITIGNORE_PATH}"))?;
let mut edited_gitignore = gitignore.clone();
to_ignore_lines
.iter()
.filter(|to_ignore| {
!gitignore
.lines()
.any(|line| line.trim() == to_ignore.trim())
})
.try_for_each(|to_ignore| -> Result<()> {
let containerfile = fs::read_to_string(container_file_path)
.context(format!("Failed to read {}", container_file_path.display()))?;
let has_label = containerfile
.lines()
.any(|line| line.to_string().trim().starts_with(&label));
let question = requestty::Question::confirm("build")
.message(
if has_label {
LABELED_ERROR_MESSAGE
} else {
NO_LABEL_ERROR_MESSAGE
}
.bright_yellow()
.to_string(),
)
.default(true)
.build();
if let Ok(answer) = requestty::prompt_one(question) {
if answer.as_bool().unwrap_or(false) {
if !edited_gitignore.ends_with('\n') {
edited_gitignore.push('\n');
}
edited_gitignore.push_str(to_ignore);
edited_gitignore.push('\n');
}
}
Ok(())
})?;
if edited_gitignore != gitignore {
fs::write(GITIGNORE_PATH, edited_gitignore.as_str())?;
}
}
Ok(())
}
/// Checks the cosign private/public key pair to ensure they match.
///
/// # Errors

View file

@ -56,8 +56,15 @@ impl BlueBuildCommand for UpgradeCommand {
let recipe = Recipe::parse(&self.common.recipe)?;
let mut build = BuildCommand::builder()
.recipe(self.common.recipe.clone())
let build = BuildCommand::builder();
#[cfg(feature = "multi-recipe")]
let build = build.recipe(vec![self.common.recipe.clone()]);
#[cfg(not(feature = "multi-recipe"))]
let build = build.recipe(self.common.recipe.clone());
let mut build = build
.archive(LOCAL_BUILD)
.drivers(self.common.drivers)
.force(self.common.force)
@ -108,8 +115,15 @@ impl BlueBuildCommand for RebaseCommand {
let recipe = Recipe::parse(&self.common.recipe)?;
let mut build = BuildCommand::builder()
.recipe(self.common.recipe.clone())
let build = BuildCommand::builder();
#[cfg(feature = "multi-recipe")]
let build = build.recipe(vec![self.common.recipe.clone()]);
#[cfg(not(feature = "multi-recipe"))]
let build = build.recipe(self.common.recipe.clone());
let mut build = build
.archive(LOCAL_BUILD)
.drivers(self.common.drivers)
.force(self.common.force)

View file

@ -1,15 +1,18 @@
use std::{
path::{Path, PathBuf},
process::Command,
time::Duration,
};
use anyhow::{bail, Result};
use blue_build_recipe::Recipe;
use blue_build_utils::constants::{
ARCHIVE_SUFFIX, LOCAL_BUILD, OCI_ARCHIVE, OSTREE_UNVERIFIED_IMAGE,
use blue_build_utils::{
constants::{ARCHIVE_SUFFIX, LOCAL_BUILD, OCI_ARCHIVE, OSTREE_UNVERIFIED_IMAGE},
logging::CommandLogging,
};
use clap::Args;
use colored::Colorize;
use indicatif::ProgressBar;
use log::{debug, trace, warn};
use tempdir::TempDir;
use typed_builder::TypedBuilder;
@ -65,7 +68,7 @@ impl BlueBuildCommand for SwitchCommand {
trace!("{tempdir:?}");
BuildCommand::builder()
.recipe(self.recipe.clone())
.recipe([self.recipe.clone()])
.archive(tempdir.path())
.force(self.force)
.build()
@ -124,6 +127,7 @@ impl SwitchCommand {
"{OSTREE_UNVERIFIED_IMAGE}:{OCI_ARCHIVE}:{path}",
path = archive_path.display()
);
let mut command = Command::new("rpm-ostree");
command.arg("rebase").arg(&image_ref);
@ -137,7 +141,10 @@ impl SwitchCommand {
);
command
}
.status()?;
.status_image_ref_progress(
format!("{}", archive_path.display()),
"Switching to new image",
)?;
if !status.success() {
bail!("Failed to switch to new image!");
@ -152,9 +159,15 @@ impl SwitchCommand {
to.display()
);
let progress = ProgressBar::new_spinner();
progress.enable_steady_tick(Duration::from_millis(100));
progress.set_message(format!("Moving image archive to {}...", to.display()));
trace!("sudo mv {} {}", from.display(), to.display());
let status = Command::new("sudo").arg("mv").args([from, to]).status()?;
progress.finish_and_clear();
if !status.success() {
bail!(
"Failed to move archive from {from} to {to}",
@ -193,12 +206,18 @@ impl SwitchCommand {
if !files.is_empty() {
let files = files.join(" ");
let progress = ProgressBar::new_spinner();
progress.enable_steady_tick(Duration::from_millis(100));
progress.set_message("Removing old image archive files...");
trace!("sudo rm -f {files}");
let status = Command::new("sudo")
.args(["rm", "-f"])
.arg(files)
.status()?;
progress.finish_and_clear();
if !status.success() {
bail!("Failed to clean out archives in {LOCAL_BUILD}");
}

View file

@ -165,6 +165,7 @@ pub trait BuildDriver: Sync + Send {
let build_opts = BuildOpts::builder()
.image(&full_image)
.containerfile(opts.containerfile.as_ref())
.squash(opts.squash)
.build();

View file

@ -1,6 +1,7 @@
use std::process::Command;
use anyhow::{bail, Result};
use blue_build_utils::logging::CommandLogging;
use log::{error, info, trace};
use semver::Version;
use serde::Deserialize;
@ -47,17 +48,21 @@ impl BuildDriver for BuildahDriver {
trace!("BuildahDriver::build({opts:#?})");
trace!(
"buildah build --pull=true --layers={} -t {}",
"buildah build --pull=true --layers={} -f {} -t {}",
!opts.squash,
opts.containerfile.display(),
opts.image,
);
let status = Command::new("buildah")
let mut command = Command::new("buildah");
command
.arg("build")
.arg("--pull=true")
.arg(format!("--layers={}", !opts.squash))
.arg("-f")
.arg(opts.containerfile.as_ref())
.arg("-t")
.arg(opts.image.as_ref())
.status()?;
.arg(opts.image.as_ref());
let status = command.status_image_ref_progress(&opts.image, "Building Image")?;
if status.success() {
info!("Successfully built {}", opts.image);
@ -89,14 +94,15 @@ impl BuildDriver for BuildahDriver {
trace!("BuildahDriver::push({opts:#?})");
trace!("buildah push {}", opts.image);
let status = Command::new("buildah")
let mut command = Command::new("buildah");
command
.arg("push")
.arg(format!(
"--compression-format={}",
opts.compression_type.unwrap_or_default()
))
.arg(opts.image.as_ref())
.status()?;
.arg(opts.image.as_ref());
let status = command.status_image_ref_progress(&opts.image, "Pushing Image")?;
if status.success() {
info!("Successfully pushed {}!", opts.image);

View file

@ -1,13 +1,11 @@
use std::{
env,
process::{Command, Stdio},
sync::Mutex,
};
use std::{env, process::Command, sync::Mutex, time::Duration};
use anyhow::{anyhow, bail, Result};
use blue_build_utils::constants::{
BB_BUILDKIT_CACHE_GHA, CONTAINER_FILE, DOCKER_HOST, SKOPEO_IMAGE,
use blue_build_utils::{
constants::{BB_BUILDKIT_CACHE_GHA, CONTAINER_FILE, DOCKER_HOST, SKOPEO_IMAGE},
logging::{CommandLogging, Logger},
};
use indicatif::{ProgressBar, ProgressStyle};
use log::{info, trace, warn};
use once_cell::sync::Lazy;
use semver::Version;
@ -76,12 +74,12 @@ impl DockerDriver {
.arg("--name=bluebuild")
.output()?;
if create_out.status.success() {
*lock = true;
} else {
if !create_out.status.success() {
bail!("{}", String::from_utf8_lossy(&create_out.stderr));
}
}
*lock = true;
drop(lock);
Ok(())
}
@ -119,7 +117,7 @@ impl BuildDriver for DockerDriver {
.arg("-t")
.arg(opts.image.as_ref())
.arg("-f")
.arg(CONTAINER_FILE)
.arg(opts.containerfile.as_ref())
.arg(".")
.status()?;
@ -211,12 +209,15 @@ impl BuildDriver for DockerDriver {
command.arg("--builder=bluebuild");
}
trace!("build --progress=plain --pull -f {CONTAINER_FILE}",);
trace!(
"build --progress=plain --pull -f {}",
opts.containerfile.display()
);
command
.arg("build")
.arg("--pull")
.arg("-f")
.arg(CONTAINER_FILE);
.arg(opts.containerfile.as_ref());
// https://github.com/moby/buildkit?tab=readme-ov-file#github-actions-cache-experimental
if env::var(BB_BUILDKIT_CACHE_GHA).map_or_else(|_| false, |e| e == "true") {
@ -228,18 +229,25 @@ impl BuildDriver for DockerDriver {
.arg("type=gha");
}
let mut final_image = String::new();
match (opts.image.as_ref(), opts.archive_path.as_ref()) {
(Some(image), None) => {
if opts.tags.is_empty() {
final_image.push_str(image);
trace!("-t {image}");
command.arg("-t").arg(image.as_ref());
} else {
for tag in opts.tags.as_ref() {
final_image
.push_str(format!("{image}:{}", opts.tags.first().unwrap_or(&"")).as_str());
opts.tags.iter().for_each(|tag| {
let full_image = format!("{image}:{tag}");
trace!("-t {full_image}");
command.arg("-t").arg(full_image);
}
});
}
if opts.push {
@ -254,6 +262,8 @@ impl BuildDriver for DockerDriver {
}
}
(None, Some(archive_path)) => {
final_image.push_str(archive_path);
trace!("--output type=oci,dest={archive_path}");
command
.arg("--output")
@ -266,14 +276,17 @@ impl BuildDriver for DockerDriver {
trace!(".");
command.arg(".");
if command.status()?.success() {
if command
.status_image_ref_progress(&final_image, "Building Image")?
.success()
{
if opts.push {
info!("Successfully built and pushed image");
info!("Successfully built and pushed image {}", final_image);
} else {
info!("Successfully built image");
info!("Successfully built image {}", final_image);
}
} else {
bail!("Failed to build image");
bail!("Failed to build image {}", final_image);
}
Ok(())
}
@ -288,6 +301,13 @@ impl InspectDriver for DockerDriver {
|tag| format!("docker://{}:{tag}", opts.image),
);
let progress = Logger::multi_progress().add(
ProgressBar::new_spinner()
.with_style(ProgressStyle::default_spinner())
.with_message(format!("Inspecting metadata for {url}")),
);
progress.enable_steady_tick(Duration::from_millis(100));
trace!("docker run {SKOPEO_IMAGE} inspect {url}");
let output = Command::new("docker")
.arg("run")
@ -295,9 +315,11 @@ impl InspectDriver for DockerDriver {
.arg(SKOPEO_IMAGE)
.arg("inspect")
.arg(&url)
.stderr(Stdio::inherit())
.output()?;
progress.finish();
Logger::multi_progress().remove(&progress);
if output.status.success() {
info!("Successfully inspected image {url}!");
} else {

View file

@ -1,4 +1,4 @@
use std::borrow::Cow;
use std::{borrow::Cow, path::Path};
use typed_builder::TypedBuilder;
@ -12,6 +12,9 @@ pub struct BuildOpts<'a> {
#[builder(default)]
pub squash: bool,
#[builder(setter(into))]
pub containerfile: Cow<'a, Path>,
}
#[derive(Debug, Clone, TypedBuilder)]
@ -33,6 +36,7 @@ pub struct PushOpts<'a> {
}
/// Options for building, tagging, and pusing images.
#[allow(clippy::struct_excessive_bools)]
#[derive(Debug, Clone, TypedBuilder)]
pub struct BuildTagPushOpts<'a> {
/// The base image name.
@ -47,6 +51,10 @@ pub struct BuildTagPushOpts<'a> {
#[builder(default, setter(into, strip_option))]
pub archive_path: Option<Cow<'a, str>>,
/// The path to the Containerfile to build.
#[builder(setter(into))]
pub containerfile: Cow<'a, Path>,
/// The list of tags for the image being built.
#[builder(default, setter(into))]
pub tags: Cow<'a, [&'a str]>,
@ -65,9 +73,11 @@ pub struct BuildTagPushOpts<'a> {
#[builder(default = 1)]
pub retry_count: u8,
/// The compression type to use when pushing.
#[builder(default)]
pub compression: CompressionType,
/// Run all steps in a single layer.
#[builder(default)]
pub squash: bool,
}

View file

@ -1,7 +1,11 @@
use std::process::{Command, Stdio};
use std::{process::Command, time::Duration};
use anyhow::{bail, Result};
use blue_build_utils::constants::SKOPEO_IMAGE;
use blue_build_utils::{
constants::SKOPEO_IMAGE,
logging::{CommandLogging, Logger},
};
use indicatif::{ProgressBar, ProgressStyle};
use log::{debug, error, info, trace};
use semver::Version;
use serde::Deserialize;
@ -57,18 +61,22 @@ impl BuildDriver for PodmanDriver {
trace!("PodmanDriver::build({opts:#?})");
trace!(
"podman build --pull=true --layers={} . -t {}",
"podman build --pull=true --layers={} -f {} -t {} .",
!opts.squash,
opts.containerfile.display(),
opts.image,
);
let status = Command::new("podman")
let mut command = Command::new("podman");
command
.arg("build")
.arg("--pull=true")
.arg(format!("--layers={}", !opts.squash))
.arg(".")
.arg("-f")
.arg(opts.containerfile.as_ref())
.arg("-t")
.arg(opts.image.as_ref())
.status()?;
.arg(".");
let status = command.status_image_ref_progress(&opts.image, "Building Image")?;
if status.success() {
info!("Successfully built {}", opts.image);
@ -100,14 +108,15 @@ impl BuildDriver for PodmanDriver {
trace!("PodmanDriver::push({opts:#?})");
trace!("podman push {}", opts.image);
let status = Command::new("podman")
let mut command = Command::new("podman");
command
.arg("push")
.arg(format!(
"--compression-format={}",
opts.compression_type.unwrap_or_default()
))
.arg(opts.image.as_ref())
.status()?;
.arg(opts.image.as_ref());
let status = command.status_image_ref_progress(&opts.image, "Pushing Image")?;
if status.success() {
info!("Successfully pushed {}!", opts.image);
@ -154,6 +163,13 @@ impl InspectDriver for PodmanDriver {
|tag| format!("docker://{}:{tag}", opts.image),
);
let progress = Logger::multi_progress().add(
ProgressBar::new_spinner()
.with_style(ProgressStyle::default_spinner())
.with_message(format!("Inspecting metadata for {url}")),
);
progress.enable_steady_tick(Duration::from_millis(100));
trace!("podman run {SKOPEO_IMAGE} inspect {url}");
let output = Command::new("podman")
.arg("run")
@ -161,9 +177,11 @@ impl InspectDriver for PodmanDriver {
.arg(SKOPEO_IMAGE)
.arg("inspect")
.arg(&url)
.stderr(Stdio::inherit())
.output()?;
progress.finish();
Logger::multi_progress().remove(&progress);
if output.status.success() {
debug!("Successfully inspected image {url}!");
} else {

View file

@ -1,6 +1,11 @@
use std::process::{Command, Stdio};
use std::{
process::{Command, Stdio},
time::Duration,
};
use anyhow::{bail, Result};
use blue_build_utils::logging::Logger;
use indicatif::{ProgressBar, ProgressStyle};
use log::{debug, trace};
use crate::image_metadata::ImageMetadata;
@ -19,6 +24,13 @@ impl InspectDriver for SkopeoDriver {
|tag| format!("docker://{}:{tag}", opts.image),
);
let progress = Logger::multi_progress().add(
ProgressBar::new_spinner()
.with_style(ProgressStyle::default_spinner())
.with_message(format!("Inspecting metadata for {url}")),
);
progress.enable_steady_tick(Duration::from_millis(100));
trace!("skopeo inspect {url}");
let output = Command::new("skopeo")
.arg("inspect")
@ -26,6 +38,9 @@ impl InspectDriver for SkopeoDriver {
.stderr(Stdio::inherit())
.output()?;
progress.finish();
Logger::multi_progress().remove(&progress);
if output.status.success() {
debug!("Successfully inspected image {url}!");
} else {

View file

@ -10,7 +10,13 @@ license.workspace = true
[dependencies]
atty = "0.2"
base64 = "0.22.1"
blake2 = "0.10.6"
directories = "5"
rand = "0.8.5"
log4rs = { version = "1.3.0", features = ["background_rotation"] }
nu-ansi-term = { version = "0.50.0", features = ["gnu_legacy"] }
os_pipe = { version = "1", features = ["io_safety"] }
process_control = { version = "4", features = ["crossbeam-channel"] }
syntect = "5"
which = "6"
@ -19,12 +25,15 @@ anyhow.workspace = true
chrono.workspace = true
clap = { workspace = true, features = ["derive"] }
colored.workspace = true
env_logger.workspace = true
format_serde_error.workspace = true
indicatif.workspace = true
indicatif-log-bridge.workspace = true
log.workspace = true
once_cell.workspace = true
serde.workspace = true
serde_yaml.workspace = true
serde_json.workspace = true
typed-builder.workspace = true
[build-dependencies]
syntect = "5.2.0"

View file

@ -3,12 +3,25 @@ pub mod constants;
pub mod logging;
pub mod syntax_highlighting;
use std::{ffi::OsStr, io::Write, path::PathBuf, process::Command, thread, time::Duration};
use std::{
os::unix::ffi::OsStrExt,
path::{Path, PathBuf},
process::Command,
thread,
time::Duration,
};
use anyhow::{anyhow, Result};
use base64::prelude::*;
use blake2::{
digest::{Update, VariableOutput},
Blake2bVar,
};
use format_serde_error::SerdeError;
use log::trace;
use crate::constants::CONTAINER_FILE;
pub use command_output::*;
/// Checks for the existance of a given command.
@ -34,23 +47,6 @@ pub fn check_command_exists(command: &str) -> Result<()> {
}
}
/// Appends a string to a file.
///
/// # Errors
/// Will error if it fails to append to a file.
pub fn append_to_file<T: Into<PathBuf> + AsRef<OsStr>>(file_path: &T, content: &str) -> Result<()> {
let file_path: PathBuf = file_path.into();
trace!("append_to_file({}, {content})", file_path.display());
let mut file = std::fs::OpenOptions::new()
.append(true)
.create(true)
.open(file_path)?;
writeln!(file, "\n{content}")?;
Ok(())
}
/// Creates a serde error for displaying the file
/// and where the error occurred.
pub fn serde_yaml_err(contents: &str) -> impl Fn(serde_yaml::Error) -> SerdeError + '_ {
@ -93,3 +89,22 @@ where
pub fn home_dir() -> Option<PathBuf> {
directories::BaseDirs::new().map(|base_dirs| base_dirs.home_dir().to_path_buf())
}
/// Generates a 1-1 related Containerfile to a recipe.
/// The file is in the format of `Containerfile.{path_hash}`.
///
/// # Errors
/// Will error if unable to create a hash of the
pub fn generate_containerfile_path<T: AsRef<Path>>(path: T) -> Result<PathBuf> {
const HASH_SIZE: usize = 8;
let mut buf = [0u8; HASH_SIZE];
let mut hasher = Blake2bVar::new(HASH_SIZE)?;
hasher.update(path.as_ref().as_os_str().as_bytes());
hasher.finalize_variable(&mut buf)?;
Ok(PathBuf::from(format!(
"{CONTAINER_FILE}.{}",
BASE64_URL_SAFE_NO_PAD.encode(buf)
)))
}

View file

@ -1,60 +1,406 @@
use std::io::{self, Write};
use std::{
env,
fs::OpenOptions,
io::{BufRead, BufReader, Result, Write as IoWrite},
path::{Path, PathBuf},
process::{Command, ExitStatus},
sync::{Arc, Mutex},
thread,
time::Duration,
};
use chrono::Local;
use colored::{ColoredString, Colorize};
use env_logger::fmt::Formatter;
use log::{Level, LevelFilter, Record};
use colored::{control::ShouldColorize, ColoredString, Colorize};
use indicatif::{MultiProgress, ProgressBar};
use indicatif_log_bridge::LogWrapper;
use log::{warn, Level, LevelFilter, Record};
use log4rs::{
append::{
console::ConsoleAppender,
rolling_file::{
policy::compound::{
roll::fixed_window::FixedWindowRoller, trigger::size::SizeTrigger, CompoundPolicy,
},
RollingFileAppender,
},
},
config::{Appender, Root},
encode::{pattern::PatternEncoder, Encode, Write},
Config, Logger as L4RSLogger,
};
use nu_ansi_term::Color;
use once_cell::sync::Lazy;
use rand::Rng;
use typed_builder::TypedBuilder;
fn colored_level(level: Level) -> ColoredString {
match level {
Level::Error => Level::Error.as_str().bright_red(),
Level::Warn => Level::Warn.as_str().yellow(),
Level::Info => Level::Info.as_str().bright_green(),
Level::Debug => Level::Debug.as_str().blue(),
Level::Trace => Level::Trace.as_str().bright_cyan(),
}
static MULTI_PROGRESS: Lazy<MultiProgress> = Lazy::new(MultiProgress::new);
static LOG_DIR: Lazy<Mutex<PathBuf>> = Lazy::new(|| Mutex::new(PathBuf::new()));
#[derive(Debug, Clone)]
pub struct Logger {
modules: Vec<(String, LevelFilter)>,
level: LevelFilter,
log_dir: Option<PathBuf>,
}
/// Given a `LevelFilter`, returns the function
/// used to format logs. The more verbose the log level,
/// the more info is displayed in each log header.
pub fn format_log(
log_level: LevelFilter,
) -> impl Fn(&mut Formatter, &Record) -> io::Result<()> + Sync + Send {
move |buf: &mut Formatter, record: &Record| match log_level {
LevelFilter::Error | LevelFilter::Warn | LevelFilter::Info => {
writeln!(
buf,
"{:width$} => {}",
colored_level(record.level()),
record.args(),
width = 5,
impl Logger {
const TRIGGER_FILE_SIZE: u64 = 10 * 1024;
const ARCHIVE_FILENAME_PATTERN: &'static str = "bluebuild-log.{}.log";
const LOG_FILENAME: &'static str = "bluebuild-log.log";
const LOG_FILE_COUNT: u32 = 4;
#[must_use]
pub fn new() -> Self {
Self::default()
}
pub fn filter_modules<I, S>(&mut self, filter_modules: I) -> &mut Self
where
I: IntoIterator<Item = (S, LevelFilter)>,
S: AsRef<str>,
{
self.modules = filter_modules
.into_iter()
.map(|(module, level)| (module.as_ref().to_string(), level))
.collect::<Vec<_>>();
self
}
pub fn filter_level(&mut self, filter_level: LevelFilter) -> &mut Self {
self.level = filter_level;
self
}
pub fn log_out_dir<P>(&mut self, path: Option<P>) -> &mut Self
where
P: AsRef<Path>,
{
self.log_dir = path.map(|p| p.as_ref().to_path_buf());
self
}
/// Initializes logging for the application.
///
/// # Panics
/// Will panic if logging is unable to be initialized.
pub fn init(&mut self) {
let home = env::var("HOME").expect("$HOME should be defined");
let log_dir = self.log_dir.as_ref().map_or_else(
|| Path::new(home.as_str()).join(".local/share/bluebuild"),
Clone::clone,
);
let mut lock = LOG_DIR.lock().expect("Should lock LOG_DIR");
lock.clone_from(&log_dir);
drop(lock);
let log_out_path = log_dir.join(Self::LOG_FILENAME);
let log_archive_pattern =
format!("{}/{}", log_dir.display(), Self::ARCHIVE_FILENAME_PATTERN);
let stderr = ConsoleAppender::builder()
.encoder(Box::new(
CustomPatternEncoder::builder()
.filter_modules(self.modules.clone())
.build(),
))
.target(log4rs::append::console::Target::Stderr)
.tty_only(true)
.build();
let file = RollingFileAppender::builder()
.encoder(Box::new(PatternEncoder::new("{d} - {l} - {m}{n}")))
.build(
log_out_path,
Box::new(CompoundPolicy::new(
Box::new(SizeTrigger::new(Self::TRIGGER_FILE_SIZE)),
Box::new(
FixedWindowRoller::builder()
.build(&log_archive_pattern, Self::LOG_FILE_COUNT)
.expect("Roller should be created"),
),
)),
)
}
LevelFilter::Debug => writeln!(
buf,
"[{} {:>width$}] => {}",
Local::now().format("%H:%M:%S"),
colored_level(record.level()),
record.args(),
width = 5,
),
LevelFilter::Trace => writeln!(
buf,
"[{} {:width$} {}:{}] => {}",
Local::now().format("%H:%M:%S"),
colored_level(record.level()),
record
.module_path()
.map_or_else(|| "", |p| p)
.bright_yellow(),
record
.line()
.map_or_else(String::new, |l| l.to_string())
.bright_green(),
record.args(),
width = 5,
),
LevelFilter::Off => Ok(()),
.expect("Must be able to create log FileAppender");
let config = Config::builder()
.appender(Appender::builder().build("stderr", Box::new(stderr)))
.appender(Appender::builder().build("file", Box::new(file)))
.build(
Root::builder()
.appender("stderr")
.appender("file")
.build(self.level),
)
.expect("Logger config should build");
let logger = L4RSLogger::new(config);
LogWrapper::new(MULTI_PROGRESS.clone(), logger)
.try_init()
.expect("LogWrapper should initialize");
}
pub fn multi_progress() -> MultiProgress {
MULTI_PROGRESS.clone()
}
}
impl Default for Logger {
fn default() -> Self {
Self {
modules: vec![],
level: LevelFilter::Info,
log_dir: None,
}
}
}
trait ColoredLevel {
fn colored(&self) -> ColoredString;
}
impl ColoredLevel for Level {
fn colored(&self) -> ColoredString {
match self {
Self::Error => Self::Error.as_str().red(),
Self::Warn => Self::Warn.as_str().yellow(),
Self::Info => Self::Info.as_str().green(),
Self::Debug => Self::Debug.as_str().blue(),
Self::Trace => Self::Trace.as_str().cyan(),
}
}
}
pub trait CommandLogging {
/// Prints each line of stdout/stderr with an image ref string
/// and a progress spinner. This helps to keep track of every
/// build running in parallel.
///
/// # Errors
/// Will error if there was an issue executing the process.
fn status_image_ref_progress<T, U>(self, image_ref: T, message: U) -> Result<ExitStatus>
where
T: AsRef<str>,
U: AsRef<str>;
}
impl CommandLogging for Command {
fn status_image_ref_progress<T, U>(mut self, image_ref: T, message: U) -> Result<ExitStatus>
where
T: AsRef<str>,
U: AsRef<str>,
{
let ansi_color = gen_random_ansi_color();
let name = color_str(&image_ref, ansi_color);
let short_name = color_str(shorten_name(&image_ref), ansi_color);
let log_prefix = Arc::new(log_header(short_name));
let (reader, writer) = os_pipe::pipe()?;
self.stdout(writer.try_clone()?).stderr(writer);
let progress = Logger::multi_progress()
.add(ProgressBar::new_spinner().with_message(format!("{} {name}", message.as_ref())));
progress.enable_steady_tick(Duration::from_millis(100));
let mut child = self.spawn()?;
// We drop the `Command` to prevent blocking on writer
// https://docs.rs/os_pipe/latest/os_pipe/#examples
drop(self);
let reader = BufReader::new(reader);
let log_file_path = {
let lock = LOG_DIR.lock().expect("Should lock LOG_DIR");
lock.join(format!(
"{}.log",
image_ref.as_ref().replace(['/', ':', '.'], "_")
))
};
let log_file = OpenOptions::new()
.create(true)
.append(true)
.open(log_file_path.as_path())?;
thread::spawn(move || {
let mp = Logger::multi_progress();
reader.lines().for_each(|line| {
if let Ok(l) = line {
let text = format!("{log_prefix} {l}");
if mp.is_hidden() {
eprintln!("{text}");
} else {
mp.println(text).unwrap();
}
if let Err(e) = writeln!(&log_file, "{l}") {
warn!(
"Failed to write to log for build {}: {e:?}",
log_file_path.display()
);
}
}
});
});
let status = child.wait()?;
progress.finish();
Logger::multi_progress().remove(&progress);
Ok(status)
}
}
#[derive(Debug, TypedBuilder)]
struct CustomPatternEncoder {
#[builder(default, setter(into))]
filter_modules: Vec<(String, LevelFilter)>,
}
impl Encode for CustomPatternEncoder {
fn encode(&self, w: &mut dyn Write, record: &Record) -> anyhow::Result<()> {
if record.module_path().is_some_and(|mp| {
self.filter_modules
.iter()
.any(|(module, level)| mp.contains(module) && *level <= record.level())
}) {
Ok(())
} else {
match log::max_level() {
LevelFilter::Error | LevelFilter::Warn | LevelFilter::Info => Ok(writeln!(
w,
"{prefix} {args}",
prefix = log_header(format!(
"{level:width$}",
level = record.level().colored(),
width = 5,
)),
args = record.args(),
)?),
LevelFilter::Debug => Ok(writeln!(
w,
"{prefix} {args}",
prefix = log_header(format!(
"{level:>width$}",
level = record.level().colored(),
width = 5,
)),
args = record.args(),
)?),
LevelFilter::Trace => Ok(writeln!(
w,
"{prefix} {args}",
prefix = log_header(format!(
"{level:width$} {module}:{line}",
level = record.level().colored(),
width = 5,
module = record
.module_path()
.map_or_else(|| "", |p| p)
.bright_yellow(),
line = record
.line()
.map_or_else(String::new, |l| l.to_string())
.bright_green(),
)),
args = record.args(),
)?),
LevelFilter::Off => Ok(()),
}
}
}
}
/// Used to keep the style of logs consistent between
/// normal log use and command output.
fn log_header<T: AsRef<str>>(text: T) -> String {
let text = text.as_ref();
match log::max_level() {
LevelFilter::Error | LevelFilter::Warn | LevelFilter::Info => {
format!("{text} {sep}", sep = "=>".bold())
}
LevelFilter::Debug | LevelFilter::Trace => format!(
"[{time} {text}] {sep}",
time = Local::now().format("%H:%M:%S"),
sep = "=>".bold(),
),
LevelFilter::Off => String::new(),
}
}
/// Shortens the image name so that it won't take up the
/// entire width of the terminal. This is a similar format
/// to what Earthly does in their terminal output for long
/// images on their log prefix output.
///
/// # Examples
/// `ghcr.io/blue-build/cli:latest` -> `g.i/b/cli:latest`
/// `registry.gitlab.com/some/namespace/image:latest` -> `r.g.c/s/n/image:latest`
#[must_use]
fn shorten_name<T>(text: T) -> String
where
T: AsRef<str>,
{
let text = text.as_ref();
// Split the reference by colon to separate the tag or digest
let mut parts = text.split(':');
let path = match parts.next() {
None => return text.to_string(),
Some(path) => path,
};
let tag = parts.next();
// Split the path by slash to work on each part
let path_parts: Vec<&str> = path.split('/').collect();
// Shorten each part except the last one to their initial letters
let shortened_parts: Vec<String> = path_parts
.iter()
.enumerate()
.map(|(i, part)| {
if i < path_parts.len() - 1 {
// Split on '.' and shorten each section
part.split('.')
.filter_map(|p| p.chars().next())
.map(|c| c.to_string())
.collect::<Vec<String>>()
.join(".")
} else {
(*part).into() // Keep the last part as it is
}
})
.collect();
// Rejoin the parts with '/'
let joined_path = shortened_parts.join("/");
// If there was a tag, append it back with ':', otherwise just return the path
match tag {
Some(t) => format!("{joined_path}:{t}"),
None => joined_path,
}
}
fn gen_random_ansi_color() -> u8 {
// ANSI extended color range
// https://www.ditig.com/publications/256-colors-cheat-sheet
const LOW_END: u8 = 21; // Blue1 #0000ff rgb(0,0,255) hsl(240,100%,50%)
const HIGH_END: u8 = 230; // Cornsilk1 #ffffd7 rgb(255,255,215) hsl(60,100%,92%)
rand::thread_rng().gen_range(LOW_END..=HIGH_END)
}
fn color_str<T>(text: T, ansi_color: u8) -> String
where
T: AsRef<str>,
{
if ShouldColorize::from_env().should_colorize() {
Color::Fixed(ansi_color)
.paint(text.as_ref().to_string())
.to_string()
} else {
text.as_ref().to_string()
}
}