chore: Remove builtin-podman code

This was not being worked on and was becoming too much trouble for our current goals
This commit is contained in:
Gerald Pinder 2024-03-23 15:54:15 -04:00
parent aa1de26ad9
commit da628db1ee
8 changed files with 37 additions and 2858 deletions

View file

@ -1,5 +1,5 @@
[language-server.rust-analyzer.config]
cargo.features = ["nightly"]
cargo.features = []
[language-server.rust-analyzer.config.check]
command = "clippy"

2464
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -66,16 +66,6 @@ shadow-rs = { version = "0.26" }
urlencoding = "2.1.3"
users = "0.11.0"
# Optional Dependencies
futures-util = { version = "0.3", optional = true }
podman-api = { version = "0.10.0", optional = true }
signal-hook = { version = "0.3.17", optional = true }
signal-hook-tokio = { version = "0.3.1", features = [
"futures-v0_3",
], optional = true }
sigstore = { version = "0.8.0", optional = true }
tokio = { version = "1", features = ["full"], optional = true }
# Workspace dependencies
anyhow.workspace = true
log.workspace = true
@ -87,15 +77,6 @@ uuid.workspace = true
[features]
default = []
nightly = ["builtin-podman"]
builtin-podman = [
"podman-api",
"tokio",
"futures-util",
"signal-hook-tokio",
"signal-hook",
]
tls = ["podman-api/tls", "builtin-podman"]
[dev-dependencies]
rusty-hook = "0.11.2"

View file

@ -8,41 +8,28 @@ ARG --global IMAGE=ghcr.io/blue-build/cli
all:
BUILD +build
BUILD ./integration-tests+all
BUILD +nightly
build:
ARG NIGHTLY=false
BUILD +lint --NIGHTLY=$NIGHTLY
BUILD +test --NIGHTLY=$NIGHTLY
BUILD +blue-build-cli --NIGHTLY=$NIGHTLY
BUILD +blue-build-cli-alpine --NIGHTLY=$NIGHTLY
BUILD +installer --NIGHTLY=$NIGHTLY
nightly:
BUILD +build --NIGHTLY=true
BUILD ./integration-tests+all --NIGHTLY=true
BUILD +lint
BUILD +test
BUILD +blue-build-cli
BUILD +blue-build-cli-alpine
BUILD +installer
lint:
FROM +common
ARG NIGHTLY=false
DO cargo+LINT --NIGHTLY=$NIGHTLY
DO cargo+LINT
test:
FROM +common
ARG NIGHTLY=false
DO cargo+TEST --NIGHTLY=$NIGHTLY
DO cargo+TEST
install:
FROM +common
ARG NIGHTLY=false
ARG --required BUILD_TARGET
DO cargo+BUILD_RELEASE --BUILD_TARGET=$BUILD_TARGET --NIGHTLY=$NIGHTLY
DO cargo+BUILD_RELEASE --BUILD_TARGET=$BUILD_TARGET
SAVE ARTIFACT target/$BUILD_TARGET/release/bluebuild
@ -60,9 +47,8 @@ common:
blue-build-cli:
FROM registry.fedoraproject.org/fedora-toolbox
ARG NIGHTLY=false
BUILD +install --BUILD_TARGET="x86_64-unknown-linux-gnu" --NIGHTLY=$NIGHTLY
BUILD +install --BUILD_TARGET="x86_64-unknown-linux-gnu"
RUN dnf -y install dnf-plugins-core \
&& dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo \
@ -79,7 +65,7 @@ blue-build-cli:
COPY +cosign/cosign /usr/bin/cosign
COPY (+install/bluebuild --BUILD_TARGET="x86_64-unknown-linux-gnu" --NIGHTLY=$NIGHTLY) /usr/bin/bluebuild
COPY (+install/bluebuild --BUILD_TARGET="x86_64-unknown-linux-gnu") /usr/bin/bluebuild
ARG TAG
ARG LATEST=false
@ -88,18 +74,17 @@ blue-build-cli:
WORKDIR /bluebuild
ENTRYPOINT ["bluebuild"]
DO cargo+SAVE_IMAGE --IMAGE=$IMAGE --TAG=$TAG --LATEST=$LATEST --NIGHTLY=$NIGHTLY
DO cargo+SAVE_IMAGE --IMAGE=$IMAGE --TAG=$TAG --LATEST=$LATEST
blue-build-cli-alpine:
FROM alpine
ARG NIGHTLY=false
BUILD +install --BUILD_TARGET="x86_64-unknown-linux-musl" --NIGHTLY=$NIGHTLY
BUILD +install --BUILD_TARGET="x86_64-unknown-linux-musl"
RUN apk update && apk add buildah podman skopeo fuse-overlayfs
COPY +cosign/cosign /usr/bin/cosign
COPY (+install/bluebuild --BUILD_TARGET="x86_64-unknown-linux-musl" --NIGHTLY=$NIGHTLY) /usr/bin/bluebuild
COPY (+install/bluebuild --BUILD_TARGET="x86_64-unknown-linux-musl") /usr/bin/bluebuild
ARG TAG
ARG LATEST=false
@ -108,20 +93,19 @@ blue-build-cli-alpine:
WORKDIR /bluebuild
ENTRYPOINT ["bluebuild"]
DO cargo+SAVE_IMAGE --IMAGE=$IMAGE --TAG=$TAG --LATEST=$LATEST --NIGHTLY=$NIGHTLY --ALPINE=true
DO cargo+SAVE_IMAGE --IMAGE=$IMAGE --TAG=$TAG --LATEST=$LATEST --ALPINE=true
installer:
FROM alpine
ARG NIGHTLY=false
COPY (+install/bluebuild --BUILD_TARGET="x86_64-unknown-linux-musl" --NIGHTLY=$NIGHTLY) /out/bluebuild
COPY (+install/bluebuild --BUILD_TARGET="x86_64-unknown-linux-musl") /out/bluebuild
COPY install.sh /install.sh
CMD ["cat", "/install.sh"]
ARG TAG
ARG LATEST=false
DO cargo+SAVE_IMAGE --IMAGE=$IMAGE --TAG=$TAG --LATEST=$LATEST --NIGHTLY=$NIGHTLY --INSTALLER=true
DO cargo+SAVE_IMAGE --IMAGE=$IMAGE --TAG=$TAG --LATEST=$LATEST --INSTALLER=true
cosign:
FROM gcr.io/projectsigstore/cosign

View file

@ -2,72 +2,62 @@ VERSION 0.8
PROJECT blue-build/cli
all:
ARG NIGHTLY=false
BUILD +test-image --NIGHTLY=$NIGHTLY
BUILD +test-secureblue --NIGHTLY=$NIGHTLY
BUILD +build --NIGHTLY=$NIGHTLY
BUILD +rebase --NIGHTLY=$NIGHTLY
BUILD +upgrade --NIGHTLY=$NIGHTLY
BUILD +test-image
BUILD +test-secureblue
BUILD +build
BUILD +rebase
BUILD +upgrade
test-image:
ARG NIGHTLY=false
FROM +build-template --NIGHTLY=$NIGHTLY --src=template-containerfile
FROM +build-template --src=template-containerfile
WORKDIR /tmp/test
COPY ./test-scripts/*.sh ./
DO +RUN_TESTS
test-secureblue:
ARG NIGHTLY=false
FROM +build-template --NIGHTLY=$NIGHTLY --src=template-secureblue
FROM +build-template --src=template-secureblue
WORKDIR /tmp/test
COPY ./test-scripts/secureblue/*.sh ./
DO +RUN_TESTS
build-template:
ARG NIGHTLY=false
ARG --required src
FROM DOCKERFILE \
-f +$src/test/Containerfile \
+$src/test/* --NIGHTLY=$NIGHTLY
+$src/test/*
template-containerfile:
ARG NIGHTLY=false
FROM +test-base --NIGHTLY=$NIGHTLY
FROM +test-base
RUN bluebuild -vv template config/recipe.yml | tee Containerfile
SAVE ARTIFACT /test
template-secureblue:
ARG NIGHTLY=false
FROM +secureblue-base --NIGHTLY=$NIGHTLY
FROM +secureblue-base
RUN bluebuild -vv template -o Containerfile config/recipes/general/recipe-silverblue-nvidia.yml
SAVE ARTIFACT /test
build:
ARG NIGHTLY=false
FROM +test-base --NIGHTLY=$NIGHTLY
FROM +test-base
RUN --privileged bluebuild -vv build config/recipe.yml
rebase:
ARG NIGHTLY=false
FROM +test-base --NIGHTLY=$NIGHTLY
FROM +test-base
RUN --privileged bluebuild -vv rebase config/recipe.yml
upgrade:
ARG NIGHTLY=false
FROM +test-base --NIGHTLY=$NIGHTLY
FROM +test-base
RUN mkdir -p /etc/bluebuild && touch /etc/bluebuild/cli_test.tar.gz
RUN --privileged bluebuild -vv upgrade config/recipe.yml
secureblue-base:
ARG NIGHTLY=false
FROM +test-base --NIGHTLY=$NIGHTLY
FROM +test-base
RUN rm -fr /test
GIT CLONE https://github.com/secureblue/secureblue.git /test
@ -75,9 +65,7 @@ secureblue-base:
DO +GEN_KEYPAIR
test-base:
ARG NIGHTLY=false
FROM ../+blue-build-cli-alpine --NIGHTLY=$NIGHTLY
FROM ../+blue-build-cli-alpine
COPY ./mock-scripts/ /usr/bin/

View file

@ -90,55 +90,6 @@ pub struct BuildCommand {
#[arg(short = 'P', long)]
#[builder(default, setter(into, strip_option))]
password: Option<String>,
/// The connection string used to connect
/// to a remote podman socket.
#[cfg(feature = "tls")]
#[arg(short, long)]
#[builder(default, setter(into, strip_option))]
connection: Option<String>,
/// The path to the `cert.pem`, `key.pem`,
/// and `ca.pem` files needed to connect to
/// a remote podman build socket.
#[cfg(feature = "tls")]
#[arg(long)]
#[builder(default, setter(into, strip_option))]
tls_path: Option<PathBuf>,
/// Whether to sign the image.
#[cfg(feature = "sigstore")]
#[arg(short, long)]
#[builder(default)]
sign: bool,
/// Path to the public key used to sign the image.
///
/// If the contents of the key are in an environment
/// variable, you can use `env://` to sepcify which
/// variable to read from.
///
/// For example:
///
/// bluebuild build --public-key env://PUBLIC_KEY ...
#[cfg(feature = "sigstore")]
#[arg(long)]
#[builder(default, setter(into, strip_option))]
public_key: Option<String>,
/// Path to the private key used to sign the image.
///
/// If the contents of the key are in an environment
/// variable, you can use `env://` to sepcify which
/// variable to read from.
///
/// For example:
///
/// bluebuild build --private-key env://PRIVATE_KEY ...
#[cfg(feature = "sigstore")]
#[arg(long)]
#[builder(default, setter(into, strip_option))]
private_key: Option<String>,
}
impl BlueBuildCommand for BuildCommand {

View file

@ -6,33 +6,19 @@
use std::{
collections::{hash_map::Entry, HashMap},
env,
path::PathBuf,
process,
sync::{Arc, Mutex},
};
use anyhow::{anyhow, bail, Result};
use blue_build_recipe::Recipe;
use blue_build_utils::constants::{
IMAGE_VERSION_LABEL, RUN_PODMAN_SOCK, VAR_RUN_PODMAN_PODMAN_SOCK, VAR_RUN_PODMAN_SOCK,
XDG_RUNTIME_DIR,
};
use blue_build_utils::constants::IMAGE_VERSION_LABEL;
use log::{debug, error, info, trace};
use once_cell::sync::Lazy;
use semver::{Version, VersionReq};
use typed_builder::TypedBuilder;
use uuid::Uuid;
#[cfg(feature = "podman-api")]
use podman_api::Podman;
#[cfg(feature = "tokio")]
use tokio::runtime::Runtime;
#[cfg(feature = "builtin-podman")]
use podman_api_driver::PodmanApiDriver;
use crate::{credentials, image_metadata::ImageMetadata};
use self::{
@ -43,8 +29,6 @@ use self::{
mod buildah_driver;
mod docker_driver;
pub mod opts;
#[cfg(feature = "builtin-podman")]
mod podman_api_driver;
mod podman_driver;
mod skopeo_driver;
@ -322,64 +306,17 @@ impl Driver<'_> {
trace!("Driver::determine_build_driver()");
let driver: Arc<dyn BuildDriver> = match (
env::var(XDG_RUNTIME_DIR),
PathBuf::from(RUN_PODMAN_SOCK),
PathBuf::from(VAR_RUN_PODMAN_PODMAN_SOCK),
PathBuf::from(VAR_RUN_PODMAN_SOCK),
blue_build_utils::check_command_exists("docker"),
blue_build_utils::check_command_exists("podman"),
blue_build_utils::check_command_exists("buildah"),
) {
#[cfg(feature = "builtin-podman")]
(Ok(xdg_runtime), _, _, _, _, _, _)
if PathBuf::from(format!("{xdg_runtime}/podman/podman.sock")).exists() =>
{
Arc::new(
PodmanApiDriver::builder()
.client(
Podman::unix(PathBuf::from(format!(
"{xdg_runtime}/podman/podman.sock"
)))
.into(),
)
.rt(Runtime::new()?)
.build(),
)
}
#[cfg(feature = "builtin-podman")]
(_, run_podman_podman_sock, _, _, _, _, _) if run_podman_podman_sock.exists() => {
Arc::new(
PodmanApiDriver::builder()
.client(Podman::unix(run_podman_podman_sock).into())
.rt(Runtime::new()?)
.build(),
)
}
#[cfg(feature = "builtin-podman")]
(_, _, var_run_podman_podman_sock, _, _, _, _)
if var_run_podman_podman_sock.exists() =>
{
Arc::new(
PodmanApiDriver::builder()
.client(Podman::unix(var_run_podman_podman_sock).into())
.rt(Runtime::new()?)
.build(),
)
}
#[cfg(feature = "builtin-podman")]
(_, _, _, var_run_podman_sock, _, _, _) if var_run_podman_sock.exists() => Arc::new(
PodmanApiDriver::builder()
.client(Podman::unix(var_run_podman_sock).into())
.rt(Runtime::new()?)
.build(),
),
(_, _, _, _, Ok(_docker), _, _) if DockerDriver::is_supported_version() => {
(Ok(_docker), _, _) if DockerDriver::is_supported_version() => {
Arc::new(DockerDriver)
}
(_, _, _, _, _, Ok(_podman), _) if PodmanDriver::is_supported_version() => {
(_, Ok(_podman), _) if PodmanDriver::is_supported_version() => {
Arc::new(PodmanDriver)
}
(_, _, _, _, _, _, Ok(_buildah)) if BuildahDriver::is_supported_version() => {
(_, _, Ok(_buildah)) if BuildahDriver::is_supported_version() => {
Arc::new(BuildahDriver)
}
_ => bail!(

View file

@ -1,200 +0,0 @@
use anyhow::Context;
use anyhow::{bail, Result};
use blue_build_utils::constants::BUILD_ID_LABEL;
use futures_util::StreamExt;
use log::{debug, error};
use log::{info, trace};
use podman_api::{
opts::{
ContainerListOpts, ContainerPruneFilter, ContainerPruneOpts, ImageBuildOpts,
ImagePruneFilter, ImagePruneOpts, ImagePushOpts, ImageTagOpts, RegistryAuth,
},
Podman,
};
use signal_hook::consts::{SIGHUP, SIGINT, SIGQUIT, SIGTERM};
use signal_hook_tokio::Signals;
use std::sync::Arc;
use tokio::{
runtime::Runtime,
sync::oneshot::{self, Sender},
time::{self, Duration},
};
use typed_builder::TypedBuilder;
use crate::drivers::BUILD_ID;
use super::{credentials, BuildDriver};
#[derive(Debug, TypedBuilder)]
pub struct PodmanApiDriver {
client: Arc<Podman>,
rt: Runtime,
}
impl BuildDriver for PodmanApiDriver {
fn build(&self, image: &str) -> Result<()> {
trace!("PodmanApiStrategy::build({image})");
self.rt.block_on(async {
trace!("Setting up signal listeners");
let signals = Signals::new([SIGTERM, SIGINT, SIGQUIT])?;
let handle = signals.handle();
let (kill_tx, mut kill_rx) = oneshot::channel::<()>();
let signals_task = tokio::spawn(handle_signals(signals, kill_tx, self.client.clone()));
// Get podman ready to build
let opts = ImageBuildOpts::builder(".")
.tag(image)
.dockerfile("Containerfile")
.remove(true)
.layers(true)
.labels([(BUILD_ID_LABEL, BUILD_ID.to_string())])
.pull(true)
.build();
trace!("Build options: {opts:#?}");
info!("Building image {image}");
match self.client.images().build(&opts) {
Ok(mut build_stream) => loop {
tokio::select! {
Some(chunk) = build_stream.next() => {
match chunk {
Ok(chunk) => chunk
.stream
.trim()
.lines()
.map(str::trim)
.filter(|line| !line.is_empty())
.for_each(|line| info!("{line}")),
Err(e) => bail!("{e}"),
}
},
_ = &mut kill_rx => {
break;
},
else => {
break;
}
}
},
Err(e) => bail!("{e}"),
};
handle.close();
signals_task.await?;
Ok(())
})
}
fn tag(&self, src_image: &str, image_name: &str, tag: &str) -> Result<()> {
trace!("PodmanApiStrategy::tag({src_image}, {image_name}, {tag})");
let first_image = self.client.images().get(src_image);
self.rt.block_on(async {
first_image
.tag(&ImageTagOpts::builder().repo(image_name).tag(tag).build())
.await
.context("Failed to tag image")?;
debug!("Tagged image {image_name}:{tag}");
Ok(())
})
}
fn push(&self, image: &str) -> Result<()> {
trace!("PodmanApiStrategy::push({image})");
let (username, password, registry) =
credentials::get().map(|c| (&c.username, &c.password, &c.registry))?;
trace!("Retrieved creds for user {username} on registry {registry}");
self.rt.block_on(async {
let new_image = self.client.images().get(image);
info!("Pushing {image}");
match new_image
.push(
&ImagePushOpts::builder()
.tls_verify(true)
.auth(
RegistryAuth::builder()
.username(username)
.password(password)
.server_address(registry)
.build(),
)
.build(),
)
.await
{
Ok(_) => info!("Pushed {image} successfully!"),
Err(e) => bail!("Failed to push image: {e}"),
};
Ok(())
})
}
fn login(&self) -> Result<()> {
trace!("PodmanApiStrategy::login()");
debug!("No login step for Socket based building, skipping...");
Ok(())
}
}
async fn handle_signals(mut signals: Signals, kill: Sender<()>, client: Arc<Podman>) {
use std::process;
trace!("handle_signals(signals, {client:#?})");
while let Some(signal) = signals.next().await {
match signal {
SIGHUP => (),
SIGINT => {
kill.send(()).unwrap();
info!("Recieved SIGINT, cleaning up build...");
time::sleep(Duration::from_secs(1)).await;
let containers = match client
.containers()
.list(&ContainerListOpts::builder().sync(true).all(true).build())
.await
{
Ok(list) => list,
Err(e) => {
error!("{e}");
process::exit(1);
}
};
trace!("{containers:#?}");
// Prune containers from this build
let container_prune_opts = ContainerPruneOpts::builder()
.filter([ContainerPruneFilter::LabelKeyVal(
BUILD_ID_LABEL.to_string(),
BUILD_ID.to_string(),
)])
.build();
if let Err(e) = client.containers().prune(&container_prune_opts).await {
error!("{e}");
process::exit(1);
}
debug!("Pruned containers");
// Prune images from this build
let image_prune_opts = ImagePruneOpts::builder()
.filter([ImagePruneFilter::LabelKeyVal(
BUILD_ID_LABEL.to_string(),
BUILD_ID.to_string(),
)])
.build();
if let Err(e) = client.images().prune(&image_prune_opts).await {
error!("{e}");
process::exit(1);
}
debug!("Pruned images");
process::exit(2);
}
_ => unreachable!(),
}
}
}