internal/cloud/gcp: delete unused internal API

Delete all internal `cloud/gcp` API related to importing virtual images
to GCP using Cloud Build API. This API is no longer needed.

Signed-off-by: Tomas Hozza <thozza@redhat.com>
This commit is contained in:
Tomas Hozza 2022-02-17 16:55:53 +01:00 committed by Tom Gundersen
parent 13844edcdb
commit 82a0bfc46d
7 changed files with 0 additions and 1076 deletions

View file

@ -83,17 +83,6 @@ func cleanupGCP(testID string, wg *sync.WaitGroup) {
}
}
// Try to clean up storage of cache objects after image import job
log.Println("[GCP] 🧹 Cleaning up cache objects from storage after image " +
"import. This should fail if the test succeeded.")
cacheObjects, errs := g.StorageImageImportCleanup(ctx, GCPImage)
for _, err = range errs {
log.Printf("[GCP] Error: %v", err)
}
for _, cacheObject := range cacheObjects {
log.Printf("[GCP] 🧹 Deleted image import job file %s", cacheObject)
}
// Try to find the potentially uploaded Storage objects using custom metadata
objects, err := g.StorageListObjectsByMetadata(ctx, GCPBucket, map[string]string{gcp.MetadataKeyImageName: GCPImage})
if err != nil {

1
go.mod
View file

@ -48,6 +48,5 @@ require (
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9
google.golang.org/api v0.63.0
google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c
google.golang.org/protobuf v1.27.1
gopkg.in/ini.v1 v1.66.4
)

View file

@ -1,230 +0,0 @@
package gcp
import (
"context"
"fmt"
"io"
"regexp"
"strings"
"time"
cloudbuild "cloud.google.com/go/cloudbuild/apiv1"
"cloud.google.com/go/storage"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
cloudbuildpb "google.golang.org/genproto/googleapis/devtools/cloudbuild/v1"
)
// Structure with resources created by the Build job
// Intended only for internal use
type cloudbuildBuildResources struct {
zone string
computeInstances []string
computeDisks []string
storageCacheDir struct {
bucket string
dir string
}
}
// CloudbuildBuildLog fetches the log for the provided Build ID and returns it as a string
//
// Uses:
// - Storage API
// - Cloud Build API
func (g *GCP) CloudbuildBuildLog(ctx context.Context, buildID string) (string, error) {
cloudbuildClient, err := cloudbuild.NewClient(ctx, option.WithCredentials(g.creds))
if err != nil {
return "", fmt.Errorf("failed to get Cloud Build client: %v", err)
}
defer cloudbuildClient.Close()
storageClient, err := storage.NewClient(ctx, option.WithCredentials(g.creds))
if err != nil {
return "", fmt.Errorf("failed to get Storage client: %v", err)
}
defer storageClient.Close()
getBuldReq := &cloudbuildpb.GetBuildRequest{
ProjectId: g.creds.ProjectID,
Id: buildID,
}
imageBuild, err := cloudbuildClient.GetBuild(ctx, getBuldReq)
if err != nil {
return "", fmt.Errorf("failed to get the build info: %v", err)
}
// Determine the log file's Bucket and Object name
// Logs_bucket example: "gs://550072179371.cloudbuild-logs.googleusercontent.com"
// Logs file names will be of the format `${logs_bucket}/log-${build_id}.txt`
logBucket := imageBuild.LogsBucket
logBucket = strings.TrimPrefix(logBucket, "gs://")
// logBucket may contain directory in its name if set to a custom value
var logObjectDir string
if strings.Contains(logBucket, "/") {
ss := strings.SplitN(logBucket, "/", 2)
logBucket = ss[0]
logObjectDir = fmt.Sprintf("%s/", ss[1])
}
logObject := fmt.Sprintf("%slog-%s.txt", logObjectDir, buildID)
// Read the log
logBuilder := new(strings.Builder)
rd, err := storageClient.Bucket(logBucket).Object(logObject).NewReader(ctx)
if err != nil {
return "", fmt.Errorf("failed to create a new Reader for object '%s/%s': %v", logBucket, logObject, err)
}
_, err = io.Copy(logBuilder, rd)
if err != nil {
return "", fmt.Errorf("reading data from object '%s/%s' failed: %v", logBucket, logObject, err)
}
return logBuilder.String(), nil
}
// CloudbuildBuildCleanup parses the logs for the specified Build job and tries to clean up all resources
// which were created as part of the job. It returns list of strings with all resources that were deleted
// as a result of calling this method.
//
// Uses:
// - Storage API
// - Cloud Build API
// - Compute Engine API (indirectly)
func (g *GCP) CloudbuildBuildCleanup(ctx context.Context, buildID string) ([]string, error) {
var deletedResources []string
storageClient, err := storage.NewClient(ctx, option.WithCredentials(g.creds))
if err != nil {
return deletedResources, fmt.Errorf("failed to get Storage client: %v", err)
}
defer storageClient.Close()
buildLog, err := g.CloudbuildBuildLog(ctx, buildID)
if err != nil {
return deletedResources, fmt.Errorf("failed to get log for build ID '%s': %v", buildID, err)
}
resources, err := cloudbuildResourcesFromBuildLog(buildLog)
if err != nil {
return deletedResources, fmt.Errorf("extracting created resources from build log failed: %v", err)
}
// Delete all Compute Engine instances
for _, instance := range resources.computeInstances {
err = g.ComputeInstanceDelete(ctx, resources.zone, instance)
if err == nil {
deletedResources = append(deletedResources, fmt.Sprintf("instance: %s (%s)", instance, resources.zone))
}
}
// Deleting instances in reality takes some time. Deleting a disk while it is still used by the instance, will fail.
// Iterate over the list of instances and wait until they are all deleted.
for _, instance := range resources.computeInstances {
for {
instanceInfo, err := g.ComputeInstanceGet(ctx, resources.zone, instance)
// Getting the instance information failed, it is ideleted.
if err != nil {
break
}
// Prevent an unlikely infinite loop of waiting on deletion of an instance which can't be deleted.
if instanceInfo.GetDeletionProtection() {
break
}
time.Sleep(1 * time.Second)
}
}
// Delete all Compute Engine Disks
for _, disk := range resources.computeDisks {
err = g.ComputeDiskDelete(ctx, resources.zone, disk)
if err == nil {
deletedResources = append(deletedResources, fmt.Sprintf("disk: %s (%s)", disk, resources.zone))
}
}
// Delete all Storage cache files
bucket := storageClient.Bucket(resources.storageCacheDir.bucket)
objects := bucket.Objects(ctx, &storage.Query{Prefix: resources.storageCacheDir.dir})
for {
objAttrs, err := objects.Next()
if err == iterator.Done || err == storage.ErrBucketNotExist {
break
}
if err != nil {
// Do not return, just continue with the next object
continue
}
object := storageClient.Bucket(objAttrs.Bucket).Object(objAttrs.Name)
if err = object.Delete(ctx); err == nil {
deletedResources = append(deletedResources, fmt.Sprintf("storage object: %s/%s", objAttrs.Bucket, objAttrs.Name))
}
}
return deletedResources, nil
}
// cloudbuildResourcesFromBuildLog parses the provided Cloud Build log for any
// resources that were created by the job as part of its work. The list of extracted
// resources is returned as cloudbuildBuildResources struct instance
func cloudbuildResourcesFromBuildLog(buildLog string) (*cloudbuildBuildResources, error) {
var resources cloudbuildBuildResources
// extract the used zone
// [inflate]: 2021-02-17T12:42:10Z Workflow Zone: europe-west1-b
zoneRe, err := regexp.Compile(`(?m)^.+Workflow Zone: (?P<zone>.+)$`)
if err != nil {
return &resources, err
}
zoneMatch := zoneRe.FindStringSubmatch(buildLog)
if zoneMatch != nil {
resources.zone = zoneMatch[1]
}
// extract Storage cache directory
// [inflate]: 2021-03-12T13:13:10Z Workflow GCSPath: gs://ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T13:13:08Z-btgtd
cacheDirRe, err := regexp.Compile(`(?m)^.+Workflow GCSPath: gs://(?P<bucket>.+)/(?P<dir>.+)$`)
if err != nil {
return &resources, err
}
cacheDirMatch := cacheDirRe.FindStringSubmatch(buildLog)
if cacheDirMatch != nil {
resources.storageCacheDir.bucket = cacheDirMatch[1]
resources.storageCacheDir.dir = cacheDirMatch[2]
}
// extract Compute disks
// [inflate.setup-disks]: 2021-03-12T13:13:11Z CreateDisks: Creating disk "disk-importer-inflate-7366y".
// [inflate.setup-disks]: 2021-03-12T13:13:11Z CreateDisks: Creating disk "disk-inflate-scratch-7366y".
// [inflate.setup-disks]: 2021-03-12T13:13:11Z CreateDisks: Creating disk "disk-btgtd".
// [shadow-disk-checksum.create-disks]: 2021-03-12T17:29:54Z CreateDisks: Creating disk "disk-shadow-disk-checksum-shadow-disk-checksum-r3qxv".
disksRe, err := regexp.Compile(`(?m)^.+CreateDisks: Creating disk "(?P<disk>.+)".*$`)
if err != nil {
return &resources, err
}
disksMatches := disksRe.FindAllStringSubmatch(buildLog, -1)
for _, disksMatch := range disksMatches {
diskName := disksMatch[1]
if diskName != "" {
resources.computeDisks = append(resources.computeDisks, diskName)
}
}
// extract Compute instances
// [inflate.import-virtual-disk]: 2021-03-12T13:13:12Z CreateInstances: Creating instance "inst-importer-inflate-7366y".
// [shadow-disk-checksum.create-instance]: 2021-03-12T17:29:55Z CreateInstances: Creating instance "inst-shadow-disk-checksum-shadow-disk-checksum-r3qxv".
instancesRe, err := regexp.Compile(`(?m)^.+CreateInstances: Creating instance "(?P<instance>.+)".*$`)
if err != nil {
return &resources, err
}
instancesMatches := instancesRe.FindAllStringSubmatch(buildLog, -1)
for _, instanceMatch := range instancesMatches {
instanceName := instanceMatch[1]
if instanceName != "" {
resources.computeInstances = append(resources.computeInstances, instanceName)
}
}
return &resources, nil
}

View file

@ -1,482 +0,0 @@
package gcp
import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
)
func TestCloudbuildResourcesFromBuildLog(t *testing.T) {
testCases := []struct {
buildLog string
resources cloudbuildBuildResources
}{
{
buildLog: `2021/03/15 18:07:56 starting build "dba8bd1a-79b7-4060-a99d-c334760cba18"
FETCHSOURCE
BUILD
Pulling image: gcr.io/compute-image-tools/gce_vm_image_import:release
release: Pulling from compute-image-tools/gce_vm_image_import
0b8bbb5f50a4: Pulling fs layer
7efaa022ad36: Pulling fs layer
e5303db5f8f9: Pulling fs layer
688d304ec274: Pulling fs layer
e969b3a22ab3: Pulling fs layer
cd7b8272632b: Pulling fs layer
2175d0ddd745: Pulling fs layer
69fbd73b475e: Pulling fs layer
7a5922a992b2: Pulling fs layer
688d304ec274: Waiting
e969b3a22ab3: Waiting
cd7b8272632b: Waiting
2175d0ddd745: Waiting
69fbd73b475e: Waiting
7a5922a992b2: Waiting
e5303db5f8f9: Verifying Checksum
e5303db5f8f9: Download complete
688d304ec274: Verifying Checksum
688d304ec274: Download complete
e969b3a22ab3: Verifying Checksum
e969b3a22ab3: Download complete
0b8bbb5f50a4: Verifying Checksum
0b8bbb5f50a4: Download complete
cd7b8272632b: Verifying Checksum
cd7b8272632b: Download complete
69fbd73b475e: Verifying Checksum
69fbd73b475e: Download complete
7efaa022ad36: Verifying Checksum
7efaa022ad36: Download complete
7a5922a992b2: Verifying Checksum
7a5922a992b2: Download complete
2175d0ddd745: Verifying Checksum
2175d0ddd745: Download complete
0b8bbb5f50a4: Pull complete
7efaa022ad36: Pull complete
e5303db5f8f9: Pull complete
688d304ec274: Pull complete
e969b3a22ab3: Pull complete
cd7b8272632b: Pull complete
2175d0ddd745: Pull complete
69fbd73b475e: Pull complete
7a5922a992b2: Pull complete
Digest: sha256:d39e2c0e6a7113d989d292536e9d14e927de838cb21a24c61eb7d44fef1fa51d
Status: Downloaded newer image for gcr.io/compute-image-tools/gce_vm_image_import:release
gcr.io/compute-image-tools/gce_vm_image_import:release
[import-image]: 2021-03-12T17:29:05Z Creating Google Compute Engine disk from gs://images-bkt-us/random-object-1234
[inflate]: 2021-03-12T17:29:05Z Validating workflow
[inflate]: 2021-03-12T17:29:05Z Validating step "setup-disks"
[inflate]: 2021-03-12T17:29:06Z Validating step "import-virtual-disk"
[inflate]: 2021-03-12T17:29:06Z Validating step "wait-for-signal"
[inflate]: 2021-03-12T17:29:06Z Validating step "cleanup"
[inflate]: 2021-03-12T17:29:06Z Validation Complete
[inflate]: 2021-03-12T17:29:06Z Workflow Project: ascendant-braid-303513
[inflate]: 2021-03-12T17:29:06Z Workflow Zone: us-central1-c
[inflate]: 2021-03-12T17:29:06Z Workflow GCSPath: gs://ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T17:29:03Z-qllpn
[inflate]: 2021-03-12T17:29:06Z Daisy scratch path: https://console.cloud.google.com/storage/browser/ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T17:29:03Z-qllpn/daisy-inflate-20210312-17:29:05-1wghm
[inflate]: 2021-03-12T17:29:06Z Uploading sources
[inflate]: 2021-03-12T17:29:07Z Running workflow
[inflate]: 2021-03-12T17:29:07Z Running step "setup-disks" (CreateDisks)
[inflate.setup-disks]: 2021-03-12T17:29:07Z CreateDisks: Creating disk "disk-importer-inflate-1wghm".
[inflate.setup-disks]: 2021-03-12T17:29:07Z CreateDisks: Creating disk "disk-qllpn".
[inflate.setup-disks]: 2021-03-12T17:29:07Z CreateDisks: Creating disk "disk-inflate-scratch-1wghm".
[inflate]: 2021-03-12T17:29:08Z Step "setup-disks" (CreateDisks) successfully finished.
[inflate]: 2021-03-12T17:29:08Z Running step "import-virtual-disk" (CreateInstances)
[inflate.import-virtual-disk]: 2021-03-12T17:29:08Z CreateInstances: Creating instance "inst-importer-inflate-1wghm".
[inflate]: 2021-03-12T17:29:18Z Step "import-virtual-disk" (CreateInstances) successfully finished.
[inflate.import-virtual-disk]: 2021-03-12T17:29:18Z CreateInstances: Streaming instance "inst-importer-inflate-1wghm" serial port 1 output to https://storage.cloud.google.com/ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T17:29:03Z-qllpn/daisy-inflate-20210312-17:29:05-1wghm/logs/inst-importer-inflate-1wghm-serial-port1.log
[inflate]: 2021-03-12T17:29:18Z Running step "wait-for-signal" (WaitForInstancesSignal)
[inflate.wait-for-signal]: 2021-03-12T17:29:18Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": watching serial port 1, SuccessMatch: "ImportSuccess:", FailureMatch: ["ImportFailed:" "WARNING Failed to download metadata script" "Failed to download GCS path" "Worker instance terminated"] (this is not an error), StatusMatch: "Import:".
[inflate.wait-for-signal]: 2021-03-12T17:29:28Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: Ensuring disk-inflate-scratch-1wghm has capacity of 3 GB in projects/550072179371/zones/us-central1-c."
[inflate.wait-for-signal]: 2021-03-12T17:29:28Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: /dev/sdb is attached and ready."
[inflate.wait-for-signal]: 2021-03-12T17:29:48Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: Copied image from gs://images-bkt-us/random-object-1234 to /daisy-scratch/random-object-1234:"
[inflate.wait-for-signal]: 2021-03-12T17:29:48Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: Importing /daisy-scratch/random-object-1234 of size 2GB to disk-qllpn in projects/550072179371/zones/us-central1-c."
[inflate.wait-for-signal]: 2021-03-12T17:29:48Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: <serial-output key:'target-size-gb' value:'2'>"
[inflate.wait-for-signal]: 2021-03-12T17:29:48Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: <serial-output key:'source-size-gb' value:'2'>"
[inflate.wait-for-signal]: 2021-03-12T17:29:48Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: <serial-output key:'import-file-format' value:'vmdk'>"
[inflate.wait-for-signal]: 2021-03-12T17:29:48Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: Ensuring disk-qllpn has capacity of 2 GB in projects/550072179371/zones/us-central1-c."
[inflate.wait-for-signal]: 2021-03-12T17:29:48Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: /dev/sdc is attached and ready."
[debug]: 2021-03-12T17:29:52Z Started checksum calculation.
[shadow-disk-checksum]: 2021-03-12T17:29:53Z Validating workflow
[shadow-disk-checksum]: 2021-03-12T17:29:53Z Validating step "create-disks"
[shadow-disk-checksum]: 2021-03-12T17:29:53Z Validating step "create-instance"
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Validating step "wait-for-checksum"
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Validation Complete
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Workflow Project: ascendant-braid-303513
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Workflow Zone: us-central1-c
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Workflow GCSPath: gs://ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T17:29:03Z-qllpn
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Daisy scratch path: https://console.cloud.google.com/storage/browser/ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T17:29:03Z-qllpn/daisy-shadow-disk-checksum-20210312-17:29:53-r3qxv
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Uploading sources
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Running workflow
[shadow-disk-checksum]: 2021-03-12T17:29:54Z Running step "create-disks" (CreateDisks)
[shadow-disk-checksum.create-disks]: 2021-03-12T17:29:54Z CreateDisks: Creating disk "disk-shadow-disk-checksum-shadow-disk-checksum-r3qxv".
[shadow-disk-checksum]: 2021-03-12T17:29:55Z Step "create-disks" (CreateDisks) successfully finished.
[shadow-disk-checksum]: 2021-03-12T17:29:55Z Running step "create-instance" (CreateInstances)
[shadow-disk-checksum.create-instance]: 2021-03-12T17:29:55Z CreateInstances: Creating instance "inst-shadow-disk-checksum-shadow-disk-checksum-r3qxv".
[shadow-disk-checksum]: 2021-03-12T17:30:03Z Error running workflow: step "create-instance" run error: operation failed &{ClientOperationId: CreationTimestamp: Description: EndTime:2021-03-12T09:30:02.764-08:00 Error:0xc000454460 HttpErrorMessage:FORBIDDEN HttpErrorStatusCode:403 Id:4817697984863411195 InsertTime:2021-03-12T09:29:56.910-08:00 Kind:compute#operation Name:operation-1615570195674-5bd5a3f9f770d-10b61845-7bf8f2ab OperationType:insert Progress:100 Region: SelfLink:https://www.googleapis.com/compute/v1/projects/ascendant-braid-303513/zones/us-central1-c/operations/operation-1615570195674-5bd5a3f9f770d-10b61845-7bf8f2ab StartTime:2021-03-12T09:29:56.913-08:00 Status:DONE StatusMessage: TargetId:2023576705161370619 TargetLink:https://www.googleapis.com/compute/v1/projects/ascendant-braid-303513/zones/us-central1-c/instances/inst-shadow-disk-checksum-shadow-disk-checksum-r3qxv User:550072179371@cloudbuild.gserviceaccount.com Warnings:[] Zone:https://www.googleapis.com/compute/v1/projects/ascendant-braid-303513/zones/us-central1-c ServerResponse:{HTTPStatusCode:200 Header:map[Cache-Control:[private] Content-Type:[application/json; charset=UTF-8] Date:[Fri, 12 Mar 2021 17:30:03 GMT] Server:[ESF] Vary:[Origin X-Origin Referer] X-Content-Type-Options:[nosniff] X-Frame-Options:[SAMEORIGIN] X-Xss-Protection:[0]]} ForceSendFields:[] NullFields:[]}:
Code: QUOTA_EXCEEDED
Message: Quota 'CPUS' exceeded. Limit: 24.0 in region us-central1.
[shadow-disk-checksum]: 2021-03-12T17:30:03Z Workflow "shadow-disk-checksum" cleaning up (this may take up to 2 minutes).
[shadow-disk-checksum]: 2021-03-12T17:30:03Z Workflow "shadow-disk-checksum" finished cleanup.
[inflate.wait-for-signal]: 2021-03-12T17:30:38Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": StatusMatch found: "Import: <serial-output key:'disk-checksum' value:'5ed32cb4d9d9dd17cba6da1b3903edaa --d29c6650c73d602034fea42869a545ae --75a1e608e6f1c50758f4fee5a7d8e3d0 --75a1e608e6f1c50758f4fee5a7d8e3d0 -'>"
[inflate.wait-for-signal]: 2021-03-12T17:30:38Z WaitForInstancesSignal: Instance "inst-importer-inflate-1wghm": SuccessMatch found "ImportSuccess: Finished import."
[inflate]: 2021-03-12T17:30:38Z Step "wait-for-signal" (WaitForInstancesSignal) successfully finished.
[inflate]: 2021-03-12T17:30:38Z Running step "cleanup" (DeleteResources)
[inflate.cleanup]: 2021-03-12T17:30:38Z DeleteResources: Deleting instance "inst-importer".
[inflate]: 2021-03-12T17:31:05Z Step "cleanup" (DeleteResources) successfully finished.
[inflate]: 2021-03-12T17:31:05Z Serial-output value -> disk-checksum:5ed32cb4d9d9dd17cba6da1b3903edaa --d29c6650c73d602034fea42869a545ae --75a1e608e6f1c50758f4fee5a7d8e3d0 --75a1e608e6f1c50758f4fee5a7d8e3d0 -
[inflate]: 2021-03-12T17:31:05Z Serial-output value -> target-size-gb:2
[inflate]: 2021-03-12T17:31:05Z Serial-output value -> source-size-gb:2
[inflate]: 2021-03-12T17:31:05Z Serial-output value -> import-file-format:vmdk
[inflate]: 2021-03-12T17:31:05Z Workflow "inflate" cleaning up (this may take up to 2 minutes).
[inflate]: 2021-03-12T17:31:07Z Workflow "inflate" finished cleanup.
[import-image]: 2021-03-12T17:31:07Z Finished creating Google Compute Engine disk
[import-image]: 2021-03-12T17:31:07Z Creating image "image-dea5f2fb8b6e3826653cfc02ef648c8f8a3be5cb0501aeb75dcd6147"
PUSH
DONE
`,
resources: cloudbuildBuildResources{
zone: "us-central1-c",
computeInstances: []string{
"inst-importer-inflate-1wghm",
"inst-shadow-disk-checksum-shadow-disk-checksum-r3qxv",
},
computeDisks: []string{
"disk-qllpn",
"disk-importer-inflate-1wghm",
"disk-inflate-scratch-1wghm",
"disk-shadow-disk-checksum-shadow-disk-checksum-r3qxv",
},
storageCacheDir: struct {
bucket string
dir string
}{
bucket: "ascendant-braid-303513-daisy-bkt-us-central1",
dir: "gce-image-import-2021-03-12T17:29:03Z-qllpn",
},
},
},
{
buildLog: `starting build "cbf2e886-a81f-4761-9d39-0a03c3579996"
FETCHSOURCE
BUILD
Pulling image: gcr.io/compute-image-tools/gce_vm_image_import:release
release: Pulling from compute-image-tools/gce_vm_image_import
0b8bbb5f50a4: Pulling fs layer
7efaa022ad36: Pulling fs layer
e5303db5f8f9: Pulling fs layer
688d304ec274: Pulling fs layer
e969b3a22ab3: Pulling fs layer
cd7b8272632b: Pulling fs layer
2175d0ddd745: Pulling fs layer
69fbd73b475e: Pulling fs layer
7a5922a992b2: Pulling fs layer
688d304ec274: Waiting
e969b3a22ab3: Waiting
cd7b8272632b: Waiting
2175d0ddd745: Waiting
69fbd73b475e: Waiting
7a5922a992b2: Waiting
e5303db5f8f9: Verifying Checksum
e5303db5f8f9: Download complete
688d304ec274: Download complete
e969b3a22ab3: Verifying Checksum
e969b3a22ab3: Download complete
0b8bbb5f50a4: Verifying Checksum
0b8bbb5f50a4: Download complete
cd7b8272632b: Verifying Checksum
cd7b8272632b: Download complete
69fbd73b475e: Verifying Checksum
69fbd73b475e: Download complete
7a5922a992b2: Verifying Checksum
7a5922a992b2: Download complete
7efaa022ad36: Verifying Checksum
7efaa022ad36: Download complete
2175d0ddd745: Verifying Checksum
2175d0ddd745: Download complete
0b8bbb5f50a4: Pull complete
7efaa022ad36: Pull complete
e5303db5f8f9: Pull complete
688d304ec274: Pull complete
e969b3a22ab3: Pull complete
cd7b8272632b: Pull complete
2175d0ddd745: Pull complete
69fbd73b475e: Pull complete
7a5922a992b2: Pull complete
Digest: sha256:d39e2c0e6a7113d989d292536e9d14e927de838cb21a24c61eb7d44fef1fa51d
Status: Downloaded newer image for gcr.io/compute-image-tools/gce_vm_image_import:release
gcr.io/compute-image-tools/gce_vm_image_import:release
[import-image]: 2021-03-12T16:52:00Z Creating Google Compute Engine disk from gs://images-bkt-us/random-object-1234
[inflate]: 2021-03-12T16:52:01Z Validating workflow
[inflate]: 2021-03-12T16:52:01Z Validating step "setup-disks"
[inflate]: 2021-03-12T16:52:01Z Validating step "import-virtual-disk"
[inflate]: 2021-03-12T16:52:01Z Validating step "wait-for-signal"
[inflate]: 2021-03-12T16:52:01Z Validating step "cleanup"
[inflate]: 2021-03-12T16:52:01Z Validation Complete
[inflate]: 2021-03-12T16:52:01Z Workflow Project: ascendant-braid-303513
[inflate]: 2021-03-12T16:52:01Z Workflow Zone: us-central1-c
[inflate]: 2021-03-12T16:52:01Z Workflow GCSPath: gs://ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T16:51:59Z-74mbx
[inflate]: 2021-03-12T16:52:01Z Daisy scratch path: https://console.cloud.google.com/storage/browser/ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T16:51:59Z-74mbx/daisy-inflate-20210312-16:52:01-trs8w
[inflate]: 2021-03-12T16:52:01Z Uploading sources
[inflate]: 2021-03-12T16:52:01Z Running workflow
[inflate]: 2021-03-12T16:52:01Z Running step "setup-disks" (CreateDisks)
[inflate.setup-disks]: 2021-03-12T16:52:01Z CreateDisks: Creating disk "disk-74mbx".
[inflate.setup-disks]: 2021-03-12T16:52:01Z CreateDisks: Creating disk "disk-importer-inflate-trs8w".
[inflate.setup-disks]: 2021-03-12T16:52:01Z CreateDisks: Creating disk "disk-inflate-scratch-trs8w".
[inflate]: 2021-03-12T16:52:03Z Step "setup-disks" (CreateDisks) successfully finished.
[inflate]: 2021-03-12T16:52:03Z Running step "import-virtual-disk" (CreateInstances)
[inflate.import-virtual-disk]: 2021-03-12T16:52:03Z CreateInstances: Creating instance "inst-importer-inflate-trs8w".
[inflate]: 2021-03-12T16:52:17Z Step "import-virtual-disk" (CreateInstances) successfully finished.
[inflate.import-virtual-disk]: 2021-03-12T16:52:17Z CreateInstances: Streaming instance "inst-importer-inflate-trs8w" serial port 1 output to https://storage.cloud.google.com/ascendant-braid-303513-daisy-bkt-us-central1/gce-image-import-2021-03-12T16:51:59Z-74mbx/daisy-inflate-20210312-16:52:01-trs8w/logs/inst-importer-inflate-trs8w-serial-port1.log
[inflate]: 2021-03-12T16:52:17Z Running step "wait-for-signal" (WaitForInstancesSignal)
[inflate.wait-for-signal]: 2021-03-12T16:52:17Z WaitForInstancesSignal: Instance "inst-importer-inflate-trs8w": watching serial port 1, SuccessMatch: "ImportSuccess:", FailureMatch: ["ImportFailed:" "WARNING Failed to download metadata script" "Failed to download GCS path" "Worker instance terminated"] (this is not an error), StatusMatch: "Import:".
[inflate.wait-for-signal]: 2021-03-12T16:52:28Z WaitForInstancesSignal: Instance "inst-importer-inflate-trs8w": StatusMatch found: "Import: Ensuring disk-inflate-scratch-trs8w has capacity of 3 GB in projects/550072179371/zones/us-central1-c."
[inflate.wait-for-signal]: 2021-03-12T16:52:28Z WaitForInstancesSignal: Instance "inst-importer-inflate-trs8w": StatusMatch found: "Import: /dev/sdb is attached and ready."
[debug]: 2021-03-12T16:52:38Z Started checksum calculation.
[shadow-disk-checksum]: 2021-03-12T16:52:38Z Validating workflow
[shadow-disk-checksum]: 2021-03-12T16:52:38Z Validating step "create-disks"
CANCELLED
ERROR: context canceled`,
resources: cloudbuildBuildResources{
zone: "us-central1-c",
computeInstances: []string{
"inst-importer-inflate-trs8w",
},
computeDisks: []string{
"disk-74mbx",
"disk-importer-inflate-trs8w",
"disk-inflate-scratch-trs8w",
},
storageCacheDir: struct {
bucket string
dir string
}{
bucket: "ascendant-braid-303513-daisy-bkt-us-central1",
dir: "gce-image-import-2021-03-12T16:51:59Z-74mbx",
},
},
},
{
buildLog: `starting build "4f351d2a-5c07-4555-8319-ee7a8e514da1"
FETCHSOURCE
BUILD
Pulling image: gcr.io/compute-image-tools/gce_vm_image_import:release
release: Pulling from compute-image-tools/gce_vm_image_import
2e1eb53387e5: Pulling fs layer
95cc589e8a63: Pulling fs layer
3b6aa88d2880: Pulling fs layer
4cf16dbc31d9: Pulling fs layer
45c52af3bf12: Pulling fs layer
ae477f711ff8: Pulling fs layer
abf85f8ba3ed: Pulling fs layer
a06a66dd712b: Pulling fs layer
e6263716b8ac: Pulling fs layer
4cf16dbc31d9: Waiting
45c52af3bf12: Waiting
ae477f711ff8: Waiting
abf85f8ba3ed: Waiting
a06a66dd712b: Waiting
e6263716b8ac: Waiting
3b6aa88d2880: Verifying Checksum
3b6aa88d2880: Download complete
4cf16dbc31d9: Verifying Checksum
4cf16dbc31d9: Download complete
45c52af3bf12: Verifying Checksum
45c52af3bf12: Download complete
ae477f711ff8: Verifying Checksum
ae477f711ff8: Download complete
95cc589e8a63: Verifying Checksum
95cc589e8a63: Download complete
a06a66dd712b: Download complete
e6263716b8ac: Verifying Checksum
e6263716b8ac: Download complete
2e1eb53387e5: Verifying Checksum
2e1eb53387e5: Download complete
abf85f8ba3ed: Verifying Checksum
abf85f8ba3ed: Download complete
2e1eb53387e5: Pull complete
95cc589e8a63: Pull complete
3b6aa88d2880: Pull complete
4cf16dbc31d9: Pull complete
45c52af3bf12: Pull complete
ae477f711ff8: Pull complete
abf85f8ba3ed: Pull complete
a06a66dd712b: Pull complete
e6263716b8ac: Pull complete
Digest: sha256:90ccc6b8c1239f14690ade311b2a85c6e75931f903c614b4556c1024d54783b5
Status: Downloaded newer image for gcr.io/compute-image-tools/gce_vm_image_import:release
gcr.io/compute-image-tools/gce_vm_image_import:release
[import-image]: 2021-02-17T12:42:09Z Creating Google Compute Engine disk from gs://thozza-images/f32-image.vhd
[inflate]: 2021-02-17T12:42:09Z Validating workflow
[inflate]: 2021-02-17T12:42:09Z Validating step "setup-disks"
[inflate]: 2021-02-17T12:42:10Z Validating step "import-virtual-disk"
[inflate]: 2021-02-17T12:42:10Z Validating step "wait-for-signal"
[inflate]: 2021-02-17T12:42:10Z Validating step "cleanup"
[inflate]: 2021-02-17T12:42:10Z Validation Complete
[inflate]: 2021-02-17T12:42:10Z Workflow Project: ascendant-braid-303513
[inflate]: 2021-02-17T12:42:10Z Workflow Zone: europe-west1-b
[inflate]: 2021-02-17T12:42:10Z Workflow GCSPath: gs://ascendant-braid-303513-daisy-bkt-eu/gce-image-import-2021-02-17T12:42:05Z-lz55d
[inflate]: 2021-02-17T12:42:10Z Daisy scratch path: https://console.cloud.google.com/storage/browser/ascendant-braid-303513-daisy-bkt-eu/gce-image-import-2021-02-17T12:42:05Z-lz55d/daisy-inflate-20210217-12:42:09-p57zp
[inflate]: 2021-02-17T12:42:10Z Uploading sources
[inflate]: 2021-02-17T12:42:11Z Running workflow
[inflate]: 2021-02-17T12:42:11Z Running step "setup-disks" (CreateDisks)
[inflate.setup-disks]: 2021-02-17T12:42:11Z CreateDisks: Creating disk "disk-lz55d".
[inflate.setup-disks]: 2021-02-17T12:42:11Z CreateDisks: Creating disk "disk-importer-inflate-p57zp".
[inflate.setup-disks]: 2021-02-17T12:42:11Z CreateDisks: Creating disk "disk-inflate-scratch-p57zp".
[inflate]: 2021-02-17T12:42:13Z Step "setup-disks" (CreateDisks) successfully finished.
[inflate]: 2021-02-17T12:42:13Z Running step "import-virtual-disk" (CreateInstances)
[inflate.import-virtual-disk]: 2021-02-17T12:42:13Z CreateInstances: Creating instance "inst-importer-inflate-p57zp".
[inflate.import-virtual-disk]: 2021-02-17T12:42:23Z CreateInstances: Streaming instance "inst-importer-inflate-p57zp" serial port 1 output to https://storage.cloud.google.com/ascendant-braid-303513-daisy-bkt-eu/gce-image-import-2021-02-17T12:42:05Z-lz55d/daisy-inflate-20210217-12:42:09-p57zp/logs/inst-importer-inflate-p57zp-serial-port1.log
[inflate]: 2021-02-17T12:42:23Z Step "import-virtual-disk" (CreateInstances) successfully finished.
[inflate]: 2021-02-17T12:42:23Z Running step "wait-for-signal" (WaitForInstancesSignal)
[inflate.wait-for-signal]: 2021-02-17T12:42:23Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": watching serial port 1, SuccessMatch: "ImportSuccess:", FailureMatch: ["ImportFailed:" "WARNING Failed to download metadata script" "Failed to download GCS path" "Worker instance terminated"] (this is not an error), StatusMatch: "Import:".
[inflate.wait-for-signal]: 2021-02-17T12:42:34Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: Ensuring disk-inflate-scratch-p57zp has capacity of 6 GB in projects/550072179371/zones/europe-west1-b."
[inflate.wait-for-signal]: 2021-02-17T12:42:34Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: /dev/sdb is attached and ready."
[inflate.wait-for-signal]: 2021-02-17T12:43:43Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: Copied image from gs://thozza-images/f32-image.vhd to /daisy-scratch/f32-image.vhd:"
[inflate.wait-for-signal]: 2021-02-17T12:43:43Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: Importing /daisy-scratch/f32-image.vhd of size 5GB to disk-lz55d in projects/550072179371/zones/europe-west1-b."
[inflate.wait-for-signal]: 2021-02-17T12:43:43Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: <serial-output key:'target-size-gb' value:'5'>"
[inflate.wait-for-signal]: 2021-02-17T12:43:43Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: <serial-output key:'source-size-gb' value:'5'>"
[inflate.wait-for-signal]: 2021-02-17T12:43:43Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: <serial-output key:'import-file-format' value:'raw'>"
[inflate.wait-for-signal]: 2021-02-17T12:43:43Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: Ensuring disk-lz55d has capacity of 5 GB in projects/550072179371/zones/europe-west1-b."
[inflate.wait-for-signal]: 2021-02-17T12:43:43Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: /dev/sdc is attached and ready."
[inflate.wait-for-signal]: 2021-02-17T12:44:13Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": StatusMatch found: "Import: <serial-output key:'disk-checksum' value:'a6e310ede3ecdc9de88cd402b4709b6a --20765c2763c49ae251ce0e7762499abf --75a1e608e6f1c50758f4fee5a7d8e3d0 --75a1e608e6f1c50758f4fee5a7d8e3d0 -'>"
[inflate.wait-for-signal]: 2021-02-17T12:44:13Z WaitForInstancesSignal: Instance "inst-importer-inflate-p57zp": SuccessMatch found "ImportSuccess: Finished import."
[inflate]: 2021-02-17T12:44:13Z Step "wait-for-signal" (WaitForInstancesSignal) successfully finished.
[inflate]: 2021-02-17T12:44:13Z Running step "cleanup" (DeleteResources)
[inflate.cleanup]: 2021-02-17T12:44:13Z DeleteResources: Deleting instance "inst-importer".
[inflate]: 2021-02-17T12:44:32Z Step "cleanup" (DeleteResources) successfully finished.
[inflate]: 2021-02-17T12:44:32Z Serial-output value -> target-size-gb:5
[inflate]: 2021-02-17T12:44:32Z Serial-output value -> source-size-gb:5
[inflate]: 2021-02-17T12:44:32Z Serial-output value -> import-file-format:raw
[inflate]: 2021-02-17T12:44:32Z Serial-output value -> disk-checksum:a6e310ede3ecdc9de88cd402b4709b6a --20765c2763c49ae251ce0e7762499abf --75a1e608e6f1c50758f4fee5a7d8e3d0 --75a1e608e6f1c50758f4fee5a7d8e3d0 -
[inflate]: 2021-02-17T12:44:32Z Workflow "inflate" cleaning up (this may take up to 2 minutes).
[inflate]: 2021-02-17T12:44:38Z Workflow "inflate" finished cleanup.
[import-image]: 2021-02-17T12:44:38Z Finished creating Google Compute Engine disk
[import-image]: 2021-02-17T12:44:38Z Inspecting disk for OS and bootloader
[inspect]: 2021-02-17T12:44:38Z Validating workflow
[inspect]: 2021-02-17T12:44:38Z Validating step "run-inspection"
[inspect]: 2021-02-17T12:44:39Z Validating step "wait-for-signal"
[inspect]: 2021-02-17T12:44:39Z Validating step "cleanup"
[inspect]: 2021-02-17T12:44:39Z Validation Complete
[inspect]: 2021-02-17T12:44:39Z Workflow Project: ascendant-braid-303513
[inspect]: 2021-02-17T12:44:39Z Workflow Zone: europe-west1-b
[inspect]: 2021-02-17T12:44:39Z Workflow GCSPath: gs://ascendant-braid-303513-daisy-bkt-eu/gce-image-import-2021-02-17T12:42:05Z-lz55d
[inspect]: 2021-02-17T12:44:39Z Daisy scratch path: https://console.cloud.google.com/storage/browser/ascendant-braid-303513-daisy-bkt-eu/gce-image-import-2021-02-17T12:42:05Z-lz55d/daisy-inspect-20210217-12:44:38-t6wt4
[inspect]: 2021-02-17T12:44:39Z Uploading sources
[inspect]: 2021-02-17T12:44:42Z Running workflow
[inspect]: 2021-02-17T12:44:42Z Running step "run-inspection" (CreateInstances)
[inspect.run-inspection]: 2021-02-17T12:44:42Z CreateInstances: Creating instance "run-inspection-inspect-t6wt4".
[inspect]: 2021-02-17T12:44:51Z Step "run-inspection" (CreateInstances) successfully finished.
[inspect.run-inspection]: 2021-02-17T12:44:51Z CreateInstances: Streaming instance "run-inspection-inspect-t6wt4" serial port 1 output to https://storage.cloud.google.com/ascendant-braid-303513-daisy-bkt-eu/gce-image-import-2021-02-17T12:42:05Z-lz55d/daisy-inspect-20210217-12:44:38-t6wt4/logs/run-inspection-inspect-t6wt4-serial-port1.log
[inspect]: 2021-02-17T12:44:51Z Running step "wait-for-signal" (WaitForInstancesSignal)
[inspect.wait-for-signal]: 2021-02-17T12:44:51Z WaitForInstancesSignal: Instance "run-inspection-inspect-t6wt4": watching serial port 1, SuccessMatch: "Success:", FailureMatch: ["Failed:" "WARNING Failed to download metadata script" "Failed to download GCS path"] (this is not an error), StatusMatch: "Status:".
[inspect.wait-for-signal]: 2021-02-17T12:45:51Z WaitForInstancesSignal: Instance "run-inspection-inspect-t6wt4": StatusMatch found: "Status: <serial-output key:'inspect_pb' value:'CgkaAjMyKAIwoB84AQ=='>"
[inspect.wait-for-signal]: 2021-02-17T12:45:51Z WaitForInstancesSignal: Instance "run-inspection-inspect-t6wt4": SuccessMatch found "Success: Done!"
[inspect]: 2021-02-17T12:45:51Z Step "wait-for-signal" (WaitForInstancesSignal) successfully finished.
[inspect]: 2021-02-17T12:45:51Z Running step "cleanup" (DeleteResources)
[inspect.cleanup]: 2021-02-17T12:45:51Z DeleteResources: Deleting instance "run-inspection".
[inspect]: 2021-02-17T12:46:07Z Step "cleanup" (DeleteResources) successfully finished.
[inspect]: 2021-02-17T12:46:07Z Serial-output value -> inspect_pb:CgkaAjMyKAIwoB84AQ==
[inspect]: 2021-02-17T12:46:07Z Workflow "inspect" cleaning up (this may take up to 2 minutes).
[inspect]: 2021-02-17T12:46:11Z Workflow "inspect" finished cleanup.
[debug]: 2021-02-17T12:46:11Z Detection results: os_release:{major_version:"32" architecture:X64 distro_id:FEDORA} os_count:1
[import-image]: 2021-02-17T12:46:11Z Inspection result=os_release:{distro:"fedora" major_version:"32" architecture:X64 distro_id:FEDORA} elapsed_time_ms:93747 os_count:1
[import-image]: 2021-02-17T12:46:13Z Could not detect operating system. Please re-import with the operating system specified. For more information, see https://cloud.google.com/compute/docs/import/importing-virtual-disks#bootable
ERROR
ERROR: build step 0 "gcr.io/compute-image-tools/gce_vm_image_import:release" failed: step exited with non-zero status: 1`,
resources: cloudbuildBuildResources{
zone: "europe-west1-b",
computeInstances: []string{
"inst-importer-inflate-p57zp",
"run-inspection-inspect-t6wt4",
},
computeDisks: []string{
"disk-lz55d",
"disk-importer-inflate-p57zp",
"disk-inflate-scratch-p57zp",
},
storageCacheDir: struct {
bucket string
dir string
}{
bucket: "ascendant-braid-303513-daisy-bkt-eu",
dir: "gce-image-import-2021-02-17T12:42:05Z-lz55d",
},
},
},
{
buildLog: `starting build "21eb22bb-b92e-41f7-972d-35e75dae2a2c"
FETCHSOURCE
BUILD
Pulling image: gcr.io/compute-image-tools/gce_vm_image_import:release
release: Pulling from compute-image-tools/gce_vm_image_import
a352db2f02b6: Pulling fs layer
8e0ed4351c49: Pulling fs layer
7ef2d30124da: Pulling fs layer
7558c9498dac: Pulling fs layer
ac1adc2a272f: Pulling fs layer
1bfe08dab915: Pulling fs layer
d7a35a584f97: Pulling fs layer
14ef19cde991: Pulling fs layer
e3a2159de935: Pulling fs layer
7558c9498dac: Waiting
ac1adc2a272f: Waiting
1bfe08dab915: Waiting
d7a35a584f97: Waiting
14ef19cde991: Waiting
e3a2159de935: Waiting
7ef2d30124da: Verifying Checksum
7ef2d30124da: Download complete
7558c9498dac: Verifying Checksum
7558c9498dac: Download complete
ac1adc2a272f: Verifying Checksum
ac1adc2a272f: Download complete
a352db2f02b6: Verifying Checksum
a352db2f02b6: Download complete
8e0ed4351c49: Verifying Checksum
8e0ed4351c49: Download complete
14ef19cde991: Verifying Checksum
14ef19cde991: Download complete
1bfe08dab915: Verifying Checksum
1bfe08dab915: Download complete
e3a2159de935: Verifying Checksum
e3a2159de935: Download complete
d7a35a584f97: Verifying Checksum
d7a35a584f97: Download complete
a352db2f02b6: Pull complete
8e0ed4351c49: Pull complete
7ef2d30124da: Pull complete
7558c9498dac: Pull complete
ac1adc2a272f: Pull complete
1bfe08dab915: Pull complete
d7a35a584f97: Pull complete
14ef19cde991: Pull complete
e3a2159de935: Pull complete
Digest: sha256:63ab233c04139087154f27797efbebc9e55302f465ffb56e2dce34c2b5bf5d8a
Status: Downloaded newer image for gcr.io/compute-image-tools/gce_vm_image_import:release
gcr.io/compute-image-tools/gce_vm_image_import:release
[import-image]: 2021-04-27T08:08:40Z The resource 'image-8c27cb332db33890146b290a3989198d829ae456dabf96e5a4461147' already exists. Please pick an image name that isn't already used.
ERROR
ERROR: build step 0 "gcr.io/compute-image-tools/gce_vm_image_import:release" failed: step exited with non-zero status: 1`,
resources: cloudbuildBuildResources{},
},
}
for i, tc := range testCases {
t.Run(fmt.Sprintf("log #%d", i), func(t *testing.T) {
resources, err := cloudbuildResourcesFromBuildLog(tc.buildLog)
require.NoError(t, err)
require.NotNil(t, resources)
require.Equal(t, resources.zone, tc.resources.zone)
require.ElementsMatch(t, resources.computeDisks, tc.resources.computeDisks)
require.ElementsMatch(t, resources.computeInstances, tc.resources.computeInstances)
require.Equal(t, resources.storageCacheDir.bucket, tc.resources.storageCacheDir.bucket)
require.Equal(t, resources.storageCacheDir.dir, tc.resources.storageCacheDir.dir)
})
}
}

View file

@ -2,20 +2,13 @@ package gcp
import (
"context"
"crypto/rand"
"fmt"
"math/big"
"strings"
"time"
cloudbuild "cloud.google.com/go/cloudbuild/apiv1"
compute "cloud.google.com/go/compute/apiv1"
"github.com/osbuild/osbuild-composer/internal/common"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
computepb "google.golang.org/genproto/googleapis/cloud/compute/v1"
cloudbuildpb "google.golang.org/genproto/googleapis/devtools/cloudbuild/v1"
"google.golang.org/protobuf/types/known/durationpb"
)
// Guest OS Features for RHEL8 images
@ -55,171 +48,6 @@ func GuestOsFeaturesByDistro(distroName string) []*computepb.GuestOsFeature {
}
}
// ComputeImageImport imports a previously uploaded image by submitting a Cloud Build API
// job. The job builds an image into Compute Engine from an image uploaded to the
// storage.
//
// The Build job usually creates a number of cache files in the Storage.
// This method does not do any cleanup, regardless if the image import succeeds or fails.
//
// To delete the Storage object (image) used for the image import, use StorageObjectDelete().
//
// To delete all potentially left over resources after the Build job, use CloudbuildBuildCleanup().
// This is especially important in case the image import is cancelled via the passed Context.
// Cancelling the build leaves behind all resources that it created - instances and disks.
// Therefore if you don't clean up the resources, they'll continue running and costing you money.
//
// bucket - Google storage bucket name with the uploaded image
// object - Google storage object name of the uploaded image
// imageName - Desired image name after the import. This must be unique within the whole project.
// os - Specifies the OS type used when installing GCP guest tools.
// If empty (""), then the image is imported without the installation of GCP guest tools.
// Valid values are: centos-7, centos-8, debian-8, debian-9, opensuse-15, rhel-6,
// rhel-6-byol, rhel-7, rhel-7-byol, rhel-8, rhel-8-byol, sles-12,
// sles-12-byol, sles-15, sles-15-byol, sles-sap-12, sles-sap-12-byol,
// sles-sap-15, sles-sap-15-byol, ubuntu-1404, ubuntu-1604, ubuntu-1804,
// ubuntu-2004, windows-10-x64-byol, windows-10-x86-byol,
// windows-2008r2, windows-2008r2-byol, windows-2012, windows-2012-byol,
// windows-2012r2, windows-2012r2-byol, windows-2016, windows-2016-byol,
// windows-2019, windows-2019-byol, windows-7-x64-byol,
// windows-7-x86-byol, windows-8-x64-byol, windows-8-x86-byol
// region - A valid region where the resulting image should be located. If empty,
// the multi-region location closest to the source is chosen automatically.
// See: https://cloud.google.com/storage/docs/locations
//
// Uses:
// - Cloud Build API
func (g *GCP) ComputeImageImport(ctx context.Context, bucket, object, imageName, os, region string) (*cloudbuildpb.Build, error) {
cloudbuildClient, err := cloudbuild.NewClient(ctx, option.WithCredentials(g.creds))
if err != nil {
return nil, fmt.Errorf("failed to get Cloud Build client: %v", err)
}
defer cloudbuildClient.Close()
buildStepArgs := []string{
fmt.Sprintf("-source_file=gs://%s/%s", bucket, object),
fmt.Sprintf("-image_name=%s", imageName),
"-timeout=7000s",
"-client_id=api",
}
if region != "" {
buildStepArgs = append(buildStepArgs, fmt.Sprintf("-storage_location=%s", region))
// Set the region to be used by the daisy workflow when creating resources
// If not specified, the workflow seems to always default to us-central1.
// The Region passed as the argument is a Google Storage Region, which can be a multi or dual region.
// Multi and Dual regions don't work with GCE API, therefore we need to get the list of GCE regions
// that they map to. If the passed Region is not a multi or dual Region, then the returned slice contains
// only the Region passed as an argument.
gceRegions, err := g.storageRegionToComputeRegions(ctx, region)
if err != nil {
return nil, fmt.Errorf("failed to translate Google Storage Region to GCE Region: %v", err)
}
// Pick a random GCE Region to be used by the image import workflow
gceRegionIndex, err := rand.Int(rand.Reader, big.NewInt(int64(len(gceRegions))))
if err != nil {
return nil, fmt.Errorf("failed to pick random GCE Region: %v", err)
}
// The expecation is that Google won't have more regions listed for multi/dual
// regions than what can potentially fit into int32.
gceRegion := gceRegions[int(gceRegionIndex.Int64())]
availableZones, err := g.ComputeZonesInRegion(ctx, gceRegion)
if err != nil {
return nil, fmt.Errorf("failed to get available GCE Zones within Region '%s': %v", region, err)
}
// Pick random zone from the list
gceZoneIndex, err := rand.Int(rand.Reader, big.NewInt(int64(len(availableZones))))
if err != nil {
return nil, fmt.Errorf("failed to pick random GCE Zone: %v", err)
}
// The expecation is that Google won't have more zones in a region than what can potentially fit into int32
zone := availableZones[int(gceZoneIndex.Int64())]
buildStepArgs = append(buildStepArgs, fmt.Sprintf("-zone=%s", zone))
}
if os != "" {
buildStepArgs = append(buildStepArgs, fmt.Sprintf("-os=%s", os))
} else {
// This effectively marks the image as non-bootable for the import process,
// but it has no effect on the later use or booting in Compute Engine other
// than the GCP guest tools not being installed.
buildStepArgs = append(buildStepArgs, "-data_disk")
}
imageBuild := &cloudbuildpb.Build{
Steps: []*cloudbuildpb.BuildStep{{
Name: "gcr.io/compute-image-tools/gce_vm_image_import:release",
Args: buildStepArgs,
}},
Tags: []string{
"gce-daisy",
"gce-daisy-image-import",
},
Timeout: durationpb.New(time.Second * 7200),
}
createBuildReq := &cloudbuildpb.CreateBuildRequest{
ProjectId: g.GetProjectID(),
Build: imageBuild,
}
resp, err := cloudbuildClient.CreateBuild(ctx, createBuildReq)
if err != nil {
return nil, fmt.Errorf("failed to create image import build job: %v", err)
}
// Get the returned Build struct
buildOpMetadata := &cloudbuildpb.BuildOperationMetadata{}
if err := resp.Metadata.UnmarshalTo(buildOpMetadata); err != nil {
return nil, err
}
imageBuild = buildOpMetadata.Build
getBuldReq := &cloudbuildpb.GetBuildRequest{
ProjectId: imageBuild.ProjectId,
Id: imageBuild.Id,
}
// Wait for the build to finish
for {
select {
case <-time.After(30 * time.Second):
// Just check the build status below
case <-ctx.Done():
// cancel the build
cancelBuildReq := &cloudbuildpb.CancelBuildRequest{
ProjectId: imageBuild.ProjectId,
Id: imageBuild.Id,
}
// since the provided ctx has been canceled, create a new one to cancel the build
ctx = context.Background()
// Cancelling the build leaves behind all resources that it created (instances and disks)
imageBuild, err = cloudbuildClient.CancelBuild(ctx, cancelBuildReq)
if err != nil {
return imageBuild, fmt.Errorf("failed to cancel the image import build job: %v", err)
}
}
imageBuild, err = cloudbuildClient.GetBuild(ctx, getBuldReq)
if err != nil {
return imageBuild, fmt.Errorf("failed to get the build info: %v", err)
}
// The build finished
if imageBuild.Status != cloudbuildpb.Build_WORKING && imageBuild.Status != cloudbuildpb.Build_QUEUED {
break
}
}
if imageBuild.Status != cloudbuildpb.Build_SUCCESS {
return imageBuild, fmt.Errorf("image import didn't finish successfully: %s", imageBuild.Status)
}
return imageBuild, nil
}
// ComputeImageInsert imports a previously uploaded archive with raw image into Compute Engine.
//
// The image must be RAW image named 'disk.raw' inside a gzip-ed tarball.
@ -507,84 +335,6 @@ func (g *GCP) ComputeDiskDelete(ctx context.Context, zone, disk string) error {
return err
}
// storageRegionToComputeRegion translates a Google Storage Region to GCE Region.
// This is useful mainly for multi and dual Storage Regions. For each valid multi
// or dual Region name, a slice with relevant GCE Regions is returned. If the
// Region provided as an argument is not multi or dual Region, a slice with the
// provided argument as the only item is returned.
//
// In general, Storage Regions correspond to the Compute Engine Regions. However,
// Storage allows also Multi and Dual regions, which must be mapped to GCE Regions,
// since these can not be used with GCE API calls.
//
// Uses:
// - Compute Engine API
func (g *GCP) storageRegionToComputeRegions(ctx context.Context, region string) ([]string, error) {
regionLower := strings.ToLower(region)
// Handle Dual-Regions
// https://cloud.google.com/storage/docs/locations#location-dr
if regionLower == "asia1" {
return []string{"asia-northeast1", "asia-northeast2"}, nil
} else if regionLower == "eur4" {
return []string{"europe-north1", "europe-west4"}, nil
} else if regionLower == "nam4" {
return []string{"us-central1", "us-east1"}, nil
}
// Handle Regular Region
if regionLower != "asia" && regionLower != "eu" && regionLower != "us" {
// Just return a slice with the region, which we got as
return []string{regionLower}, nil
}
// Handle Multi-Regions
// https://cloud.google.com/storage/docs/locations#location-mr
regionsClient, err := compute.NewRegionsRESTClient(ctx, option.WithCredentials(g.creds))
if err != nil {
return nil, fmt.Errorf("failed to get Compute Engine Regions client: %v", err)
}
defer regionsClient.Close()
req := &computepb.ListRegionsRequest{
Project: g.GetProjectID(),
}
regionIterator := regionsClient.List(ctx, req)
regionsMap := make(map[string][]string)
for {
regionObj, err := regionIterator.Next()
if err == iterator.Done {
break
}
if err != nil {
return nil, fmt.Errorf("encountered an error while iterating over Compute Engine Regions: %v", err)
}
regionPrefix := strings.Split(regionObj.GetName(), "-")[0]
regionsMap[regionPrefix] = append(regionsMap[regionPrefix], regionObj.GetName())
}
switch regionLower {
case "asia", "us":
return regionsMap[regionLower], nil
case "eu":
var euRegions []string
for _, euRegion := range regionsMap["europe"] {
// "europe-west2" (London) and "europe-west6" (Zurich) are excluded
// see https://cloud.google.com/storage/docs/locations#location-mr
if euRegion != "europe-west2" && euRegion != "europe-west6" {
euRegions = append(euRegions, euRegion)
}
}
return euRegions, nil
default:
// This case should never happen, since the "default" case is handled above by
// if regionLower != "asia" && regionLower != "eu" && regionLower != "us"
return nil, fmt.Errorf("failed to translate Google Storage Region '%s' to Compute Engine Region", regionLower)
}
}
// ComputeZonesInRegion returns list of zones within the given GCE Region, which are "UP".
//
// Uses:

View file

@ -8,14 +8,10 @@ import (
"fmt"
"io"
"os"
"regexp"
"strings"
compute "cloud.google.com/go/compute/apiv1"
"cloud.google.com/go/storage"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
computepb "google.golang.org/genproto/googleapis/cloud/compute/v1"
)
const (
@ -103,103 +99,6 @@ func (g *GCP) StorageObjectDelete(ctx context.Context, bucket, object string) er
return nil
}
// StorageImageImportCleanup deletes all objects created as part of an Image
// import into Compute Engine and the related Build Job. The method returns a list
// of deleted Storage objects, as well as list of errors which occurred during
// the cleanup. The method tries to clean up as much as possible, therefore
// it does not return on non-fatal errors.
//
// The Build job stores a copy of the to-be-imported image in a region specific
// bucket, along with the Build job logs and some cache files.
//
// Uses:
// - Compute Engine API
// - Storage API
func (g *GCP) StorageImageImportCleanup(ctx context.Context, imageName string) ([]string, []error) {
var deletedObjects []string
var errors []error
storageClient, err := storage.NewClient(ctx, option.WithCredentials(g.creds))
if err != nil {
errors = append(errors, fmt.Errorf("failed to get Storage client: %v", err))
return deletedObjects, errors
}
defer storageClient.Close()
imagesClient, err := compute.NewImagesRESTClient(ctx, option.WithCredentials(g.creds))
if err != nil {
errors = append(errors, fmt.Errorf("failed to get Compute Engine Images client: %v", err))
return deletedObjects, errors
}
defer imagesClient.Close()
// Clean up the cache bucket
req := &computepb.GetImageRequest{
Project: g.GetProjectID(),
Image: imageName,
}
image, err := imagesClient.Get(ctx, req)
if err != nil {
// Without the image, we can not determine which objects to delete, just return
errors = append(errors, fmt.Errorf("failed to get image: %v", err))
return deletedObjects, errors
}
// Determine the regular expression to match files related to the specific Image Import
// e.g. "https://www.googleapis.com/compute/v1/projects/ascendant-braid-303513/zones/europe-west1-b/disks/disk-d7tr4"
// e.g. "https://www.googleapis.com/compute/v1/projects/ascendant-braid-303513/zones/europe-west1-b/disks/disk-l7s2w-1"
// Needed is only the part between "disk-" and possible "-<num>"/"EOF"
ss := strings.Split(image.GetSourceDisk(), "/")
srcDiskName := ss[len(ss)-1]
ss = strings.Split(srcDiskName, "-")
if len(ss) < 2 {
errors = append(errors, fmt.Errorf("unexpected source disk name '%s', can not clean up storage", srcDiskName))
return deletedObjects, errors
}
scrDiskSuffix := ss[1]
// e.g. "gce-image-import-2021-02-05T17:27:40Z-2xhp5/daisy-import-image-20210205-17:27:43-s6l0l/logs/daisy.log"
reStr := fmt.Sprintf("gce-image-import-.+-%s", scrDiskSuffix)
cacheFilesRe := regexp.MustCompile(reStr)
buckets := storageClient.Buckets(ctx, g.creds.ProjectID)
for {
bkt, err := buckets.Next()
if err == iterator.Done {
break
}
if err != nil {
errors = append(errors, fmt.Errorf("failure while iterating over storage buckets: %v", err))
return deletedObjects, errors
}
// Check all buckets created by the Image Import Build jobs
// These are named e.g. "<project_id>-daisy-bkt-eu" - "ascendant-braid-303513-daisy-bkt-eu"
if strings.HasPrefix(bkt.Name, fmt.Sprintf("%s-daisy-bkt", g.creds.ProjectID)) {
objects := storageClient.Bucket(bkt.Name).Objects(ctx, nil)
for {
obj, err := objects.Next()
if err == iterator.Done {
break
}
if err != nil {
// Do not return, just log, to clean up as much as possible!
errors = append(errors, fmt.Errorf("failure while iterating over bucket objects: %v", err))
break
}
if cacheFilesRe.FindString(obj.Name) != "" {
o := storageClient.Bucket(bkt.Name).Object(obj.Name)
if err = o.Delete(ctx); err != nil {
// Do not return, just log, to clean up as much as possible!
errors = append(errors, fmt.Errorf("failed to delete storage object: %v", err))
}
deletedObjects = append(deletedObjects, fmt.Sprintf("%s/%s", bkt.Name, obj.Name))
}
}
}
}
return deletedObjects, errors
}
// StorageListObjectsByMetadata searches specified Storage bucket for objects matching the provided
// metadata. The provided metadata is used for filtering the bucket's content. Therefore if the provided
// metadata is nil, then all objects present in the bucket will be returned.

1
vendor/modules.txt vendored
View file

@ -689,7 +689,6 @@ google.golang.org/grpc/xds/internal/xdsclient/load
google.golang.org/grpc/xds/internal/xdsclient/v2
google.golang.org/grpc/xds/internal/xdsclient/v3
# google.golang.org/protobuf v1.27.1
## explicit
google.golang.org/protobuf/encoding/protojson
google.golang.org/protobuf/encoding/prototext
google.golang.org/protobuf/encoding/protowire