github.com/GoogleContainerTools/skaffold@v1.39.18/pkg/skaffold/build/gcb/cloud_build.go (about) 1 /* 2 Copyright 2019 The Skaffold Authors 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15 */ 16 17 package gcb 18 19 import ( 20 "context" 21 "encoding/json" 22 "errors" 23 "fmt" 24 "io" 25 "net/http" 26 "strings" 27 "time" 28 29 cstorage "cloud.google.com/go/storage" 30 "github.com/google/uuid" 31 "google.golang.org/api/cloudbuild/v1" 32 "google.golang.org/api/googleapi" 33 "google.golang.org/api/iterator" 34 "k8s.io/apimachinery/pkg/util/wait" 35 36 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/build" 37 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/constants" 38 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/docker" 39 sErrors "github.com/GoogleContainerTools/skaffold/pkg/skaffold/errors" 40 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/gcp" 41 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/instrumentation" 42 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/output" 43 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/output/log" 44 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/platform" 45 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/schema/latest" 46 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/sources" 47 "github.com/GoogleContainerTools/skaffold/pkg/skaffold/util" 48 "github.com/GoogleContainerTools/skaffold/proto/v1" 49 ) 50 51 // Build builds a list of artifacts with Google Cloud Build. 52 func (b *Builder) Build(ctx context.Context, out io.Writer, artifact *latest.Artifact) build.ArtifactBuilder { 53 instrumentation.AddAttributesToCurrentSpanFromContext(ctx, map[string]string{ 54 "BuildType": "gcb", 55 "Context": instrumentation.PII(artifact.Workspace), 56 }) 57 builder := build.WithLogFile(b.buildArtifactWithCloudBuild, b.muted) 58 return builder 59 } 60 61 func (b *Builder) PreBuild(_ context.Context, _ io.Writer) error { 62 return nil 63 } 64 65 func (b *Builder) PostBuild(_ context.Context, _ io.Writer) error { 66 return nil 67 } 68 69 func (b *Builder) Concurrency() *int { 70 return util.IntPtr(b.GoogleCloudBuild.Concurrency) 71 } 72 73 func (b *Builder) buildArtifactWithCloudBuild(ctx context.Context, out io.Writer, artifact *latest.Artifact, tag string, platform platform.Matcher) (string, error) { 74 instrumentation.AddAttributesToCurrentSpanFromContext(ctx, map[string]string{ 75 "Destination": instrumentation.PII(tag), 76 }) 77 // TODO: [#4922] Implement required artifact resolution from the `artifactStore` 78 cbclient, err := cloudbuild.NewService(ctx, gcp.ClientOptions(ctx)...) 79 if err != nil { 80 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 81 ErrCode: proto.StatusCode_BUILD_GET_CLOUD_BUILD_CLIENT_ERR, 82 Message: fmt.Sprintf("getting cloudbuild client: %s", err), 83 }) 84 } 85 86 c, err := cstorage.NewClient(ctx, gcp.ClientOptions(ctx)...) 87 if err != nil { 88 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 89 ErrCode: proto.StatusCode_BUILD_GET_CLOUD_STORAGE_CLIENT_ERR, 90 Message: fmt.Sprintf("getting cloud storage client: %s", err), 91 }) 92 } 93 defer c.Close() 94 95 projectID := b.ProjectID 96 if projectID == "" { 97 guessedProjectID, err := gcp.ExtractProjectID(tag) 98 if err != nil { 99 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 100 ErrCode: proto.StatusCode_BUILD_GCB_EXTRACT_PROJECT_ID, 101 Message: fmt.Sprintf("extracting projectID from image name: %s", err), 102 }) 103 } 104 105 projectID = guessedProjectID 106 } 107 log.Entry(ctx).Debugf("project id set to %s", projectID) 108 109 cbBucket := fmt.Sprintf("%s%s", projectID, constants.GCSBucketSuffix) 110 buildObject := fmt.Sprintf("source/%s-%s.tar.gz", projectID, uuid.New().String()) 111 112 if err := b.createBucketIfNotExists(ctx, c, projectID, cbBucket); err != nil { 113 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 114 ErrCode: proto.StatusCode_BUILD_GCB_CREATE_BUCKET_ERR, 115 Message: fmt.Sprintf("creating bucket if not exists: %s", err), 116 }) 117 } 118 if err := b.checkBucketProjectCorrect(ctx, c, projectID, cbBucket); err != nil { 119 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 120 ErrCode: proto.StatusCode_BUILD_GCB_GET_GCS_BUCKET_ERR, 121 Message: fmt.Sprintf("checking bucket is in correct project: %s", err), 122 }) 123 } 124 125 dependencies, err := b.sourceDependencies.SingleArtifactDependencies(ctx, artifact) 126 if err != nil { 127 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 128 ErrCode: proto.StatusCode_BUILD_GCB_GET_DEPENDENCY_ERR, 129 Message: fmt.Sprintf("getting dependencies for %q: %s", artifact.ImageName, err), 130 }) 131 } 132 133 output.Default.Fprintf(out, "Pushing code to gs://%s/%s\n", cbBucket, buildObject) 134 135 // Upload entire workspace for Jib projects to fix multi-module bug 136 // https://github.com/GoogleContainerTools/skaffold/issues/3477 137 // TODO: Avoid duplication (every Jib artifact will upload the entire workspace) 138 if artifact.JibArtifact != nil { 139 deps, err := jibAddWorkspaceToDependencies(artifact.Workspace, dependencies) 140 if err != nil { 141 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 142 ErrCode: proto.StatusCode_BUILD_GCB_JIB_DEPENDENCY_ERR, 143 Message: fmt.Sprintf("walking workspace for Jib projects: %s", err), 144 }) 145 } 146 dependencies = deps 147 } 148 149 if err := sources.UploadToGCS(ctx, c, artifact, cbBucket, buildObject, dependencies); err != nil { 150 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 151 ErrCode: proto.StatusCode_BUILD_GCB_UPLOAD_TO_GCS_ERR, 152 Message: fmt.Sprintf("uploading source archive: %s", err), 153 }) 154 } 155 156 buildSpec, err := b.buildSpec(ctx, artifact, tag, platform, cbBucket, buildObject) 157 if err != nil { 158 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 159 ErrCode: proto.StatusCode_BUILD_GCB_GENERATE_BUILD_DESCRIPTOR_ERR, 160 Message: fmt.Sprintf("could not create build description: %s", err), 161 }) 162 } 163 remoteID, getBuildFunc, err := b.createCloudBuild(ctx, cbclient, projectID, buildSpec) 164 if err != nil { 165 return "", err 166 } 167 logsObject := fmt.Sprintf("log-%s.txt", remoteID) 168 output.Default.Fprintf(out, "Logs are available at \nhttps://console.cloud.google.com/m/cloudstorage/b/%s/o/%s\n", cbBucket, logsObject) 169 170 var digest string 171 offset := int64(0) 172 watch: 173 for { 174 var cb *cloudbuild.Build 175 var errE error 176 log.Entry(ctx).Debugf("current offset %d", offset) 177 backoff := NewStatusBackoff() 178 if waitErr := wait.Poll(backoff.Duration, RetryTimeout, func() (bool, error) { 179 step := backoff.Step() 180 log.Entry(ctx).Debugf("backing off for %s", step) 181 time.Sleep(step) 182 cb, errE = getBuildFunc() 183 if errE == nil { 184 return true, nil 185 } 186 // Error code 429 is the error code for quota exceeded https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto 187 if apiErr, ok := errE.(*googleapi.Error); ok && apiErr.Code == 429 { 188 // if we hit the rate limit, continue to retry 189 return false, nil 190 } 191 return false, errE 192 }); waitErr != nil { 193 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 194 ErrCode: proto.StatusCode_BUILD_GCB_GET_BUILD_STATUS_ERR, 195 Message: fmt.Sprintf("error getting build status: %s", waitErr), 196 }) 197 } 198 if errE != nil { 199 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 200 ErrCode: proto.StatusCode_BUILD_GCB_GET_BUILD_STATUS_ERR, 201 Message: fmt.Sprintf("error getting build status %s", err), 202 }) 203 } 204 if cb == nil { 205 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 206 ErrCode: proto.StatusCode_BUILD_GCB_GET_BUILD_STATUS_ERR, 207 Message: "error getting build status", 208 }) 209 } 210 211 r, err := b.getLogs(ctx, c, offset, cbBucket, logsObject) 212 if err != nil { 213 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 214 ErrCode: proto.StatusCode_BUILD_GCB_GET_BUILD_LOG_ERR, 215 Message: fmt.Sprintf("error getting logs: %s", err), 216 }) 217 } 218 if r != nil { 219 written, err := io.Copy(out, r) 220 if err != nil { 221 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 222 ErrCode: proto.StatusCode_BUILD_GCB_COPY_BUILD_LOG_ERR, 223 Message: fmt.Sprintf("error copying logs to stdout: %s", err), 224 }) 225 } 226 offset += written 227 r.Close() 228 } 229 switch cb.Status { 230 case StatusQueued, StatusWorking, StatusUnknown: 231 case StatusSuccess: 232 digest, err = b.getDigest(cb, tag, platform) 233 if err != nil { 234 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 235 ErrCode: proto.StatusCode_BUILD_GCB_GET_BUILT_IMAGE_ERR, 236 Message: fmt.Sprintf("error getting image id from finished build: %s", err), 237 }) 238 } 239 break watch 240 case StatusFailure: 241 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 242 ErrCode: proto.StatusCode_BUILD_GCB_BUILD_FAILED, 243 Message: fmt.Sprintf(" cloud build failed: %s", cb.Status), 244 }) 245 case StatusInternalError: 246 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 247 ErrCode: proto.StatusCode_BUILD_GCB_BUILD_INTERNAL_ERR, 248 Message: fmt.Sprintf("cloud build failed due to internal error: %s", cb.Status), 249 }) 250 case StatusTimeout: 251 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 252 ErrCode: proto.StatusCode_BUILD_GCB_BUILD_TIMEOUT, 253 Message: fmt.Sprintf("cloud build timedout: %s", cb.Status), 254 }) 255 case StatusCancelled: 256 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 257 ErrCode: proto.StatusCode_BUILD_GCB_BUILD_CANCELLED, 258 Message: fmt.Sprintf("cloud build cancelled: %s", cb.Status), 259 }) 260 default: 261 return "", sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 262 ErrCode: proto.StatusCode_BUILD_GCB_BUILD_UNKNOWN_STATUS, 263 Message: fmt.Sprintf("cloud build status unknown: %s", cb.Status), 264 }) 265 } 266 267 time.Sleep(RetryDelay) 268 } 269 270 if err := c.Bucket(cbBucket).Object(buildObject).Delete(ctx); err != nil { 271 log.Entry(ctx).Warnf("Unable to deleting source archive after build: %q: %v", buildObject, err) 272 } else { 273 log.Entry(ctx).Infof("Deleted source archive %s", buildObject) 274 } 275 276 return build.TagWithDigest(tag, digest), nil 277 } 278 279 func getBuildID(op *cloudbuild.Operation) (string, error) { 280 if op.Metadata == nil { 281 return "", errors.New("missing Metadata in operation") 282 } 283 var buildMeta cloudbuild.BuildOperationMetadata 284 if err := json.Unmarshal([]byte(op.Metadata), &buildMeta); err != nil { 285 return "", err 286 } 287 if buildMeta.Build == nil { 288 return "", errors.New("missing Build in operation metadata") 289 } 290 return buildMeta.Build.Id, nil 291 } 292 293 func (b *Builder) getDigest(cb *cloudbuild.Build, defaultToTag string, platforms platform.Matcher) (string, error) { 294 if cb.Results != nil && len(cb.Results.Images) == 1 { 295 return cb.Results.Images[0].Digest, nil 296 } 297 298 // The build steps pushed the image directly like when we use Jib. 299 // Retrieve the digest for that tag. 300 // TODO(dgageot): I don't think GCB can push to an insecure registry. 301 return docker.RemoteDigest(defaultToTag, b.cfg, platforms.Platforms) 302 } 303 304 func (b *Builder) getLogs(ctx context.Context, c *cstorage.Client, offset int64, bucket, objectName string) (io.ReadCloser, error) { 305 r, err := c.Bucket(bucket).Object(objectName).NewRangeReader(ctx, offset, -1) 306 if err != nil { 307 if gerr, ok := err.(*googleapi.Error); ok { 308 switch gerr.Code { 309 // case http. 310 case 404, 416, 429, 503: 311 log.Entry(ctx).Debugf("Status Code: %d, %s", gerr.Code, gerr.Body) 312 return nil, nil 313 } 314 } 315 if err == cstorage.ErrObjectNotExist { 316 log.Entry(ctx).Debugf("Logs for %s %s not uploaded yet...", bucket, objectName) 317 return nil, nil 318 } 319 return nil, fmt.Errorf("unknown error: %w", err) 320 } 321 return r, nil 322 } 323 324 func (b *Builder) checkBucketProjectCorrect(ctx context.Context, c *cstorage.Client, projectID, bucket string) error { 325 it := c.Buckets(ctx, projectID) 326 // Set the prefix to the bucket we're looking for to only return that bucket and buckets with that prefix 327 // that we'll filter further later on 328 it.Prefix = bucket 329 for { 330 attrs, err := it.Next() 331 if err == iterator.Done { 332 return fmt.Errorf("bucket not found: %w", err) 333 } 334 if err != nil { 335 return fmt.Errorf("iterating over buckets: %w", err) 336 } 337 // Since we can't filter on bucket name specifically, only prefix, we need to check equality here and not just prefix 338 if attrs.Name == bucket { 339 return nil 340 } 341 } 342 } 343 344 func (b *Builder) createBucketIfNotExists(ctx context.Context, c *cstorage.Client, projectID, bucket string) error { 345 var err error 346 347 _, err = c.Bucket(bucket).Attrs(ctx) 348 349 if err == nil { 350 // Bucket exists 351 return nil 352 } 353 354 if err != cstorage.ErrBucketNotExist { 355 return fmt.Errorf("getting bucket %q: %w", bucket, err) 356 } 357 358 err = c.Bucket(bucket).Create(ctx, projectID, &cstorage.BucketAttrs{ 359 Name: bucket, 360 }) 361 if e, ok := err.(*googleapi.Error); ok { 362 if e.Code == http.StatusConflict { 363 // 409 errors are ok, there could have been a race condition or eventual consistency. 364 log.Entry(ctx).Debug("Not creating bucket, got a 409 error indicating it already exists.") 365 return nil 366 } 367 } 368 369 if err != nil { 370 return err 371 } 372 log.Entry(ctx).Debugf("Created bucket %s in %s", bucket, projectID) 373 return nil 374 } 375 376 func (b *Builder) createCloudBuild(ctx context.Context, cbclient *cloudbuild.Service, projectID string, buildSpec cloudbuild.Build) (string, func(opts ...googleapi.CallOption) (*cloudbuild.Build, error), error) { 377 var op *cloudbuild.Operation 378 var err error 379 if b.WorkerPool == "" && b.Region == "" { 380 op, err = cbclient.Projects.Builds.Create(projectID, &buildSpec).Context(ctx).Do() 381 if err != nil { 382 return "", nil, sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 383 ErrCode: proto.StatusCode_BUILD_GCB_CREATE_BUILD_ERR, 384 Message: fmt.Sprintf("error creating build: %s", err), 385 }) 386 } 387 remoteID, errB := getBuildID(op) 388 if errB != nil { 389 return "", nil, sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 390 ErrCode: proto.StatusCode_BUILD_GCB_GET_BUILD_ID_ERR, 391 Message: err.Error(), 392 }) 393 } 394 return remoteID, cbclient.Projects.Builds.Get(projectID, remoteID).Do, nil 395 } 396 397 var location string 398 399 if b.Region != "" { 400 location = fmt.Sprintf("projects/%s/locations/%s", projectID, b.Region) 401 } 402 if b.WorkerPool != "" { 403 location = strings.Split(b.WorkerPool, "/workerPools/")[0] 404 } 405 log.Entry(ctx).Debugf("location: %s", location) 406 // location should match the format "projects/{project}/locations/{location}" 407 op, err = cbclient.Projects.Locations.Builds.Create(location, &buildSpec).Context(ctx).Do() 408 if err != nil { 409 return "", nil, sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 410 ErrCode: proto.StatusCode_BUILD_GCB_CREATE_BUILD_ERR, 411 Message: fmt.Sprintf("error creating build: %s", err), 412 }) 413 } 414 remoteID, err := getBuildID(op) 415 if err != nil { 416 return "", nil, sErrors.NewErrorWithStatusCode(&proto.ActionableErr{ 417 ErrCode: proto.StatusCode_BUILD_GCB_GET_BUILD_ID_ERR, 418 Message: err.Error(), 419 }) 420 } 421 // build id should match the format "projects/{project}/locations/{location}/builds/{buildID}" 422 buildID := fmt.Sprintf("%s/builds/%s", location, remoteID) 423 return remoteID, cbclient.Projects.Locations.Builds.Get(buildID).Do, nil 424 }