github.com/kubeshop/testkube@v1.17.23/cmd/tcl/testworkflow-toolkit/commands/artifacts.go (about)

     1  // Copyright 2024 Testkube.
     2  //
     3  // Licensed as a Testkube Pro file under the Testkube Community
     4  // License (the "License"); you may not use this file except in compliance with
     5  // the License. You may obtain a copy of the License at
     6  //
     7  //	https://github.com/kubeshop/testkube/blob/main/licenses/TCL.txt
     8  
     9  package commands
    10  
    11  import (
    12  	"fmt"
    13  	"io/fs"
    14  	"net/http"
    15  	"os"
    16  	"path/filepath"
    17  	"strings"
    18  	"time"
    19  
    20  	"github.com/minio/minio-go/v7"
    21  	"github.com/spf13/cobra"
    22  
    23  	"github.com/kubeshop/testkube/cmd/tcl/testworkflow-toolkit/artifacts"
    24  	"github.com/kubeshop/testkube/cmd/tcl/testworkflow-toolkit/env"
    25  	"github.com/kubeshop/testkube/pkg/ui"
    26  )
    27  
    28  var directAddGzipEncoding = artifacts.WithMinioOptionsEnhancer(func(options *minio.PutObjectOptions, path string, size int64) {
    29  	options.ContentType = "application/gzip"
    30  	options.ContentEncoding = "gzip"
    31  })
    32  
    33  var directDisableMultipart = artifacts.WithMinioOptionsEnhancer(func(options *minio.PutObjectOptions, path string, size int64) {
    34  	options.DisableMultipart = true
    35  })
    36  
    37  var directDetectMimetype = artifacts.WithMinioOptionsEnhancer(func(options *minio.PutObjectOptions, path string, size int64) {
    38  	if options.ContentType == "" {
    39  		options.ContentType = artifacts.DetectMimetype(path)
    40  	}
    41  })
    42  
    43  var directUnpack = artifacts.WithMinioOptionsEnhancer(func(options *minio.PutObjectOptions, path string, size int64) {
    44  	options.UserMetadata = map[string]string{
    45  		"X-Amz-Meta-Snowball-Auto-Extract": "true",
    46  		"X-Amz-Meta-Minio-Snowball-Prefix": env.WorkflowName() + "/" + env.ExecutionId(),
    47  	}
    48  })
    49  
    50  var cloudAddGzipEncoding = artifacts.WithRequestEnhancerCloud(func(req *http.Request, path string, size int64) {
    51  	req.Header.Set("Content-Type", "application/gzip")
    52  	req.Header.Set("Content-Encoding", "gzip")
    53  })
    54  
    55  var cloudUnpack = artifacts.WithRequestEnhancerCloud(func(req *http.Request, path string, size int64) {
    56  	req.Header.Set("X-Amz-Meta-Snowball-Auto-Extract", "true")
    57  })
    58  
    59  var cloudDetectMimetype = artifacts.WithRequestEnhancerCloud(func(req *http.Request, path string, size int64) {
    60  	if req.Header.Get("Content-Type") == "" {
    61  		contentType := artifacts.DetectMimetype(path)
    62  		if contentType != "" {
    63  			req.Header.Set("Content-Type", contentType)
    64  		}
    65  		if contentType == "application/gzip" && req.Header.Get("Content-Encoding") == "" {
    66  			req.Header.Set("Content-Encoding", "gzip")
    67  		}
    68  	}
    69  })
    70  
    71  func NewArtifactsCmd() *cobra.Command {
    72  	var (
    73  		mounts            []string
    74  		id                string
    75  		compress          string
    76  		compressCachePath string
    77  		unpack            bool
    78  	)
    79  
    80  	cmd := &cobra.Command{
    81  		Use:   "artifacts <paths...>",
    82  		Short: "Save workflow artifacts",
    83  		Args:  cobra.MinimumNArgs(1),
    84  
    85  		Run: func(cmd *cobra.Command, paths []string) {
    86  			root, _ := os.Getwd()
    87  			walker, err := artifacts.CreateWalker(paths, mounts, root)
    88  			ui.ExitOnError("building a walker", err)
    89  
    90  			if len(walker.Patterns()) == 0 || len(walker.SearchPaths()) == 0 {
    91  				ui.Failf("error: did not found any valid path pattern in the mounted directories")
    92  			}
    93  
    94  			fmt.Printf("Root: %s\nPatterns:\n", ui.LightCyan(walker.Root()))
    95  			for _, p := range walker.Patterns() {
    96  				fmt.Printf("- %s\n", ui.LightMagenta(p))
    97  			}
    98  			fmt.Printf("\n")
    99  
   100  			// Configure uploader
   101  			var processor artifacts.Processor
   102  			var uploader artifacts.Uploader
   103  
   104  			// Sanitize archive name
   105  			compress = strings.Trim(filepath.ToSlash(filepath.Clean(compress)), "/.")
   106  			if compress != "" {
   107  				compressLower := strings.ToLower(compress)
   108  				if strings.HasSuffix(compressLower, ".tar") {
   109  					compress += ".gz"
   110  				} else if !strings.HasSuffix(compressLower, ".tgz") && !strings.HasSuffix(compressLower, ".tar.gz") {
   111  					compress += ".tar.gz"
   112  				}
   113  			}
   114  
   115  			// Archive
   116  			if env.CloudEnabled() {
   117  				if compress != "" {
   118  					processor = artifacts.NewTarCachedProcessor(compress, compressCachePath)
   119  					opts := []artifacts.CloudUploaderOpt{cloudAddGzipEncoding}
   120  					if unpack {
   121  						opts = append(opts, cloudUnpack)
   122  					}
   123  					uploader = artifacts.NewCloudUploader(opts...)
   124  				} else {
   125  					processor = artifacts.NewDirectProcessor()
   126  					uploader = artifacts.NewCloudUploader(artifacts.WithParallelismCloud(30), cloudDetectMimetype)
   127  				}
   128  			} else if compress != "" && unpack {
   129  				processor = artifacts.NewTarCachedProcessor(compress, compressCachePath)
   130  				uploader = artifacts.NewDirectUploader(directAddGzipEncoding, directDisableMultipart, directUnpack)
   131  			} else if compress != "" && compressCachePath != "" {
   132  				processor = artifacts.NewTarCachedProcessor(compress, compressCachePath)
   133  				uploader = artifacts.NewDirectUploader(directAddGzipEncoding, directDisableMultipart)
   134  			} else if compress != "" {
   135  				processor = artifacts.NewTarProcessor(compress)
   136  				uploader = artifacts.NewDirectUploader(directAddGzipEncoding)
   137  			} else {
   138  				processor = artifacts.NewDirectProcessor()
   139  				uploader = artifacts.NewDirectUploader(artifacts.WithParallelism(30), directDetectMimetype)
   140  			}
   141  
   142  			handler := artifacts.NewHandler(uploader, processor)
   143  
   144  			err = handler.Start()
   145  			ui.ExitOnError("initializing uploader", err)
   146  
   147  			started := time.Now()
   148  			err = walker.Walk(os.DirFS("/"), func(path string, file fs.File, err error) error {
   149  				if err != nil {
   150  					fmt.Printf("Warning: '%s' has been ignored, as there was a problem reading it: %s\n", path, err.Error())
   151  					return nil
   152  				}
   153  
   154  				stat, err := file.Stat()
   155  				if err != nil {
   156  					fmt.Printf("Warning: '%s' has been ignored, as there was a problem reading it: %s\n", path, err.Error())
   157  					return nil
   158  				}
   159  				return handler.Add(path, file, stat)
   160  			})
   161  			ui.ExitOnError("reading the file system", err)
   162  			err = handler.End()
   163  
   164  			// TODO: Emit information about artifacts
   165  			ui.ExitOnError("finishing upload", err)
   166  			fmt.Printf("Took %s.\n", time.Now().Sub(started).Truncate(time.Millisecond))
   167  		},
   168  	}
   169  
   170  	cmd.Flags().StringSliceVarP(&mounts, "mount", "m", nil, "mounted volumes for limiting paths")
   171  	cmd.Flags().StringVar(&id, "id", "", "execution ID")
   172  	cmd.Flags().StringVar(&compress, "compress", "", "tgz name if should be compressed")
   173  	cmd.Flags().BoolVar(&unpack, "unpack", false, "minio only: unpack the file if compressed")
   174  	cmd.Flags().StringVar(&compressCachePath, "compress-cache", "", "local cache path for passing compressed archive through")
   175  
   176  	return cmd
   177  }