github.com/kubeshop/testkube@v1.17.23/pkg/executor/scraper/factory/factory.go (about)

     1  package factory
     2  
     3  import (
     4  	"context"
     5  	"fmt"
     6  	"time"
     7  
     8  	cloudevents "github.com/cloudevents/sdk-go/v2"
     9  	"github.com/pkg/errors"
    10  
    11  	"github.com/kubeshop/testkube/pkg/agent"
    12  	"github.com/kubeshop/testkube/pkg/cloud"
    13  	cloudscraper "github.com/kubeshop/testkube/pkg/cloud/data/artifact"
    14  	cloudexecutor "github.com/kubeshop/testkube/pkg/cloud/data/executor"
    15  	"github.com/kubeshop/testkube/pkg/envs"
    16  	"github.com/kubeshop/testkube/pkg/executor/output"
    17  	"github.com/kubeshop/testkube/pkg/executor/scraper"
    18  	"github.com/kubeshop/testkube/pkg/filesystem"
    19  	"github.com/kubeshop/testkube/pkg/log"
    20  	"github.com/kubeshop/testkube/pkg/ui"
    21  )
    22  
    23  type ExtractorType string
    24  type UploaderType string
    25  
    26  const (
    27  	RecursiveFilesystemExtractor ExtractorType = "RecursiveFilesystemExtractor"
    28  	ArchiveFilesystemExtractor   ExtractorType = "ArchiveFilesystemExtractor"
    29  	MinIOUploader                UploaderType  = "MinIOUploader"
    30  	CloudUploader                UploaderType  = "CloudUploader"
    31  )
    32  
    33  func TryGetScrapper(ctx context.Context, params envs.Params) (scraper.Scraper, error) {
    34  	if params.ScrapperEnabled {
    35  		uploader := MinIOUploader
    36  		if params.ProMode {
    37  			uploader = CloudUploader
    38  		}
    39  		extractor := RecursiveFilesystemExtractor
    40  		if params.CompressArtifacts {
    41  			extractor = ArchiveFilesystemExtractor
    42  		}
    43  
    44  		s, err := GetScraper(ctx, params, extractor, uploader)
    45  		if err != nil {
    46  			return nil, errors.Wrap(err, "error creating scraper")
    47  		}
    48  		return s, nil
    49  	}
    50  
    51  	return nil, nil
    52  }
    53  
    54  func GetScraper(ctx context.Context, params envs.Params, extractorType ExtractorType, uploaderType UploaderType) (scraper.Scraper, error) {
    55  	var extractor scraper.Extractor
    56  	switch extractorType {
    57  	case RecursiveFilesystemExtractor:
    58  		extractor = scraper.NewRecursiveFilesystemExtractor(filesystem.NewOSFileSystem())
    59  	case ArchiveFilesystemExtractor:
    60  		var opts []scraper.ArchiveFilesystemExtractorOpts
    61  		if params.ProMode {
    62  			opts = append(opts, scraper.GenerateTarballMetaFile())
    63  		}
    64  		extractor = scraper.NewArchiveFilesystemExtractor(filesystem.NewOSFileSystem(), opts...)
    65  	default:
    66  		return nil, errors.Errorf("unknown extractor type: %s", extractorType)
    67  	}
    68  
    69  	var err error
    70  	var loader scraper.Uploader
    71  	switch uploaderType {
    72  	case MinIOUploader:
    73  		loader, err = getMinIOUploader(params)
    74  		if err != nil {
    75  			return nil, errors.Wrap(err, "error creating minio uploader")
    76  		}
    77  	case CloudUploader:
    78  		loader, err = getRemoteStorageUploader(ctx, params)
    79  		if err != nil {
    80  			return nil, errors.Wrap(err, "error creating remote storage uploader")
    81  		}
    82  	default:
    83  		return nil, errors.Errorf("unknown uploader type: %s", uploaderType)
    84  	}
    85  
    86  	var cdeventsClient cloudevents.Client
    87  	if params.CDEventsTarget != "" {
    88  		cdeventsClient, err = cloudevents.NewClientHTTP(cloudevents.WithTarget(params.CDEventsTarget))
    89  		if err != nil {
    90  			log.DefaultLogger.Warnf("failed to create cloud event client: %v", err)
    91  		}
    92  	}
    93  
    94  	return scraper.NewExtractLoadScraper(extractor, loader, cdeventsClient, params.ClusterID, params.DashboardURI), nil
    95  }
    96  
    97  func getRemoteStorageUploader(ctx context.Context, params envs.Params) (uploader *cloudscraper.CloudUploader, err error) {
    98  	// timeout blocking connection to cloud
    99  	ctxTimeout, cancel := context.WithTimeout(ctx, time.Duration(params.ProConnectionTimeoutSec)*time.Second)
   100  	defer cancel()
   101  
   102  	output.PrintLogf(
   103  		"%s Uploading artifacts using Remote Storage Uploader (timeout:%ds, agentInsecure:%v, agentSkipVerify: %v, url: %s, scraperSkipVerify: %v)",
   104  		ui.IconCheckMark, params.ProConnectionTimeoutSec, params.ProAPITLSInsecure, params.ProAPISkipVerify, params.ProAPIURL, params.SkipVerify)
   105  	grpcConn, err := agent.NewGRPCConnection(
   106  		ctxTimeout,
   107  		params.ProAPITLSInsecure,
   108  		params.ProAPISkipVerify,
   109  		params.ProAPIURL,
   110  		params.ProAPICertFile,
   111  		params.ProAPIKeyFile,
   112  		params.ProAPICAFile,
   113  		log.DefaultLogger,
   114  	)
   115  	if err != nil {
   116  		return nil, err
   117  	}
   118  	output.PrintLogf("%s Connected to Agent API", ui.IconCheckMark)
   119  
   120  	grpcClient := cloud.NewTestKubeCloudAPIClient(grpcConn)
   121  	cloudExecutor := cloudexecutor.NewCloudGRPCExecutor(grpcClient, grpcConn, params.ProAPIKey)
   122  	return cloudscraper.NewCloudUploader(cloudExecutor, params.SkipVerify), nil
   123  }
   124  
   125  func getMinIOUploader(params envs.Params) (*scraper.MinIOUploader, error) {
   126  	output.PrintLog(fmt.Sprintf("%s Uploading artifacts using MinIO Uploader", ui.IconCheckMark))
   127  	return scraper.NewMinIOUploader(
   128  		params.Endpoint,
   129  		params.AccessKeyID,
   130  		params.SecretAccessKey,
   131  		params.Region,
   132  		params.Token,
   133  		params.Bucket,
   134  		params.Ssl,
   135  		params.SkipVerify,
   136  		params.CertFile,
   137  		params.KeyFile,
   138  		params.CAFile,
   139  	)
   140  }