github.com/NVIDIA/aistore@v1.3.23-0.20240517131212-7df6609be51d/ais/test/scripted_cli_test.go (about)

     1  // Package integration_test.
     2  /*
     3   * Copyright (c) 2023-2024, NVIDIA CORPORATION. All rights reserved.
     4   */
     5  package integration_test
     6  
     7  import (
     8  	"os"
     9  	"os/exec"
    10  	"strings"
    11  	"testing"
    12  
    13  	"github.com/NVIDIA/aistore/api/apc"
    14  	"github.com/NVIDIA/aistore/cmn"
    15  	"github.com/NVIDIA/aistore/cmn/cos"
    16  	"github.com/NVIDIA/aistore/tools"
    17  	"github.com/NVIDIA/aistore/tools/tassert"
    18  	"github.com/NVIDIA/aistore/tools/tlog"
    19  	"github.com/NVIDIA/aistore/tools/trand"
    20  )
    21  
    22  func TestGetWarmValidateS3UsingScript(t *testing.T) {
    23  	tools.CheckSkip(t, &tools.SkipTestArgs{
    24  		CloudBck: true,
    25  		Bck:      cliBck,
    26  	})
    27  	// note additional limitation
    28  	normp, _ := cmn.NormalizeProvider(cliBck.Provider)
    29  	if normp != apc.AWS {
    30  		t.Skipf("skipping %s - the test uses s3cmd (command line tool) and requires s3 bucket (see \"prerequisites\")", t.Name())
    31  	}
    32  
    33  	var (
    34  		bucketName = cliBck.Cname("")
    35  		cmd        = exec.Command("./scripts/s3-get-validate.sh", "--bucket", bucketName)
    36  	)
    37  	tlog.Logf("Running '%s %s'\n", cmd.Path, strings.Join(cmd.Args, " "))
    38  	out, err := cmd.CombinedOutput()
    39  	if len(out) > 0 {
    40  		tlog.Logln(string(out))
    41  	}
    42  	tassert.CheckFatal(t, err)
    43  }
    44  
    45  func TestGetWarmValidateRemaisUsingScript(t *testing.T) {
    46  	tools.CheckSkip(t, &tools.SkipTestArgs{RequiresRemoteCluster: true})
    47  
    48  	bck := cliBck
    49  	if bck.IsRemoteAIS() {
    50  		tlog.Logf("using existing %s ...\n", bck.Cname(""))
    51  	} else {
    52  		bck = cmn.Bck{
    53  			Name:     trand.String(10),
    54  			Provider: apc.AIS,
    55  			Ns:       cmn.Ns{UUID: tools.RemoteCluster.Alias},
    56  		}
    57  		tlog.Logf("using temp bucket %s ...\n", bck.Cname(""))
    58  	}
    59  
    60  	var (
    61  		bucketName = bck.Cname("")
    62  		cmd        = exec.Command("./scripts/remais-get-validate.sh", "--bucket", bucketName)
    63  	)
    64  	tlog.Logf("Running '%s'\n", cmd.String())
    65  	out, err := cmd.CombinedOutput()
    66  	if len(out) > 0 {
    67  		tlog.Logln(string(out))
    68  	}
    69  	tassert.CheckFatal(t, err)
    70  }
    71  
    72  func TestPrefetchLatestS3UsingScript(t *testing.T) {
    73  	tools.CheckSkip(t, &tools.SkipTestArgs{
    74  		CloudBck: true,
    75  		Bck:      cliBck,
    76  	})
    77  	// note additional limitation
    78  	normp, _ := cmn.NormalizeProvider(cliBck.Provider)
    79  	if normp != apc.AWS {
    80  		t.Skipf("skipping %s - the test uses s3cmd (command line tool) and requires s3 bucket (see \"prerequisites\")", t.Name())
    81  	}
    82  
    83  	var (
    84  		bucketName = cliBck.Cname("")
    85  		cmd        = exec.Command("./scripts/s3-prefetch-latest-prefix.sh", "--bucket", bucketName)
    86  	)
    87  	tlog.Logf("Running '%s'\n", cmd.String())
    88  	out, err := cmd.CombinedOutput()
    89  	if len(out) > 0 {
    90  		tlog.Logln(string(out))
    91  	}
    92  	tassert.CheckFatal(t, err)
    93  }
    94  
    95  func TestPrefetchLatestRemaisUsingScript(t *testing.T) {
    96  	tools.CheckSkip(t, &tools.SkipTestArgs{RequiresRemoteCluster: true})
    97  
    98  	bck := cliBck
    99  	if bck.IsRemoteAIS() {
   100  		tlog.Logf("using existing %s ...\n", bck.Cname(""))
   101  	} else {
   102  		bck = cmn.Bck{
   103  			Name:     trand.String(10),
   104  			Provider: apc.AIS,
   105  			Ns:       cmn.Ns{UUID: tools.RemoteCluster.Alias},
   106  		}
   107  		tlog.Logf("using temp bucket %s ...\n", bck.Cname(""))
   108  	}
   109  
   110  	var (
   111  		bucketName = bck.Cname("")
   112  		cmd        = exec.Command("./scripts/remais-prefetch-latest.sh", "--bucket", bucketName)
   113  	)
   114  	tlog.Logf("Running '%s'\n", cmd.String())
   115  	out, err := cmd.CombinedOutput()
   116  	if len(out) > 0 {
   117  		tlog.Logln(string(out))
   118  	}
   119  	tassert.CheckFatal(t, err)
   120  }
   121  
   122  func TestCopySyncWithOutOfBandUsingRemaisScript(t *testing.T) {
   123  	tools.CheckSkip(t, &tools.SkipTestArgs{
   124  		Long:                  true,
   125  		RequiresRemoteCluster: true,
   126  	})
   127  
   128  	bck := cliBck
   129  	var (
   130  		bucketName = bck.Cname("")
   131  		cmd        = exec.Command("./scripts/cp-sync-remais-out-of-band.sh", "--bucket", bucketName)
   132  	)
   133  	tlog.Logf("Running '%s'\n", cmd.String())
   134  	out, err := cmd.CombinedOutput()
   135  	if len(out) > 0 {
   136  		tlog.Logln(string(out))
   137  	}
   138  	tassert.CheckFatal(t, err)
   139  }
   140  
   141  // NOTE: not running with an actual remote s3 bucket (could take hours)
   142  // instead, using aisore S3 API with a temp `ais://` bucket, and with two additional workarounds:
   143  // 1. MD5
   144  // 2. "apc.S3Scheme+apc.BckProviderSeparator+bck.Name" (below)
   145  func TestMultipartUploadLargeFilesScript(t *testing.T) {
   146  	tools.CheckSkip(t, &tools.SkipTestArgs{
   147  		Long: true,
   148  	})
   149  
   150  	tempdir, err := os.MkdirTemp("", "s3-mpt")
   151  	tassert.CheckFatal(t, err)
   152  	t.Cleanup(func() {
   153  		_ = os.RemoveAll(tempdir)
   154  	})
   155  
   156  	bck := cmn.Bck{Name: trand.String(10), Provider: apc.AIS}
   157  
   158  	// 1. set MD5 to satisfy `s3cmd` (for details, see docs/s3cmd.md & docs/s3compat.md)
   159  	bprops := &cmn.BpropsToSet{
   160  		Cksum: &cmn.CksumConfToSet{Type: apc.Ptr(cos.ChecksumMD5)},
   161  	}
   162  	tools.CreateBucket(t, proxyURL, bck, bprops, true /*cleanup*/)
   163  
   164  	// 2. subst "ais://" with "s3://" to circumvent s3cmd failing with "not a recognized URI"
   165  	cmd := exec.Command("./scripts/s3-mpt-large-files.sh", tempdir, apc.S3Scheme+apc.BckProviderSeparator+bck.Name,
   166  		"1",    // number of iterations
   167  		"true", // generate large files
   168  		"1",    // number of large files
   169  	)
   170  
   171  	tlog.Logf("Running '%s' (this may take a while...)\n", cmd.String())
   172  	out, err := cmd.CombinedOutput()
   173  	if len(out) > 0 {
   174  		tlog.Logln(string(out))
   175  	}
   176  	tassert.CheckFatal(t, err)
   177  }
   178  
   179  // remais-blob-download.sh
   180  func TestRemaisBlobDownloadScript(t *testing.T) {
   181  	tools.CheckSkip(t, &tools.SkipTestArgs{
   182  		RequiresRemoteCluster: true,
   183  		Long:                  true,
   184  	})
   185  	bck := cmn.Bck{
   186  		Name:     trand.String(10),
   187  		Ns:       cmn.Ns{UUID: tools.RemoteCluster.Alias},
   188  		Provider: apc.AIS,
   189  	}
   190  	tools.CreateBucket(t, proxyURL, bck, nil, true /*cleanup*/)
   191  	name := bck.Cname("")
   192  
   193  	// use remais-blob-download.sh defaults for everything except bucket name (e.g.):
   194  	// "--minsize", "1MB",
   195  	// "--maxsize", "10MB",
   196  	// "--totalsize", "100MB",
   197  	// "--chunksize", "500K",
   198  	// "--numworkers", "5"
   199  	cmd := exec.Command("./scripts/remais-blob-download.sh", "--bucket", name)
   200  
   201  	tlog.Logf("Running '%s' (this may take a while...)\n", cmd.String())
   202  	out, err := cmd.CombinedOutput()
   203  	if len(out) > 0 {
   204  		tlog.Logln(string(out))
   205  	}
   206  	tassert.CheckFatal(t, err)
   207  }
   208  
   209  // get-archregx-wdskey.sh
   210  func TestGetArchregxWdskeyScript(t *testing.T) {
   211  	cmd := exec.Command("./scripts/get-archregx-wdskey.sh")
   212  
   213  	out, err := cmd.CombinedOutput()
   214  	if len(out) > 0 {
   215  		tlog.Logln(string(out))
   216  	}
   217  	tassert.CheckFatal(t, err)
   218  }