github.com/artpar/rclone@v1.67.3/backend/chunker/chunker_internal_test.go (about)

     1  package chunker
     2  
     3  import (
     4  	"bytes"
     5  	"context"
     6  	"flag"
     7  	"fmt"
     8  	"io"
     9  	"path"
    10  	"regexp"
    11  	"strings"
    12  	"testing"
    13  
    14  	"github.com/artpar/rclone/fs"
    15  	"github.com/artpar/rclone/fs/config/configmap"
    16  	"github.com/artpar/rclone/fs/fspath"
    17  	"github.com/artpar/rclone/fs/hash"
    18  	"github.com/artpar/rclone/fs/object"
    19  	"github.com/artpar/rclone/fs/operations"
    20  	"github.com/artpar/rclone/fstest"
    21  	"github.com/artpar/rclone/fstest/fstests"
    22  	"github.com/artpar/rclone/lib/random"
    23  	"github.com/stretchr/testify/assert"
    24  	"github.com/stretchr/testify/require"
    25  )
    26  
    27  // Command line flags
    28  var (
    29  	UploadKilobytes = flag.Int("upload-kilobytes", 0, "Upload size in Kilobytes, set this to test large uploads")
    30  )
    31  
    32  // test that chunking does not break large uploads
    33  func testPutLarge(t *testing.T, f *Fs, kilobytes int) {
    34  	t.Run(fmt.Sprintf("PutLarge%dk", kilobytes), func(t *testing.T) {
    35  		fstests.TestPutLarge(context.Background(), t, f, &fstest.Item{
    36  			ModTime: fstest.Time("2001-02-03T04:05:06.499999999Z"),
    37  			Path:    fmt.Sprintf("chunker-upload-%dk", kilobytes),
    38  			Size:    int64(kilobytes) * int64(fs.Kibi),
    39  		})
    40  	})
    41  }
    42  
    43  type settings map[string]interface{}
    44  
    45  func deriveFs(ctx context.Context, t *testing.T, f fs.Fs, path string, opts settings) fs.Fs {
    46  	fsName := strings.Split(f.Name(), "{")[0] // strip off hash
    47  	configMap := configmap.Simple{}
    48  	for key, val := range opts {
    49  		configMap[key] = fmt.Sprintf("%v", val)
    50  	}
    51  	rpath := fspath.JoinRootPath(f.Root(), path)
    52  	remote := fmt.Sprintf("%s,%s:%s", fsName, configMap.String(), rpath)
    53  	fixFs, err := fs.NewFs(ctx, remote)
    54  	require.NoError(t, err)
    55  	return fixFs
    56  }
    57  
    58  var mtime1 = fstest.Time("2001-02-03T04:05:06.499999999Z")
    59  
    60  func testPutFile(ctx context.Context, t *testing.T, f fs.Fs, name, contents, message string, check bool) fs.Object {
    61  	item := fstest.Item{Path: name, ModTime: mtime1}
    62  	obj := fstests.PutTestContents(ctx, t, f, &item, contents, check)
    63  	assert.NotNil(t, obj, message)
    64  	return obj
    65  }
    66  
    67  // test chunk name parser
    68  func testChunkNameFormat(t *testing.T, f *Fs) {
    69  	saveOpt := f.opt
    70  	defer func() {
    71  		// restore original settings (f is pointer, f.opt is struct)
    72  		f.opt = saveOpt
    73  		_ = f.setChunkNameFormat(f.opt.NameFormat)
    74  	}()
    75  
    76  	assertFormat := func(pattern, wantDataFormat, wantCtrlFormat, wantNameRegexp string) {
    77  		err := f.setChunkNameFormat(pattern)
    78  		assert.NoError(t, err)
    79  		assert.Equal(t, wantDataFormat, f.dataNameFmt)
    80  		assert.Equal(t, wantCtrlFormat, f.ctrlNameFmt)
    81  		assert.Equal(t, wantNameRegexp, f.nameRegexp.String())
    82  	}
    83  
    84  	assertFormatValid := func(pattern string) {
    85  		err := f.setChunkNameFormat(pattern)
    86  		assert.NoError(t, err)
    87  	}
    88  
    89  	assertFormatInvalid := func(pattern string) {
    90  		err := f.setChunkNameFormat(pattern)
    91  		assert.Error(t, err)
    92  	}
    93  
    94  	assertMakeName := func(wantChunkName, mainName string, chunkNo int, ctrlType, xactID string) {
    95  		gotChunkName := ""
    96  		assert.NotPanics(t, func() {
    97  			gotChunkName = f.makeChunkName(mainName, chunkNo, ctrlType, xactID)
    98  		}, "makeChunkName(%q,%d,%q,%q) must not panic", mainName, chunkNo, ctrlType, xactID)
    99  		if gotChunkName != "" {
   100  			assert.Equal(t, wantChunkName, gotChunkName)
   101  		}
   102  	}
   103  
   104  	assertMakeNamePanics := func(mainName string, chunkNo int, ctrlType, xactID string) {
   105  		assert.Panics(t, func() {
   106  			_ = f.makeChunkName(mainName, chunkNo, ctrlType, xactID)
   107  		}, "makeChunkName(%q,%d,%q,%q) should panic", mainName, chunkNo, ctrlType, xactID)
   108  	}
   109  
   110  	assertParseName := func(fileName, wantMainName string, wantChunkNo int, wantCtrlType, wantXactID string) {
   111  		gotMainName, gotChunkNo, gotCtrlType, gotXactID := f.parseChunkName(fileName)
   112  		assert.Equal(t, wantMainName, gotMainName)
   113  		assert.Equal(t, wantChunkNo, gotChunkNo)
   114  		assert.Equal(t, wantCtrlType, gotCtrlType)
   115  		assert.Equal(t, wantXactID, gotXactID)
   116  	}
   117  
   118  	const newFormatSupported = false // support for patterns not starting with base name (*)
   119  
   120  	// valid formats
   121  	assertFormat(`*.rclone_chunk.###`, `%s.rclone_chunk.%03d`, `%s.rclone_chunk._%s`, `^(.+?)\.rclone_chunk\.(?:([0-9]{3,})|_([a-z][a-z0-9]{2,6}))(?:_([0-9a-z]{4,9})|\.\.tmp_([0-9]{10,13}))?$`)
   122  	assertFormat(`*.rclone_chunk.#`, `%s.rclone_chunk.%d`, `%s.rclone_chunk._%s`, `^(.+?)\.rclone_chunk\.(?:([0-9]+)|_([a-z][a-z0-9]{2,6}))(?:_([0-9a-z]{4,9})|\.\.tmp_([0-9]{10,13}))?$`)
   123  	assertFormat(`*_chunk_#####`, `%s_chunk_%05d`, `%s_chunk__%s`, `^(.+?)_chunk_(?:([0-9]{5,})|_([a-z][a-z0-9]{2,6}))(?:_([0-9a-z]{4,9})|\.\.tmp_([0-9]{10,13}))?$`)
   124  	assertFormat(`*-chunk-#`, `%s-chunk-%d`, `%s-chunk-_%s`, `^(.+?)-chunk-(?:([0-9]+)|_([a-z][a-z0-9]{2,6}))(?:_([0-9a-z]{4,9})|\.\.tmp_([0-9]{10,13}))?$`)
   125  	assertFormat(`*-chunk-#-%^$()[]{}.+-!?:\`, `%s-chunk-%d-%%^$()[]{}.+-!?:\`, `%s-chunk-_%s-%%^$()[]{}.+-!?:\`, `^(.+?)-chunk-(?:([0-9]+)|_([a-z][a-z0-9]{2,6}))-%\^\$\(\)\[\]\{\}\.\+-!\?:\\(?:_([0-9a-z]{4,9})|\.\.tmp_([0-9]{10,13}))?$`)
   126  	if newFormatSupported {
   127  		assertFormat(`_*-chunk-##,`, `_%s-chunk-%02d,`, `_%s-chunk-_%s,`, `^_(.+?)-chunk-(?:([0-9]{2,})|_([a-z][a-z0-9]{2,6})),(?:_([0-9a-z]{4,9})|\.\.tmp_([0-9]{10,13}))?$`)
   128  	}
   129  
   130  	// invalid formats
   131  	assertFormatInvalid(`chunk-#`)
   132  	assertFormatInvalid(`*-chunk`)
   133  	assertFormatInvalid(`*-*-chunk-#`)
   134  	assertFormatInvalid(`*-chunk-#-#`)
   135  	assertFormatInvalid(`#-chunk-*`)
   136  	assertFormatInvalid(`*/#`)
   137  
   138  	assertFormatValid(`*#`)
   139  	assertFormatInvalid(`**#`)
   140  	assertFormatInvalid(`#*`)
   141  	assertFormatInvalid(``)
   142  	assertFormatInvalid(`-`)
   143  
   144  	// quick tests
   145  	if newFormatSupported {
   146  		assertFormat(`part_*_#`, `part_%s_%d`, `part_%s__%s`, `^part_(.+?)_(?:([0-9]+)|_([a-z][a-z0-9]{2,6}))(?:_([0-9][0-9a-z]{3,8})\.\.tmp_([0-9]{10,13}))?$`)
   147  		f.opt.StartFrom = 1
   148  
   149  		assertMakeName(`part_fish_1`, "fish", 0, "", "")
   150  		assertParseName(`part_fish_43`, "fish", 42, "", "")
   151  		assertMakeName(`part_fish__locks`, "fish", -2, "locks", "")
   152  		assertParseName(`part_fish__locks`, "fish", -1, "locks", "")
   153  		assertMakeName(`part_fish__x2y`, "fish", -2, "x2y", "")
   154  		assertParseName(`part_fish__x2y`, "fish", -1, "x2y", "")
   155  		assertMakeName(`part_fish_3_0004`, "fish", 2, "", "4")
   156  		assertParseName(`part_fish_4_0005`, "fish", 3, "", "0005")
   157  		assertMakeName(`part_fish__blkinfo_jj5fvo3wr`, "fish", -3, "blkinfo", "jj5fvo3wr")
   158  		assertParseName(`part_fish__blkinfo_zz9fvo3wr`, "fish", -1, "blkinfo", "zz9fvo3wr")
   159  
   160  		// old-style temporary suffix (parse only)
   161  		assertParseName(`part_fish_4..tmp_0000000011`, "fish", 3, "", "000b")
   162  		assertParseName(`part_fish__blkinfo_jj5fvo3wr`, "fish", -1, "blkinfo", "jj5fvo3wr")
   163  	}
   164  
   165  	// prepare format for long tests
   166  	assertFormat(`*.chunk.###`, `%s.chunk.%03d`, `%s.chunk._%s`, `^(.+?)\.chunk\.(?:([0-9]{3,})|_([a-z][a-z0-9]{2,6}))(?:_([0-9a-z]{4,9})|\.\.tmp_([0-9]{10,13}))?$`)
   167  	f.opt.StartFrom = 2
   168  
   169  	// valid data chunks
   170  	assertMakeName(`fish.chunk.003`, "fish", 1, "", "")
   171  	assertParseName(`fish.chunk.003`, "fish", 1, "", "")
   172  	assertMakeName(`fish.chunk.021`, "fish", 19, "", "")
   173  	assertParseName(`fish.chunk.021`, "fish", 19, "", "")
   174  
   175  	// valid temporary data chunks
   176  	assertMakeName(`fish.chunk.011_4321`, "fish", 9, "", "4321")
   177  	assertParseName(`fish.chunk.011_4321`, "fish", 9, "", "4321")
   178  	assertMakeName(`fish.chunk.011_00bc`, "fish", 9, "", "00bc")
   179  	assertParseName(`fish.chunk.011_00bc`, "fish", 9, "", "00bc")
   180  	assertMakeName(`fish.chunk.1916_5jjfvo3wr`, "fish", 1914, "", "5jjfvo3wr")
   181  	assertParseName(`fish.chunk.1916_5jjfvo3wr`, "fish", 1914, "", "5jjfvo3wr")
   182  	assertMakeName(`fish.chunk.1917_zz9fvo3wr`, "fish", 1915, "", "zz9fvo3wr")
   183  	assertParseName(`fish.chunk.1917_zz9fvo3wr`, "fish", 1915, "", "zz9fvo3wr")
   184  
   185  	// valid temporary data chunks (old temporary suffix, only parse)
   186  	assertParseName(`fish.chunk.004..tmp_0000000047`, "fish", 2, "", "001b")
   187  	assertParseName(`fish.chunk.323..tmp_9994567890123`, "fish", 321, "", "3jjfvo3wr")
   188  
   189  	// parsing invalid data chunk names
   190  	assertParseName(`fish.chunk.3`, "", -1, "", "")
   191  	assertParseName(`fish.chunk.001`, "", -1, "", "")
   192  	assertParseName(`fish.chunk.21`, "", -1, "", "")
   193  	assertParseName(`fish.chunk.-21`, "", -1, "", "")
   194  
   195  	assertParseName(`fish.chunk.004abcd`, "", -1, "", "")        // missing underscore delimiter
   196  	assertParseName(`fish.chunk.004__1234`, "", -1, "", "")      // extra underscore delimiter
   197  	assertParseName(`fish.chunk.004_123`, "", -1, "", "")        // too short temporary suffix
   198  	assertParseName(`fish.chunk.004_1234567890`, "", -1, "", "") // too long temporary suffix
   199  	assertParseName(`fish.chunk.004_-1234`, "", -1, "", "")      // temporary suffix must be positive
   200  	assertParseName(`fish.chunk.004_123E`, "", -1, "", "")       // uppercase not allowed
   201  	assertParseName(`fish.chunk.004_12.3`, "", -1, "", "")       // punctuation not allowed
   202  
   203  	// parsing invalid data chunk names (old temporary suffix)
   204  	assertParseName(`fish.chunk.004.tmp_0000000021`, "", -1, "", "")
   205  	assertParseName(`fish.chunk.003..tmp_123456789`, "", -1, "", "")
   206  	assertParseName(`fish.chunk.003..tmp_012345678901234567890123456789`, "", -1, "", "")
   207  	assertParseName(`fish.chunk.323..tmp_12345678901234`, "", -1, "", "")
   208  	assertParseName(`fish.chunk.003..tmp_-1`, "", -1, "", "")
   209  
   210  	// valid control chunks
   211  	assertMakeName(`fish.chunk._info`, "fish", -1, "info", "")
   212  	assertMakeName(`fish.chunk._locks`, "fish", -2, "locks", "")
   213  	assertMakeName(`fish.chunk._blkinfo`, "fish", -3, "blkinfo", "")
   214  	assertMakeName(`fish.chunk._x2y`, "fish", -4, "x2y", "")
   215  
   216  	assertParseName(`fish.chunk._info`, "fish", -1, "info", "")
   217  	assertParseName(`fish.chunk._locks`, "fish", -1, "locks", "")
   218  	assertParseName(`fish.chunk._blkinfo`, "fish", -1, "blkinfo", "")
   219  	assertParseName(`fish.chunk._x2y`, "fish", -1, "x2y", "")
   220  
   221  	// valid temporary control chunks
   222  	assertMakeName(`fish.chunk._info_0001`, "fish", -1, "info", "1")
   223  	assertMakeName(`fish.chunk._locks_4321`, "fish", -2, "locks", "4321")
   224  	assertMakeName(`fish.chunk._uploads_abcd`, "fish", -3, "uploads", "abcd")
   225  	assertMakeName(`fish.chunk._blkinfo_xyzabcdef`, "fish", -4, "blkinfo", "xyzabcdef")
   226  	assertMakeName(`fish.chunk._x2y_1aaa`, "fish", -5, "x2y", "1aaa")
   227  
   228  	assertParseName(`fish.chunk._info_0001`, "fish", -1, "info", "0001")
   229  	assertParseName(`fish.chunk._locks_4321`, "fish", -1, "locks", "4321")
   230  	assertParseName(`fish.chunk._uploads_9abc`, "fish", -1, "uploads", "9abc")
   231  	assertParseName(`fish.chunk._blkinfo_xyzabcdef`, "fish", -1, "blkinfo", "xyzabcdef")
   232  	assertParseName(`fish.chunk._x2y_1aaa`, "fish", -1, "x2y", "1aaa")
   233  
   234  	// valid temporary control chunks (old temporary suffix, parse only)
   235  	assertParseName(`fish.chunk._info..tmp_0000000047`, "fish", -1, "info", "001b")
   236  	assertParseName(`fish.chunk._locks..tmp_0000054321`, "fish", -1, "locks", "15wx")
   237  	assertParseName(`fish.chunk._uploads..tmp_0000000000`, "fish", -1, "uploads", "0000")
   238  	assertParseName(`fish.chunk._blkinfo..tmp_9994567890123`, "fish", -1, "blkinfo", "3jjfvo3wr")
   239  	assertParseName(`fish.chunk._x2y..tmp_0000000000`, "fish", -1, "x2y", "0000")
   240  
   241  	// parsing invalid control chunk names
   242  	assertParseName(`fish.chunk.metadata`, "", -1, "", "") // must be prepended by underscore
   243  	assertParseName(`fish.chunk.info`, "", -1, "", "")
   244  	assertParseName(`fish.chunk.locks`, "", -1, "", "")
   245  	assertParseName(`fish.chunk.uploads`, "", -1, "", "")
   246  
   247  	assertParseName(`fish.chunk._os`, "", -1, "", "")        // too short
   248  	assertParseName(`fish.chunk._metadata`, "", -1, "", "")  // too long
   249  	assertParseName(`fish.chunk._blockinfo`, "", -1, "", "") // way too long
   250  	assertParseName(`fish.chunk._4me`, "", -1, "", "")       // cannot start with digit
   251  	assertParseName(`fish.chunk._567`, "", -1, "", "")       // cannot be all digits
   252  	assertParseName(`fish.chunk._me_ta`, "", -1, "", "")     // punctuation not allowed
   253  	assertParseName(`fish.chunk._in-fo`, "", -1, "", "")
   254  	assertParseName(`fish.chunk._.bin`, "", -1, "", "")
   255  	assertParseName(`fish.chunk._.2xy`, "", -1, "", "")
   256  
   257  	// parsing invalid temporary control chunks
   258  	assertParseName(`fish.chunk._blkinfo1234`, "", -1, "", "")     // missing underscore delimiter
   259  	assertParseName(`fish.chunk._info__1234`, "", -1, "", "")      // extra underscore delimiter
   260  	assertParseName(`fish.chunk._info_123`, "", -1, "", "")        // too short temporary suffix
   261  	assertParseName(`fish.chunk._info_1234567890`, "", -1, "", "") // too long temporary suffix
   262  	assertParseName(`fish.chunk._info_-1234`, "", -1, "", "")      // temporary suffix must be positive
   263  	assertParseName(`fish.chunk._info_123E`, "", -1, "", "")       // uppercase not allowed
   264  	assertParseName(`fish.chunk._info_12.3`, "", -1, "", "")       // punctuation not allowed
   265  
   266  	assertParseName(`fish.chunk._locks..tmp_123456789`, "", -1, "", "")
   267  	assertParseName(`fish.chunk._meta..tmp_-1`, "", -1, "", "")
   268  	assertParseName(`fish.chunk._blockinfo..tmp_012345678901234567890123456789`, "", -1, "", "")
   269  
   270  	// short control chunk names: 3 letters ok, 1-2 letters not allowed
   271  	assertMakeName(`fish.chunk._ext`, "fish", -1, "ext", "")
   272  	assertParseName(`fish.chunk._int`, "fish", -1, "int", "")
   273  
   274  	assertMakeNamePanics("fish", -1, "in", "")
   275  	assertMakeNamePanics("fish", -1, "up", "4")
   276  	assertMakeNamePanics("fish", -1, "x", "")
   277  	assertMakeNamePanics("fish", -1, "c", "1z")
   278  
   279  	assertMakeName(`fish.chunk._ext_0000`, "fish", -1, "ext", "0")
   280  	assertMakeName(`fish.chunk._ext_0026`, "fish", -1, "ext", "26")
   281  	assertMakeName(`fish.chunk._int_0abc`, "fish", -1, "int", "abc")
   282  	assertMakeName(`fish.chunk._int_9xyz`, "fish", -1, "int", "9xyz")
   283  	assertMakeName(`fish.chunk._out_jj5fvo3wr`, "fish", -1, "out", "jj5fvo3wr")
   284  	assertMakeName(`fish.chunk._out_jj5fvo3wr`, "fish", -1, "out", "jj5fvo3wr")
   285  
   286  	assertParseName(`fish.chunk._ext_0000`, "fish", -1, "ext", "0000")
   287  	assertParseName(`fish.chunk._ext_0026`, "fish", -1, "ext", "0026")
   288  	assertParseName(`fish.chunk._int_0abc`, "fish", -1, "int", "0abc")
   289  	assertParseName(`fish.chunk._int_9xyz`, "fish", -1, "int", "9xyz")
   290  	assertParseName(`fish.chunk._out_jj5fvo3wr`, "fish", -1, "out", "jj5fvo3wr")
   291  	assertParseName(`fish.chunk._out_jj5fvo3wr`, "fish", -1, "out", "jj5fvo3wr")
   292  
   293  	// base file name can sometimes look like a valid chunk name
   294  	assertParseName(`fish.chunk.003.chunk.004`, "fish.chunk.003", 2, "", "")
   295  	assertParseName(`fish.chunk.003.chunk._info`, "fish.chunk.003", -1, "info", "")
   296  	assertParseName(`fish.chunk.003.chunk._Meta`, "", -1, "", "")
   297  
   298  	assertParseName(`fish.chunk._info.chunk.004`, "fish.chunk._info", 2, "", "")
   299  	assertParseName(`fish.chunk._info.chunk._info`, "fish.chunk._info", -1, "info", "")
   300  	assertParseName(`fish.chunk._info.chunk._info.chunk._Meta`, "", -1, "", "")
   301  
   302  	// base file name looking like a valid chunk name (old temporary suffix)
   303  	assertParseName(`fish.chunk.003.chunk.005..tmp_0000000022`, "fish.chunk.003", 3, "", "000m")
   304  	assertParseName(`fish.chunk.003.chunk._x..tmp_0000054321`, "", -1, "", "")
   305  	assertParseName(`fish.chunk._info.chunk.005..tmp_0000000023`, "fish.chunk._info", 3, "", "000n")
   306  	assertParseName(`fish.chunk._info.chunk._info.chunk._x..tmp_0000054321`, "", -1, "", "")
   307  
   308  	assertParseName(`fish.chunk.003.chunk._blkinfo..tmp_9994567890123`, "fish.chunk.003", -1, "blkinfo", "3jjfvo3wr")
   309  	assertParseName(`fish.chunk._info.chunk._blkinfo..tmp_9994567890123`, "fish.chunk._info", -1, "blkinfo", "3jjfvo3wr")
   310  
   311  	assertParseName(`fish.chunk.004..tmp_0000000021.chunk.004`, "fish.chunk.004..tmp_0000000021", 2, "", "")
   312  	assertParseName(`fish.chunk.004..tmp_0000000021.chunk.005..tmp_0000000025`, "fish.chunk.004..tmp_0000000021", 3, "", "000p")
   313  	assertParseName(`fish.chunk.004..tmp_0000000021.chunk._info`, "fish.chunk.004..tmp_0000000021", -1, "info", "")
   314  	assertParseName(`fish.chunk.004..tmp_0000000021.chunk._blkinfo..tmp_9994567890123`, "fish.chunk.004..tmp_0000000021", -1, "blkinfo", "3jjfvo3wr")
   315  	assertParseName(`fish.chunk.004..tmp_0000000021.chunk._Meta`, "", -1, "", "")
   316  	assertParseName(`fish.chunk.004..tmp_0000000021.chunk._x..tmp_0000054321`, "", -1, "", "")
   317  
   318  	assertParseName(`fish.chunk._blkinfo..tmp_9994567890123.chunk.004`, "fish.chunk._blkinfo..tmp_9994567890123", 2, "", "")
   319  	assertParseName(`fish.chunk._blkinfo..tmp_9994567890123.chunk.005..tmp_0000000026`, "fish.chunk._blkinfo..tmp_9994567890123", 3, "", "000q")
   320  	assertParseName(`fish.chunk._blkinfo..tmp_9994567890123.chunk._info`, "fish.chunk._blkinfo..tmp_9994567890123", -1, "info", "")
   321  	assertParseName(`fish.chunk._blkinfo..tmp_9994567890123.chunk._blkinfo..tmp_9994567890123`, "fish.chunk._blkinfo..tmp_9994567890123", -1, "blkinfo", "3jjfvo3wr")
   322  	assertParseName(`fish.chunk._blkinfo..tmp_9994567890123.chunk._info.chunk._Meta`, "", -1, "", "")
   323  	assertParseName(`fish.chunk._blkinfo..tmp_9994567890123.chunk._info.chunk._x..tmp_0000054321`, "", -1, "", "")
   324  
   325  	assertParseName(`fish.chunk._blkinfo..tmp_1234567890123456789.chunk.004`, "fish.chunk._blkinfo..tmp_1234567890123456789", 2, "", "")
   326  	assertParseName(`fish.chunk._blkinfo..tmp_1234567890123456789.chunk.005..tmp_0000000022`, "fish.chunk._blkinfo..tmp_1234567890123456789", 3, "", "000m")
   327  	assertParseName(`fish.chunk._blkinfo..tmp_1234567890123456789.chunk._info`, "fish.chunk._blkinfo..tmp_1234567890123456789", -1, "info", "")
   328  	assertParseName(`fish.chunk._blkinfo..tmp_1234567890123456789.chunk._blkinfo..tmp_9994567890123`, "fish.chunk._blkinfo..tmp_1234567890123456789", -1, "blkinfo", "3jjfvo3wr")
   329  	assertParseName(`fish.chunk._blkinfo..tmp_1234567890123456789.chunk._info.chunk._Meta`, "", -1, "", "")
   330  	assertParseName(`fish.chunk._blkinfo..tmp_1234567890123456789.chunk._info.chunk._x..tmp_0000054321`, "", -1, "", "")
   331  
   332  	// attempts to make invalid chunk names
   333  	assertMakeNamePanics("fish", -1, "", "")          // neither data nor control
   334  	assertMakeNamePanics("fish", 0, "info", "")       // both data and control
   335  	assertMakeNamePanics("fish", -1, "metadata", "")  // control type too long
   336  	assertMakeNamePanics("fish", -1, "blockinfo", "") // control type way too long
   337  	assertMakeNamePanics("fish", -1, "2xy", "")       // first digit not allowed
   338  	assertMakeNamePanics("fish", -1, "123", "")       // all digits not allowed
   339  	assertMakeNamePanics("fish", -1, "Meta", "")      // only lower case letters allowed
   340  	assertMakeNamePanics("fish", -1, "in-fo", "")     // punctuation not allowed
   341  	assertMakeNamePanics("fish", -1, "_info", "")
   342  	assertMakeNamePanics("fish", -1, "info_", "")
   343  	assertMakeNamePanics("fish", -2, ".bind", "")
   344  	assertMakeNamePanics("fish", -2, "bind.", "")
   345  
   346  	assertMakeNamePanics("fish", -1, "", "1")          // neither data nor control
   347  	assertMakeNamePanics("fish", 0, "info", "23")      // both data and control
   348  	assertMakeNamePanics("fish", -1, "metadata", "45") // control type too long
   349  	assertMakeNamePanics("fish", -1, "blockinfo", "7") // control type way too long
   350  	assertMakeNamePanics("fish", -1, "2xy", "abc")     // first digit not allowed
   351  	assertMakeNamePanics("fish", -1, "123", "def")     // all digits not allowed
   352  	assertMakeNamePanics("fish", -1, "Meta", "mnk")    // only lower case letters allowed
   353  	assertMakeNamePanics("fish", -1, "in-fo", "xyz")   // punctuation not allowed
   354  	assertMakeNamePanics("fish", -1, "_info", "5678")
   355  	assertMakeNamePanics("fish", -1, "info_", "999")
   356  	assertMakeNamePanics("fish", -2, ".bind", "0")
   357  	assertMakeNamePanics("fish", -2, "bind.", "0")
   358  
   359  	assertMakeNamePanics("fish", 0, "", "1234567890") // temporary suffix too long
   360  	assertMakeNamePanics("fish", 0, "", "123F4")      // uppercase not allowed
   361  	assertMakeNamePanics("fish", 0, "", "123.")       // punctuation not allowed
   362  	assertMakeNamePanics("fish", 0, "", "_123")
   363  }
   364  
   365  func testSmallFileInternals(t *testing.T, f *Fs) {
   366  	const dir = "small"
   367  	ctx := context.Background()
   368  	saveOpt := f.opt
   369  	defer func() {
   370  		f.opt.FailHard = false
   371  		_ = operations.Purge(ctx, f.base, dir)
   372  		f.opt = saveOpt
   373  	}()
   374  	f.opt.FailHard = false
   375  
   376  	modTime := fstest.Time("2001-02-03T04:05:06.499999999Z")
   377  
   378  	checkSmallFileInternals := func(obj fs.Object) {
   379  		assert.NotNil(t, obj)
   380  		o, ok := obj.(*Object)
   381  		assert.True(t, ok)
   382  		assert.NotNil(t, o)
   383  		if o == nil {
   384  			return
   385  		}
   386  		switch {
   387  		case !f.useMeta:
   388  			// If meta format is "none", non-chunked file (even empty)
   389  			// internally is a single chunk without meta object.
   390  			assert.Nil(t, o.main)
   391  			assert.True(t, o.isComposite()) // sorry, sometimes a name is misleading
   392  			assert.Equal(t, 1, len(o.chunks))
   393  		case f.hashAll:
   394  			// Consistent hashing forces meta object on small files too
   395  			assert.NotNil(t, o.main)
   396  			assert.True(t, o.isComposite())
   397  			assert.Equal(t, 1, len(o.chunks))
   398  		default:
   399  			// normally non-chunked file is kept in the Object's main field
   400  			assert.NotNil(t, o.main)
   401  			assert.False(t, o.isComposite())
   402  			assert.Equal(t, 0, len(o.chunks))
   403  		}
   404  	}
   405  
   406  	checkContents := func(obj fs.Object, contents string) {
   407  		assert.NotNil(t, obj)
   408  		assert.Equal(t, int64(len(contents)), obj.Size())
   409  
   410  		r, err := obj.Open(ctx)
   411  		assert.NoError(t, err)
   412  		assert.NotNil(t, r)
   413  		if r == nil {
   414  			return
   415  		}
   416  		data, err := io.ReadAll(r)
   417  		assert.NoError(t, err)
   418  		assert.Equal(t, contents, string(data))
   419  		_ = r.Close()
   420  	}
   421  
   422  	checkHashsum := func(obj fs.Object) {
   423  		var ht hash.Type
   424  		switch {
   425  		case !f.hashAll:
   426  			return
   427  		case f.useMD5:
   428  			ht = hash.MD5
   429  		case f.useSHA1:
   430  			ht = hash.SHA1
   431  		default:
   432  			return
   433  		}
   434  		// even empty files must have hashsum in consistent mode
   435  		sum, err := obj.Hash(ctx, ht)
   436  		assert.NoError(t, err)
   437  		assert.NotEqual(t, sum, "")
   438  	}
   439  
   440  	checkSmallFile := func(name, contents string) {
   441  		filename := path.Join(dir, name)
   442  		item := fstest.Item{Path: filename, ModTime: modTime}
   443  		put := fstests.PutTestContents(ctx, t, f, &item, contents, false)
   444  		assert.NotNil(t, put)
   445  		checkSmallFileInternals(put)
   446  		checkContents(put, contents)
   447  		checkHashsum(put)
   448  
   449  		// objects returned by Put and NewObject must have similar structure
   450  		obj, err := f.NewObject(ctx, filename)
   451  		assert.NoError(t, err)
   452  		assert.NotNil(t, obj)
   453  		checkSmallFileInternals(obj)
   454  		checkContents(obj, contents)
   455  		checkHashsum(obj)
   456  
   457  		_ = obj.Remove(ctx)
   458  		_ = put.Remove(ctx) // for good
   459  	}
   460  
   461  	checkSmallFile("emptyfile", "")
   462  	checkSmallFile("smallfile", "Ok")
   463  }
   464  
   465  func testPreventCorruption(t *testing.T, f *Fs) {
   466  	if f.opt.ChunkSize > 50 {
   467  		t.Skip("this test requires small chunks")
   468  	}
   469  	const dir = "corrupted"
   470  	ctx := context.Background()
   471  	saveOpt := f.opt
   472  	defer func() {
   473  		f.opt.FailHard = false
   474  		_ = operations.Purge(ctx, f.base, dir)
   475  		f.opt = saveOpt
   476  	}()
   477  	f.opt.FailHard = true
   478  
   479  	contents := random.String(250)
   480  	modTime := fstest.Time("2001-02-03T04:05:06.499999999Z")
   481  	const overlapMessage = "chunk overlap"
   482  
   483  	assertOverlapError := func(err error) {
   484  		assert.Error(t, err)
   485  		if err != nil {
   486  			assert.Contains(t, err.Error(), overlapMessage)
   487  		}
   488  	}
   489  
   490  	newFile := func(name string) fs.Object {
   491  		item := fstest.Item{Path: path.Join(dir, name), ModTime: modTime}
   492  		obj := fstests.PutTestContents(ctx, t, f, &item, contents, true)
   493  		require.NotNil(t, obj)
   494  		return obj
   495  	}
   496  	billyObj := newFile("billy")
   497  	billyTxn := billyObj.(*Object).xactID
   498  	if f.useNoRename {
   499  		require.True(t, billyTxn != "")
   500  	} else {
   501  		require.True(t, billyTxn == "")
   502  	}
   503  
   504  	billyChunkName := func(chunkNo int) string {
   505  		return f.makeChunkName(billyObj.Remote(), chunkNo, "", billyTxn)
   506  	}
   507  
   508  	err := f.Mkdir(ctx, billyChunkName(1))
   509  	assertOverlapError(err)
   510  
   511  	_, err = f.Move(ctx, newFile("silly1"), billyChunkName(2))
   512  	assert.Error(t, err)
   513  	assert.True(t, err == fs.ErrorCantMove || (err != nil && strings.Contains(err.Error(), overlapMessage)))
   514  
   515  	_, err = f.Copy(ctx, newFile("silly2"), billyChunkName(3))
   516  	assert.Error(t, err)
   517  	assert.True(t, err == fs.ErrorCantCopy || (err != nil && strings.Contains(err.Error(), overlapMessage)))
   518  
   519  	// accessing chunks in strict mode is prohibited
   520  	f.opt.FailHard = true
   521  	billyChunk4Name := billyChunkName(4)
   522  	_, err = f.base.NewObject(ctx, billyChunk4Name)
   523  	require.NoError(t, err)
   524  	_, err = f.NewObject(ctx, billyChunk4Name)
   525  	assertOverlapError(err)
   526  
   527  	f.opt.FailHard = false
   528  	billyChunk4, err := f.NewObject(ctx, billyChunk4Name)
   529  	assert.NoError(t, err)
   530  	require.NotNil(t, billyChunk4)
   531  
   532  	f.opt.FailHard = true
   533  	_, err = f.Put(ctx, bytes.NewBufferString(contents), billyChunk4)
   534  	assertOverlapError(err)
   535  
   536  	// you can freely read chunks (if you have an object)
   537  	r, err := billyChunk4.Open(ctx)
   538  	assert.NoError(t, err)
   539  	var chunkContents []byte
   540  	assert.NotPanics(t, func() {
   541  		chunkContents, err = io.ReadAll(r)
   542  		_ = r.Close()
   543  	})
   544  	assert.NoError(t, err)
   545  	assert.NotEqual(t, contents, string(chunkContents))
   546  
   547  	// but you can't change them
   548  	err = billyChunk4.Update(ctx, bytes.NewBufferString(contents), newFile("silly3"))
   549  	assertOverlapError(err)
   550  
   551  	// Remove isn't special, you can't corrupt files even if you have an object
   552  	err = billyChunk4.Remove(ctx)
   553  	assertOverlapError(err)
   554  
   555  	// recreate billy in case it was anyhow corrupted
   556  	willyObj := newFile("willy")
   557  	willyTxn := willyObj.(*Object).xactID
   558  	willyChunkName := f.makeChunkName(willyObj.Remote(), 1, "", willyTxn)
   559  	f.opt.FailHard = false
   560  	willyChunk, err := f.NewObject(ctx, willyChunkName)
   561  	f.opt.FailHard = true
   562  	assert.NoError(t, err)
   563  	require.NotNil(t, willyChunk)
   564  
   565  	_, err = operations.Copy(ctx, f, willyChunk, willyChunkName, newFile("silly4"))
   566  	assertOverlapError(err)
   567  
   568  	// operations.Move will return error when chunker's Move refused
   569  	// to corrupt target file, but reverts to copy/delete method
   570  	// still trying to delete target chunk. Chunker must come to rescue.
   571  	_, err = operations.Move(ctx, f, willyChunk, willyChunkName, newFile("silly5"))
   572  	assertOverlapError(err)
   573  	r, err = willyChunk.Open(ctx)
   574  	assert.NoError(t, err)
   575  	assert.NotPanics(t, func() {
   576  		_, err = io.ReadAll(r)
   577  		_ = r.Close()
   578  	})
   579  	assert.NoError(t, err)
   580  }
   581  
   582  func testChunkNumberOverflow(t *testing.T, f *Fs) {
   583  	if f.opt.ChunkSize > 50 {
   584  		t.Skip("this test requires small chunks")
   585  	}
   586  	const dir = "wreaked"
   587  	const wreakNumber = 10200300
   588  	ctx := context.Background()
   589  	saveOpt := f.opt
   590  	defer func() {
   591  		f.opt.FailHard = false
   592  		_ = operations.Purge(ctx, f.base, dir)
   593  		f.opt = saveOpt
   594  	}()
   595  
   596  	modTime := fstest.Time("2001-02-03T04:05:06.499999999Z")
   597  	contents := random.String(100)
   598  
   599  	newFile := func(f fs.Fs, name string) (obj fs.Object, filename string, txnID string) {
   600  		filename = path.Join(dir, name)
   601  		item := fstest.Item{Path: filename, ModTime: modTime}
   602  		obj = fstests.PutTestContents(ctx, t, f, &item, contents, true)
   603  		require.NotNil(t, obj)
   604  		if chunkObj, isChunkObj := obj.(*Object); isChunkObj {
   605  			txnID = chunkObj.xactID
   606  		}
   607  		return
   608  	}
   609  
   610  	f.opt.FailHard = false
   611  	file, fileName, fileTxn := newFile(f, "wreaker")
   612  	wreak, _, _ := newFile(f.base, f.makeChunkName("wreaker", wreakNumber, "", fileTxn))
   613  
   614  	f.opt.FailHard = false
   615  	fstest.CheckListingWithRoot(t, f, dir, nil, nil, f.Precision())
   616  	_, err := f.NewObject(ctx, fileName)
   617  	assert.Error(t, err)
   618  
   619  	f.opt.FailHard = true
   620  	_, err = f.List(ctx, dir)
   621  	assert.Error(t, err)
   622  	_, err = f.NewObject(ctx, fileName)
   623  	assert.Error(t, err)
   624  
   625  	f.opt.FailHard = false
   626  	_ = wreak.Remove(ctx)
   627  	_ = file.Remove(ctx)
   628  }
   629  
   630  func testMetadataInput(t *testing.T, f *Fs) {
   631  	const minChunkForTest = 50
   632  	if f.opt.ChunkSize < minChunkForTest {
   633  		t.Skip("this test requires chunks that fit metadata")
   634  	}
   635  
   636  	const dir = "usermeta"
   637  	ctx := context.Background()
   638  	saveOpt := f.opt
   639  	defer func() {
   640  		f.opt.FailHard = false
   641  		_ = operations.Purge(ctx, f.base, dir)
   642  		f.opt = saveOpt
   643  	}()
   644  	f.opt.FailHard = false
   645  
   646  	runSubtest := func(contents, name string) {
   647  		description := fmt.Sprintf("file with %s metadata", name)
   648  		filename := path.Join(dir, name)
   649  		require.True(t, len(contents) > 2 && len(contents) < minChunkForTest, description+" test data is correct")
   650  
   651  		part := testPutFile(ctx, t, f.base, f.makeChunkName(filename, 0, "", ""), "oops", "", true)
   652  		_ = testPutFile(ctx, t, f, filename, contents, "upload "+description, false)
   653  
   654  		obj, err := f.NewObject(ctx, filename)
   655  		assert.NoError(t, err, "access "+description)
   656  		assert.NotNil(t, obj)
   657  		assert.Equal(t, int64(len(contents)), obj.Size(), "size "+description)
   658  
   659  		o, ok := obj.(*Object)
   660  		assert.NotNil(t, ok)
   661  		if o != nil {
   662  			assert.True(t, o.isComposite() && len(o.chunks) == 1, description+" is forced composite")
   663  			o = nil
   664  		}
   665  
   666  		defer func() {
   667  			_ = obj.Remove(ctx)
   668  			_ = part.Remove(ctx)
   669  		}()
   670  
   671  		r, err := obj.Open(ctx)
   672  		assert.NoError(t, err, "open "+description)
   673  		assert.NotNil(t, r, "open stream of "+description)
   674  		if err == nil && r != nil {
   675  			data, err := io.ReadAll(r)
   676  			assert.NoError(t, err, "read all of "+description)
   677  			assert.Equal(t, contents, string(data), description+" contents is ok")
   678  			_ = r.Close()
   679  		}
   680  	}
   681  
   682  	metaData, err := marshalSimpleJSON(ctx, 3, 1, "", "", "")
   683  	require.NoError(t, err)
   684  	todaysMeta := string(metaData)
   685  	runSubtest(todaysMeta, "today")
   686  
   687  	pastMeta := regexp.MustCompile(`"ver":[0-9]+`).ReplaceAllLiteralString(todaysMeta, `"ver":1`)
   688  	pastMeta = regexp.MustCompile(`"size":[0-9]+`).ReplaceAllLiteralString(pastMeta, `"size":0`)
   689  	runSubtest(pastMeta, "past")
   690  
   691  	futureMeta := regexp.MustCompile(`"ver":[0-9]+`).ReplaceAllLiteralString(todaysMeta, `"ver":999`)
   692  	futureMeta = regexp.MustCompile(`"nchunks":[0-9]+`).ReplaceAllLiteralString(futureMeta, `"nchunks":0,"x":"y"`)
   693  	runSubtest(futureMeta, "future")
   694  }
   695  
   696  // Test that chunker refuses to change on objects with future/unknown metadata
   697  func testFutureProof(t *testing.T, f *Fs) {
   698  	if !f.useMeta {
   699  		t.Skip("this test requires metadata support")
   700  	}
   701  
   702  	saveOpt := f.opt
   703  	ctx := context.Background()
   704  	f.opt.FailHard = true
   705  	const dir = "future"
   706  	const file = dir + "/test"
   707  	defer func() {
   708  		f.opt.FailHard = false
   709  		_ = operations.Purge(ctx, f.base, dir)
   710  		f.opt = saveOpt
   711  	}()
   712  
   713  	modTime := fstest.Time("2001-02-03T04:05:06.499999999Z")
   714  	putPart := func(name string, part int, data, msg string) {
   715  		if part > 0 {
   716  			name = f.makeChunkName(name, part-1, "", "")
   717  		}
   718  		item := fstest.Item{Path: name, ModTime: modTime}
   719  		obj := fstests.PutTestContents(ctx, t, f.base, &item, data, true)
   720  		assert.NotNil(t, obj, msg)
   721  	}
   722  
   723  	// simulate chunked object from future
   724  	meta := `{"ver":999,"nchunks":3,"size":9,"garbage":"litter","sha1":"0707f2970043f9f7c22029482db27733deaec029"}`
   725  	putPart(file, 0, meta, "metaobject")
   726  	putPart(file, 1, "abc", "chunk1")
   727  	putPart(file, 2, "def", "chunk2")
   728  	putPart(file, 3, "ghi", "chunk3")
   729  
   730  	// List should succeed
   731  	ls, err := f.List(ctx, dir)
   732  	assert.NoError(t, err)
   733  	assert.Equal(t, 1, len(ls))
   734  	assert.Equal(t, int64(9), ls[0].Size())
   735  
   736  	// NewObject should succeed
   737  	obj, err := f.NewObject(ctx, file)
   738  	assert.NoError(t, err)
   739  	assert.Equal(t, file, obj.Remote())
   740  	assert.Equal(t, int64(9), obj.Size())
   741  
   742  	// Hash must fail
   743  	_, err = obj.Hash(ctx, hash.SHA1)
   744  	assert.Equal(t, ErrMetaUnknown, err)
   745  
   746  	// Move must fail
   747  	mobj, err := operations.Move(ctx, f, nil, file+"2", obj)
   748  	assert.Nil(t, mobj)
   749  	assert.Error(t, err)
   750  	if err != nil {
   751  		assert.Contains(t, err.Error(), "please upgrade rclone")
   752  	}
   753  
   754  	// Put must fail
   755  	oi := object.NewStaticObjectInfo(file, modTime, 3, true, nil, nil)
   756  	buf := bytes.NewBufferString("abc")
   757  	_, err = f.Put(ctx, buf, oi)
   758  	assert.Error(t, err)
   759  
   760  	// Rcat must fail
   761  	in := io.NopCloser(bytes.NewBufferString("abc"))
   762  	robj, err := operations.Rcat(ctx, f, file, in, modTime, nil)
   763  	assert.Nil(t, robj)
   764  	assert.NotNil(t, err)
   765  	if err != nil {
   766  		assert.Contains(t, err.Error(), "please upgrade rclone")
   767  	}
   768  }
   769  
   770  // The newer method of doing transactions without renaming should still be able to correctly process chunks that were created with renaming
   771  // If you attempt to do the inverse, however, the data chunks will be ignored causing commands to perform incorrectly
   772  func testBackwardsCompatibility(t *testing.T, f *Fs) {
   773  	if !f.useMeta {
   774  		t.Skip("Can't do norename transactions without metadata")
   775  	}
   776  	const dir = "backcomp"
   777  	ctx := context.Background()
   778  	saveOpt := f.opt
   779  	saveUseNoRename := f.useNoRename
   780  	defer func() {
   781  		f.opt.FailHard = false
   782  		_ = operations.Purge(ctx, f.base, dir)
   783  		f.opt = saveOpt
   784  		f.useNoRename = saveUseNoRename
   785  	}()
   786  	f.opt.ChunkSize = fs.SizeSuffix(10)
   787  
   788  	modTime := fstest.Time("2001-02-03T04:05:06.499999999Z")
   789  	contents := random.String(250)
   790  	newFile := func(f fs.Fs, name string) (fs.Object, string) {
   791  		filename := path.Join(dir, name)
   792  		item := fstest.Item{Path: filename, ModTime: modTime}
   793  		obj := fstests.PutTestContents(ctx, t, f, &item, contents, true)
   794  		require.NotNil(t, obj)
   795  		return obj, filename
   796  	}
   797  
   798  	f.opt.FailHard = false
   799  	f.useNoRename = false
   800  	file, fileName := newFile(f, "renamefile")
   801  
   802  	f.opt.FailHard = false
   803  	item := fstest.NewItem(fileName, contents, modTime)
   804  
   805  	var items []fstest.Item
   806  	items = append(items, item)
   807  
   808  	f.useNoRename = true
   809  	fstest.CheckListingWithRoot(t, f, dir, items, nil, f.Precision())
   810  	_, err := f.NewObject(ctx, fileName)
   811  	assert.NoError(t, err)
   812  
   813  	f.opt.FailHard = true
   814  	_, err = f.List(ctx, dir)
   815  	assert.NoError(t, err)
   816  
   817  	f.opt.FailHard = false
   818  	_ = file.Remove(ctx)
   819  }
   820  
   821  func testChunkerServerSideMove(t *testing.T, f *Fs) {
   822  	if !f.useMeta {
   823  		t.Skip("Can't test norename transactions without metadata")
   824  	}
   825  
   826  	ctx := context.Background()
   827  	const dir = "servermovetest"
   828  	subRemote := fmt.Sprintf("%s:%s/%s", f.Name(), f.Root(), dir)
   829  
   830  	subFs1, err := fs.NewFs(ctx, subRemote+"/subdir1")
   831  	assert.NoError(t, err)
   832  	fs1, isChunkerFs := subFs1.(*Fs)
   833  	assert.True(t, isChunkerFs)
   834  	fs1.useNoRename = false
   835  	fs1.opt.ChunkSize = fs.SizeSuffix(3)
   836  
   837  	subFs2, err := fs.NewFs(ctx, subRemote+"/subdir2")
   838  	assert.NoError(t, err)
   839  	fs2, isChunkerFs := subFs2.(*Fs)
   840  	assert.True(t, isChunkerFs)
   841  	fs2.useNoRename = true
   842  	fs2.opt.ChunkSize = fs.SizeSuffix(3)
   843  
   844  	modTime := fstest.Time("2001-02-03T04:05:06.499999999Z")
   845  	item := fstest.Item{Path: "movefile", ModTime: modTime}
   846  	contents := "abcdef"
   847  	file := fstests.PutTestContents(ctx, t, fs1, &item, contents, true)
   848  
   849  	dstOverwritten, _ := fs2.NewObject(ctx, "movefile")
   850  	dstFile, err := operations.Move(ctx, fs2, dstOverwritten, "movefile", file)
   851  	assert.NoError(t, err)
   852  	assert.Equal(t, int64(len(contents)), dstFile.Size())
   853  
   854  	r, err := dstFile.Open(ctx)
   855  	assert.NoError(t, err)
   856  	assert.NotNil(t, r)
   857  	data, err := io.ReadAll(r)
   858  	assert.NoError(t, err)
   859  	assert.Equal(t, contents, string(data))
   860  	_ = r.Close()
   861  	_ = operations.Purge(ctx, f.base, dir)
   862  }
   863  
   864  // Test that md5all creates metadata even for small files
   865  func testMD5AllSlow(t *testing.T, f *Fs) {
   866  	ctx := context.Background()
   867  	fsResult := deriveFs(ctx, t, f, "md5all", settings{
   868  		"chunk_size":   "1P",
   869  		"name_format":  "*.#",
   870  		"hash_type":    "md5all",
   871  		"transactions": "rename",
   872  		"meta_format":  "simplejson",
   873  	})
   874  	chunkFs, ok := fsResult.(*Fs)
   875  	require.True(t, ok, "fs must be a chunker remote")
   876  	baseFs := chunkFs.base
   877  	if !baseFs.Features().SlowHash {
   878  		t.Skipf("this test needs a base fs with slow hash, e.g. local")
   879  	}
   880  
   881  	assert.True(t, chunkFs.useMD5, "must use md5")
   882  	assert.True(t, chunkFs.hashAll, "must hash all files")
   883  
   884  	_ = testPutFile(ctx, t, chunkFs, "file", "-", "error", true)
   885  	obj, err := chunkFs.NewObject(ctx, "file")
   886  	require.NoError(t, err)
   887  	sum, err := obj.Hash(ctx, hash.MD5)
   888  	assert.NoError(t, err)
   889  	assert.Equal(t, "336d5ebc5436534e61d16e63ddfca327", sum)
   890  
   891  	list, err := baseFs.List(ctx, "")
   892  	require.NoError(t, err)
   893  	assert.Equal(t, 2, len(list))
   894  	_, err = baseFs.NewObject(ctx, "file")
   895  	assert.NoError(t, err, "metadata must be created")
   896  	_, err = baseFs.NewObject(ctx, "file.1")
   897  	assert.NoError(t, err, "first chunk must be created")
   898  
   899  	require.NoError(t, operations.Purge(ctx, baseFs, ""))
   900  }
   901  
   902  // InternalTest dispatches all internal tests
   903  func (f *Fs) InternalTest(t *testing.T) {
   904  	t.Run("PutLarge", func(t *testing.T) {
   905  		if *UploadKilobytes <= 0 {
   906  			t.Skip("-upload-kilobytes is not set")
   907  		}
   908  		testPutLarge(t, f, *UploadKilobytes)
   909  	})
   910  	t.Run("ChunkNameFormat", func(t *testing.T) {
   911  		testChunkNameFormat(t, f)
   912  	})
   913  	t.Run("SmallFileInternals", func(t *testing.T) {
   914  		testSmallFileInternals(t, f)
   915  	})
   916  	t.Run("PreventCorruption", func(t *testing.T) {
   917  		testPreventCorruption(t, f)
   918  	})
   919  	t.Run("ChunkNumberOverflow", func(t *testing.T) {
   920  		testChunkNumberOverflow(t, f)
   921  	})
   922  	t.Run("MetadataInput", func(t *testing.T) {
   923  		testMetadataInput(t, f)
   924  	})
   925  	t.Run("FutureProof", func(t *testing.T) {
   926  		testFutureProof(t, f)
   927  	})
   928  	t.Run("BackwardsCompatibility", func(t *testing.T) {
   929  		testBackwardsCompatibility(t, f)
   930  	})
   931  	t.Run("ChunkerServerSideMove", func(t *testing.T) {
   932  		testChunkerServerSideMove(t, f)
   933  	})
   934  	t.Run("MD5AllSlow", func(t *testing.T) {
   935  		testMD5AllSlow(t, f)
   936  	})
   937  }
   938  
   939  var _ fstests.InternalTester = (*Fs)(nil)