github.com/10XDev/rclone@v1.52.3-0.20200626220027-16af9ab76b2a/fs/operations/operations_test.go (about) 1 // Integration tests - test rclone by doing real transactions to a 2 // storage provider to and from the local disk. 3 // 4 // By default it will use a local fs, however you can provide a 5 // -remote option to use a different remote. The test_all.go script 6 // is a wrapper to call this for all the test remotes. 7 // 8 // FIXME not safe for concurrent running of tests until fs.Config is 9 // no longer a global 10 // 11 // NB When writing tests 12 // 13 // Make sure every series of writes to the remote has a 14 // fstest.CheckItems() before use. This make sure the directory 15 // listing is now consistent and stops cascading errors. 16 // 17 // Call accounting.GlobalStats().ResetCounters() before every fs.Sync() as it 18 // uses the error count internally. 19 20 package operations_test 21 22 import ( 23 "bytes" 24 "context" 25 "errors" 26 "fmt" 27 "io" 28 "io/ioutil" 29 "log" 30 "net/http" 31 "net/http/httptest" 32 "os" 33 "regexp" 34 "strings" 35 "testing" 36 "time" 37 38 _ "github.com/rclone/rclone/backend/all" // import all backends 39 "github.com/rclone/rclone/fs" 40 "github.com/rclone/rclone/fs/accounting" 41 "github.com/rclone/rclone/fs/filter" 42 "github.com/rclone/rclone/fs/fserrors" 43 "github.com/rclone/rclone/fs/fshttp" 44 "github.com/rclone/rclone/fs/hash" 45 "github.com/rclone/rclone/fs/operations" 46 "github.com/rclone/rclone/fstest" 47 "github.com/rclone/rclone/lib/random" 48 "github.com/rclone/rclone/lib/readers" 49 "github.com/stretchr/testify/assert" 50 "github.com/stretchr/testify/require" 51 ) 52 53 // Some times used in the tests 54 var ( 55 t1 = fstest.Time("2001-02-03T04:05:06.499999999Z") 56 t2 = fstest.Time("2011-12-25T12:59:59.123456789Z") 57 t3 = fstest.Time("2011-12-30T12:59:59.000000000Z") 58 ) 59 60 // TestMain drives the tests 61 func TestMain(m *testing.M) { 62 fstest.TestMain(m) 63 } 64 65 func TestMkdir(t *testing.T) { 66 r := fstest.NewRun(t) 67 defer r.Finalise() 68 69 err := operations.Mkdir(context.Background(), r.Fremote, "") 70 require.NoError(t, err) 71 fstest.CheckListing(t, r.Fremote, []fstest.Item{}) 72 73 err = operations.Mkdir(context.Background(), r.Fremote, "") 74 require.NoError(t, err) 75 } 76 77 func TestLsd(t *testing.T) { 78 r := fstest.NewRun(t) 79 defer r.Finalise() 80 file1 := r.WriteObject(context.Background(), "sub dir/hello world", "hello world", t1) 81 82 fstest.CheckItems(t, r.Fremote, file1) 83 84 var buf bytes.Buffer 85 err := operations.ListDir(context.Background(), r.Fremote, &buf) 86 require.NoError(t, err) 87 res := buf.String() 88 assert.Contains(t, res, "sub dir\n") 89 } 90 91 func TestLs(t *testing.T) { 92 r := fstest.NewRun(t) 93 defer r.Finalise() 94 file1 := r.WriteBoth(context.Background(), "potato2", "------------------------------------------------------------", t1) 95 file2 := r.WriteBoth(context.Background(), "empty space", "-", t2) 96 97 fstest.CheckItems(t, r.Fremote, file1, file2) 98 99 var buf bytes.Buffer 100 err := operations.List(context.Background(), r.Fremote, &buf) 101 require.NoError(t, err) 102 res := buf.String() 103 assert.Contains(t, res, " 1 empty space\n") 104 assert.Contains(t, res, " 60 potato2\n") 105 } 106 107 func TestLsWithFilesFrom(t *testing.T) { 108 r := fstest.NewRun(t) 109 defer r.Finalise() 110 file1 := r.WriteBoth(context.Background(), "potato2", "------------------------------------------------------------", t1) 111 file2 := r.WriteBoth(context.Background(), "empty space", "-", t2) 112 113 fstest.CheckItems(t, r.Fremote, file1, file2) 114 115 // Set the --files-from equivalent 116 f, err := filter.NewFilter(nil) 117 require.NoError(t, err) 118 require.NoError(t, f.AddFile("potato2")) 119 require.NoError(t, f.AddFile("notfound")) 120 121 // Monkey patch the active filter 122 oldFilter := filter.Active 123 filter.Active = f 124 defer func() { 125 filter.Active = oldFilter 126 }() 127 128 var buf bytes.Buffer 129 err = operations.List(context.Background(), r.Fremote, &buf) 130 require.NoError(t, err) 131 assert.Equal(t, " 60 potato2\n", buf.String()) 132 133 // Now try with --no-traverse 134 oldNoTraverse := fs.Config.NoTraverse 135 fs.Config.NoTraverse = true 136 defer func() { 137 fs.Config.NoTraverse = oldNoTraverse 138 }() 139 140 buf.Reset() 141 err = operations.List(context.Background(), r.Fremote, &buf) 142 require.NoError(t, err) 143 assert.Equal(t, " 60 potato2\n", buf.String()) 144 } 145 146 func TestLsLong(t *testing.T) { 147 r := fstest.NewRun(t) 148 defer r.Finalise() 149 file1 := r.WriteBoth(context.Background(), "potato2", "------------------------------------------------------------", t1) 150 file2 := r.WriteBoth(context.Background(), "empty space", "-", t2) 151 152 fstest.CheckItems(t, r.Fremote, file1, file2) 153 154 var buf bytes.Buffer 155 err := operations.ListLong(context.Background(), r.Fremote, &buf) 156 require.NoError(t, err) 157 res := buf.String() 158 lines := strings.Split(strings.Trim(res, "\n"), "\n") 159 assert.Equal(t, 2, len(lines)) 160 161 timeFormat := "2006-01-02 15:04:05.000000000" 162 precision := r.Fremote.Precision() 163 location := time.Now().Location() 164 checkTime := func(m, filename string, expected time.Time) { 165 modTime, err := time.ParseInLocation(timeFormat, m, location) // parse as localtime 166 if err != nil { 167 t.Errorf("Error parsing %q: %v", m, err) 168 } else { 169 fstest.AssertTimeEqualWithPrecision(t, filename, expected, modTime, precision) 170 } 171 } 172 173 m1 := regexp.MustCompile(`(?m)^ 1 (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d\.\d{9}) empty space$`) 174 if ms := m1.FindStringSubmatch(res); ms == nil { 175 t.Errorf("empty space missing: %q", res) 176 } else { 177 checkTime(ms[1], "empty space", t2.Local()) 178 } 179 180 m2 := regexp.MustCompile(`(?m)^ 60 (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d\.\d{9}) potato2$`) 181 if ms := m2.FindStringSubmatch(res); ms == nil { 182 t.Errorf("potato2 missing: %q", res) 183 } else { 184 checkTime(ms[1], "potato2", t1.Local()) 185 } 186 } 187 188 func TestHashSums(t *testing.T) { 189 r := fstest.NewRun(t) 190 defer r.Finalise() 191 file1 := r.WriteBoth(context.Background(), "potato2", "------------------------------------------------------------", t1) 192 file2 := r.WriteBoth(context.Background(), "empty space", "-", t2) 193 194 fstest.CheckItems(t, r.Fremote, file1, file2) 195 196 // MD5 Sum 197 198 var buf bytes.Buffer 199 err := operations.Md5sum(context.Background(), r.Fremote, &buf) 200 require.NoError(t, err) 201 res := buf.String() 202 if !strings.Contains(res, "336d5ebc5436534e61d16e63ddfca327 empty space\n") && 203 !strings.Contains(res, " UNSUPPORTED empty space\n") && 204 !strings.Contains(res, " empty space\n") { 205 t.Errorf("empty space missing: %q", res) 206 } 207 if !strings.Contains(res, "d6548b156ea68a4e003e786df99eee76 potato2\n") && 208 !strings.Contains(res, " UNSUPPORTED potato2\n") && 209 !strings.Contains(res, " potato2\n") { 210 t.Errorf("potato2 missing: %q", res) 211 } 212 213 // SHA1 Sum 214 215 buf.Reset() 216 err = operations.Sha1sum(context.Background(), r.Fremote, &buf) 217 require.NoError(t, err) 218 res = buf.String() 219 if !strings.Contains(res, "3bc15c8aae3e4124dd409035f32ea2fd6835efc9 empty space\n") && 220 !strings.Contains(res, " UNSUPPORTED empty space\n") && 221 !strings.Contains(res, " empty space\n") { 222 t.Errorf("empty space missing: %q", res) 223 } 224 if !strings.Contains(res, "9dc7f7d3279715991a22853f5981df582b7f9f6d potato2\n") && 225 !strings.Contains(res, " UNSUPPORTED potato2\n") && 226 !strings.Contains(res, " potato2\n") { 227 t.Errorf("potato2 missing: %q", res) 228 } 229 230 // QuickXorHash Sum 231 232 buf.Reset() 233 var ht hash.Type 234 err = ht.Set("QuickXorHash") 235 require.NoError(t, err) 236 err = operations.HashLister(context.Background(), ht, r.Fremote, &buf) 237 require.NoError(t, err) 238 res = buf.String() 239 if !strings.Contains(res, "2d00000000000000000000000100000000000000 empty space\n") && 240 !strings.Contains(res, " UNSUPPORTED empty space\n") && 241 !strings.Contains(res, " empty space\n") { 242 t.Errorf("empty space missing: %q", res) 243 } 244 if !strings.Contains(res, "4001dad296b6b4a52d6d694b67dad296b6b4a52d potato2\n") && 245 !strings.Contains(res, " UNSUPPORTED potato2\n") && 246 !strings.Contains(res, " potato2\n") { 247 t.Errorf("potato2 missing: %q", res) 248 } 249 250 // QuickXorHash Sum with Base64 Encoded 251 252 buf.Reset() 253 err = operations.HashListerBase64(context.Background(), ht, r.Fremote, &buf) 254 require.NoError(t, err) 255 res = buf.String() 256 if !strings.Contains(res, "LQAAAAAAAAAAAAAAAQAAAAAAAAA= empty space\n") && 257 !strings.Contains(res, " UNSUPPORTED empty space\n") && 258 !strings.Contains(res, " empty space\n") { 259 t.Errorf("empty space missing: %q", res) 260 } 261 if !strings.Contains(res, "QAHa0pa2tKUtbWlLZ9rSlra0pS0= potato2\n") && 262 !strings.Contains(res, " UNSUPPORTED potato2\n") && 263 !strings.Contains(res, " potato2\n") { 264 t.Errorf("potato2 missing: %q", res) 265 } 266 } 267 268 func TestSuffixName(t *testing.T) { 269 origSuffix, origKeepExt := fs.Config.Suffix, fs.Config.SuffixKeepExtension 270 defer func() { 271 fs.Config.Suffix, fs.Config.SuffixKeepExtension = origSuffix, origKeepExt 272 }() 273 for _, test := range []struct { 274 remote string 275 suffix string 276 keepExt bool 277 want string 278 }{ 279 {"test.txt", "", false, "test.txt"}, 280 {"test.txt", "", true, "test.txt"}, 281 {"test.txt", "-suffix", false, "test.txt-suffix"}, 282 {"test.txt", "-suffix", true, "test-suffix.txt"}, 283 {"test.txt.csv", "-suffix", false, "test.txt.csv-suffix"}, 284 {"test.txt.csv", "-suffix", true, "test.txt-suffix.csv"}, 285 {"test", "-suffix", false, "test-suffix"}, 286 {"test", "-suffix", true, "test-suffix"}, 287 } { 288 fs.Config.Suffix = test.suffix 289 fs.Config.SuffixKeepExtension = test.keepExt 290 got := operations.SuffixName(test.remote) 291 assert.Equal(t, test.want, got, fmt.Sprintf("%+v", test)) 292 } 293 } 294 295 func TestCount(t *testing.T) { 296 r := fstest.NewRun(t) 297 defer r.Finalise() 298 file1 := r.WriteBoth(context.Background(), "potato2", "------------------------------------------------------------", t1) 299 file2 := r.WriteBoth(context.Background(), "empty space", "-", t2) 300 file3 := r.WriteBoth(context.Background(), "sub dir/potato3", "hello", t2) 301 302 fstest.CheckItems(t, r.Fremote, file1, file2, file3) 303 304 // Check the MaxDepth too 305 fs.Config.MaxDepth = 1 306 defer func() { fs.Config.MaxDepth = -1 }() 307 308 objects, size, err := operations.Count(context.Background(), r.Fremote) 309 require.NoError(t, err) 310 assert.Equal(t, int64(2), objects) 311 assert.Equal(t, int64(61), size) 312 } 313 314 func TestDelete(t *testing.T) { 315 r := fstest.NewRun(t) 316 defer r.Finalise() 317 file1 := r.WriteObject(context.Background(), "small", "1234567890", t2) // 10 bytes 318 file2 := r.WriteObject(context.Background(), "medium", "------------------------------------------------------------", t1) // 60 bytes 319 file3 := r.WriteObject(context.Background(), "large", "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", t1) // 100 bytes 320 fstest.CheckItems(t, r.Fremote, file1, file2, file3) 321 322 filter.Active.Opt.MaxSize = 60 323 defer func() { 324 filter.Active.Opt.MaxSize = -1 325 }() 326 327 err := operations.Delete(context.Background(), r.Fremote) 328 require.NoError(t, err) 329 fstest.CheckItems(t, r.Fremote, file3) 330 } 331 332 func testCheck(t *testing.T, checkFunction func(ctx context.Context, fdst, fsrc fs.Fs, oneway bool) error) { 333 r := fstest.NewRun(t) 334 defer r.Finalise() 335 336 check := func(i int, wantErrors int64, wantChecks int64, oneway bool) { 337 fs.Debugf(r.Fremote, "%d: Starting check test", i) 338 accounting.GlobalStats().ResetCounters() 339 var buf bytes.Buffer 340 log.SetOutput(&buf) 341 defer func() { 342 log.SetOutput(os.Stderr) 343 }() 344 err := checkFunction(context.Background(), r.Fremote, r.Flocal, oneway) 345 gotErrors := accounting.GlobalStats().GetErrors() 346 gotChecks := accounting.GlobalStats().GetChecks() 347 if wantErrors == 0 && err != nil { 348 t.Errorf("%d: Got error when not expecting one: %v", i, err) 349 } 350 if wantErrors != 0 && err == nil { 351 t.Errorf("%d: No error when expecting one", i) 352 } 353 if wantErrors != gotErrors { 354 t.Errorf("%d: Expecting %d errors but got %d", i, wantErrors, gotErrors) 355 } 356 if gotChecks > 0 && !strings.Contains(buf.String(), "matching files") { 357 t.Errorf("%d: Total files matching line missing", i) 358 } 359 if wantChecks != gotChecks { 360 t.Errorf("%d: Expecting %d total matching files but got %d", i, wantChecks, gotChecks) 361 } 362 fs.Debugf(r.Fremote, "%d: Ending check test", i) 363 } 364 365 file1 := r.WriteBoth(context.Background(), "rutabaga", "is tasty", t3) 366 fstest.CheckItems(t, r.Fremote, file1) 367 fstest.CheckItems(t, r.Flocal, file1) 368 check(1, 0, 1, false) 369 370 file2 := r.WriteFile("potato2", "------------------------------------------------------------", t1) 371 fstest.CheckItems(t, r.Flocal, file1, file2) 372 check(2, 1, 1, false) 373 374 file3 := r.WriteObject(context.Background(), "empty space", "-", t2) 375 fstest.CheckItems(t, r.Fremote, file1, file3) 376 check(3, 2, 1, false) 377 378 file2r := file2 379 if fs.Config.SizeOnly { 380 file2r = r.WriteObject(context.Background(), "potato2", "--Some-Differences-But-Size-Only-Is-Enabled-----------------", t1) 381 } else { 382 r.WriteObject(context.Background(), "potato2", "------------------------------------------------------------", t1) 383 } 384 fstest.CheckItems(t, r.Fremote, file1, file2r, file3) 385 check(4, 1, 2, false) 386 387 r.WriteFile("empty space", "-", t2) 388 fstest.CheckItems(t, r.Flocal, file1, file2, file3) 389 check(5, 0, 3, false) 390 391 file4 := r.WriteObject(context.Background(), "remotepotato", "------------------------------------------------------------", t1) 392 fstest.CheckItems(t, r.Fremote, file1, file2r, file3, file4) 393 check(6, 1, 3, false) 394 check(7, 0, 3, true) 395 } 396 397 func TestCheck(t *testing.T) { 398 testCheck(t, operations.Check) 399 } 400 401 func TestCheckFsError(t *testing.T) { 402 dstFs, err := fs.NewFs("non-existent") 403 if err != nil { 404 t.Fatal(err) 405 } 406 srcFs, err := fs.NewFs("non-existent") 407 if err != nil { 408 t.Fatal(err) 409 } 410 err = operations.Check(context.Background(), dstFs, srcFs, false) 411 require.Error(t, err) 412 } 413 414 func TestCheckDownload(t *testing.T) { 415 testCheck(t, operations.CheckDownload) 416 } 417 418 func TestCheckSizeOnly(t *testing.T) { 419 fs.Config.SizeOnly = true 420 defer func() { fs.Config.SizeOnly = false }() 421 TestCheck(t) 422 } 423 424 func TestCat(t *testing.T) { 425 r := fstest.NewRun(t) 426 defer r.Finalise() 427 file1 := r.WriteBoth(context.Background(), "file1", "ABCDEFGHIJ", t1) 428 file2 := r.WriteBoth(context.Background(), "file2", "012345678", t2) 429 430 fstest.CheckItems(t, r.Fremote, file1, file2) 431 432 for _, test := range []struct { 433 offset int64 434 count int64 435 a string 436 b string 437 }{ 438 {0, -1, "ABCDEFGHIJ", "012345678"}, 439 {0, 5, "ABCDE", "01234"}, 440 {-3, -1, "HIJ", "678"}, 441 {1, 3, "BCD", "123"}, 442 } { 443 var buf bytes.Buffer 444 err := operations.Cat(context.Background(), r.Fremote, &buf, test.offset, test.count) 445 require.NoError(t, err) 446 res := buf.String() 447 448 if res != test.a+test.b && res != test.b+test.a { 449 t.Errorf("Incorrect output from Cat(%d,%d): %q", test.offset, test.count, res) 450 } 451 } 452 } 453 454 func TestPurge(t *testing.T) { 455 r := fstest.NewRunIndividual(t) // make new container (azureblob has delayed mkdir after rmdir) 456 defer r.Finalise() 457 r.Mkdir(context.Background(), r.Fremote) 458 459 // Make some files and dirs 460 r.ForceMkdir(context.Background(), r.Fremote) 461 file1 := r.WriteObject(context.Background(), "A1/B1/C1/one", "aaa", t1) 462 //..and dirs we expect to delete 463 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A2")) 464 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B2")) 465 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B2/C2")) 466 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B1/C3")) 467 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A3")) 468 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A3/B3")) 469 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A3/B3/C4")) 470 //..and one more file at the end 471 file2 := r.WriteObject(context.Background(), "A1/two", "bbb", t2) 472 473 fstest.CheckListingWithPrecision( 474 t, 475 r.Fremote, 476 []fstest.Item{ 477 file1, file2, 478 }, 479 []string{ 480 "A1", 481 "A1/B1", 482 "A1/B1/C1", 483 "A2", 484 "A1/B2", 485 "A1/B2/C2", 486 "A1/B1/C3", 487 "A3", 488 "A3/B3", 489 "A3/B3/C4", 490 }, 491 fs.GetModifyWindow(r.Fremote), 492 ) 493 494 require.NoError(t, operations.Purge(context.Background(), r.Fremote, "A1/B1")) 495 496 fstest.CheckListingWithPrecision( 497 t, 498 r.Fremote, 499 []fstest.Item{ 500 file2, 501 }, 502 []string{ 503 "A1", 504 "A2", 505 "A1/B2", 506 "A1/B2/C2", 507 "A3", 508 "A3/B3", 509 "A3/B3/C4", 510 }, 511 fs.GetModifyWindow(r.Fremote), 512 ) 513 514 require.NoError(t, operations.Purge(context.Background(), r.Fremote, "")) 515 516 fstest.CheckListingWithPrecision( 517 t, 518 r.Fremote, 519 []fstest.Item{}, 520 []string{}, 521 fs.GetModifyWindow(r.Fremote), 522 ) 523 524 } 525 526 func TestRmdirsNoLeaveRoot(t *testing.T) { 527 r := fstest.NewRun(t) 528 defer r.Finalise() 529 r.Mkdir(context.Background(), r.Fremote) 530 531 // Make some files and dirs we expect to keep 532 r.ForceMkdir(context.Background(), r.Fremote) 533 file1 := r.WriteObject(context.Background(), "A1/B1/C1/one", "aaa", t1) 534 //..and dirs we expect to delete 535 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A2")) 536 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B2")) 537 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B2/C2")) 538 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B1/C3")) 539 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A3")) 540 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A3/B3")) 541 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A3/B3/C4")) 542 //..and one more file at the end 543 file2 := r.WriteObject(context.Background(), "A1/two", "bbb", t2) 544 545 fstest.CheckListingWithPrecision( 546 t, 547 r.Fremote, 548 []fstest.Item{ 549 file1, file2, 550 }, 551 []string{ 552 "A1", 553 "A1/B1", 554 "A1/B1/C1", 555 "A2", 556 "A1/B2", 557 "A1/B2/C2", 558 "A1/B1/C3", 559 "A3", 560 "A3/B3", 561 "A3/B3/C4", 562 }, 563 fs.GetModifyWindow(r.Fremote), 564 ) 565 566 require.NoError(t, operations.Rmdirs(context.Background(), r.Fremote, "A3/B3/C4", false)) 567 568 fstest.CheckListingWithPrecision( 569 t, 570 r.Fremote, 571 []fstest.Item{ 572 file1, file2, 573 }, 574 []string{ 575 "A1", 576 "A1/B1", 577 "A1/B1/C1", 578 "A2", 579 "A1/B2", 580 "A1/B2/C2", 581 "A1/B1/C3", 582 "A3", 583 "A3/B3", 584 }, 585 fs.GetModifyWindow(r.Fremote), 586 ) 587 588 require.NoError(t, operations.Rmdirs(context.Background(), r.Fremote, "", false)) 589 590 fstest.CheckListingWithPrecision( 591 t, 592 r.Fremote, 593 []fstest.Item{ 594 file1, file2, 595 }, 596 []string{ 597 "A1", 598 "A1/B1", 599 "A1/B1/C1", 600 }, 601 fs.GetModifyWindow(r.Fremote), 602 ) 603 604 } 605 606 func TestRmdirsLeaveRoot(t *testing.T) { 607 r := fstest.NewRun(t) 608 defer r.Finalise() 609 r.Mkdir(context.Background(), r.Fremote) 610 611 r.ForceMkdir(context.Background(), r.Fremote) 612 613 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1")) 614 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B1")) 615 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B1/C1")) 616 617 fstest.CheckListingWithPrecision( 618 t, 619 r.Fremote, 620 []fstest.Item{}, 621 []string{ 622 "A1", 623 "A1/B1", 624 "A1/B1/C1", 625 }, 626 fs.GetModifyWindow(r.Fremote), 627 ) 628 629 require.NoError(t, operations.Rmdirs(context.Background(), r.Fremote, "A1", true)) 630 631 fstest.CheckListingWithPrecision( 632 t, 633 r.Fremote, 634 []fstest.Item{}, 635 []string{ 636 "A1", 637 }, 638 fs.GetModifyWindow(r.Fremote), 639 ) 640 } 641 642 func TestCopyURL(t *testing.T) { 643 r := fstest.NewRun(t) 644 defer r.Finalise() 645 646 contents := "file contents\n" 647 file1 := r.WriteFile("file1", contents, t1) 648 file2 := r.WriteFile("file2", contents, t1) 649 r.Mkdir(context.Background(), r.Fremote) 650 fstest.CheckItems(t, r.Fremote) 651 652 // check when reading from regular HTTP server 653 status := 0 654 handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { 655 if status != 0 { 656 http.Error(w, "an error ocurred", status) 657 } 658 _, err := w.Write([]byte(contents)) 659 assert.NoError(t, err) 660 }) 661 ts := httptest.NewServer(handler) 662 defer ts.Close() 663 664 o, err := operations.CopyURL(context.Background(), r.Fremote, "file1", ts.URL, false, false) 665 require.NoError(t, err) 666 assert.Equal(t, int64(len(contents)), o.Size()) 667 668 fstest.CheckListingWithPrecision(t, r.Fremote, []fstest.Item{file1}, nil, fs.ModTimeNotSupported) 669 670 // Check file clobbering 671 o, err = operations.CopyURL(context.Background(), r.Fremote, "file1", ts.URL, false, true) 672 require.Error(t, err) 673 674 // Check auto file naming 675 status = 0 676 urlFileName := "filename.txt" 677 o, err = operations.CopyURL(context.Background(), r.Fremote, "", ts.URL+"/"+urlFileName, true, false) 678 require.NoError(t, err) 679 assert.Equal(t, int64(len(contents)), o.Size()) 680 assert.Equal(t, urlFileName, o.Remote()) 681 682 // Check auto file naming when url without file name 683 o, err = operations.CopyURL(context.Background(), r.Fremote, "file1", ts.URL, true, false) 684 require.Error(t, err) 685 686 // Check an error is returned for a 404 687 status = http.StatusNotFound 688 o, err = operations.CopyURL(context.Background(), r.Fremote, "file1", ts.URL, false, false) 689 require.Error(t, err) 690 assert.Contains(t, err.Error(), "Not Found") 691 assert.Nil(t, o) 692 status = 0 693 694 // check when reading from unverified HTTPS server 695 fs.Config.InsecureSkipVerify = true 696 fshttp.ResetTransport() 697 defer func() { 698 fs.Config.InsecureSkipVerify = false 699 fshttp.ResetTransport() 700 }() 701 tss := httptest.NewTLSServer(handler) 702 defer tss.Close() 703 704 o, err = operations.CopyURL(context.Background(), r.Fremote, "file2", tss.URL, false, false) 705 require.NoError(t, err) 706 assert.Equal(t, int64(len(contents)), o.Size()) 707 fstest.CheckListingWithPrecision(t, r.Fremote, []fstest.Item{file1, file2, fstest.NewItem(urlFileName, contents, t1)}, nil, fs.ModTimeNotSupported) 708 } 709 710 func TestCopyURLToWriter(t *testing.T) { 711 contents := "file contents\n" 712 713 // check when reading from regular HTTP server 714 status := 0 715 handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { 716 if status != 0 { 717 http.Error(w, "an error ocurred", status) 718 return 719 } 720 _, err := w.Write([]byte(contents)) 721 assert.NoError(t, err) 722 }) 723 ts := httptest.NewServer(handler) 724 defer ts.Close() 725 726 // test normal fetch 727 var buf bytes.Buffer 728 err := operations.CopyURLToWriter(context.Background(), ts.URL, &buf) 729 require.NoError(t, err) 730 assert.Equal(t, contents, buf.String()) 731 732 // test fetch with error 733 status = http.StatusNotFound 734 buf.Reset() 735 err = operations.CopyURLToWriter(context.Background(), ts.URL, &buf) 736 require.Error(t, err) 737 assert.Contains(t, err.Error(), "Not Found") 738 assert.Equal(t, 0, len(buf.String())) 739 } 740 741 func TestMoveFile(t *testing.T) { 742 r := fstest.NewRun(t) 743 defer r.Finalise() 744 745 file1 := r.WriteFile("file1", "file1 contents", t1) 746 fstest.CheckItems(t, r.Flocal, file1) 747 748 file2 := file1 749 file2.Path = "sub/file2" 750 751 err := operations.MoveFile(context.Background(), r.Fremote, r.Flocal, file2.Path, file1.Path) 752 require.NoError(t, err) 753 fstest.CheckItems(t, r.Flocal) 754 fstest.CheckItems(t, r.Fremote, file2) 755 756 r.WriteFile("file1", "file1 contents", t1) 757 fstest.CheckItems(t, r.Flocal, file1) 758 759 err = operations.MoveFile(context.Background(), r.Fremote, r.Flocal, file2.Path, file1.Path) 760 require.NoError(t, err) 761 fstest.CheckItems(t, r.Flocal) 762 fstest.CheckItems(t, r.Fremote, file2) 763 764 err = operations.MoveFile(context.Background(), r.Fremote, r.Fremote, file2.Path, file2.Path) 765 require.NoError(t, err) 766 fstest.CheckItems(t, r.Flocal) 767 fstest.CheckItems(t, r.Fremote, file2) 768 } 769 770 func TestCaseInsensitiveMoveFile(t *testing.T) { 771 r := fstest.NewRun(t) 772 defer r.Finalise() 773 if !r.Fremote.Features().CaseInsensitive { 774 return 775 } 776 777 file1 := r.WriteFile("file1", "file1 contents", t1) 778 fstest.CheckItems(t, r.Flocal, file1) 779 780 file2 := file1 781 file2.Path = "sub/file2" 782 783 err := operations.MoveFile(context.Background(), r.Fremote, r.Flocal, file2.Path, file1.Path) 784 require.NoError(t, err) 785 fstest.CheckItems(t, r.Flocal) 786 fstest.CheckItems(t, r.Fremote, file2) 787 788 r.WriteFile("file1", "file1 contents", t1) 789 fstest.CheckItems(t, r.Flocal, file1) 790 791 err = operations.MoveFile(context.Background(), r.Fremote, r.Flocal, file2.Path, file1.Path) 792 require.NoError(t, err) 793 fstest.CheckItems(t, r.Flocal) 794 fstest.CheckItems(t, r.Fremote, file2) 795 796 file2Capitalized := file2 797 file2Capitalized.Path = "sub/File2" 798 799 err = operations.MoveFile(context.Background(), r.Fremote, r.Fremote, file2Capitalized.Path, file2.Path) 800 require.NoError(t, err) 801 fstest.CheckItems(t, r.Flocal) 802 fstest.CheckItems(t, r.Fremote, file2Capitalized) 803 } 804 805 func TestMoveFileBackupDir(t *testing.T) { 806 r := fstest.NewRun(t) 807 defer r.Finalise() 808 if !operations.CanServerSideMove(r.Fremote) { 809 t.Skip("Skipping test as remote does not support server side move or copy") 810 } 811 812 oldBackupDir := fs.Config.BackupDir 813 fs.Config.BackupDir = r.FremoteName + "/backup" 814 defer func() { 815 fs.Config.BackupDir = oldBackupDir 816 }() 817 818 file1 := r.WriteFile("dst/file1", "file1 contents", t1) 819 fstest.CheckItems(t, r.Flocal, file1) 820 821 file1old := r.WriteObject(context.Background(), "dst/file1", "file1 contents old", t1) 822 fstest.CheckItems(t, r.Fremote, file1old) 823 824 err := operations.MoveFile(context.Background(), r.Fremote, r.Flocal, file1.Path, file1.Path) 825 require.NoError(t, err) 826 fstest.CheckItems(t, r.Flocal) 827 file1old.Path = "backup/dst/file1" 828 fstest.CheckItems(t, r.Fremote, file1old, file1) 829 } 830 831 func TestCopyFile(t *testing.T) { 832 r := fstest.NewRun(t) 833 defer r.Finalise() 834 835 file1 := r.WriteFile("file1", "file1 contents", t1) 836 fstest.CheckItems(t, r.Flocal, file1) 837 838 file2 := file1 839 file2.Path = "sub/file2" 840 841 err := operations.CopyFile(context.Background(), r.Fremote, r.Flocal, file2.Path, file1.Path) 842 require.NoError(t, err) 843 fstest.CheckItems(t, r.Flocal, file1) 844 fstest.CheckItems(t, r.Fremote, file2) 845 846 err = operations.CopyFile(context.Background(), r.Fremote, r.Flocal, file2.Path, file1.Path) 847 require.NoError(t, err) 848 fstest.CheckItems(t, r.Flocal, file1) 849 fstest.CheckItems(t, r.Fremote, file2) 850 851 err = operations.CopyFile(context.Background(), r.Fremote, r.Fremote, file2.Path, file2.Path) 852 require.NoError(t, err) 853 fstest.CheckItems(t, r.Flocal, file1) 854 fstest.CheckItems(t, r.Fremote, file2) 855 } 856 857 func TestCopyFileBackupDir(t *testing.T) { 858 r := fstest.NewRun(t) 859 defer r.Finalise() 860 if !operations.CanServerSideMove(r.Fremote) { 861 t.Skip("Skipping test as remote does not support server side move or copy") 862 } 863 864 oldBackupDir := fs.Config.BackupDir 865 fs.Config.BackupDir = r.FremoteName + "/backup" 866 defer func() { 867 fs.Config.BackupDir = oldBackupDir 868 }() 869 870 file1 := r.WriteFile("dst/file1", "file1 contents", t1) 871 fstest.CheckItems(t, r.Flocal, file1) 872 873 file1old := r.WriteObject(context.Background(), "dst/file1", "file1 contents old", t1) 874 fstest.CheckItems(t, r.Fremote, file1old) 875 876 err := operations.CopyFile(context.Background(), r.Fremote, r.Flocal, file1.Path, file1.Path) 877 require.NoError(t, err) 878 fstest.CheckItems(t, r.Flocal, file1) 879 file1old.Path = "backup/dst/file1" 880 fstest.CheckItems(t, r.Fremote, file1old, file1) 881 } 882 883 // Test with CompareDest set 884 func TestCopyFileCompareDest(t *testing.T) { 885 r := fstest.NewRun(t) 886 defer r.Finalise() 887 888 fs.Config.CompareDest = r.FremoteName + "/CompareDest" 889 defer func() { 890 fs.Config.CompareDest = "" 891 }() 892 fdst, err := fs.NewFs(r.FremoteName + "/dst") 893 require.NoError(t, err) 894 895 // check empty dest, empty compare 896 file1 := r.WriteFile("one", "one", t1) 897 fstest.CheckItems(t, r.Flocal, file1) 898 899 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file1.Path, file1.Path) 900 require.NoError(t, err) 901 902 file1dst := file1 903 file1dst.Path = "dst/one" 904 905 fstest.CheckItems(t, r.Fremote, file1dst) 906 907 // check old dest, empty compare 908 file1b := r.WriteFile("one", "onet2", t2) 909 fstest.CheckItems(t, r.Fremote, file1dst) 910 fstest.CheckItems(t, r.Flocal, file1b) 911 912 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file1b.Path, file1b.Path) 913 require.NoError(t, err) 914 915 file1bdst := file1b 916 file1bdst.Path = "dst/one" 917 918 fstest.CheckItems(t, r.Fremote, file1bdst) 919 920 // check old dest, new compare 921 file3 := r.WriteObject(context.Background(), "dst/one", "one", t1) 922 file2 := r.WriteObject(context.Background(), "CompareDest/one", "onet2", t2) 923 file1c := r.WriteFile("one", "onet2", t2) 924 fstest.CheckItems(t, r.Fremote, file2, file3) 925 fstest.CheckItems(t, r.Flocal, file1c) 926 927 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file1c.Path, file1c.Path) 928 require.NoError(t, err) 929 930 fstest.CheckItems(t, r.Fremote, file2, file3) 931 932 // check empty dest, new compare 933 file4 := r.WriteObject(context.Background(), "CompareDest/two", "two", t2) 934 file5 := r.WriteFile("two", "two", t2) 935 fstest.CheckItems(t, r.Fremote, file2, file3, file4) 936 fstest.CheckItems(t, r.Flocal, file1c, file5) 937 938 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file5.Path, file5.Path) 939 require.NoError(t, err) 940 941 fstest.CheckItems(t, r.Fremote, file2, file3, file4) 942 943 // check new dest, new compare 944 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file5.Path, file5.Path) 945 require.NoError(t, err) 946 947 fstest.CheckItems(t, r.Fremote, file2, file3, file4) 948 949 // check empty dest, old compare 950 file5b := r.WriteFile("two", "twot3", t3) 951 fstest.CheckItems(t, r.Fremote, file2, file3, file4) 952 fstest.CheckItems(t, r.Flocal, file1c, file5b) 953 954 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file5b.Path, file5b.Path) 955 require.NoError(t, err) 956 957 file5bdst := file5b 958 file5bdst.Path = "dst/two" 959 960 fstest.CheckItems(t, r.Fremote, file2, file3, file4, file5bdst) 961 } 962 963 // Test with CopyDest set 964 func TestCopyFileCopyDest(t *testing.T) { 965 r := fstest.NewRun(t) 966 defer r.Finalise() 967 968 if r.Fremote.Features().Copy == nil { 969 t.Skip("Skipping test as remote does not support server side copy") 970 } 971 972 fs.Config.CopyDest = r.FremoteName + "/CopyDest" 973 defer func() { 974 fs.Config.CopyDest = "" 975 }() 976 977 fdst, err := fs.NewFs(r.FremoteName + "/dst") 978 require.NoError(t, err) 979 980 // check empty dest, empty copy 981 file1 := r.WriteFile("one", "one", t1) 982 fstest.CheckItems(t, r.Flocal, file1) 983 984 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file1.Path, file1.Path) 985 require.NoError(t, err) 986 987 file1dst := file1 988 file1dst.Path = "dst/one" 989 990 fstest.CheckItems(t, r.Fremote, file1dst) 991 992 // check old dest, empty copy 993 file1b := r.WriteFile("one", "onet2", t2) 994 fstest.CheckItems(t, r.Fremote, file1dst) 995 fstest.CheckItems(t, r.Flocal, file1b) 996 997 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file1b.Path, file1b.Path) 998 require.NoError(t, err) 999 1000 file1bdst := file1b 1001 file1bdst.Path = "dst/one" 1002 1003 fstest.CheckItems(t, r.Fremote, file1bdst) 1004 1005 // check old dest, new copy, backup-dir 1006 1007 fs.Config.BackupDir = r.FremoteName + "/BackupDir" 1008 1009 file3 := r.WriteObject(context.Background(), "dst/one", "one", t1) 1010 file2 := r.WriteObject(context.Background(), "CopyDest/one", "onet2", t2) 1011 file1c := r.WriteFile("one", "onet2", t2) 1012 fstest.CheckItems(t, r.Fremote, file2, file3) 1013 fstest.CheckItems(t, r.Flocal, file1c) 1014 1015 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file1c.Path, file1c.Path) 1016 require.NoError(t, err) 1017 1018 file2dst := file2 1019 file2dst.Path = "dst/one" 1020 file3.Path = "BackupDir/one" 1021 1022 fstest.CheckItems(t, r.Fremote, file2, file2dst, file3) 1023 fs.Config.BackupDir = "" 1024 1025 // check empty dest, new copy 1026 file4 := r.WriteObject(context.Background(), "CopyDest/two", "two", t2) 1027 file5 := r.WriteFile("two", "two", t2) 1028 fstest.CheckItems(t, r.Fremote, file2, file2dst, file3, file4) 1029 fstest.CheckItems(t, r.Flocal, file1c, file5) 1030 1031 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file5.Path, file5.Path) 1032 require.NoError(t, err) 1033 1034 file4dst := file4 1035 file4dst.Path = "dst/two" 1036 1037 fstest.CheckItems(t, r.Fremote, file2, file2dst, file3, file4, file4dst) 1038 1039 // check new dest, new copy 1040 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file5.Path, file5.Path) 1041 require.NoError(t, err) 1042 1043 fstest.CheckItems(t, r.Fremote, file2, file2dst, file3, file4, file4dst) 1044 1045 // check empty dest, old copy 1046 file6 := r.WriteObject(context.Background(), "CopyDest/three", "three", t2) 1047 file7 := r.WriteFile("three", "threet3", t3) 1048 fstest.CheckItems(t, r.Fremote, file2, file2dst, file3, file4, file4dst, file6) 1049 fstest.CheckItems(t, r.Flocal, file1c, file5, file7) 1050 1051 err = operations.CopyFile(context.Background(), fdst, r.Flocal, file7.Path, file7.Path) 1052 require.NoError(t, err) 1053 1054 file7dst := file7 1055 file7dst.Path = "dst/three" 1056 1057 fstest.CheckItems(t, r.Fremote, file2, file2dst, file3, file4, file4dst, file6, file7dst) 1058 } 1059 1060 // testFsInfo is for unit testing fs.Info 1061 type testFsInfo struct { 1062 name string 1063 root string 1064 stringVal string 1065 precision time.Duration 1066 hashes hash.Set 1067 features fs.Features 1068 } 1069 1070 // Name of the remote (as passed into NewFs) 1071 func (i *testFsInfo) Name() string { return i.name } 1072 1073 // Root of the remote (as passed into NewFs) 1074 func (i *testFsInfo) Root() string { return i.root } 1075 1076 // String returns a description of the FS 1077 func (i *testFsInfo) String() string { return i.stringVal } 1078 1079 // Precision of the ModTimes in this Fs 1080 func (i *testFsInfo) Precision() time.Duration { return i.precision } 1081 1082 // Returns the supported hash types of the filesystem 1083 func (i *testFsInfo) Hashes() hash.Set { return i.hashes } 1084 1085 // Returns the supported hash types of the filesystem 1086 func (i *testFsInfo) Features() *fs.Features { return &i.features } 1087 1088 func TestSameConfig(t *testing.T) { 1089 a := &testFsInfo{name: "name", root: "root"} 1090 for _, test := range []struct { 1091 name string 1092 root string 1093 expected bool 1094 }{ 1095 {"name", "root", true}, 1096 {"name", "rooty", true}, 1097 {"namey", "root", false}, 1098 {"namey", "roott", false}, 1099 } { 1100 b := &testFsInfo{name: test.name, root: test.root} 1101 actual := operations.SameConfig(a, b) 1102 assert.Equal(t, test.expected, actual) 1103 actual = operations.SameConfig(b, a) 1104 assert.Equal(t, test.expected, actual) 1105 } 1106 } 1107 1108 func TestSame(t *testing.T) { 1109 a := &testFsInfo{name: "name", root: "root"} 1110 for _, test := range []struct { 1111 name string 1112 root string 1113 expected bool 1114 }{ 1115 {"name", "root", true}, 1116 {"name", "rooty", false}, 1117 {"namey", "root", false}, 1118 {"namey", "roott", false}, 1119 } { 1120 b := &testFsInfo{name: test.name, root: test.root} 1121 actual := operations.Same(a, b) 1122 assert.Equal(t, test.expected, actual) 1123 actual = operations.Same(b, a) 1124 assert.Equal(t, test.expected, actual) 1125 } 1126 } 1127 1128 func TestOverlapping(t *testing.T) { 1129 a := &testFsInfo{name: "name", root: "root"} 1130 slash := string(os.PathSeparator) // native path separator 1131 for _, test := range []struct { 1132 name string 1133 root string 1134 expected bool 1135 }{ 1136 {"name", "root", true}, 1137 {"namey", "root", false}, 1138 {"name", "rooty", false}, 1139 {"namey", "rooty", false}, 1140 {"name", "roo", false}, 1141 {"name", "root/toot", true}, 1142 {"name", "root/toot/", true}, 1143 {"name", "root" + slash + "toot", true}, 1144 {"name", "root" + slash + "toot" + slash, true}, 1145 {"name", "", true}, 1146 {"name", "/", true}, 1147 } { 1148 b := &testFsInfo{name: test.name, root: test.root} 1149 what := fmt.Sprintf("(%q,%q) vs (%q,%q)", a.name, a.root, b.name, b.root) 1150 actual := operations.Overlapping(a, b) 1151 assert.Equal(t, test.expected, actual, what) 1152 actual = operations.Overlapping(b, a) 1153 assert.Equal(t, test.expected, actual, what) 1154 } 1155 } 1156 1157 func TestCheckEqualReaders(t *testing.T) { 1158 b65a := make([]byte, 65*1024) 1159 b65b := make([]byte, 65*1024) 1160 b65b[len(b65b)-1] = 1 1161 b66 := make([]byte, 66*1024) 1162 1163 differ, err := operations.CheckEqualReaders(bytes.NewBuffer(b65a), bytes.NewBuffer(b65a)) 1164 assert.NoError(t, err) 1165 assert.Equal(t, differ, false) 1166 1167 differ, err = operations.CheckEqualReaders(bytes.NewBuffer(b65a), bytes.NewBuffer(b65b)) 1168 assert.NoError(t, err) 1169 assert.Equal(t, differ, true) 1170 1171 differ, err = operations.CheckEqualReaders(bytes.NewBuffer(b65a), bytes.NewBuffer(b66)) 1172 assert.NoError(t, err) 1173 assert.Equal(t, differ, true) 1174 1175 differ, err = operations.CheckEqualReaders(bytes.NewBuffer(b66), bytes.NewBuffer(b65a)) 1176 assert.NoError(t, err) 1177 assert.Equal(t, differ, true) 1178 1179 myErr := errors.New("sentinel") 1180 wrap := func(b []byte) io.Reader { 1181 r := bytes.NewBuffer(b) 1182 e := readers.ErrorReader{Err: myErr} 1183 return io.MultiReader(r, e) 1184 } 1185 1186 differ, err = operations.CheckEqualReaders(wrap(b65a), bytes.NewBuffer(b65a)) 1187 assert.Equal(t, myErr, err) 1188 assert.Equal(t, differ, true) 1189 1190 differ, err = operations.CheckEqualReaders(wrap(b65a), bytes.NewBuffer(b65b)) 1191 assert.Equal(t, myErr, err) 1192 assert.Equal(t, differ, true) 1193 1194 differ, err = operations.CheckEqualReaders(wrap(b65a), bytes.NewBuffer(b66)) 1195 assert.Equal(t, myErr, err) 1196 assert.Equal(t, differ, true) 1197 1198 differ, err = operations.CheckEqualReaders(wrap(b66), bytes.NewBuffer(b65a)) 1199 assert.Equal(t, myErr, err) 1200 assert.Equal(t, differ, true) 1201 1202 differ, err = operations.CheckEqualReaders(bytes.NewBuffer(b65a), wrap(b65a)) 1203 assert.Equal(t, myErr, err) 1204 assert.Equal(t, differ, true) 1205 1206 differ, err = operations.CheckEqualReaders(bytes.NewBuffer(b65a), wrap(b65b)) 1207 assert.Equal(t, myErr, err) 1208 assert.Equal(t, differ, true) 1209 1210 differ, err = operations.CheckEqualReaders(bytes.NewBuffer(b65a), wrap(b66)) 1211 assert.Equal(t, myErr, err) 1212 assert.Equal(t, differ, true) 1213 1214 differ, err = operations.CheckEqualReaders(bytes.NewBuffer(b66), wrap(b65a)) 1215 assert.Equal(t, myErr, err) 1216 assert.Equal(t, differ, true) 1217 } 1218 1219 func TestListFormat(t *testing.T) { 1220 item0 := &operations.ListJSONItem{ 1221 Path: "a", 1222 Name: "a", 1223 Encrypted: "encryptedFileName", 1224 Size: 1, 1225 MimeType: "application/octet-stream", 1226 ModTime: operations.Timestamp{ 1227 When: t1, 1228 Format: "2006-01-02T15:04:05.000000000Z07:00"}, 1229 IsDir: false, 1230 Hashes: map[string]string{ 1231 "MD5": "0cc175b9c0f1b6a831c399e269772661", 1232 "SHA-1": "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", 1233 "DropboxHash": "bf5d3affb73efd2ec6c36ad3112dd933efed63c4e1cbffcfa88e2759c144f2d8", 1234 "QuickXorHash": "6100000000000000000000000100000000000000"}, 1235 ID: "fileID", 1236 OrigID: "fileOrigID", 1237 } 1238 1239 item1 := &operations.ListJSONItem{ 1240 Path: "subdir", 1241 Name: "subdir", 1242 Encrypted: "encryptedDirName", 1243 Size: -1, 1244 MimeType: "inode/directory", 1245 ModTime: operations.Timestamp{ 1246 When: t2, 1247 Format: "2006-01-02T15:04:05.000000000Z07:00"}, 1248 IsDir: true, 1249 Hashes: map[string]string(nil), 1250 ID: "dirID", 1251 OrigID: "dirOrigID", 1252 } 1253 1254 var list operations.ListFormat 1255 list.AddPath() 1256 list.SetDirSlash(false) 1257 assert.Equal(t, "subdir", list.Format(item1)) 1258 1259 list.SetDirSlash(true) 1260 assert.Equal(t, "subdir/", list.Format(item1)) 1261 1262 list.SetOutput(nil) 1263 assert.Equal(t, "", list.Format(item1)) 1264 1265 list.AppendOutput(func(item *operations.ListJSONItem) string { return "a" }) 1266 list.AppendOutput(func(item *operations.ListJSONItem) string { return "b" }) 1267 assert.Equal(t, "ab", list.Format(item1)) 1268 list.SetSeparator(":::") 1269 assert.Equal(t, "a:::b", list.Format(item1)) 1270 1271 list.SetOutput(nil) 1272 list.AddModTime() 1273 assert.Equal(t, t1.Local().Format("2006-01-02 15:04:05"), list.Format(item0)) 1274 1275 list.SetOutput(nil) 1276 list.SetSeparator("|") 1277 list.AddID() 1278 list.AddOrigID() 1279 assert.Equal(t, "fileID|fileOrigID", list.Format(item0)) 1280 assert.Equal(t, "dirID|dirOrigID", list.Format(item1)) 1281 1282 list.SetOutput(nil) 1283 list.AddMimeType() 1284 assert.Contains(t, list.Format(item0), "/") 1285 assert.Equal(t, "inode/directory", list.Format(item1)) 1286 1287 list.SetOutput(nil) 1288 list.AddPath() 1289 list.SetAbsolute(true) 1290 assert.Equal(t, "/a", list.Format(item0)) 1291 list.SetAbsolute(false) 1292 assert.Equal(t, "a", list.Format(item0)) 1293 1294 list.SetOutput(nil) 1295 list.AddSize() 1296 assert.Equal(t, "1", list.Format(item0)) 1297 1298 list.AddPath() 1299 list.AddModTime() 1300 list.SetDirSlash(true) 1301 list.SetSeparator("__SEP__") 1302 assert.Equal(t, "1__SEP__a__SEP__"+t1.Local().Format("2006-01-02 15:04:05"), list.Format(item0)) 1303 assert.Equal(t, "-1__SEP__subdir/__SEP__"+t2.Local().Format("2006-01-02 15:04:05"), list.Format(item1)) 1304 1305 for _, test := range []struct { 1306 ht hash.Type 1307 want string 1308 }{ 1309 {hash.MD5, "0cc175b9c0f1b6a831c399e269772661"}, 1310 {hash.SHA1, "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8"}, 1311 } { 1312 list.SetOutput(nil) 1313 list.AddHash(test.ht) 1314 assert.Equal(t, test.want, list.Format(item0)) 1315 } 1316 1317 list.SetOutput(nil) 1318 list.SetSeparator("|") 1319 list.SetCSV(true) 1320 list.AddSize() 1321 list.AddPath() 1322 list.AddModTime() 1323 list.SetDirSlash(true) 1324 assert.Equal(t, "1|a|"+t1.Local().Format("2006-01-02 15:04:05"), list.Format(item0)) 1325 assert.Equal(t, "-1|subdir/|"+t2.Local().Format("2006-01-02 15:04:05"), list.Format(item1)) 1326 1327 list.SetOutput(nil) 1328 list.SetSeparator("|") 1329 list.AddPath() 1330 list.AddEncrypted() 1331 assert.Equal(t, "a|encryptedFileName", list.Format(item0)) 1332 assert.Equal(t, "subdir/|encryptedDirName/", list.Format(item1)) 1333 1334 } 1335 1336 func TestDirMove(t *testing.T) { 1337 r := fstest.NewRun(t) 1338 defer r.Finalise() 1339 1340 r.Mkdir(context.Background(), r.Fremote) 1341 1342 // Make some files and dirs 1343 r.ForceMkdir(context.Background(), r.Fremote) 1344 files := []fstest.Item{ 1345 r.WriteObject(context.Background(), "A1/one", "one", t1), 1346 r.WriteObject(context.Background(), "A1/two", "two", t2), 1347 r.WriteObject(context.Background(), "A1/B1/three", "three", t3), 1348 r.WriteObject(context.Background(), "A1/B1/C1/four", "four", t1), 1349 r.WriteObject(context.Background(), "A1/B1/C2/five", "five", t2), 1350 } 1351 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B2")) 1352 require.NoError(t, operations.Mkdir(context.Background(), r.Fremote, "A1/B1/C3")) 1353 1354 fstest.CheckListingWithPrecision( 1355 t, 1356 r.Fremote, 1357 files, 1358 []string{ 1359 "A1", 1360 "A1/B1", 1361 "A1/B2", 1362 "A1/B1/C1", 1363 "A1/B1/C2", 1364 "A1/B1/C3", 1365 }, 1366 fs.GetModifyWindow(r.Fremote), 1367 ) 1368 1369 require.NoError(t, operations.DirMove(context.Background(), r.Fremote, "A1", "A2")) 1370 1371 for i := range files { 1372 files[i].Path = strings.Replace(files[i].Path, "A1/", "A2/", -1) 1373 } 1374 1375 fstest.CheckListingWithPrecision( 1376 t, 1377 r.Fremote, 1378 files, 1379 []string{ 1380 "A2", 1381 "A2/B1", 1382 "A2/B2", 1383 "A2/B1/C1", 1384 "A2/B1/C2", 1385 "A2/B1/C3", 1386 }, 1387 fs.GetModifyWindow(r.Fremote), 1388 ) 1389 1390 // Disable DirMove 1391 features := r.Fremote.Features() 1392 oldDirMove := features.DirMove 1393 features.DirMove = nil 1394 defer func() { 1395 features.DirMove = oldDirMove 1396 }() 1397 1398 require.NoError(t, operations.DirMove(context.Background(), r.Fremote, "A2", "A3")) 1399 1400 for i := range files { 1401 files[i].Path = strings.Replace(files[i].Path, "A2/", "A3/", -1) 1402 } 1403 1404 fstest.CheckListingWithPrecision( 1405 t, 1406 r.Fremote, 1407 files, 1408 []string{ 1409 "A3", 1410 "A3/B1", 1411 "A3/B2", 1412 "A3/B1/C1", 1413 "A3/B1/C2", 1414 "A3/B1/C3", 1415 }, 1416 fs.GetModifyWindow(r.Fremote), 1417 ) 1418 1419 } 1420 1421 func TestGetFsInfo(t *testing.T) { 1422 r := fstest.NewRun(t) 1423 defer r.Finalise() 1424 1425 f := r.Fremote 1426 info := operations.GetFsInfo(f) 1427 assert.Equal(t, f.Name(), info.Name) 1428 assert.Equal(t, f.Root(), info.Root) 1429 assert.Equal(t, f.String(), info.String) 1430 assert.Equal(t, f.Precision(), info.Precision) 1431 hashSet := hash.NewHashSet() 1432 for _, hashName := range info.Hashes { 1433 var ht hash.Type 1434 require.NoError(t, ht.Set(hashName)) 1435 hashSet.Add(ht) 1436 } 1437 assert.Equal(t, f.Hashes(), hashSet) 1438 assert.Equal(t, f.Features().Enabled(), info.Features) 1439 } 1440 1441 func TestRcat(t *testing.T) { 1442 check := func(withChecksum, ignoreChecksum bool) { 1443 checksumBefore, ignoreChecksumBefore := fs.Config.CheckSum, fs.Config.IgnoreChecksum 1444 fs.Config.CheckSum, fs.Config.IgnoreChecksum = withChecksum, ignoreChecksum 1445 defer func() { 1446 fs.Config.CheckSum, fs.Config.IgnoreChecksum = checksumBefore, ignoreChecksumBefore 1447 }() 1448 1449 var prefix string 1450 if withChecksum { 1451 prefix = "with_checksum_" 1452 } else { 1453 prefix = "no_checksum_" 1454 } 1455 if ignoreChecksum { 1456 prefix = "ignore_checksum_" 1457 } 1458 1459 r := fstest.NewRun(t) 1460 defer r.Finalise() 1461 1462 if *fstest.SizeLimit > 0 && int64(fs.Config.StreamingUploadCutoff) > *fstest.SizeLimit { 1463 savedCutoff := fs.Config.StreamingUploadCutoff 1464 defer func() { 1465 fs.Config.StreamingUploadCutoff = savedCutoff 1466 }() 1467 fs.Config.StreamingUploadCutoff = fs.SizeSuffix(*fstest.SizeLimit) 1468 t.Logf("Adjust StreamingUploadCutoff to size limit %s (was %s)", fs.Config.StreamingUploadCutoff, savedCutoff) 1469 } 1470 1471 fstest.CheckListing(t, r.Fremote, []fstest.Item{}) 1472 1473 data1 := "this is some really nice test data" 1474 path1 := prefix + "small_file_from_pipe" 1475 1476 data2 := string(make([]byte, fs.Config.StreamingUploadCutoff+1)) 1477 path2 := prefix + "big_file_from_pipe" 1478 1479 in := ioutil.NopCloser(strings.NewReader(data1)) 1480 _, err := operations.Rcat(context.Background(), r.Fremote, path1, in, t1) 1481 require.NoError(t, err) 1482 1483 in = ioutil.NopCloser(strings.NewReader(data2)) 1484 _, err = operations.Rcat(context.Background(), r.Fremote, path2, in, t2) 1485 require.NoError(t, err) 1486 1487 file1 := fstest.NewItem(path1, data1, t1) 1488 file2 := fstest.NewItem(path2, data2, t2) 1489 fstest.CheckItems(t, r.Fremote, file1, file2) 1490 } 1491 1492 for i := 0; i < 4; i++ { 1493 withChecksum := (i & 1) != 0 1494 ignoreChecksum := (i & 2) != 0 1495 t.Run(fmt.Sprintf("withChecksum=%v,ignoreChecksum=%v", withChecksum, ignoreChecksum), func(t *testing.T) { 1496 check(withChecksum, ignoreChecksum) 1497 }) 1498 } 1499 } 1500 1501 func TestRcatSize(t *testing.T) { 1502 r := fstest.NewRun(t) 1503 defer r.Finalise() 1504 1505 const body = "------------------------------------------------------------" 1506 file1 := r.WriteFile("potato1", body, t1) 1507 file2 := r.WriteFile("potato2", body, t2) 1508 // Test with known length 1509 bodyReader := ioutil.NopCloser(strings.NewReader(body)) 1510 obj, err := operations.RcatSize(context.Background(), r.Fremote, file1.Path, bodyReader, int64(len(body)), file1.ModTime) 1511 require.NoError(t, err) 1512 assert.Equal(t, int64(len(body)), obj.Size()) 1513 assert.Equal(t, file1.Path, obj.Remote()) 1514 1515 // Test with unknown length 1516 bodyReader = ioutil.NopCloser(strings.NewReader(body)) // reset Reader 1517 ioutil.NopCloser(strings.NewReader(body)) 1518 obj, err = operations.RcatSize(context.Background(), r.Fremote, file2.Path, bodyReader, -1, file2.ModTime) 1519 require.NoError(t, err) 1520 assert.Equal(t, int64(len(body)), obj.Size()) 1521 assert.Equal(t, file2.Path, obj.Remote()) 1522 1523 // Check files exist 1524 fstest.CheckItems(t, r.Fremote, file1, file2) 1525 } 1526 1527 func TestCopyFileMaxTransfer(t *testing.T) { 1528 r := fstest.NewRun(t) 1529 defer r.Finalise() 1530 old := fs.Config.MaxTransfer 1531 oldMode := fs.Config.CutoffMode 1532 1533 defer func() { 1534 fs.Config.MaxTransfer = old 1535 fs.Config.CutoffMode = oldMode 1536 accounting.Stats(context.Background()).ResetCounters() 1537 }() 1538 1539 ctx := context.Background() 1540 1541 const sizeCutoff = 2048 1542 file1 := r.WriteFile("TestCopyFileMaxTransfer/file1", "file1 contents", t1) 1543 file2 := r.WriteFile("TestCopyFileMaxTransfer/file2", "file2 contents"+random.String(sizeCutoff), t2) 1544 file3 := r.WriteFile("TestCopyFileMaxTransfer/file3", "file3 contents"+random.String(sizeCutoff), t2) 1545 file4 := r.WriteFile("TestCopyFileMaxTransfer/file4", "file4 contents"+random.String(sizeCutoff), t2) 1546 1547 // Cutoff mode: Hard 1548 fs.Config.MaxTransfer = sizeCutoff 1549 fs.Config.CutoffMode = fs.CutoffModeHard 1550 1551 // file1: Show a small file gets transferred OK 1552 accounting.Stats(ctx).ResetCounters() 1553 err := operations.CopyFile(ctx, r.Fremote, r.Flocal, file1.Path, file1.Path) 1554 require.NoError(t, err) 1555 fstest.CheckItems(t, r.Flocal, file1, file2, file3, file4) 1556 fstest.CheckItems(t, r.Fremote, file1) 1557 1558 // file2: show a large file does not get transferred 1559 accounting.Stats(ctx).ResetCounters() 1560 err = operations.CopyFile(ctx, r.Fremote, r.Flocal, file2.Path, file2.Path) 1561 require.NotNil(t, err, "Did not get expected max transfer limit error") 1562 assert.Contains(t, err.Error(), "Max transfer limit reached") 1563 assert.True(t, fserrors.IsFatalError(err)) 1564 fstest.CheckItems(t, r.Flocal, file1, file2, file3, file4) 1565 fstest.CheckItems(t, r.Fremote, file1) 1566 1567 // Cutoff mode: Cautious 1568 fs.Config.CutoffMode = fs.CutoffModeCautious 1569 1570 // file3: show a large file does not get transferred 1571 accounting.Stats(ctx).ResetCounters() 1572 err = operations.CopyFile(ctx, r.Fremote, r.Flocal, file3.Path, file3.Path) 1573 require.NotNil(t, err) 1574 assert.Contains(t, err.Error(), "Max transfer limit reached") 1575 assert.True(t, fserrors.IsFatalError(err)) 1576 fstest.CheckItems(t, r.Flocal, file1, file2, file3, file4) 1577 fstest.CheckItems(t, r.Fremote, file1) 1578 1579 if strings.HasPrefix(r.Fremote.Name(), "TestChunker") { 1580 t.Log("skipping remainder of test for chunker as it involves multiple transfers") 1581 return 1582 } 1583 1584 // Cutoff mode: Soft 1585 fs.Config.CutoffMode = fs.CutoffModeSoft 1586 1587 // file4: show a large file does get transferred this time 1588 accounting.Stats(ctx).ResetCounters() 1589 err = operations.CopyFile(ctx, r.Fremote, r.Flocal, file4.Path, file4.Path) 1590 require.NoError(t, err) 1591 fstest.CheckItems(t, r.Flocal, file1, file2, file3, file4) 1592 fstest.CheckItems(t, r.Fremote, file1, file4) 1593 }