github.com/rclone/rclone@v1.66.1-0.20240517100346-7b89735ae726/fs/operations/operations_test.go (about) 1 // Integration tests - test rclone by doing real transactions to a 2 // storage provider to and from the local disk. 3 // 4 // By default it will use a local fs, however you can provide a 5 // -remote option to use a different remote. The test_all.go script 6 // is a wrapper to call this for all the test remotes. 7 // 8 // FIXME not safe for concurrent running of tests until fs.Config is 9 // no longer a global 10 // 11 // NB When writing tests 12 // 13 // Make sure every series of writes to the remote has a 14 // fstest.CheckItems() before use. This make sure the directory 15 // listing is now consistent and stops cascading errors. 16 // 17 // Call accounting.GlobalStats().ResetCounters() before every fs.Sync() as it 18 // uses the error count internally. 19 20 package operations_test 21 22 import ( 23 "bytes" 24 "context" 25 "errors" 26 "fmt" 27 "io" 28 "net/http" 29 "net/http/httptest" 30 "os" 31 "regexp" 32 "strings" 33 "testing" 34 "time" 35 36 _ "github.com/rclone/rclone/backend/all" // import all backends 37 "github.com/rclone/rclone/fs" 38 "github.com/rclone/rclone/fs/accounting" 39 "github.com/rclone/rclone/fs/filter" 40 "github.com/rclone/rclone/fs/fshttp" 41 "github.com/rclone/rclone/fs/hash" 42 "github.com/rclone/rclone/fs/operations" 43 "github.com/rclone/rclone/fstest" 44 "github.com/rclone/rclone/fstest/fstests" 45 "github.com/stretchr/testify/assert" 46 "github.com/stretchr/testify/require" 47 "golang.org/x/text/cases" 48 "golang.org/x/text/language" 49 ) 50 51 // Some times used in the tests 52 var ( 53 t1 = fstest.Time("2001-02-03T04:05:06.499999999Z") 54 t2 = fstest.Time("2011-12-25T12:59:59.123456789Z") 55 t3 = fstest.Time("2011-12-30T12:59:59.000000000Z") 56 ) 57 58 // TestMain drives the tests 59 func TestMain(m *testing.M) { 60 fstest.TestMain(m) 61 } 62 63 func TestMkdir(t *testing.T) { 64 ctx := context.Background() 65 r := fstest.NewRun(t) 66 67 err := operations.Mkdir(ctx, r.Fremote, "") 68 require.NoError(t, err) 69 fstest.CheckListing(t, r.Fremote, []fstest.Item{}) 70 71 err = operations.Mkdir(ctx, r.Fremote, "") 72 require.NoError(t, err) 73 } 74 75 func TestLsd(t *testing.T) { 76 ctx := context.Background() 77 r := fstest.NewRun(t) 78 file1 := r.WriteObject(ctx, "sub dir/hello world", "hello world", t1) 79 80 r.CheckRemoteItems(t, file1) 81 82 var buf bytes.Buffer 83 err := operations.ListDir(ctx, r.Fremote, &buf) 84 require.NoError(t, err) 85 res := buf.String() 86 assert.Contains(t, res, "sub dir\n") 87 } 88 89 func TestLs(t *testing.T) { 90 ctx := context.Background() 91 r := fstest.NewRun(t) 92 file1 := r.WriteBoth(ctx, "potato2", "------------------------------------------------------------", t1) 93 file2 := r.WriteBoth(ctx, "empty space", "-", t2) 94 95 r.CheckRemoteItems(t, file1, file2) 96 97 var buf bytes.Buffer 98 err := operations.List(ctx, r.Fremote, &buf) 99 require.NoError(t, err) 100 res := buf.String() 101 assert.Contains(t, res, " 1 empty space\n") 102 assert.Contains(t, res, " 60 potato2\n") 103 } 104 105 func TestLsWithFilesFrom(t *testing.T) { 106 ctx := context.Background() 107 ctx, ci := fs.AddConfig(ctx) 108 r := fstest.NewRun(t) 109 file1 := r.WriteBoth(ctx, "potato2", "------------------------------------------------------------", t1) 110 file2 := r.WriteBoth(ctx, "empty space", "-", t2) 111 112 r.CheckRemoteItems(t, file1, file2) 113 114 // Set the --files-from equivalent 115 f, err := filter.NewFilter(nil) 116 require.NoError(t, err) 117 require.NoError(t, f.AddFile("potato2")) 118 require.NoError(t, f.AddFile("notfound")) 119 120 // Change the active filter 121 ctx = filter.ReplaceConfig(ctx, f) 122 123 var buf bytes.Buffer 124 err = operations.List(ctx, r.Fremote, &buf) 125 require.NoError(t, err) 126 assert.Equal(t, " 60 potato2\n", buf.String()) 127 128 // Now try with --no-traverse 129 ci.NoTraverse = true 130 131 buf.Reset() 132 err = operations.List(ctx, r.Fremote, &buf) 133 require.NoError(t, err) 134 assert.Equal(t, " 60 potato2\n", buf.String()) 135 } 136 137 func TestLsLong(t *testing.T) { 138 ctx := context.Background() 139 r := fstest.NewRun(t) 140 file1 := r.WriteBoth(ctx, "potato2", "------------------------------------------------------------", t1) 141 file2 := r.WriteBoth(ctx, "empty space", "-", t2) 142 143 r.CheckRemoteItems(t, file1, file2) 144 145 var buf bytes.Buffer 146 err := operations.ListLong(ctx, r.Fremote, &buf) 147 require.NoError(t, err) 148 res := buf.String() 149 lines := strings.Split(strings.Trim(res, "\n"), "\n") 150 assert.Equal(t, 2, len(lines)) 151 152 timeFormat := "2006-01-02 15:04:05.000000000" 153 precision := r.Fremote.Precision() 154 location := time.Now().Location() 155 checkTime := func(m, filename string, expected time.Time) { 156 modTime, err := time.ParseInLocation(timeFormat, m, location) // parse as localtime 157 if err != nil { 158 t.Errorf("Error parsing %q: %v", m, err) 159 } else { 160 fstest.AssertTimeEqualWithPrecision(t, filename, expected, modTime, precision) 161 } 162 } 163 164 m1 := regexp.MustCompile(`(?m)^ 1 (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d\.\d{9}) empty space$`) 165 if ms := m1.FindStringSubmatch(res); ms == nil { 166 t.Errorf("empty space missing: %q", res) 167 } else { 168 checkTime(ms[1], "empty space", t2.Local()) 169 } 170 171 m2 := regexp.MustCompile(`(?m)^ 60 (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d\.\d{9}) potato2$`) 172 if ms := m2.FindStringSubmatch(res); ms == nil { 173 t.Errorf("potato2 missing: %q", res) 174 } else { 175 checkTime(ms[1], "potato2", t1.Local()) 176 } 177 } 178 179 func TestHashSums(t *testing.T) { 180 ctx := context.Background() 181 r := fstest.NewRun(t) 182 file1 := r.WriteBoth(ctx, "potato2", "------------------------------------------------------------", t1) 183 file2 := r.WriteBoth(ctx, "empty space", "-", t2) 184 185 r.CheckRemoteItems(t, file1, file2) 186 187 hashes := r.Fremote.Hashes() 188 189 var quickXorHash hash.Type 190 err := quickXorHash.Set("QuickXorHash") 191 require.NoError(t, err) 192 193 for _, test := range []struct { 194 name string 195 download bool 196 base64 bool 197 ht hash.Type 198 want []string 199 }{ 200 { 201 ht: hash.MD5, 202 want: []string{ 203 "336d5ebc5436534e61d16e63ddfca327 empty space\n", 204 "d6548b156ea68a4e003e786df99eee76 potato2\n", 205 }, 206 }, 207 { 208 ht: hash.MD5, 209 download: true, 210 want: []string{ 211 "336d5ebc5436534e61d16e63ddfca327 empty space\n", 212 "d6548b156ea68a4e003e786df99eee76 potato2\n", 213 }, 214 }, 215 { 216 ht: hash.SHA1, 217 want: []string{ 218 "3bc15c8aae3e4124dd409035f32ea2fd6835efc9 empty space\n", 219 "9dc7f7d3279715991a22853f5981df582b7f9f6d potato2\n", 220 }, 221 }, 222 { 223 ht: hash.SHA1, 224 download: true, 225 want: []string{ 226 "3bc15c8aae3e4124dd409035f32ea2fd6835efc9 empty space\n", 227 "9dc7f7d3279715991a22853f5981df582b7f9f6d potato2\n", 228 }, 229 }, 230 { 231 ht: quickXorHash, 232 want: []string{ 233 "2d00000000000000000000000100000000000000 empty space\n", 234 "4001dad296b6b4a52d6d694b67dad296b6b4a52d potato2\n", 235 }, 236 }, 237 { 238 ht: quickXorHash, 239 download: true, 240 want: []string{ 241 "2d00000000000000000000000100000000000000 empty space\n", 242 "4001dad296b6b4a52d6d694b67dad296b6b4a52d potato2\n", 243 }, 244 }, 245 { 246 ht: quickXorHash, 247 base64: true, 248 want: []string{ 249 "LQAAAAAAAAAAAAAAAQAAAAAAAAA= empty space\n", 250 "QAHa0pa2tKUtbWlLZ9rSlra0pS0= potato2\n", 251 }, 252 }, 253 { 254 ht: quickXorHash, 255 base64: true, 256 download: true, 257 want: []string{ 258 "LQAAAAAAAAAAAAAAAQAAAAAAAAA= empty space\n", 259 "QAHa0pa2tKUtbWlLZ9rSlra0pS0= potato2\n", 260 }, 261 }, 262 } { 263 if !hashes.Contains(test.ht) { 264 continue 265 } 266 name := cases.Title(language.Und, cases.NoLower).String(test.ht.String()) 267 if test.download { 268 name += "Download" 269 } 270 if test.base64 { 271 name += "Base64" 272 } 273 t.Run(name, func(t *testing.T) { 274 var buf bytes.Buffer 275 err := operations.HashLister(ctx, test.ht, test.base64, test.download, r.Fremote, &buf) 276 require.NoError(t, err) 277 res := buf.String() 278 for _, line := range test.want { 279 assert.Contains(t, res, line) 280 } 281 }) 282 } 283 } 284 285 func TestHashSumsWithErrors(t *testing.T) { 286 ctx := context.Background() 287 memFs, err := fs.NewFs(ctx, ":memory:") 288 require.NoError(t, err) 289 290 // Make a test file 291 content := "-" 292 item1 := fstest.NewItem("file1", content, t1) 293 _ = fstests.PutTestContents(ctx, t, memFs, &item1, content, true) 294 295 // MemoryFS supports MD5 296 buf := &bytes.Buffer{} 297 err = operations.HashLister(ctx, hash.MD5, false, false, memFs, buf) 298 require.NoError(t, err) 299 assert.Contains(t, buf.String(), "336d5ebc5436534e61d16e63ddfca327 file1\n") 300 301 // MemoryFS can't do SHA1, but UNSUPPORTED must not appear in the output 302 buf.Reset() 303 err = operations.HashLister(ctx, hash.SHA1, false, false, memFs, buf) 304 require.NoError(t, err) 305 assert.NotContains(t, buf.String(), " UNSUPPORTED ") 306 307 // ERROR must not appear in the output either 308 assert.NotContains(t, buf.String(), " ERROR ") 309 // TODO mock an unreadable file 310 } 311 312 func TestHashStream(t *testing.T) { 313 reader := strings.NewReader("") 314 in := io.NopCloser(reader) 315 out := &bytes.Buffer{} 316 for _, test := range []struct { 317 input string 318 ht hash.Type 319 wantHex string 320 wantBase64 string 321 }{ 322 { 323 input: "", 324 ht: hash.MD5, 325 wantHex: "d41d8cd98f00b204e9800998ecf8427e -\n", 326 wantBase64: "1B2M2Y8AsgTpgAmY7PhCfg== -\n", 327 }, 328 { 329 input: "", 330 ht: hash.SHA1, 331 wantHex: "da39a3ee5e6b4b0d3255bfef95601890afd80709 -\n", 332 wantBase64: "2jmj7l5rSw0yVb_vlWAYkK_YBwk= -\n", 333 }, 334 { 335 input: "Hello world!", 336 ht: hash.MD5, 337 wantHex: "86fb269d190d2c85f6e0468ceca42a20 -\n", 338 wantBase64: "hvsmnRkNLIX24EaM7KQqIA== -\n", 339 }, 340 { 341 input: "Hello world!", 342 ht: hash.SHA1, 343 wantHex: "d3486ae9136e7856bc42212385ea797094475802 -\n", 344 wantBase64: "00hq6RNueFa8QiEjhep5cJRHWAI= -\n", 345 }, 346 } { 347 reader.Reset(test.input) 348 require.NoError(t, operations.HashSumStream(test.ht, false, in, out)) 349 assert.Equal(t, test.wantHex, out.String()) 350 _, _ = reader.Seek(0, io.SeekStart) 351 out.Reset() 352 require.NoError(t, operations.HashSumStream(test.ht, true, in, out)) 353 assert.Equal(t, test.wantBase64, out.String()) 354 out.Reset() 355 } 356 } 357 358 func TestSuffixName(t *testing.T) { 359 ctx := context.Background() 360 ctx, ci := fs.AddConfig(ctx) 361 for _, test := range []struct { 362 remote string 363 suffix string 364 keepExt bool 365 want string 366 }{ 367 {"test.txt", "", false, "test.txt"}, 368 {"test.txt", "", true, "test.txt"}, 369 {"test.txt", "-suffix", false, "test.txt-suffix"}, 370 {"test.txt", "-suffix", true, "test-suffix.txt"}, 371 {"test.txt.csv", "-suffix", false, "test.txt.csv-suffix"}, 372 {"test.txt.csv", "-suffix", true, "test-suffix.txt.csv"}, 373 {"test", "-suffix", false, "test-suffix"}, 374 {"test", "-suffix", true, "test-suffix"}, 375 {"test.html", "-suffix", true, "test-suffix.html"}, 376 {"test.html.txt", "-suffix", true, "test-suffix.html.txt"}, 377 {"test.csv.html.txt", "-suffix", true, "test-suffix.csv.html.txt"}, 378 {"test.badext.csv.html.txt", "-suffix", true, "test.badext-suffix.csv.html.txt"}, 379 {"test.badext", "-suffix", true, "test-suffix.badext"}, 380 } { 381 ci.Suffix = test.suffix 382 ci.SuffixKeepExtension = test.keepExt 383 got := operations.SuffixName(ctx, test.remote) 384 assert.Equal(t, test.want, got, fmt.Sprintf("%+v", test)) 385 } 386 } 387 388 func TestCount(t *testing.T) { 389 ctx := context.Background() 390 ctx, ci := fs.AddConfig(ctx) 391 r := fstest.NewRun(t) 392 file1 := r.WriteBoth(ctx, "potato2", "------------------------------------------------------------", t1) 393 file2 := r.WriteBoth(ctx, "empty space", "-", t2) 394 file3 := r.WriteBoth(ctx, "sub dir/potato3", "hello", t2) 395 396 r.CheckRemoteItems(t, file1, file2, file3) 397 398 // Check the MaxDepth too 399 ci.MaxDepth = 1 400 401 objects, size, sizeless, err := operations.Count(ctx, r.Fremote) 402 require.NoError(t, err) 403 assert.Equal(t, int64(2), objects) 404 assert.Equal(t, int64(61), size) 405 assert.Equal(t, int64(0), sizeless) 406 } 407 408 func TestDelete(t *testing.T) { 409 ctx := context.Background() 410 fi, err := filter.NewFilter(nil) 411 require.NoError(t, err) 412 fi.Opt.MaxSize = 60 413 ctx = filter.ReplaceConfig(ctx, fi) 414 r := fstest.NewRun(t) 415 file1 := r.WriteObject(ctx, "small", "1234567890", t2) // 10 bytes 416 file2 := r.WriteObject(ctx, "medium", "------------------------------------------------------------", t1) // 60 bytes 417 file3 := r.WriteObject(ctx, "large", "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", t1) // 100 bytes 418 r.CheckRemoteItems(t, file1, file2, file3) 419 420 err = operations.Delete(ctx, r.Fremote) 421 require.NoError(t, err) 422 r.CheckRemoteItems(t, file3) 423 } 424 425 func isChunker(f fs.Fs) bool { 426 return strings.HasPrefix(f.Name(), "TestChunker") 427 } 428 429 func skipIfChunker(t *testing.T, f fs.Fs) { 430 if isChunker(f) { 431 t.Skip("Skipping test on chunker backend") 432 } 433 } 434 435 func TestMaxDelete(t *testing.T) { 436 ctx := context.Background() 437 ctx, ci := fs.AddConfig(ctx) 438 r := fstest.NewRun(t) 439 accounting.GlobalStats().ResetCounters() 440 ci.MaxDelete = 2 441 defer r.Finalise() 442 skipIfChunker(t, r.Fremote) // chunker does copy/delete on s3 443 file1 := r.WriteObject(ctx, "small", "1234567890", t2) // 10 bytes 444 file2 := r.WriteObject(ctx, "medium", "------------------------------------------------------------", t1) // 60 bytes 445 file3 := r.WriteObject(ctx, "large", "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", t1) // 100 bytes 446 r.CheckRemoteItems(t, file1, file2, file3) 447 err := operations.Delete(ctx, r.Fremote) 448 449 require.Error(t, err) 450 objects, _, _, err := operations.Count(ctx, r.Fremote) 451 require.NoError(t, err) 452 assert.Equal(t, int64(1), objects) 453 } 454 455 // TestMaxDeleteSizeLargeFile one of the files is larger than allowed 456 func TestMaxDeleteSizeLargeFile(t *testing.T) { 457 ctx := context.Background() 458 ctx, ci := fs.AddConfig(ctx) 459 r := fstest.NewRun(t) 460 accounting.GlobalStats().ResetCounters() 461 ci.MaxDeleteSize = 70 462 defer r.Finalise() 463 skipIfChunker(t, r.Fremote) // chunker does copy/delete on s3 464 file1 := r.WriteObject(ctx, "small", "1234567890", t2) // 10 bytes 465 file2 := r.WriteObject(ctx, "medium", "------------------------------------------------------------", t1) // 60 bytes 466 file3 := r.WriteObject(ctx, "large", "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", t1) // 100 bytes 467 r.CheckRemoteItems(t, file1, file2, file3) 468 469 err := operations.Delete(ctx, r.Fremote) 470 require.Error(t, err) 471 r.CheckRemoteItems(t, file3) 472 } 473 474 func TestMaxDeleteSize(t *testing.T) { 475 ctx := context.Background() 476 ctx, ci := fs.AddConfig(ctx) 477 r := fstest.NewRun(t) 478 accounting.GlobalStats().ResetCounters() 479 ci.MaxDeleteSize = 160 480 defer r.Finalise() 481 skipIfChunker(t, r.Fremote) // chunker does copy/delete on s3 482 file1 := r.WriteObject(ctx, "small", "1234567890", t2) // 10 bytes 483 file2 := r.WriteObject(ctx, "medium", "------------------------------------------------------------", t1) // 60 bytes 484 file3 := r.WriteObject(ctx, "large", "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", t1) // 100 bytes 485 r.CheckRemoteItems(t, file1, file2, file3) 486 487 err := operations.Delete(ctx, r.Fremote) 488 require.Error(t, err) 489 objects, _, _, err := operations.Count(ctx, r.Fremote) 490 require.NoError(t, err) 491 assert.Equal(t, int64(1), objects) // 10 or 100 bytes 492 } 493 494 func TestRetry(t *testing.T) { 495 ctx := context.Background() 496 497 var i int 498 var err error 499 fn := func() error { 500 i-- 501 if i <= 0 { 502 return nil 503 } 504 return err 505 } 506 507 i, err = 3, io.EOF 508 assert.Equal(t, nil, operations.Retry(ctx, nil, 5, fn)) 509 assert.Equal(t, 0, i) 510 511 i, err = 10, io.EOF 512 assert.Equal(t, io.EOF, operations.Retry(ctx, nil, 5, fn)) 513 assert.Equal(t, 5, i) 514 515 i, err = 10, fs.ErrorObjectNotFound 516 assert.Equal(t, fs.ErrorObjectNotFound, operations.Retry(ctx, nil, 5, fn)) 517 assert.Equal(t, 9, i) 518 519 } 520 521 func TestCat(t *testing.T) { 522 ctx := context.Background() 523 r := fstest.NewRun(t) 524 file1 := r.WriteBoth(ctx, "file1", "ABCDEFGHIJ", t1) 525 file2 := r.WriteBoth(ctx, "file2", "012345678", t2) 526 527 r.CheckRemoteItems(t, file1, file2) 528 529 for _, test := range []struct { 530 offset int64 531 count int64 532 separator string 533 a string 534 b string 535 }{ 536 {0, -1, "", "ABCDEFGHIJ", "012345678"}, 537 {0, 5, "", "ABCDE", "01234"}, 538 {-3, -1, "", "HIJ", "678"}, 539 {1, 3, "", "BCD", "123"}, 540 {0, -1, "\n", "ABCDEFGHIJ", "012345678"}, 541 } { 542 var buf bytes.Buffer 543 err := operations.Cat(ctx, r.Fremote, &buf, test.offset, test.count, []byte(test.separator)) 544 require.NoError(t, err) 545 res := buf.String() 546 547 if res != test.a+test.separator+test.b+test.separator && res != test.b+test.separator+test.a+test.separator { 548 t.Errorf("Incorrect output from Cat(%d,%d,%s): %q", test.offset, test.count, test.separator, res) 549 } 550 } 551 } 552 553 func TestPurge(t *testing.T) { 554 ctx := context.Background() 555 r := fstest.NewRunIndividual(t) // make new container (azureblob has delayed mkdir after rmdir) 556 r.Mkdir(ctx, r.Fremote) 557 558 // Make some files and dirs 559 r.ForceMkdir(ctx, r.Fremote) 560 file1 := r.WriteObject(ctx, "A1/B1/C1/one", "aaa", t1) 561 //..and dirs we expect to delete 562 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A2")) 563 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B2")) 564 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B2/C2")) 565 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B1/C3")) 566 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A3")) 567 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A3/B3")) 568 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A3/B3/C4")) 569 //..and one more file at the end 570 file2 := r.WriteObject(ctx, "A1/two", "bbb", t2) 571 572 fstest.CheckListingWithPrecision( 573 t, 574 r.Fremote, 575 []fstest.Item{ 576 file1, file2, 577 }, 578 []string{ 579 "A1", 580 "A1/B1", 581 "A1/B1/C1", 582 "A2", 583 "A1/B2", 584 "A1/B2/C2", 585 "A1/B1/C3", 586 "A3", 587 "A3/B3", 588 "A3/B3/C4", 589 }, 590 fs.GetModifyWindow(ctx, r.Fremote), 591 ) 592 593 require.NoError(t, operations.Purge(ctx, r.Fremote, "A1/B1")) 594 595 fstest.CheckListingWithPrecision( 596 t, 597 r.Fremote, 598 []fstest.Item{ 599 file2, 600 }, 601 []string{ 602 "A1", 603 "A2", 604 "A1/B2", 605 "A1/B2/C2", 606 "A3", 607 "A3/B3", 608 "A3/B3/C4", 609 }, 610 fs.GetModifyWindow(ctx, r.Fremote), 611 ) 612 613 require.NoError(t, operations.Purge(ctx, r.Fremote, "")) 614 615 fstest.CheckListingWithPrecision( 616 t, 617 r.Fremote, 618 []fstest.Item{}, 619 []string{}, 620 fs.GetModifyWindow(ctx, r.Fremote), 621 ) 622 623 } 624 625 func TestRmdirsNoLeaveRoot(t *testing.T) { 626 ctx := context.Background() 627 r := fstest.NewRun(t) 628 r.Mkdir(ctx, r.Fremote) 629 630 // Make some files and dirs we expect to keep 631 r.ForceMkdir(ctx, r.Fremote) 632 file1 := r.WriteObject(ctx, "A1/B1/C1/one", "aaa", t1) 633 //..and dirs we expect to delete 634 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A2")) 635 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B2")) 636 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B2/C2")) 637 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B1/C3")) 638 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A3")) 639 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A3/B3")) 640 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A3/B3/C4")) 641 //..and one more file at the end 642 file2 := r.WriteObject(ctx, "A1/two", "bbb", t2) 643 644 fstest.CheckListingWithPrecision( 645 t, 646 r.Fremote, 647 []fstest.Item{ 648 file1, file2, 649 }, 650 []string{ 651 "A1", 652 "A1/B1", 653 "A1/B1/C1", 654 "A2", 655 "A1/B2", 656 "A1/B2/C2", 657 "A1/B1/C3", 658 "A3", 659 "A3/B3", 660 "A3/B3/C4", 661 }, 662 fs.GetModifyWindow(ctx, r.Fremote), 663 ) 664 665 require.NoError(t, operations.Rmdirs(ctx, r.Fremote, "A3/B3/C4", false)) 666 667 fstest.CheckListingWithPrecision( 668 t, 669 r.Fremote, 670 []fstest.Item{ 671 file1, file2, 672 }, 673 []string{ 674 "A1", 675 "A1/B1", 676 "A1/B1/C1", 677 "A2", 678 "A1/B2", 679 "A1/B2/C2", 680 "A1/B1/C3", 681 "A3", 682 "A3/B3", 683 }, 684 fs.GetModifyWindow(ctx, r.Fremote), 685 ) 686 687 require.NoError(t, operations.Rmdirs(ctx, r.Fremote, "", false)) 688 689 fstest.CheckListingWithPrecision( 690 t, 691 r.Fremote, 692 []fstest.Item{ 693 file1, file2, 694 }, 695 []string{ 696 "A1", 697 "A1/B1", 698 "A1/B1/C1", 699 }, 700 fs.GetModifyWindow(ctx, r.Fremote), 701 ) 702 703 // Delete the files so we can remove everything including the root 704 for _, file := range []fstest.Item{file1, file2} { 705 o, err := r.Fremote.NewObject(ctx, file.Path) 706 require.NoError(t, err) 707 require.NoError(t, o.Remove(ctx)) 708 } 709 710 require.NoError(t, operations.Rmdirs(ctx, r.Fremote, "", false)) 711 712 fstest.CheckListingWithPrecision( 713 t, 714 r.Fremote, 715 []fstest.Item{}, 716 []string{}, 717 fs.GetModifyWindow(ctx, r.Fremote), 718 ) 719 } 720 721 func TestRmdirsLeaveRoot(t *testing.T) { 722 ctx := context.Background() 723 r := fstest.NewRun(t) 724 r.Mkdir(ctx, r.Fremote) 725 726 r.ForceMkdir(ctx, r.Fremote) 727 728 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1")) 729 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B1")) 730 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B1/C1")) 731 732 fstest.CheckListingWithPrecision( 733 t, 734 r.Fremote, 735 []fstest.Item{}, 736 []string{ 737 "A1", 738 "A1/B1", 739 "A1/B1/C1", 740 }, 741 fs.GetModifyWindow(ctx, r.Fremote), 742 ) 743 744 require.NoError(t, operations.Rmdirs(ctx, r.Fremote, "A1", true)) 745 746 fstest.CheckListingWithPrecision( 747 t, 748 r.Fremote, 749 []fstest.Item{}, 750 []string{ 751 "A1", 752 }, 753 fs.GetModifyWindow(ctx, r.Fremote), 754 ) 755 } 756 757 func TestRmdirsWithFilter(t *testing.T) { 758 ctx := context.Background() 759 ctx, fi := filter.AddConfig(ctx) 760 require.NoError(t, fi.AddRule("+ /A1/B1/**")) 761 require.NoError(t, fi.AddRule("- *")) 762 r := fstest.NewRun(t) 763 r.Mkdir(ctx, r.Fremote) 764 765 r.ForceMkdir(ctx, r.Fremote) 766 767 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1")) 768 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B1")) 769 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B1/C1")) 770 771 fstest.CheckListingWithPrecision( 772 t, 773 r.Fremote, 774 []fstest.Item{}, 775 []string{ 776 "A1", 777 "A1/B1", 778 "A1/B1/C1", 779 }, 780 fs.GetModifyWindow(ctx, r.Fremote), 781 ) 782 783 require.NoError(t, operations.Rmdirs(ctx, r.Fremote, "", false)) 784 785 fstest.CheckListingWithPrecision( 786 t, 787 r.Fremote, 788 []fstest.Item{}, 789 []string{ 790 "A1", 791 }, 792 fs.GetModifyWindow(ctx, r.Fremote), 793 ) 794 } 795 796 func TestCopyURL(t *testing.T) { 797 ctx := context.Background() 798 ctx, ci := fs.AddConfig(ctx) 799 r := fstest.NewRun(t) 800 801 contents := "file contents\n" 802 file1 := r.WriteFile("file1", contents, t1) 803 file2 := r.WriteFile("file2", contents, t1) 804 r.Mkdir(ctx, r.Fremote) 805 r.CheckRemoteItems(t) 806 807 // check when reading from regular HTTP server 808 status := 0 809 nameHeader := false 810 headerFilename := "headerfilename.txt" 811 handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { 812 if status != 0 { 813 http.Error(w, "an error occurred", status) 814 } 815 if nameHeader { 816 w.Header().Set("Content-Disposition", `attachment; filename="folder\`+headerFilename+`"`) 817 } 818 _, err := w.Write([]byte(contents)) 819 assert.NoError(t, err) 820 }) 821 ts := httptest.NewServer(handler) 822 defer ts.Close() 823 824 o, err := operations.CopyURL(ctx, r.Fremote, "file1", ts.URL, false, false, false) 825 require.NoError(t, err) 826 assert.Equal(t, int64(len(contents)), o.Size()) 827 828 fstest.CheckListingWithPrecision(t, r.Fremote, []fstest.Item{file1}, nil, fs.ModTimeNotSupported) 829 830 // Check file clobbering 831 _, err = operations.CopyURL(ctx, r.Fremote, "file1", ts.URL, false, false, true) 832 require.Error(t, err) 833 834 // Check auto file naming 835 status = 0 836 urlFileName := "filename.txt" 837 o, err = operations.CopyURL(ctx, r.Fremote, "", ts.URL+"/"+urlFileName, true, false, false) 838 require.NoError(t, err) 839 assert.Equal(t, int64(len(contents)), o.Size()) 840 assert.Equal(t, urlFileName, o.Remote()) 841 842 // Check header file naming 843 nameHeader = true 844 o, err = operations.CopyURL(ctx, r.Fremote, "", ts.URL, true, true, false) 845 require.NoError(t, err) 846 assert.Equal(t, int64(len(contents)), o.Size()) 847 assert.Equal(t, headerFilename, o.Remote()) 848 849 // Check auto file naming when url without file name 850 _, err = operations.CopyURL(ctx, r.Fremote, "file1", ts.URL, true, false, false) 851 require.Error(t, err) 852 853 // Check header file naming without header set 854 nameHeader = false 855 _, err = operations.CopyURL(ctx, r.Fremote, "file1", ts.URL, true, true, false) 856 require.Error(t, err) 857 858 // Check an error is returned for a 404 859 status = http.StatusNotFound 860 o, err = operations.CopyURL(ctx, r.Fremote, "file1", ts.URL, false, false, false) 861 require.Error(t, err) 862 assert.Contains(t, err.Error(), "Not Found") 863 assert.Nil(t, o) 864 status = 0 865 866 // check when reading from unverified HTTPS server 867 ci.InsecureSkipVerify = true 868 fshttp.ResetTransport() 869 defer fshttp.ResetTransport() 870 tss := httptest.NewTLSServer(handler) 871 defer tss.Close() 872 873 o, err = operations.CopyURL(ctx, r.Fremote, "file2", tss.URL, false, false, false) 874 require.NoError(t, err) 875 assert.Equal(t, int64(len(contents)), o.Size()) 876 fstest.CheckListingWithPrecision(t, r.Fremote, []fstest.Item{file1, file2, fstest.NewItem(urlFileName, contents, t1), fstest.NewItem(headerFilename, contents, t1)}, nil, fs.ModTimeNotSupported) 877 } 878 879 func TestCopyURLToWriter(t *testing.T) { 880 ctx := context.Background() 881 contents := "file contents\n" 882 883 // check when reading from regular HTTP server 884 status := 0 885 handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { 886 if status != 0 { 887 http.Error(w, "an error occurred", status) 888 return 889 } 890 _, err := w.Write([]byte(contents)) 891 assert.NoError(t, err) 892 }) 893 ts := httptest.NewServer(handler) 894 defer ts.Close() 895 896 // test normal fetch 897 var buf bytes.Buffer 898 err := operations.CopyURLToWriter(ctx, ts.URL, &buf) 899 require.NoError(t, err) 900 assert.Equal(t, contents, buf.String()) 901 902 // test fetch with error 903 status = http.StatusNotFound 904 buf.Reset() 905 err = operations.CopyURLToWriter(ctx, ts.URL, &buf) 906 require.Error(t, err) 907 assert.Contains(t, err.Error(), "Not Found") 908 assert.Equal(t, 0, len(buf.String())) 909 } 910 911 func TestMoveFile(t *testing.T) { 912 ctx := context.Background() 913 r := fstest.NewRun(t) 914 915 file1 := r.WriteFile("file1", "file1 contents", t1) 916 r.CheckLocalItems(t, file1) 917 918 file2 := file1 919 file2.Path = "sub/file2" 920 921 err := operations.MoveFile(ctx, r.Fremote, r.Flocal, file2.Path, file1.Path) 922 require.NoError(t, err) 923 r.CheckLocalItems(t) 924 r.CheckRemoteItems(t, file2) 925 926 r.WriteFile("file1", "file1 contents", t1) 927 r.CheckLocalItems(t, file1) 928 929 err = operations.MoveFile(ctx, r.Fremote, r.Flocal, file2.Path, file1.Path) 930 require.NoError(t, err) 931 r.CheckLocalItems(t) 932 r.CheckRemoteItems(t, file2) 933 934 err = operations.MoveFile(ctx, r.Fremote, r.Fremote, file2.Path, file2.Path) 935 require.NoError(t, err) 936 r.CheckLocalItems(t) 937 r.CheckRemoteItems(t, file2) 938 } 939 940 func TestMoveFileWithIgnoreExisting(t *testing.T) { 941 ctx := context.Background() 942 ctx, ci := fs.AddConfig(ctx) 943 r := fstest.NewRun(t) 944 file1 := r.WriteFile("file1", "file1 contents", t1) 945 r.CheckLocalItems(t, file1) 946 947 ci.IgnoreExisting = true 948 949 err := operations.MoveFile(ctx, r.Fremote, r.Flocal, file1.Path, file1.Path) 950 require.NoError(t, err) 951 r.CheckLocalItems(t) 952 r.CheckRemoteItems(t, file1) 953 954 // Recreate file with updated content 955 file1b := r.WriteFile("file1", "file1 modified", t2) 956 r.CheckLocalItems(t, file1b) 957 958 // Ensure modified file did not transfer and was not deleted 959 err = operations.MoveFile(ctx, r.Fremote, r.Flocal, file1.Path, file1b.Path) 960 require.NoError(t, err) 961 r.CheckLocalItems(t, file1b) 962 r.CheckRemoteItems(t, file1) 963 } 964 965 func TestCaseInsensitiveMoveFile(t *testing.T) { 966 ctx := context.Background() 967 r := fstest.NewRun(t) 968 if !r.Fremote.Features().CaseInsensitive { 969 return 970 } 971 972 file1 := r.WriteFile("file1", "file1 contents", t1) 973 r.CheckLocalItems(t, file1) 974 975 file2 := file1 976 file2.Path = "sub/file2" 977 978 err := operations.MoveFile(ctx, r.Fremote, r.Flocal, file2.Path, file1.Path) 979 require.NoError(t, err) 980 r.CheckLocalItems(t) 981 r.CheckRemoteItems(t, file2) 982 983 r.WriteFile("file1", "file1 contents", t1) 984 r.CheckLocalItems(t, file1) 985 986 err = operations.MoveFile(ctx, r.Fremote, r.Flocal, file2.Path, file1.Path) 987 require.NoError(t, err) 988 r.CheckLocalItems(t) 989 r.CheckRemoteItems(t, file2) 990 991 file2Capitalized := file2 992 file2Capitalized.Path = "sub/File2" 993 994 err = operations.MoveFile(ctx, r.Fremote, r.Fremote, file2Capitalized.Path, file2.Path) 995 require.NoError(t, err) 996 r.CheckLocalItems(t) 997 r.CheckRemoteItems(t, file2Capitalized) 998 } 999 1000 func TestCaseInsensitiveMoveFileDryRun(t *testing.T) { 1001 ctx := context.Background() 1002 ctx, ci := fs.AddConfig(ctx) 1003 r := fstest.NewRun(t) 1004 if !r.Fremote.Features().CaseInsensitive { 1005 return 1006 } 1007 1008 file1 := r.WriteObject(ctx, "hello", "world", t1) 1009 r.CheckRemoteItems(t, file1) 1010 1011 ci.DryRun = true 1012 err := operations.MoveFile(ctx, r.Fremote, r.Fremote, "HELLO", file1.Path) 1013 require.NoError(t, err) 1014 r.CheckRemoteItems(t, file1) 1015 } 1016 1017 func TestMoveFileBackupDir(t *testing.T) { 1018 ctx := context.Background() 1019 ctx, ci := fs.AddConfig(ctx) 1020 r := fstest.NewRun(t) 1021 if !operations.CanServerSideMove(r.Fremote) { 1022 t.Skip("Skipping test as remote does not support server-side move or copy") 1023 } 1024 1025 ci.BackupDir = r.FremoteName + "/backup" 1026 1027 file1 := r.WriteFile("dst/file1", "file1 contents", t1) 1028 r.CheckLocalItems(t, file1) 1029 1030 file1old := r.WriteObject(ctx, "dst/file1", "file1 contents old", t1) 1031 r.CheckRemoteItems(t, file1old) 1032 1033 err := operations.MoveFile(ctx, r.Fremote, r.Flocal, file1.Path, file1.Path) 1034 require.NoError(t, err) 1035 r.CheckLocalItems(t) 1036 file1old.Path = "backup/dst/file1" 1037 r.CheckRemoteItems(t, file1old, file1) 1038 } 1039 1040 // testFsInfo is for unit testing fs.Info 1041 type testFsInfo struct { 1042 name string 1043 root string 1044 stringVal string 1045 precision time.Duration 1046 hashes hash.Set 1047 features fs.Features 1048 } 1049 1050 // Name of the remote (as passed into NewFs) 1051 func (i *testFsInfo) Name() string { return i.name } 1052 1053 // Root of the remote (as passed into NewFs) 1054 func (i *testFsInfo) Root() string { return i.root } 1055 1056 // String returns a description of the FS 1057 func (i *testFsInfo) String() string { return i.stringVal } 1058 1059 // Precision of the ModTimes in this Fs 1060 func (i *testFsInfo) Precision() time.Duration { return i.precision } 1061 1062 // Returns the supported hash types of the filesystem 1063 func (i *testFsInfo) Hashes() hash.Set { return i.hashes } 1064 1065 // Returns the supported hash types of the filesystem 1066 func (i *testFsInfo) Features() *fs.Features { return &i.features } 1067 1068 func TestSameConfig(t *testing.T) { 1069 a := &testFsInfo{name: "name", root: "root"} 1070 for _, test := range []struct { 1071 name string 1072 root string 1073 expected bool 1074 }{ 1075 {"name", "root", true}, 1076 {"name", "rooty", true}, 1077 {"namey", "root", false}, 1078 {"namey", "roott", false}, 1079 } { 1080 b := &testFsInfo{name: test.name, root: test.root} 1081 actual := operations.SameConfig(a, b) 1082 assert.Equal(t, test.expected, actual) 1083 actual = operations.SameConfig(b, a) 1084 assert.Equal(t, test.expected, actual) 1085 } 1086 } 1087 1088 func TestSame(t *testing.T) { 1089 a := &testFsInfo{name: "name", root: "root"} 1090 for _, test := range []struct { 1091 name string 1092 root string 1093 expected bool 1094 }{ 1095 {"name", "root", true}, 1096 {"name", "rooty", false}, 1097 {"namey", "root", false}, 1098 {"namey", "roott", false}, 1099 } { 1100 b := &testFsInfo{name: test.name, root: test.root} 1101 actual := operations.Same(a, b) 1102 assert.Equal(t, test.expected, actual) 1103 actual = operations.Same(b, a) 1104 assert.Equal(t, test.expected, actual) 1105 } 1106 } 1107 1108 // testFs is for unit testing fs.Fs 1109 type testFs struct { 1110 testFsInfo 1111 } 1112 1113 func (i *testFs) List(ctx context.Context, dir string) (entries fs.DirEntries, err error) { 1114 return nil, nil 1115 } 1116 1117 func (i *testFs) NewObject(ctx context.Context, remote string) (fs.Object, error) { return nil, nil } 1118 1119 func (i *testFs) Put(ctx context.Context, in io.Reader, src fs.ObjectInfo, options ...fs.OpenOption) (fs.Object, error) { 1120 return nil, nil 1121 } 1122 1123 func (i *testFs) Mkdir(ctx context.Context, dir string) error { return nil } 1124 1125 func (i *testFs) Rmdir(ctx context.Context, dir string) error { return nil } 1126 1127 // copied from TestOverlapping because the behavior of OverlappingFilterCheck should be identical to Overlapping 1128 // when no filters are set 1129 func TestOverlappingFilterCheckWithoutFilter(t *testing.T) { 1130 ctx := context.Background() 1131 src := &testFs{testFsInfo{name: "name", root: "root"}} 1132 slash := string(os.PathSeparator) // native path separator 1133 for _, test := range []struct { 1134 name string 1135 root string 1136 expected bool 1137 }{ 1138 {"name", "root", true}, 1139 {"name", "/root", true}, 1140 {"namey", "root", false}, 1141 {"name", "rooty", false}, 1142 {"namey", "rooty", false}, 1143 {"name", "roo", false}, 1144 {"name", "root/toot", true}, 1145 {"name", "root/toot/", true}, 1146 {"name", "root" + slash + "toot", true}, 1147 {"name", "root" + slash + "toot" + slash, true}, 1148 {"name", "", true}, 1149 {"name", "/", true}, 1150 } { 1151 dst := &testFs{testFsInfo{name: test.name, root: test.root}} 1152 what := fmt.Sprintf("(%q,%q) vs (%q,%q)", src.name, src.root, dst.name, dst.root) 1153 actual := operations.OverlappingFilterCheck(ctx, src, dst) 1154 assert.Equal(t, test.expected, actual, what) 1155 actual = operations.OverlappingFilterCheck(ctx, dst, src) 1156 assert.Equal(t, test.expected, actual, what) 1157 } 1158 } 1159 1160 func TestOverlappingFilterCheckWithFilter(t *testing.T) { 1161 ctx := context.Background() 1162 fi, err := filter.NewFilter(nil) 1163 require.NoError(t, err) 1164 require.NoError(t, fi.Add(false, "/exclude/")) 1165 require.NoError(t, fi.Add(false, "/Exclude2/")) 1166 require.NoError(t, fi.Add(true, "*")) 1167 ctx = filter.ReplaceConfig(ctx, fi) 1168 1169 src := &testFs{testFsInfo{name: "name", root: "root"}} 1170 src.features.CaseInsensitive = true 1171 slash := string(os.PathSeparator) // native path separator 1172 for _, test := range []struct { 1173 name string 1174 root string 1175 expected bool 1176 }{ 1177 {"name", "root", true}, 1178 {"name", "ROOT", true}, // case insensitive is set 1179 {"name", "/root", true}, 1180 {"name", "root/", true}, 1181 {"name", "root" + slash, true}, 1182 {"name", "root/exclude", false}, 1183 {"name", "root/Exclude2", false}, 1184 {"name", "root/include", true}, 1185 {"name", "root/exclude/", false}, 1186 {"name", "root/Exclude2/", false}, 1187 {"name", "root/exclude/sub", false}, 1188 {"name", "root/Exclude2/sub", false}, 1189 {"name", "/root/exclude/", false}, 1190 {"name", "root" + slash + "exclude", false}, 1191 {"name", "root" + slash + "exclude" + slash, false}, 1192 {"namey", "root/include", false}, 1193 {"namey", "root/include/", false}, 1194 {"namey", "root" + slash + "include", false}, 1195 {"namey", "root" + slash + "include" + slash, false}, 1196 } { 1197 dst := &testFs{testFsInfo{name: test.name, root: test.root}} 1198 dst.features.CaseInsensitive = true 1199 what := fmt.Sprintf("(%q,%q) vs (%q,%q)", src.name, src.root, dst.name, dst.root) 1200 actual := operations.OverlappingFilterCheck(ctx, dst, src) 1201 assert.Equal(t, test.expected, actual, what) 1202 actual = operations.OverlappingFilterCheck(ctx, src, dst) 1203 assert.Equal(t, test.expected, actual, what) 1204 } 1205 } 1206 1207 func TestListFormat(t *testing.T) { 1208 item0 := &operations.ListJSONItem{ 1209 Path: "a", 1210 Name: "a", 1211 Encrypted: "encryptedFileName", 1212 Size: 1, 1213 MimeType: "application/octet-stream", 1214 ModTime: operations.Timestamp{ 1215 When: t1, 1216 Format: "2006-01-02T15:04:05.000000000Z07:00"}, 1217 IsDir: false, 1218 Hashes: map[string]string{ 1219 "md5": "0cc175b9c0f1b6a831c399e269772661", 1220 "sha1": "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", 1221 "dropbox": "bf5d3affb73efd2ec6c36ad3112dd933efed63c4e1cbffcfa88e2759c144f2d8", 1222 "quickxor": "6100000000000000000000000100000000000000"}, 1223 ID: "fileID", 1224 OrigID: "fileOrigID", 1225 } 1226 1227 item1 := &operations.ListJSONItem{ 1228 Path: "subdir", 1229 Name: "subdir", 1230 Encrypted: "encryptedDirName", 1231 Size: -1, 1232 MimeType: "inode/directory", 1233 ModTime: operations.Timestamp{ 1234 When: t2, 1235 Format: "2006-01-02T15:04:05.000000000Z07:00"}, 1236 IsDir: true, 1237 Hashes: map[string]string(nil), 1238 ID: "dirID", 1239 OrigID: "dirOrigID", 1240 } 1241 1242 var list operations.ListFormat 1243 list.AddPath() 1244 list.SetDirSlash(false) 1245 assert.Equal(t, "subdir", list.Format(item1)) 1246 1247 list.SetDirSlash(true) 1248 assert.Equal(t, "subdir/", list.Format(item1)) 1249 1250 list.SetOutput(nil) 1251 assert.Equal(t, "", list.Format(item1)) 1252 1253 list.AppendOutput(func(item *operations.ListJSONItem) string { return "a" }) 1254 list.AppendOutput(func(item *operations.ListJSONItem) string { return "b" }) 1255 assert.Equal(t, "ab", list.Format(item1)) 1256 list.SetSeparator(":::") 1257 assert.Equal(t, "a:::b", list.Format(item1)) 1258 1259 list.SetOutput(nil) 1260 list.AddModTime("") 1261 assert.Equal(t, t1.Local().Format("2006-01-02 15:04:05"), list.Format(item0)) 1262 1263 list.SetOutput(nil) 1264 list.SetSeparator("|") 1265 list.AddID() 1266 list.AddOrigID() 1267 assert.Equal(t, "fileID|fileOrigID", list.Format(item0)) 1268 assert.Equal(t, "dirID|dirOrigID", list.Format(item1)) 1269 1270 list.SetOutput(nil) 1271 list.AddMimeType() 1272 assert.Contains(t, list.Format(item0), "/") 1273 assert.Equal(t, "inode/directory", list.Format(item1)) 1274 1275 list.SetOutput(nil) 1276 list.AddMetadata() 1277 assert.Equal(t, "{}", list.Format(item0)) 1278 assert.Equal(t, "{}", list.Format(item1)) 1279 1280 list.SetOutput(nil) 1281 list.AddPath() 1282 list.SetAbsolute(true) 1283 assert.Equal(t, "/a", list.Format(item0)) 1284 list.SetAbsolute(false) 1285 assert.Equal(t, "a", list.Format(item0)) 1286 1287 list.SetOutput(nil) 1288 list.AddSize() 1289 assert.Equal(t, "1", list.Format(item0)) 1290 1291 list.AddPath() 1292 list.AddModTime("") 1293 list.SetDirSlash(true) 1294 list.SetSeparator("__SEP__") 1295 assert.Equal(t, "1__SEP__a__SEP__"+t1.Local().Format("2006-01-02 15:04:05"), list.Format(item0)) 1296 assert.Equal(t, "-1__SEP__subdir/__SEP__"+t2.Local().Format("2006-01-02 15:04:05"), list.Format(item1)) 1297 1298 for _, test := range []struct { 1299 ht hash.Type 1300 want string 1301 }{ 1302 {hash.MD5, "0cc175b9c0f1b6a831c399e269772661"}, 1303 {hash.SHA1, "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8"}, 1304 } { 1305 list.SetOutput(nil) 1306 list.AddHash(test.ht) 1307 assert.Equal(t, test.want, list.Format(item0)) 1308 } 1309 1310 list.SetOutput(nil) 1311 list.SetSeparator("|") 1312 list.SetCSV(true) 1313 list.AddSize() 1314 list.AddPath() 1315 list.AddModTime("") 1316 list.SetDirSlash(true) 1317 assert.Equal(t, "1|a|"+t1.Local().Format("2006-01-02 15:04:05"), list.Format(item0)) 1318 assert.Equal(t, "-1|subdir/|"+t2.Local().Format("2006-01-02 15:04:05"), list.Format(item1)) 1319 1320 list.SetOutput(nil) 1321 list.SetSeparator("|") 1322 list.AddPath() 1323 list.AddEncrypted() 1324 assert.Equal(t, "a|encryptedFileName", list.Format(item0)) 1325 assert.Equal(t, "subdir/|encryptedDirName/", list.Format(item1)) 1326 1327 } 1328 1329 func TestDirMove(t *testing.T) { 1330 ctx := context.Background() 1331 r := fstest.NewRun(t) 1332 1333 r.Mkdir(ctx, r.Fremote) 1334 1335 // Make some files and dirs 1336 r.ForceMkdir(ctx, r.Fremote) 1337 files := []fstest.Item{ 1338 r.WriteObject(ctx, "A1/one", "one", t1), 1339 r.WriteObject(ctx, "A1/two", "two", t2), 1340 r.WriteObject(ctx, "A1/B1/three", "three", t3), 1341 r.WriteObject(ctx, "A1/B1/C1/four", "four", t1), 1342 r.WriteObject(ctx, "A1/B1/C2/five", "five", t2), 1343 } 1344 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B2")) 1345 require.NoError(t, operations.Mkdir(ctx, r.Fremote, "A1/B1/C3")) 1346 1347 fstest.CheckListingWithPrecision( 1348 t, 1349 r.Fremote, 1350 files, 1351 []string{ 1352 "A1", 1353 "A1/B1", 1354 "A1/B2", 1355 "A1/B1/C1", 1356 "A1/B1/C2", 1357 "A1/B1/C3", 1358 }, 1359 fs.GetModifyWindow(ctx, r.Fremote), 1360 ) 1361 1362 require.NoError(t, operations.DirMove(ctx, r.Fremote, "A1", "A2")) 1363 1364 for i := range files { 1365 files[i].Path = strings.ReplaceAll(files[i].Path, "A1/", "A2/") 1366 } 1367 1368 fstest.CheckListingWithPrecision( 1369 t, 1370 r.Fremote, 1371 files, 1372 []string{ 1373 "A2", 1374 "A2/B1", 1375 "A2/B2", 1376 "A2/B1/C1", 1377 "A2/B1/C2", 1378 "A2/B1/C3", 1379 }, 1380 fs.GetModifyWindow(ctx, r.Fremote), 1381 ) 1382 1383 // Disable DirMove 1384 features := r.Fremote.Features() 1385 features.DirMove = nil 1386 1387 require.NoError(t, operations.DirMove(ctx, r.Fremote, "A2", "A3")) 1388 1389 for i := range files { 1390 files[i].Path = strings.ReplaceAll(files[i].Path, "A2/", "A3/") 1391 } 1392 1393 fstest.CheckListingWithPrecision( 1394 t, 1395 r.Fremote, 1396 files, 1397 []string{ 1398 "A3", 1399 "A3/B1", 1400 "A3/B2", 1401 "A3/B1/C1", 1402 "A3/B1/C2", 1403 "A3/B1/C3", 1404 }, 1405 fs.GetModifyWindow(ctx, r.Fremote), 1406 ) 1407 1408 // Try with a DirMove method that exists but returns fs.ErrorCantDirMove (ex. combine moving across upstreams) 1409 // Should fall back to manual move (copy + delete) 1410 1411 features.DirMove = func(ctx context.Context, src fs.Fs, srcRemote string, dstRemote string) error { 1412 return fs.ErrorCantDirMove 1413 } 1414 1415 assert.NoError(t, operations.DirMove(ctx, r.Fremote, "A3", "A4")) 1416 1417 for i := range files { 1418 files[i].Path = strings.ReplaceAll(files[i].Path, "A3/", "A4/") 1419 } 1420 1421 fstest.CheckListingWithPrecision( 1422 t, 1423 r.Fremote, 1424 files, 1425 []string{ 1426 "A4", 1427 "A4/B1", 1428 "A4/B2", 1429 "A4/B1/C1", 1430 "A4/B1/C2", 1431 "A4/B1/C3", 1432 }, 1433 fs.GetModifyWindow(ctx, r.Fremote), 1434 ) 1435 } 1436 1437 func TestGetFsInfo(t *testing.T) { 1438 r := fstest.NewRun(t) 1439 1440 f := r.Fremote 1441 info := operations.GetFsInfo(f) 1442 assert.Equal(t, f.Name(), info.Name) 1443 assert.Equal(t, f.Root(), info.Root) 1444 assert.Equal(t, f.String(), info.String) 1445 assert.Equal(t, f.Precision(), info.Precision) 1446 hashSet := hash.NewHashSet() 1447 for _, hashName := range info.Hashes { 1448 var ht hash.Type 1449 require.NoError(t, ht.Set(hashName)) 1450 hashSet.Add(ht) 1451 } 1452 assert.Equal(t, f.Hashes(), hashSet) 1453 assert.Equal(t, f.Features().Enabled(), info.Features) 1454 } 1455 1456 func TestRcat(t *testing.T) { 1457 ctx := context.Background() 1458 ctx, ci := fs.AddConfig(ctx) 1459 check := func(t *testing.T, withChecksum, ignoreChecksum bool) { 1460 ci.CheckSum, ci.IgnoreChecksum = withChecksum, ignoreChecksum 1461 1462 var prefix string 1463 if withChecksum { 1464 prefix = "with_checksum_" 1465 } else { 1466 prefix = "no_checksum_" 1467 } 1468 if ignoreChecksum { 1469 prefix = "ignore_checksum_" 1470 } 1471 1472 r := fstest.NewRun(t) 1473 1474 if *fstest.SizeLimit > 0 && int64(ci.StreamingUploadCutoff) > *fstest.SizeLimit { 1475 savedCutoff := ci.StreamingUploadCutoff 1476 ci.StreamingUploadCutoff = fs.SizeSuffix(*fstest.SizeLimit) 1477 t.Logf("Adjust StreamingUploadCutoff to size limit %s (was %s)", ci.StreamingUploadCutoff, savedCutoff) 1478 } 1479 1480 fstest.CheckListing(t, r.Fremote, []fstest.Item{}) 1481 1482 data1 := "this is some really nice test data" 1483 path1 := prefix + "small_file_from_pipe" 1484 1485 data2 := string(make([]byte, ci.StreamingUploadCutoff+1)) 1486 path2 := prefix + "big_file_from_pipe" 1487 1488 in := io.NopCloser(strings.NewReader(data1)) 1489 _, err := operations.Rcat(ctx, r.Fremote, path1, in, t1, nil) 1490 require.NoError(t, err) 1491 1492 in = io.NopCloser(strings.NewReader(data2)) 1493 _, err = operations.Rcat(ctx, r.Fremote, path2, in, t2, nil) 1494 require.NoError(t, err) 1495 1496 file1 := fstest.NewItem(path1, data1, t1) 1497 file2 := fstest.NewItem(path2, data2, t2) 1498 r.CheckRemoteItems(t, file1, file2) 1499 } 1500 1501 for i := 0; i < 4; i++ { 1502 withChecksum := (i & 1) != 0 1503 ignoreChecksum := (i & 2) != 0 1504 t.Run(fmt.Sprintf("withChecksum=%v,ignoreChecksum=%v", withChecksum, ignoreChecksum), func(t *testing.T) { 1505 check(t, withChecksum, ignoreChecksum) 1506 }) 1507 } 1508 } 1509 1510 func TestRcatMetadata(t *testing.T) { 1511 r := fstest.NewRun(t) 1512 1513 if !r.Fremote.Features().UserMetadata { 1514 t.Skip("Skipping as destination doesn't support user metadata") 1515 } 1516 1517 test := func(disableUploadCutoff bool) { 1518 ctx := context.Background() 1519 ctx, ci := fs.AddConfig(ctx) 1520 ci.Metadata = true 1521 data := "this is some really nice test data with metadata" 1522 path := "rcat_metadata" 1523 1524 meta := fs.Metadata{ 1525 "key": "value", 1526 "sausage": "potato", 1527 } 1528 1529 if disableUploadCutoff { 1530 ci.StreamingUploadCutoff = 0 1531 data += " uploadCutoff=0" 1532 path += "_uploadcutoff0" 1533 } 1534 1535 fstest.CheckListing(t, r.Fremote, []fstest.Item{}) 1536 1537 in := io.NopCloser(strings.NewReader(data)) 1538 _, err := operations.Rcat(ctx, r.Fremote, path, in, t1, meta) 1539 require.NoError(t, err) 1540 1541 file := fstest.NewItem(path, data, t1) 1542 r.CheckRemoteItems(t, file) 1543 1544 o, err := r.Fremote.NewObject(ctx, path) 1545 require.NoError(t, err) 1546 gotMeta, err := fs.GetMetadata(ctx, o) 1547 require.NoError(t, err) 1548 // Check the specific user data we set is set 1549 // Likely there will be other values 1550 assert.Equal(t, "value", gotMeta["key"]) 1551 assert.Equal(t, "potato", gotMeta["sausage"]) 1552 1553 // Delete the test file 1554 require.NoError(t, o.Remove(ctx)) 1555 } 1556 1557 t.Run("Normal", func(t *testing.T) { 1558 test(false) 1559 }) 1560 t.Run("ViaDisk", func(t *testing.T) { 1561 test(true) 1562 }) 1563 } 1564 1565 func TestRcatSize(t *testing.T) { 1566 ctx := context.Background() 1567 r := fstest.NewRun(t) 1568 1569 const body = "------------------------------------------------------------" 1570 file1 := r.WriteFile("potato1", body, t1) 1571 file2 := r.WriteFile("potato2", body, t2) 1572 // Test with known length 1573 bodyReader := io.NopCloser(strings.NewReader(body)) 1574 obj, err := operations.RcatSize(ctx, r.Fremote, file1.Path, bodyReader, int64(len(body)), file1.ModTime, nil) 1575 require.NoError(t, err) 1576 assert.Equal(t, int64(len(body)), obj.Size()) 1577 assert.Equal(t, file1.Path, obj.Remote()) 1578 1579 // Test with unknown length 1580 bodyReader = io.NopCloser(strings.NewReader(body)) // reset Reader 1581 io.NopCloser(strings.NewReader(body)) 1582 obj, err = operations.RcatSize(ctx, r.Fremote, file2.Path, bodyReader, -1, file2.ModTime, nil) 1583 require.NoError(t, err) 1584 assert.Equal(t, int64(len(body)), obj.Size()) 1585 assert.Equal(t, file2.Path, obj.Remote()) 1586 1587 // Check files exist 1588 r.CheckRemoteItems(t, file1, file2) 1589 } 1590 1591 func TestRcatSizeMetadata(t *testing.T) { 1592 r := fstest.NewRun(t) 1593 1594 if !r.Fremote.Features().UserMetadata { 1595 t.Skip("Skipping as destination doesn't support user metadata") 1596 } 1597 1598 ctx := context.Background() 1599 ctx, ci := fs.AddConfig(ctx) 1600 ci.Metadata = true 1601 1602 meta := fs.Metadata{ 1603 "key": "value", 1604 "sausage": "potato", 1605 } 1606 1607 const body = "------------------------------------------------------------" 1608 file1 := r.WriteFile("potato1", body, t1) 1609 file2 := r.WriteFile("potato2", body, t2) 1610 1611 // Test with known length 1612 bodyReader := io.NopCloser(strings.NewReader(body)) 1613 obj, err := operations.RcatSize(ctx, r.Fremote, file1.Path, bodyReader, int64(len(body)), file1.ModTime, meta) 1614 require.NoError(t, err) 1615 assert.Equal(t, int64(len(body)), obj.Size()) 1616 assert.Equal(t, file1.Path, obj.Remote()) 1617 1618 // Test with unknown length 1619 bodyReader = io.NopCloser(strings.NewReader(body)) // reset Reader 1620 io.NopCloser(strings.NewReader(body)) 1621 obj, err = operations.RcatSize(ctx, r.Fremote, file2.Path, bodyReader, -1, file2.ModTime, meta) 1622 require.NoError(t, err) 1623 assert.Equal(t, int64(len(body)), obj.Size()) 1624 assert.Equal(t, file2.Path, obj.Remote()) 1625 1626 // Check files exist 1627 r.CheckRemoteItems(t, file1, file2) 1628 1629 // Check metadata OK 1630 for _, path := range []string{file1.Path, file2.Path} { 1631 o, err := r.Fremote.NewObject(ctx, path) 1632 require.NoError(t, err) 1633 gotMeta, err := fs.GetMetadata(ctx, o) 1634 require.NoError(t, err) 1635 // Check the specific user data we set is set 1636 // Likely there will be other values 1637 assert.Equal(t, "value", gotMeta["key"]) 1638 assert.Equal(t, "potato", gotMeta["sausage"]) 1639 } 1640 } 1641 1642 func TestTouchDir(t *testing.T) { 1643 ctx := context.Background() 1644 r := fstest.NewRun(t) 1645 1646 if r.Fremote.Precision() == fs.ModTimeNotSupported { 1647 t.Skip("Skipping test as remote does not support modtime") 1648 } 1649 1650 file1 := r.WriteBoth(ctx, "potato2", "------------------------------------------------------------", t1) 1651 file2 := r.WriteBoth(ctx, "empty space", "-", t2) 1652 file3 := r.WriteBoth(ctx, "sub dir/potato3", "hello", t2) 1653 r.CheckRemoteItems(t, file1, file2, file3) 1654 1655 accounting.GlobalStats().ResetCounters() 1656 timeValue := time.Date(2010, 9, 8, 7, 6, 5, 4, time.UTC) 1657 err := operations.TouchDir(ctx, r.Fremote, "", timeValue, true) 1658 require.NoError(t, err) 1659 if accounting.Stats(ctx).GetErrors() != 0 { 1660 err = accounting.Stats(ctx).GetLastError() 1661 require.True(t, errors.Is(err, fs.ErrorCantSetModTime) || errors.Is(err, fs.ErrorCantSetModTimeWithoutDelete)) 1662 } else { 1663 file1.ModTime = timeValue 1664 file2.ModTime = timeValue 1665 file3.ModTime = timeValue 1666 r.CheckRemoteItems(t, file1, file2, file3) 1667 } 1668 } 1669 1670 var testMetadata = fs.Metadata{ 1671 // System metadata supported by all backends 1672 "mtime": t1.Format(time.RFC3339Nano), 1673 // User metadata 1674 "potato": "jersey", 1675 } 1676 1677 func TestMkdirMetadata(t *testing.T) { 1678 const name = "dir with metadata" 1679 ctx := context.Background() 1680 ctx, ci := fs.AddConfig(ctx) 1681 ci.Metadata = true 1682 r := fstest.NewRun(t) 1683 features := r.Fremote.Features() 1684 if features.MkdirMetadata == nil { 1685 t.Skip("Skipping test as remote does not support MkdirMetadata") 1686 } 1687 1688 newDst, err := operations.MkdirMetadata(ctx, r.Fremote, name, testMetadata) 1689 require.NoError(t, err) 1690 require.NotNil(t, newDst) 1691 1692 require.True(t, features.ReadDirMetadata, "Expecting ReadDirMetadata to be supported if MkdirMetadata is supported") 1693 1694 // Check the returned directory and one read from the listing 1695 fstest.CheckEntryMetadata(ctx, t, r.Fremote, newDst, testMetadata) 1696 fstest.CheckEntryMetadata(ctx, t, r.Fremote, fstest.NewDirectory(ctx, t, r.Fremote, name), testMetadata) 1697 } 1698 1699 func TestMkdirModTime(t *testing.T) { 1700 const name = "directory with modtime" 1701 ctx := context.Background() 1702 r := fstest.NewRun(t) 1703 if r.Fremote.Features().DirSetModTime == nil && r.Fremote.Features().MkdirMetadata == nil { 1704 t.Skip("Skipping test as remote does not support DirSetModTime or MkdirMetadata") 1705 } 1706 newDst, err := operations.MkdirModTime(ctx, r.Fremote, name, t2) 1707 require.NoError(t, err) 1708 1709 // Check the returned directory and one read from the listing 1710 // newDst may be nil here depending on how the modtime was set 1711 if newDst != nil { 1712 fstest.CheckDirModTime(ctx, t, r.Fremote, newDst, t2) 1713 } 1714 fstest.CheckDirModTime(ctx, t, r.Fremote, fstest.NewDirectory(ctx, t, r.Fremote, name), t2) 1715 } 1716 1717 func TestCopyDirMetadata(t *testing.T) { 1718 const nameNonExistent = "non existent directory" 1719 const nameExistent = "existing directory" 1720 ctx := context.Background() 1721 ctx, ci := fs.AddConfig(ctx) 1722 ci.Metadata = true 1723 r := fstest.NewRun(t) 1724 if !r.Fremote.Features().WriteDirMetadata && r.Fremote.Features().MkdirMetadata == nil { 1725 t.Skip("Skipping test as remote does not support WriteDirMetadata or MkdirMetadata") 1726 } 1727 1728 // Create a source local directory with metadata 1729 newSrc, err := operations.MkdirMetadata(ctx, r.Flocal, "dir with metadata to be copied", testMetadata) 1730 require.NoError(t, err) 1731 require.NotNil(t, newSrc) 1732 1733 // First try with the directory not existing 1734 newDst, err := operations.CopyDirMetadata(ctx, r.Fremote, nil, nameNonExistent, newSrc) 1735 require.NoError(t, err) 1736 require.NotNil(t, newDst) 1737 1738 // Check the returned directory and one read from the listing 1739 fstest.CheckEntryMetadata(ctx, t, r.Fremote, newDst, testMetadata) 1740 fstest.CheckEntryMetadata(ctx, t, r.Fremote, fstest.NewDirectory(ctx, t, r.Fremote, nameNonExistent), testMetadata) 1741 1742 // Then try with the directory existing 1743 require.NoError(t, r.Fremote.Rmdir(ctx, nameNonExistent)) 1744 require.NoError(t, r.Fremote.Mkdir(ctx, nameExistent)) 1745 existingDir := fstest.NewDirectory(ctx, t, r.Fremote, nameExistent) 1746 1747 newDst, err = operations.CopyDirMetadata(ctx, r.Fremote, existingDir, "SHOULD BE IGNORED", newSrc) 1748 require.NoError(t, err) 1749 require.NotNil(t, newDst) 1750 1751 // Check the returned directory and one read from the listing 1752 fstest.CheckEntryMetadata(ctx, t, r.Fremote, newDst, testMetadata) 1753 fstest.CheckEntryMetadata(ctx, t, r.Fremote, fstest.NewDirectory(ctx, t, r.Fremote, nameExistent), testMetadata) 1754 } 1755 1756 func TestSetDirModTime(t *testing.T) { 1757 const name = "set modtime on existing directory" 1758 ctx, ci := fs.AddConfig(context.Background()) 1759 r := fstest.NewRun(t) 1760 if r.Fremote.Features().DirSetModTime == nil && !r.Fremote.Features().WriteDirSetModTime { 1761 t.Skip("Skipping test as remote does not support DirSetModTime or WriteDirSetModTime") 1762 } 1763 1764 // Check that we obey --no-update-dir-modtime - this should return nil, nil 1765 ci.NoUpdateDirModTime = true 1766 newDst, err := operations.SetDirModTime(ctx, r.Fremote, nil, "set modtime on non existent directory", t2) 1767 require.NoError(t, err) 1768 require.Nil(t, newDst) 1769 ci.NoUpdateDirModTime = false 1770 1771 // First try with the directory not existing - should return an error 1772 newDst, err = operations.SetDirModTime(ctx, r.Fremote, nil, "set modtime on non existent directory", t2) 1773 require.Error(t, err) 1774 require.Nil(t, newDst) 1775 1776 // Then try with the directory existing 1777 require.NoError(t, r.Fremote.Mkdir(ctx, name)) 1778 existingDir := fstest.NewDirectory(ctx, t, r.Fremote, name) 1779 1780 newDst, err = operations.SetDirModTime(ctx, r.Fremote, existingDir, "SHOULD BE IGNORED", t2) 1781 require.NoError(t, err) 1782 require.NotNil(t, newDst) 1783 1784 // Check the returned directory and one read from the listing 1785 // The modtime will only be correct on newDst if it had a SetModTime method 1786 if _, ok := newDst.(fs.SetModTimer); ok { 1787 fstest.CheckDirModTime(ctx, t, r.Fremote, newDst, t2) 1788 } 1789 fstest.CheckDirModTime(ctx, t, r.Fremote, fstest.NewDirectory(ctx, t, r.Fremote, name), t2) 1790 1791 // Now wrap the directory to make the SetModTime method return fs.ErrorNotImplemented and check that it falls back correctly 1792 wrappedDir := fs.NewDirWrapper(existingDir.Remote(), fs.NewDir(existingDir.Remote(), existingDir.ModTime(ctx))) 1793 newDst, err = operations.SetDirModTime(ctx, r.Fremote, wrappedDir, "SHOULD BE IGNORED", t1) 1794 require.NoError(t, err) 1795 require.NotNil(t, newDst) 1796 fstest.CheckDirModTime(ctx, t, r.Fremote, fstest.NewDirectory(ctx, t, r.Fremote, name), t1) 1797 } 1798 1799 func TestDirsEqual(t *testing.T) { 1800 ctx := context.Background() 1801 ctx, ci := fs.AddConfig(ctx) 1802 ci.Metadata = true 1803 r := fstest.NewRun(t) 1804 if !r.Fremote.Features().WriteDirMetadata && r.Fremote.Features().MkdirMetadata == nil { 1805 t.Skip("Skipping test as remote does not support WriteDirMetadata or MkdirMetadata") 1806 } 1807 1808 opt := operations.DirsEqualOpt{ 1809 ModifyWindow: fs.GetModifyWindow(ctx, r.Flocal, r.Fremote), 1810 SetDirModtime: true, 1811 SetDirMetadata: true, 1812 } 1813 1814 // Create a source local directory with metadata 1815 src, err := operations.MkdirMetadata(ctx, r.Flocal, "dir with metadata to be copied", testMetadata) 1816 require.NoError(t, err) 1817 require.NotNil(t, src) 1818 1819 // try with nil dst -- should be false 1820 equal := operations.DirsEqual(ctx, src, nil, opt) 1821 assert.False(t, equal) 1822 1823 // make a dest with an equal modtime 1824 dst, err := operations.MkdirModTime(ctx, r.Fremote, "dst", src.ModTime(ctx)) 1825 require.NoError(t, err) 1826 1827 // try with equal modtimes -- should be true 1828 equal = operations.DirsEqual(ctx, src, dst, opt) 1829 assert.True(t, equal) 1830 1831 // try with unequal modtimes -- should be false 1832 dst, err = operations.SetDirModTime(ctx, r.Fremote, dst, "", t2) 1833 require.NoError(t, err) 1834 equal = operations.DirsEqual(ctx, src, dst, opt) 1835 assert.False(t, equal) 1836 1837 // try with unequal modtimes that are within modify window -- should be true 1838 halfWindow := opt.ModifyWindow / 2 1839 dst, err = operations.SetDirModTime(ctx, r.Fremote, dst, "", src.ModTime(ctx).Add(halfWindow)) 1840 require.NoError(t, err) 1841 equal = operations.DirsEqual(ctx, src, dst, opt) 1842 assert.True(t, equal) 1843 1844 // test ignoretimes -- should be false 1845 ci.IgnoreTimes = true 1846 equal = operations.DirsEqual(ctx, src, dst, opt) 1847 assert.False(t, equal) 1848 1849 // test immutable -- should be true 1850 ci.IgnoreTimes = false 1851 ci.Immutable = true 1852 dst, err = operations.SetDirModTime(ctx, r.Fremote, dst, "", t3) 1853 require.NoError(t, err) 1854 equal = operations.DirsEqual(ctx, src, dst, opt) 1855 assert.True(t, equal) 1856 1857 // test dst newer than src with --update -- should be true 1858 ci.Immutable = false 1859 ci.UpdateOlder = true 1860 equal = operations.DirsEqual(ctx, src, dst, opt) 1861 assert.True(t, equal) 1862 1863 // test no SetDirModtime or SetDirMetadata -- should be true 1864 ci.UpdateOlder = false 1865 opt.SetDirMetadata, opt.SetDirModtime = false, false 1866 equal = operations.DirsEqual(ctx, src, dst, opt) 1867 assert.True(t, equal) 1868 }