sigs.k8s.io/release-sdk@v0.11.1-0.20240417074027-8061fb5e4952/object/objectfakes/fake_store.go (about) 1 /* 2 Copyright The Kubernetes Authors. 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15 */ 16 17 // Code generated by counterfeiter. DO NOT EDIT. 18 package objectfakes 19 20 import ( 21 "sync" 22 23 "sigs.k8s.io/release-sdk/object" 24 ) 25 26 type FakeStore struct { 27 CopyBucketToBucketStub func(string, string) error 28 copyBucketToBucketMutex sync.RWMutex 29 copyBucketToBucketArgsForCall []struct { 30 arg1 string 31 arg2 string 32 } 33 copyBucketToBucketReturns struct { 34 result1 error 35 } 36 copyBucketToBucketReturnsOnCall map[int]struct { 37 result1 error 38 } 39 CopyToLocalStub func(string, string) error 40 copyToLocalMutex sync.RWMutex 41 copyToLocalArgsForCall []struct { 42 arg1 string 43 arg2 string 44 } 45 copyToLocalReturns struct { 46 result1 error 47 } 48 copyToLocalReturnsOnCall map[int]struct { 49 result1 error 50 } 51 CopyToRemoteStub func(string, string) error 52 copyToRemoteMutex sync.RWMutex 53 copyToRemoteArgsForCall []struct { 54 arg1 string 55 arg2 string 56 } 57 copyToRemoteReturns struct { 58 result1 error 59 } 60 copyToRemoteReturnsOnCall map[int]struct { 61 result1 error 62 } 63 GetMarkerPathStub func(string, string, bool) (string, error) 64 getMarkerPathMutex sync.RWMutex 65 getMarkerPathArgsForCall []struct { 66 arg1 string 67 arg2 string 68 arg3 bool 69 } 70 getMarkerPathReturns struct { 71 result1 string 72 result2 error 73 } 74 getMarkerPathReturnsOnCall map[int]struct { 75 result1 string 76 result2 error 77 } 78 GetReleasePathStub func(string, string, string, bool) (string, error) 79 getReleasePathMutex sync.RWMutex 80 getReleasePathArgsForCall []struct { 81 arg1 string 82 arg2 string 83 arg3 string 84 arg4 bool 85 } 86 getReleasePathReturns struct { 87 result1 string 88 result2 error 89 } 90 getReleasePathReturnsOnCall map[int]struct { 91 result1 string 92 result2 error 93 } 94 IsPathNormalizedStub func(string) bool 95 isPathNormalizedMutex sync.RWMutex 96 isPathNormalizedArgsForCall []struct { 97 arg1 string 98 } 99 isPathNormalizedReturns struct { 100 result1 bool 101 } 102 isPathNormalizedReturnsOnCall map[int]struct { 103 result1 bool 104 } 105 NormalizePathStub func(...string) (string, error) 106 normalizePathMutex sync.RWMutex 107 normalizePathArgsForCall []struct { 108 arg1 []string 109 } 110 normalizePathReturns struct { 111 result1 string 112 result2 error 113 } 114 normalizePathReturnsOnCall map[int]struct { 115 result1 string 116 result2 error 117 } 118 PathExistsStub func(string) (bool, error) 119 pathExistsMutex sync.RWMutex 120 pathExistsArgsForCall []struct { 121 arg1 string 122 } 123 pathExistsReturns struct { 124 result1 bool 125 result2 error 126 } 127 pathExistsReturnsOnCall map[int]struct { 128 result1 bool 129 result2 error 130 } 131 RsyncRecursiveStub func(string, string) error 132 rsyncRecursiveMutex sync.RWMutex 133 rsyncRecursiveArgsForCall []struct { 134 arg1 string 135 arg2 string 136 } 137 rsyncRecursiveReturns struct { 138 result1 error 139 } 140 rsyncRecursiveReturnsOnCall map[int]struct { 141 result1 error 142 } 143 SetOptionsStub func(...object.OptFn) 144 setOptionsMutex sync.RWMutex 145 setOptionsArgsForCall []struct { 146 arg1 []object.OptFn 147 } 148 invocations map[string][][]interface{} 149 invocationsMutex sync.RWMutex 150 } 151 152 func (fake *FakeStore) CopyBucketToBucket(arg1 string, arg2 string) error { 153 fake.copyBucketToBucketMutex.Lock() 154 ret, specificReturn := fake.copyBucketToBucketReturnsOnCall[len(fake.copyBucketToBucketArgsForCall)] 155 fake.copyBucketToBucketArgsForCall = append(fake.copyBucketToBucketArgsForCall, struct { 156 arg1 string 157 arg2 string 158 }{arg1, arg2}) 159 stub := fake.CopyBucketToBucketStub 160 fakeReturns := fake.copyBucketToBucketReturns 161 fake.recordInvocation("CopyBucketToBucket", []interface{}{arg1, arg2}) 162 fake.copyBucketToBucketMutex.Unlock() 163 if stub != nil { 164 return stub(arg1, arg2) 165 } 166 if specificReturn { 167 return ret.result1 168 } 169 return fakeReturns.result1 170 } 171 172 func (fake *FakeStore) CopyBucketToBucketCallCount() int { 173 fake.copyBucketToBucketMutex.RLock() 174 defer fake.copyBucketToBucketMutex.RUnlock() 175 return len(fake.copyBucketToBucketArgsForCall) 176 } 177 178 func (fake *FakeStore) CopyBucketToBucketCalls(stub func(string, string) error) { 179 fake.copyBucketToBucketMutex.Lock() 180 defer fake.copyBucketToBucketMutex.Unlock() 181 fake.CopyBucketToBucketStub = stub 182 } 183 184 func (fake *FakeStore) CopyBucketToBucketArgsForCall(i int) (string, string) { 185 fake.copyBucketToBucketMutex.RLock() 186 defer fake.copyBucketToBucketMutex.RUnlock() 187 argsForCall := fake.copyBucketToBucketArgsForCall[i] 188 return argsForCall.arg1, argsForCall.arg2 189 } 190 191 func (fake *FakeStore) CopyBucketToBucketReturns(result1 error) { 192 fake.copyBucketToBucketMutex.Lock() 193 defer fake.copyBucketToBucketMutex.Unlock() 194 fake.CopyBucketToBucketStub = nil 195 fake.copyBucketToBucketReturns = struct { 196 result1 error 197 }{result1} 198 } 199 200 func (fake *FakeStore) CopyBucketToBucketReturnsOnCall(i int, result1 error) { 201 fake.copyBucketToBucketMutex.Lock() 202 defer fake.copyBucketToBucketMutex.Unlock() 203 fake.CopyBucketToBucketStub = nil 204 if fake.copyBucketToBucketReturnsOnCall == nil { 205 fake.copyBucketToBucketReturnsOnCall = make(map[int]struct { 206 result1 error 207 }) 208 } 209 fake.copyBucketToBucketReturnsOnCall[i] = struct { 210 result1 error 211 }{result1} 212 } 213 214 func (fake *FakeStore) CopyToLocal(arg1 string, arg2 string) error { 215 fake.copyToLocalMutex.Lock() 216 ret, specificReturn := fake.copyToLocalReturnsOnCall[len(fake.copyToLocalArgsForCall)] 217 fake.copyToLocalArgsForCall = append(fake.copyToLocalArgsForCall, struct { 218 arg1 string 219 arg2 string 220 }{arg1, arg2}) 221 stub := fake.CopyToLocalStub 222 fakeReturns := fake.copyToLocalReturns 223 fake.recordInvocation("CopyToLocal", []interface{}{arg1, arg2}) 224 fake.copyToLocalMutex.Unlock() 225 if stub != nil { 226 return stub(arg1, arg2) 227 } 228 if specificReturn { 229 return ret.result1 230 } 231 return fakeReturns.result1 232 } 233 234 func (fake *FakeStore) CopyToLocalCallCount() int { 235 fake.copyToLocalMutex.RLock() 236 defer fake.copyToLocalMutex.RUnlock() 237 return len(fake.copyToLocalArgsForCall) 238 } 239 240 func (fake *FakeStore) CopyToLocalCalls(stub func(string, string) error) { 241 fake.copyToLocalMutex.Lock() 242 defer fake.copyToLocalMutex.Unlock() 243 fake.CopyToLocalStub = stub 244 } 245 246 func (fake *FakeStore) CopyToLocalArgsForCall(i int) (string, string) { 247 fake.copyToLocalMutex.RLock() 248 defer fake.copyToLocalMutex.RUnlock() 249 argsForCall := fake.copyToLocalArgsForCall[i] 250 return argsForCall.arg1, argsForCall.arg2 251 } 252 253 func (fake *FakeStore) CopyToLocalReturns(result1 error) { 254 fake.copyToLocalMutex.Lock() 255 defer fake.copyToLocalMutex.Unlock() 256 fake.CopyToLocalStub = nil 257 fake.copyToLocalReturns = struct { 258 result1 error 259 }{result1} 260 } 261 262 func (fake *FakeStore) CopyToLocalReturnsOnCall(i int, result1 error) { 263 fake.copyToLocalMutex.Lock() 264 defer fake.copyToLocalMutex.Unlock() 265 fake.CopyToLocalStub = nil 266 if fake.copyToLocalReturnsOnCall == nil { 267 fake.copyToLocalReturnsOnCall = make(map[int]struct { 268 result1 error 269 }) 270 } 271 fake.copyToLocalReturnsOnCall[i] = struct { 272 result1 error 273 }{result1} 274 } 275 276 func (fake *FakeStore) CopyToRemote(arg1 string, arg2 string) error { 277 fake.copyToRemoteMutex.Lock() 278 ret, specificReturn := fake.copyToRemoteReturnsOnCall[len(fake.copyToRemoteArgsForCall)] 279 fake.copyToRemoteArgsForCall = append(fake.copyToRemoteArgsForCall, struct { 280 arg1 string 281 arg2 string 282 }{arg1, arg2}) 283 stub := fake.CopyToRemoteStub 284 fakeReturns := fake.copyToRemoteReturns 285 fake.recordInvocation("CopyToRemote", []interface{}{arg1, arg2}) 286 fake.copyToRemoteMutex.Unlock() 287 if stub != nil { 288 return stub(arg1, arg2) 289 } 290 if specificReturn { 291 return ret.result1 292 } 293 return fakeReturns.result1 294 } 295 296 func (fake *FakeStore) CopyToRemoteCallCount() int { 297 fake.copyToRemoteMutex.RLock() 298 defer fake.copyToRemoteMutex.RUnlock() 299 return len(fake.copyToRemoteArgsForCall) 300 } 301 302 func (fake *FakeStore) CopyToRemoteCalls(stub func(string, string) error) { 303 fake.copyToRemoteMutex.Lock() 304 defer fake.copyToRemoteMutex.Unlock() 305 fake.CopyToRemoteStub = stub 306 } 307 308 func (fake *FakeStore) CopyToRemoteArgsForCall(i int) (string, string) { 309 fake.copyToRemoteMutex.RLock() 310 defer fake.copyToRemoteMutex.RUnlock() 311 argsForCall := fake.copyToRemoteArgsForCall[i] 312 return argsForCall.arg1, argsForCall.arg2 313 } 314 315 func (fake *FakeStore) CopyToRemoteReturns(result1 error) { 316 fake.copyToRemoteMutex.Lock() 317 defer fake.copyToRemoteMutex.Unlock() 318 fake.CopyToRemoteStub = nil 319 fake.copyToRemoteReturns = struct { 320 result1 error 321 }{result1} 322 } 323 324 func (fake *FakeStore) CopyToRemoteReturnsOnCall(i int, result1 error) { 325 fake.copyToRemoteMutex.Lock() 326 defer fake.copyToRemoteMutex.Unlock() 327 fake.CopyToRemoteStub = nil 328 if fake.copyToRemoteReturnsOnCall == nil { 329 fake.copyToRemoteReturnsOnCall = make(map[int]struct { 330 result1 error 331 }) 332 } 333 fake.copyToRemoteReturnsOnCall[i] = struct { 334 result1 error 335 }{result1} 336 } 337 338 func (fake *FakeStore) GetMarkerPath(arg1 string, arg2 string, arg3 bool) (string, error) { 339 fake.getMarkerPathMutex.Lock() 340 ret, specificReturn := fake.getMarkerPathReturnsOnCall[len(fake.getMarkerPathArgsForCall)] 341 fake.getMarkerPathArgsForCall = append(fake.getMarkerPathArgsForCall, struct { 342 arg1 string 343 arg2 string 344 arg3 bool 345 }{arg1, arg2, arg3}) 346 stub := fake.GetMarkerPathStub 347 fakeReturns := fake.getMarkerPathReturns 348 fake.recordInvocation("GetMarkerPath", []interface{}{arg1, arg2, arg3}) 349 fake.getMarkerPathMutex.Unlock() 350 if stub != nil { 351 return stub(arg1, arg2, arg3) 352 } 353 if specificReturn { 354 return ret.result1, ret.result2 355 } 356 return fakeReturns.result1, fakeReturns.result2 357 } 358 359 func (fake *FakeStore) GetMarkerPathCallCount() int { 360 fake.getMarkerPathMutex.RLock() 361 defer fake.getMarkerPathMutex.RUnlock() 362 return len(fake.getMarkerPathArgsForCall) 363 } 364 365 func (fake *FakeStore) GetMarkerPathCalls(stub func(string, string, bool) (string, error)) { 366 fake.getMarkerPathMutex.Lock() 367 defer fake.getMarkerPathMutex.Unlock() 368 fake.GetMarkerPathStub = stub 369 } 370 371 func (fake *FakeStore) GetMarkerPathArgsForCall(i int) (string, string, bool) { 372 fake.getMarkerPathMutex.RLock() 373 defer fake.getMarkerPathMutex.RUnlock() 374 argsForCall := fake.getMarkerPathArgsForCall[i] 375 return argsForCall.arg1, argsForCall.arg2, argsForCall.arg3 376 } 377 378 func (fake *FakeStore) GetMarkerPathReturns(result1 string, result2 error) { 379 fake.getMarkerPathMutex.Lock() 380 defer fake.getMarkerPathMutex.Unlock() 381 fake.GetMarkerPathStub = nil 382 fake.getMarkerPathReturns = struct { 383 result1 string 384 result2 error 385 }{result1, result2} 386 } 387 388 func (fake *FakeStore) GetMarkerPathReturnsOnCall(i int, result1 string, result2 error) { 389 fake.getMarkerPathMutex.Lock() 390 defer fake.getMarkerPathMutex.Unlock() 391 fake.GetMarkerPathStub = nil 392 if fake.getMarkerPathReturnsOnCall == nil { 393 fake.getMarkerPathReturnsOnCall = make(map[int]struct { 394 result1 string 395 result2 error 396 }) 397 } 398 fake.getMarkerPathReturnsOnCall[i] = struct { 399 result1 string 400 result2 error 401 }{result1, result2} 402 } 403 404 func (fake *FakeStore) GetReleasePath(arg1 string, arg2 string, arg3 string, arg4 bool) (string, error) { 405 fake.getReleasePathMutex.Lock() 406 ret, specificReturn := fake.getReleasePathReturnsOnCall[len(fake.getReleasePathArgsForCall)] 407 fake.getReleasePathArgsForCall = append(fake.getReleasePathArgsForCall, struct { 408 arg1 string 409 arg2 string 410 arg3 string 411 arg4 bool 412 }{arg1, arg2, arg3, arg4}) 413 stub := fake.GetReleasePathStub 414 fakeReturns := fake.getReleasePathReturns 415 fake.recordInvocation("GetReleasePath", []interface{}{arg1, arg2, arg3, arg4}) 416 fake.getReleasePathMutex.Unlock() 417 if stub != nil { 418 return stub(arg1, arg2, arg3, arg4) 419 } 420 if specificReturn { 421 return ret.result1, ret.result2 422 } 423 return fakeReturns.result1, fakeReturns.result2 424 } 425 426 func (fake *FakeStore) GetReleasePathCallCount() int { 427 fake.getReleasePathMutex.RLock() 428 defer fake.getReleasePathMutex.RUnlock() 429 return len(fake.getReleasePathArgsForCall) 430 } 431 432 func (fake *FakeStore) GetReleasePathCalls(stub func(string, string, string, bool) (string, error)) { 433 fake.getReleasePathMutex.Lock() 434 defer fake.getReleasePathMutex.Unlock() 435 fake.GetReleasePathStub = stub 436 } 437 438 func (fake *FakeStore) GetReleasePathArgsForCall(i int) (string, string, string, bool) { 439 fake.getReleasePathMutex.RLock() 440 defer fake.getReleasePathMutex.RUnlock() 441 argsForCall := fake.getReleasePathArgsForCall[i] 442 return argsForCall.arg1, argsForCall.arg2, argsForCall.arg3, argsForCall.arg4 443 } 444 445 func (fake *FakeStore) GetReleasePathReturns(result1 string, result2 error) { 446 fake.getReleasePathMutex.Lock() 447 defer fake.getReleasePathMutex.Unlock() 448 fake.GetReleasePathStub = nil 449 fake.getReleasePathReturns = struct { 450 result1 string 451 result2 error 452 }{result1, result2} 453 } 454 455 func (fake *FakeStore) GetReleasePathReturnsOnCall(i int, result1 string, result2 error) { 456 fake.getReleasePathMutex.Lock() 457 defer fake.getReleasePathMutex.Unlock() 458 fake.GetReleasePathStub = nil 459 if fake.getReleasePathReturnsOnCall == nil { 460 fake.getReleasePathReturnsOnCall = make(map[int]struct { 461 result1 string 462 result2 error 463 }) 464 } 465 fake.getReleasePathReturnsOnCall[i] = struct { 466 result1 string 467 result2 error 468 }{result1, result2} 469 } 470 471 func (fake *FakeStore) IsPathNormalized(arg1 string) bool { 472 fake.isPathNormalizedMutex.Lock() 473 ret, specificReturn := fake.isPathNormalizedReturnsOnCall[len(fake.isPathNormalizedArgsForCall)] 474 fake.isPathNormalizedArgsForCall = append(fake.isPathNormalizedArgsForCall, struct { 475 arg1 string 476 }{arg1}) 477 stub := fake.IsPathNormalizedStub 478 fakeReturns := fake.isPathNormalizedReturns 479 fake.recordInvocation("IsPathNormalized", []interface{}{arg1}) 480 fake.isPathNormalizedMutex.Unlock() 481 if stub != nil { 482 return stub(arg1) 483 } 484 if specificReturn { 485 return ret.result1 486 } 487 return fakeReturns.result1 488 } 489 490 func (fake *FakeStore) IsPathNormalizedCallCount() int { 491 fake.isPathNormalizedMutex.RLock() 492 defer fake.isPathNormalizedMutex.RUnlock() 493 return len(fake.isPathNormalizedArgsForCall) 494 } 495 496 func (fake *FakeStore) IsPathNormalizedCalls(stub func(string) bool) { 497 fake.isPathNormalizedMutex.Lock() 498 defer fake.isPathNormalizedMutex.Unlock() 499 fake.IsPathNormalizedStub = stub 500 } 501 502 func (fake *FakeStore) IsPathNormalizedArgsForCall(i int) string { 503 fake.isPathNormalizedMutex.RLock() 504 defer fake.isPathNormalizedMutex.RUnlock() 505 argsForCall := fake.isPathNormalizedArgsForCall[i] 506 return argsForCall.arg1 507 } 508 509 func (fake *FakeStore) IsPathNormalizedReturns(result1 bool) { 510 fake.isPathNormalizedMutex.Lock() 511 defer fake.isPathNormalizedMutex.Unlock() 512 fake.IsPathNormalizedStub = nil 513 fake.isPathNormalizedReturns = struct { 514 result1 bool 515 }{result1} 516 } 517 518 func (fake *FakeStore) IsPathNormalizedReturnsOnCall(i int, result1 bool) { 519 fake.isPathNormalizedMutex.Lock() 520 defer fake.isPathNormalizedMutex.Unlock() 521 fake.IsPathNormalizedStub = nil 522 if fake.isPathNormalizedReturnsOnCall == nil { 523 fake.isPathNormalizedReturnsOnCall = make(map[int]struct { 524 result1 bool 525 }) 526 } 527 fake.isPathNormalizedReturnsOnCall[i] = struct { 528 result1 bool 529 }{result1} 530 } 531 532 func (fake *FakeStore) NormalizePath(arg1 ...string) (string, error) { 533 fake.normalizePathMutex.Lock() 534 ret, specificReturn := fake.normalizePathReturnsOnCall[len(fake.normalizePathArgsForCall)] 535 fake.normalizePathArgsForCall = append(fake.normalizePathArgsForCall, struct { 536 arg1 []string 537 }{arg1}) 538 stub := fake.NormalizePathStub 539 fakeReturns := fake.normalizePathReturns 540 fake.recordInvocation("NormalizePath", []interface{}{arg1}) 541 fake.normalizePathMutex.Unlock() 542 if stub != nil { 543 return stub(arg1...) 544 } 545 if specificReturn { 546 return ret.result1, ret.result2 547 } 548 return fakeReturns.result1, fakeReturns.result2 549 } 550 551 func (fake *FakeStore) NormalizePathCallCount() int { 552 fake.normalizePathMutex.RLock() 553 defer fake.normalizePathMutex.RUnlock() 554 return len(fake.normalizePathArgsForCall) 555 } 556 557 func (fake *FakeStore) NormalizePathCalls(stub func(...string) (string, error)) { 558 fake.normalizePathMutex.Lock() 559 defer fake.normalizePathMutex.Unlock() 560 fake.NormalizePathStub = stub 561 } 562 563 func (fake *FakeStore) NormalizePathArgsForCall(i int) []string { 564 fake.normalizePathMutex.RLock() 565 defer fake.normalizePathMutex.RUnlock() 566 argsForCall := fake.normalizePathArgsForCall[i] 567 return argsForCall.arg1 568 } 569 570 func (fake *FakeStore) NormalizePathReturns(result1 string, result2 error) { 571 fake.normalizePathMutex.Lock() 572 defer fake.normalizePathMutex.Unlock() 573 fake.NormalizePathStub = nil 574 fake.normalizePathReturns = struct { 575 result1 string 576 result2 error 577 }{result1, result2} 578 } 579 580 func (fake *FakeStore) NormalizePathReturnsOnCall(i int, result1 string, result2 error) { 581 fake.normalizePathMutex.Lock() 582 defer fake.normalizePathMutex.Unlock() 583 fake.NormalizePathStub = nil 584 if fake.normalizePathReturnsOnCall == nil { 585 fake.normalizePathReturnsOnCall = make(map[int]struct { 586 result1 string 587 result2 error 588 }) 589 } 590 fake.normalizePathReturnsOnCall[i] = struct { 591 result1 string 592 result2 error 593 }{result1, result2} 594 } 595 596 func (fake *FakeStore) PathExists(arg1 string) (bool, error) { 597 fake.pathExistsMutex.Lock() 598 ret, specificReturn := fake.pathExistsReturnsOnCall[len(fake.pathExistsArgsForCall)] 599 fake.pathExistsArgsForCall = append(fake.pathExistsArgsForCall, struct { 600 arg1 string 601 }{arg1}) 602 stub := fake.PathExistsStub 603 fakeReturns := fake.pathExistsReturns 604 fake.recordInvocation("PathExists", []interface{}{arg1}) 605 fake.pathExistsMutex.Unlock() 606 if stub != nil { 607 return stub(arg1) 608 } 609 if specificReturn { 610 return ret.result1, ret.result2 611 } 612 return fakeReturns.result1, fakeReturns.result2 613 } 614 615 func (fake *FakeStore) PathExistsCallCount() int { 616 fake.pathExistsMutex.RLock() 617 defer fake.pathExistsMutex.RUnlock() 618 return len(fake.pathExistsArgsForCall) 619 } 620 621 func (fake *FakeStore) PathExistsCalls(stub func(string) (bool, error)) { 622 fake.pathExistsMutex.Lock() 623 defer fake.pathExistsMutex.Unlock() 624 fake.PathExistsStub = stub 625 } 626 627 func (fake *FakeStore) PathExistsArgsForCall(i int) string { 628 fake.pathExistsMutex.RLock() 629 defer fake.pathExistsMutex.RUnlock() 630 argsForCall := fake.pathExistsArgsForCall[i] 631 return argsForCall.arg1 632 } 633 634 func (fake *FakeStore) PathExistsReturns(result1 bool, result2 error) { 635 fake.pathExistsMutex.Lock() 636 defer fake.pathExistsMutex.Unlock() 637 fake.PathExistsStub = nil 638 fake.pathExistsReturns = struct { 639 result1 bool 640 result2 error 641 }{result1, result2} 642 } 643 644 func (fake *FakeStore) PathExistsReturnsOnCall(i int, result1 bool, result2 error) { 645 fake.pathExistsMutex.Lock() 646 defer fake.pathExistsMutex.Unlock() 647 fake.PathExistsStub = nil 648 if fake.pathExistsReturnsOnCall == nil { 649 fake.pathExistsReturnsOnCall = make(map[int]struct { 650 result1 bool 651 result2 error 652 }) 653 } 654 fake.pathExistsReturnsOnCall[i] = struct { 655 result1 bool 656 result2 error 657 }{result1, result2} 658 } 659 660 func (fake *FakeStore) RsyncRecursive(arg1 string, arg2 string) error { 661 fake.rsyncRecursiveMutex.Lock() 662 ret, specificReturn := fake.rsyncRecursiveReturnsOnCall[len(fake.rsyncRecursiveArgsForCall)] 663 fake.rsyncRecursiveArgsForCall = append(fake.rsyncRecursiveArgsForCall, struct { 664 arg1 string 665 arg2 string 666 }{arg1, arg2}) 667 stub := fake.RsyncRecursiveStub 668 fakeReturns := fake.rsyncRecursiveReturns 669 fake.recordInvocation("RsyncRecursive", []interface{}{arg1, arg2}) 670 fake.rsyncRecursiveMutex.Unlock() 671 if stub != nil { 672 return stub(arg1, arg2) 673 } 674 if specificReturn { 675 return ret.result1 676 } 677 return fakeReturns.result1 678 } 679 680 func (fake *FakeStore) RsyncRecursiveCallCount() int { 681 fake.rsyncRecursiveMutex.RLock() 682 defer fake.rsyncRecursiveMutex.RUnlock() 683 return len(fake.rsyncRecursiveArgsForCall) 684 } 685 686 func (fake *FakeStore) RsyncRecursiveCalls(stub func(string, string) error) { 687 fake.rsyncRecursiveMutex.Lock() 688 defer fake.rsyncRecursiveMutex.Unlock() 689 fake.RsyncRecursiveStub = stub 690 } 691 692 func (fake *FakeStore) RsyncRecursiveArgsForCall(i int) (string, string) { 693 fake.rsyncRecursiveMutex.RLock() 694 defer fake.rsyncRecursiveMutex.RUnlock() 695 argsForCall := fake.rsyncRecursiveArgsForCall[i] 696 return argsForCall.arg1, argsForCall.arg2 697 } 698 699 func (fake *FakeStore) RsyncRecursiveReturns(result1 error) { 700 fake.rsyncRecursiveMutex.Lock() 701 defer fake.rsyncRecursiveMutex.Unlock() 702 fake.RsyncRecursiveStub = nil 703 fake.rsyncRecursiveReturns = struct { 704 result1 error 705 }{result1} 706 } 707 708 func (fake *FakeStore) RsyncRecursiveReturnsOnCall(i int, result1 error) { 709 fake.rsyncRecursiveMutex.Lock() 710 defer fake.rsyncRecursiveMutex.Unlock() 711 fake.RsyncRecursiveStub = nil 712 if fake.rsyncRecursiveReturnsOnCall == nil { 713 fake.rsyncRecursiveReturnsOnCall = make(map[int]struct { 714 result1 error 715 }) 716 } 717 fake.rsyncRecursiveReturnsOnCall[i] = struct { 718 result1 error 719 }{result1} 720 } 721 722 func (fake *FakeStore) SetOptions(arg1 ...object.OptFn) { 723 fake.setOptionsMutex.Lock() 724 fake.setOptionsArgsForCall = append(fake.setOptionsArgsForCall, struct { 725 arg1 []object.OptFn 726 }{arg1}) 727 stub := fake.SetOptionsStub 728 fake.recordInvocation("SetOptions", []interface{}{arg1}) 729 fake.setOptionsMutex.Unlock() 730 if stub != nil { 731 fake.SetOptionsStub(arg1...) 732 } 733 } 734 735 func (fake *FakeStore) SetOptionsCallCount() int { 736 fake.setOptionsMutex.RLock() 737 defer fake.setOptionsMutex.RUnlock() 738 return len(fake.setOptionsArgsForCall) 739 } 740 741 func (fake *FakeStore) SetOptionsCalls(stub func(...object.OptFn)) { 742 fake.setOptionsMutex.Lock() 743 defer fake.setOptionsMutex.Unlock() 744 fake.SetOptionsStub = stub 745 } 746 747 func (fake *FakeStore) SetOptionsArgsForCall(i int) []object.OptFn { 748 fake.setOptionsMutex.RLock() 749 defer fake.setOptionsMutex.RUnlock() 750 argsForCall := fake.setOptionsArgsForCall[i] 751 return argsForCall.arg1 752 } 753 754 func (fake *FakeStore) Invocations() map[string][][]interface{} { 755 fake.invocationsMutex.RLock() 756 defer fake.invocationsMutex.RUnlock() 757 fake.copyBucketToBucketMutex.RLock() 758 defer fake.copyBucketToBucketMutex.RUnlock() 759 fake.copyToLocalMutex.RLock() 760 defer fake.copyToLocalMutex.RUnlock() 761 fake.copyToRemoteMutex.RLock() 762 defer fake.copyToRemoteMutex.RUnlock() 763 fake.getMarkerPathMutex.RLock() 764 defer fake.getMarkerPathMutex.RUnlock() 765 fake.getReleasePathMutex.RLock() 766 defer fake.getReleasePathMutex.RUnlock() 767 fake.isPathNormalizedMutex.RLock() 768 defer fake.isPathNormalizedMutex.RUnlock() 769 fake.normalizePathMutex.RLock() 770 defer fake.normalizePathMutex.RUnlock() 771 fake.pathExistsMutex.RLock() 772 defer fake.pathExistsMutex.RUnlock() 773 fake.rsyncRecursiveMutex.RLock() 774 defer fake.rsyncRecursiveMutex.RUnlock() 775 fake.setOptionsMutex.RLock() 776 defer fake.setOptionsMutex.RUnlock() 777 copiedInvocations := map[string][][]interface{}{} 778 for key, value := range fake.invocations { 779 copiedInvocations[key] = value 780 } 781 return copiedInvocations 782 } 783 784 func (fake *FakeStore) recordInvocation(key string, args []interface{}) { 785 fake.invocationsMutex.Lock() 786 defer fake.invocationsMutex.Unlock() 787 if fake.invocations == nil { 788 fake.invocations = map[string][][]interface{}{} 789 } 790 if fake.invocations[key] == nil { 791 fake.invocations[key] = [][]interface{}{} 792 } 793 fake.invocations[key] = append(fake.invocations[key], args) 794 } 795 796 var _ object.Store = new(FakeStore)