github.com/mailgun/holster/v4@v4.20.0/retry/retry_test.go (about) 1 package retry_test 2 3 import ( 4 "context" 5 "fmt" 6 "sync" 7 "testing" 8 "time" 9 10 "github.com/mailgun/holster/v4/errors" 11 "github.com/mailgun/holster/v4/retry" 12 "github.com/stretchr/testify/assert" 13 "github.com/stretchr/testify/require" 14 ) 15 16 var errCause = errors.New("cause of error") 17 18 func TestUntilInterval(t *testing.T) { 19 ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*200) 20 defer cancel() 21 err := retry.Until(ctx, retry.Interval(time.Millisecond*10), func(ctx context.Context, att int) error { 22 return errCause 23 }) 24 25 require.Error(t, err) 26 assert.True(t, errors.Is(err, &retry.Err{})) 27 28 // Inspect the error 29 var retryErr *retry.Err 30 assert.True(t, errors.As(err, &retryErr)) 31 assert.GreaterOrEqual(t, retryErr.Attempts, 18) 32 assert.LessOrEqual(t, retryErr.Attempts, 20) 33 assert.Equal(t, retry.Cancelled, retryErr.Reason) 34 35 // Cause() works as expected 36 cause := errors.Cause(err) 37 assert.Equal(t, errCause, cause) 38 } 39 40 func TestUntilNoError(t *testing.T) { 41 ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*200) 42 defer cancel() 43 err := retry.Until(ctx, retry.Interval(time.Millisecond*10), func(ctx context.Context, att int) error { 44 return nil 45 }) 46 47 require.NoError(t, err) 48 assert.False(t, errors.Is(err, &retry.Err{})) 49 } 50 51 func TestUntilAttempts(t *testing.T) { 52 ctx := context.Background() 53 err := retry.Until(ctx, retry.Attempts(10, time.Millisecond*10), func(ctx context.Context, att int) error { 54 return fmt.Errorf("failed attempt '%d'", att) 55 }) 56 57 require.Error(t, err) 58 assert.True(t, errors.Is(err, &retry.Err{})) 59 assert.Equal(t, "on attempt '10'; attempts exhausted: failed attempt '10'", err.Error()) 60 } 61 62 func TestUntilStopped(t *testing.T) { 63 ctx := context.Background() 64 err := retry.Until(ctx, retry.Attempts(10, time.Millisecond*10), func(ctx context.Context, att int) error { 65 return retry.Stop(fmt.Errorf("failed attempt '%d'", att)) 66 }) 67 require.Error(t, err) 68 // Inspect the error 69 var retryErr *retry.Err 70 assert.True(t, errors.As(err, &retryErr)) 71 assert.Equal(t, 1, retryErr.Attempts) 72 assert.Equal(t, retry.Stopped, retryErr.Reason) 73 assert.Equal(t, "on attempt '1'; retry stopped: failed attempt '1'", err.Error()) 74 } 75 76 func TestUntilExponential(t *testing.T) { 77 ctx := context.Background() 78 backOff := &retry.ExponentialBackOff{ 79 Min: time.Millisecond, 80 Max: time.Millisecond * 100, 81 Factor: 2, 82 Attempts: 10, 83 } 84 85 err := retry.Until(ctx, backOff, func(ctx context.Context, att int) error { 86 return fmt.Errorf("failed attempt '%d'", att) 87 }) 88 89 require.Error(t, err) 90 assert.True(t, errors.Is(err, &retry.Err{})) 91 assert.Equal(t, "on attempt '11'; attempts exhausted: failed attempt '11'", err.Error()) 92 } 93 94 func TestUntilExponentialCancelled(t *testing.T) { 95 ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*100) 96 defer cancel() 97 backOff := &retry.ExponentialBackOff{ 98 Min: time.Millisecond, 99 Max: time.Millisecond * 100, 100 Factor: 2, 101 } 102 103 err := retry.Until(ctx, backOff, func(ctx context.Context, att int) error { 104 return fmt.Errorf("failed attempt '%d'", att) 105 }) 106 107 require.Error(t, err) 108 assert.True(t, errors.Is(err, &retry.Err{})) 109 assert.Equal(t, "on attempt '6'; context cancelled: failed attempt '6'", err.Error()) 110 } 111 112 func TestAsync(t *testing.T) { 113 ctx := context.Background() 114 async := retry.NewRetryAsync() 115 a1 := async.Async("one", ctx, retry.Attempts(10, time.Millisecond*10), func(ctx context.Context, i int) error { return errCause }) 116 a2 := async.Async("two", ctx, retry.Attempts(10, time.Millisecond*10), func(ctx context.Context, i int) error { return errCause }) 117 a3 := async.Async("thr", ctx, retry.Attempts(10, time.Millisecond*10), func(ctx context.Context, i int) error { return errCause }) 118 119 // Creates the async retry 120 f1 := async.Async("for", ctx, retry.Attempts(10, time.Millisecond*100), func(ctx context.Context, i int) error { return errCause }) 121 // Returns a handler to the currently running async retry 122 f2 := async.Async("for", ctx, retry.Attempts(10, time.Millisecond*100), func(ctx context.Context, i int) error { return errCause }) 123 124 // The are the same 125 assert.Equal(t, f1, f2) 126 // Should contain the error for our inspection 127 assert.Equal(t, errCause, f2.Err) 128 // Should report that the retry is still running 129 assert.Equal(t, true, f2.Retrying) 130 131 // Retries are all still running 132 time.Sleep(time.Millisecond * 10) 133 assert.Equal(t, 4, async.Len()) 134 135 // We can inspect the errors for all running async retries 136 errs := async.Errs() 137 require.NotNil(t, errs) 138 for _, e := range errs { 139 assert.Equal(t, e, errCause) 140 } 141 142 // Wait for all the async retries to exhaust their timeouts 143 async.Wait() 144 145 require.Equal(t, errCause, a1.Err) 146 require.Equal(t, errCause, a2.Err) 147 require.Equal(t, errCause, a3.Err) 148 require.Equal(t, errCause, f1.Err) 149 require.Equal(t, errCause, f2.Err) 150 } 151 152 func TestBackoffRace(t *testing.T) { 153 ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*100) 154 defer cancel() 155 backOff := &retry.ExponentialBackOff{ 156 Min: time.Millisecond, 157 Max: time.Millisecond * 100, 158 Factor: 2, 159 } 160 161 var wg sync.WaitGroup 162 for i := 0; i < 10; i++ { 163 wg.Add(1) 164 go func() { 165 defer wg.Done() 166 _ = retry.Until(ctx, backOff, func(ctx context.Context, att int) error { 167 t.Logf("Attempts: %d", backOff.NumRetries()) 168 return fmt.Errorf("failed attempt '%d'", att) 169 }) 170 }() 171 } 172 wg.Wait() 173 } 174 175 func TestBackOffNew(t *testing.T) { 176 backOff := &retry.ExponentialBackOff{ 177 Min: time.Millisecond, 178 Max: time.Millisecond * 100, 179 Factor: 2, 180 } 181 bo := backOff.New() 182 assert.Equal(t, bo, backOff) 183 }