summaryrefslogtreecommitdiff
path: root/vendor/github.com/minio/minio-go/v7/retry.go
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/minio/minio-go/v7/retry.go')
-rw-r--r--vendor/github.com/minio/minio-go/v7/retry.go27
1 files changed, 15 insertions, 12 deletions
diff --git a/vendor/github.com/minio/minio-go/v7/retry.go b/vendor/github.com/minio/minio-go/v7/retry.go
index ed954017c..b83d1b2e5 100644
--- a/vendor/github.com/minio/minio-go/v7/retry.go
+++ b/vendor/github.com/minio/minio-go/v7/retry.go
@@ -21,6 +21,8 @@ import (
"context"
"crypto/x509"
"errors"
+ "iter"
+ "math"
"net/http"
"net/url"
"time"
@@ -45,9 +47,7 @@ var DefaultRetryCap = time.Second
// newRetryTimer creates a timer with exponentially increasing
// delays until the maximum retry attempts are reached.
-func (c *Client) newRetryTimer(ctx context.Context, maxRetry int, baseSleep, maxSleep time.Duration, jitter float64) <-chan int {
- attemptCh := make(chan int)
-
+func (c *Client) newRetryTimer(ctx context.Context, maxRetry int, baseSleep, maxSleep time.Duration, jitter float64) iter.Seq[int] {
// computes the exponential backoff duration according to
// https://www.awsarchitectureblog.com/2015/03/backoff.html
exponentialBackoffWait := func(attempt int) time.Duration {
@@ -64,18 +64,22 @@ func (c *Client) newRetryTimer(ctx context.Context, maxRetry int, baseSleep, max
if sleep > maxSleep {
sleep = maxSleep
}
- if jitter != NoJitter {
+ if math.Abs(jitter-NoJitter) > 1e-9 {
sleep -= time.Duration(c.random.Float64() * float64(sleep) * jitter)
}
return sleep
}
- go func() {
- defer close(attemptCh)
- for i := 0; i < maxRetry; i++ {
- select {
- case attemptCh <- i + 1:
- case <-ctx.Done():
+ return func(yield func(int) bool) {
+ // if context is already canceled, skip yield
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ }
+
+ for i := range maxRetry {
+ if !yield(i) {
return
}
@@ -85,8 +89,7 @@ func (c *Client) newRetryTimer(ctx context.Context, maxRetry int, baseSleep, max
return
}
}
- }()
- return attemptCh
+ }
}
// List of AWS S3 error codes which are retryable.