Skip to content

Commit

Permalink
Merge pull request #104 from infosiftr/retry-504
Browse files Browse the repository at this point in the history
Add 504 to the list of responses we retry on
  • Loading branch information
tianon authored Jan 16, 2025
2 parents e7b1446 + 0560d0f commit a947401
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions registry/rate-limits.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package registry

import (
"net/http"
"slices"
"time"

"golang.org/x/time/rate"
Expand All @@ -24,7 +25,7 @@ func (d *rateLimitedRetryingRoundTripper) RoundTrip(req *http.Request) (*http.Re
// cap request retries at once per second
requestRetryLimiter = rate.NewLimiter(rate.Every(time.Second), 1)

// if we see 3x (503 or 502 or 500) during retry, we should bail
// if we see 50x three times during retry, we should bail
maxTry50X = 3

ctx = req.Context()
Expand Down Expand Up @@ -54,7 +55,7 @@ func (d *rateLimitedRetryingRoundTripper) RoundTrip(req *http.Request) (*http.Re
}

// certain status codes should result in a few auto-retries (especially with the automatic retry delay this injects), but up to a limit so we don't contribute to the "thundering herd" too much in a serious outage
if (res.StatusCode == 503 || res.StatusCode == 502 || res.StatusCode == 500) && maxTry50X > 1 {
if maxTry50X > 1 && slices.Contains([]int{500, 502, 503, 504}, res.StatusCode) {
maxTry50X--
doRetry = true
// no need to eat up the rate limiter tokens as we do for 429 because this is not a rate limiting error (and we have the "requestRetryLimiter" that separately limits our retries of *this* request)
Expand Down

0 comments on commit a947401

Please sign in to comment.