Skip to content

Commit

Permalink
Use regular retry loop when aborted attempts are exhausted
Browse files Browse the repository at this point in the history
  • Loading branch information
pbthif committed Feb 2, 2024
1 parent d37f4c4 commit 8502344
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,9 @@ public extension HTTPOperationsClient {
let shouldRetryOnError = retryOnError(error)

// For requests that fail for transient connection issues (StreamClosed, remoteConnectionClosed)
// don't consume retry attempts and don't use expotential backoff
// don't consume retry attempts and don't use exponential backoff.
// If aborted attempts are exhausted, we'll treat the aborted attempt just like any other retriable
// error, by consuming a retry attempt and applying exponential backoff.
if self.abortedAttemptsRemaining > 0 && treatAsAbortedAttempt(cause: error.cause) {
logger.debug(
"Request aborted with error: \(error). Retrying in \(self.waitOnAbortedAttemptMs) ms.")
Expand All @@ -160,8 +162,8 @@ public extension HTTPOperationsClient {
try await Task.sleep(nanoseconds: UInt64(self.waitOnAbortedAttemptMs) * millisecondsToNanoSeconds)

return try await self.executeWithOutput()
// if there are retries remaining (and haven't exhausted aborted attempts) and we should retry on this error
} else if self.abortedAttemptsRemaining > 0 && self.retriesRemaining > 0 && shouldRetryOnError {
// if there are retries remaining (and we've exhausted aborted attempts) and we should retry on this error
} else if self.retriesRemaining > 0 && shouldRetryOnError {
// determine the required interval
let retryInterval = Int(retryConfiguration.getRetryInterval(retriesRemaining: retriesRemaining))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,9 @@ public extension HTTPOperationsClient {
let shouldRetryOnError = retryOnError(error)

// For requests that fail for transient connection issues (StreamClosed, remoteConnectionClosed)
// don't consume retry attempts and don't use expotential backoff
// don't consume retry attempts and don't use exponential backoff.
// If aborted attempts are exhausted, we'll treat the aborted attempt just like any other retriable
// error, by consuming a retry attempt and applying exponential backoff.
if self.abortedAttemptsRemaining > 0 && treatAsAbortedAttempt(cause: error.cause) {
logger.debug(
"Request aborted with error: \(error). Retrying in \(self.waitOnAbortedAttemptMs) ms.")
Expand All @@ -137,8 +139,8 @@ public extension HTTPOperationsClient {
try await self.executeWithoutOutput()

return
// if there are retries remaining (and haven't exhausted aborted attempts) and we should retry on this error
} else if self.abortedAttemptsRemaining > 0 && self.retriesRemaining > 0 && shouldRetryOnError {
// if there are retries remaining (and we've exhausted aborted attempts) and we should retry on this error
} else if self.retriesRemaining > 0 && shouldRetryOnError {
// determine the required interval
let retryInterval = Int(retryConfiguration.getRetryInterval(retriesRemaining: retriesRemaining))

Expand Down

0 comments on commit 8502344

Please sign in to comment.