private async Task <(bool success, AmazonS3Exception exception)> TryRetryableFileUploadAsyncCore(string bucketName, string prefix, Action <UploadProgressArgs> uploadCallback, ExponentialBackoff exponentialBackoff, List <Queue <S3FileHashStatus> > queueList, List <Queue <S3FileHashStatus> > retryQueueList) { // How many times it retry uploading QueueList. var retryLimit = 5; var currentRetry = 0; AmazonS3Exception exception = null; foreach (var queue in queueList) { try { await ConcurretFileUploadAsync(bucketName, queue, prefix, uploadCallback); Log($"Partial Complete : Upload to S3. ({queue.Count})"); } catch (AmazonS3Exception ex) { exception = ex; switch (ex.StatusCode) { case HttpStatusCode.ServiceUnavailable: { // Put error queue list into retry queue list. retryQueueList.Add(queue); // re-throw when retry limit exceeded. if (currentRetry >= retryLimit) { Error($"Error : Exceeded retry count limit ({currentRetry}/{retryLimit}). Stop execution."); throw ex; } // Request reejected because "Too many Request"? Wait for Exponential Backoff. // Sample Error : // (Status Code : 502) Unhandled Exception: Amazon.S3.AmazonS3Exception: Please reduce your request rate. --->Amazon.Runtime.Internal.HttpErrorResponseException: Exception of type 'Amazon.Runtime.Internal.HttpErrorResponseException' was thrown. var waitTime = exponentialBackoff.GetNextDelay(); Warn($"Warning : Exception happen during upload, re-queue to last then wait {waitTime.TotalSeconds}sec for next retry. Exception count in Queue List ({currentRetry}/{retryLimit}). {ex.GetType().FullName}, {ex.Message}, {ex.StackTrace}"); // Adjust next retry timing : wait for exponential Backoff await Task.Delay(waitTime); // increment retry count currentRetry++; continue; } default: throw ex; } } } return((exception == null), exception); }