/// <summary> /// Polls all flat container base URLs until the provided ID and version are available. /// </summary> /// <param name="id">The package ID.</param> /// <param name="version">The package version.</param> /// <param name="logger">The logger.</param> /// <returns>Returns a task that completes when the package is available or the timeout has occurred.</returns> public Task WaitForPackageAsync(string id, string version, ITestOutputHelper logger) { // Retry for connection problem, timeout, or HTTP 5XX. We are okay with retrying on 5XX in this case because // the origin server is blob storage. If they are having some internal server error, there's not much we can // do. return(RetryUtility.ExecuteWithRetry( async() => { var baseUrls = await _v3IndexClient.GetFlatContainerBaseUrlsAsync(logger); Assert.True(baseUrls.Count > 0, "At least one flat container base URL must be configured."); logger.WriteLine( $"Waiting for package {id} {version} to be available on flat container base URLs:" + Environment.NewLine + string.Join(Environment.NewLine, baseUrls.Select(u => $" - {u}"))); var tasks = baseUrls .Select(u => PollAsync(u, id, version, logger)) .ToList(); await Task.WhenAll(tasks); }, ex => ex.HasTypeOrInnerType <SocketException>() || ex.HasTypeOrInnerType <TaskCanceledException>() || (ex.HasTypeOrInnerType <HttpRequestMessageException>(out var hre) && (hre.StatusCode == HttpStatusCode.InternalServerError || hre.StatusCode == HttpStatusCode.BadGateway || hre.StatusCode == HttpStatusCode.ServiceUnavailable || hre.StatusCode == HttpStatusCode.GatewayTimeout)), logger: logger)); }
public Task <TResult[]> TryAndGetFileContent <TResult>( string id, string version, FlatContainerContentType fileType, ITestOutputHelper logger, Func <string, string, string, FlatContainerContentType, ITestOutputHelper, Task <TResult> > retrieve) { return(RetryUtility.ExecuteWithRetry( async() => { var baseUrls = await _v3IndexClient.GetFlatContainerBaseUrlsAsync(logger); Assert.True(baseUrls.Count > 0, "At least one flat container base URL must be configured."); var tasks = baseUrls .Select(u => retrieve(u, id, version, fileType, logger)) .ToList(); return await Task.WhenAll(tasks); }, ex => ex.HasTypeOrInnerType <SocketException>() || ex.HasTypeOrInnerType <TaskCanceledException>() || (ex.HasTypeOrInnerType <HttpRequestMessageException>(out var hre) && (hre.StatusCode == HttpStatusCode.InternalServerError || hre.StatusCode == HttpStatusCode.BadGateway || hre.StatusCode == HttpStatusCode.ServiceUnavailable || hre.StatusCode == HttpStatusCode.GatewayTimeout)), logger: logger)); }
public async Task ReturnsResultAfterSomeRetries() { var attempts = 0; Func <Task <int> > executeAsync = () => Task.Run(async() => { attempts++; if (attempts > 3) { return(23); } await Task.Yield(); throw new InvalidOperationException("Bad! " + attempts); }); var result = await RetryUtility.ExecuteWithRetry( executeAsync, ex => true, maxAttempts : 5, sleepDuration : TimeSpan.Zero, logger : _output); Assert.Equal(4, attempts); Assert.Equal(23, result); }
private async Task <V3Index> GetV3IndexAsync(ITestOutputHelper logger) { return(await RetryUtility.ExecuteWithRetry( async() => { return await _httpClient.GetJsonAsync <V3Index>( _testSettings.V3IndexUrl, allowNotFound: false, logResponseBody: false, logger: logger); }, ex => (ex.HasTypeOrInnerType <WebException>(out var we) && we.Status == WebExceptionStatus.NameResolutionFailure), logger : logger)); }
private Task PollAsync( string id, string version, bool semVer2, Func <CatalogEntry, bool> isComplete, string startingMessage, string successMessageFormat, string failureMessageFormat, ITestOutputHelper logger) { // Retry for connection problem, timeout, or HTTP 5XX. We are okay with retrying on 5XX in this case because // the origin server is blob storage. If they are having some internal server error, there's not much we can // do. return(RetryUtility.ExecuteWithRetry( async() => { var baseUrls = await(semVer2 ? _v3IndexClient.GetSemVer2RegistrationBaseUrlsAsync(logger) : _v3IndexClient.GetRegistrationBaseUrlsAsync(logger)); Assert.True(baseUrls.Count > 0, "At least one registration base URL must be configured."); logger.WriteLine( startingMessage + Environment.NewLine + string.Join(Environment.NewLine, baseUrls.Select(u => $" - {u}"))); var tasks = baseUrls .Select(baseUrl => PollAsync( baseUrl, id, version, isComplete, successMessageFormat, failureMessageFormat, logger)) .ToList(); await Task.WhenAll(tasks); }, ex => ex.HasTypeOrInnerType <SocketException>() || ex.HasTypeOrInnerType <TaskCanceledException>() || (ex.HasTypeOrInnerType <HttpRequestMessageException>(out var hre) && (hre.StatusCode == HttpStatusCode.InternalServerError || hre.StatusCode == HttpStatusCode.BadGateway || hre.StatusCode == HttpStatusCode.ServiceUnavailable || hre.StatusCode == HttpStatusCode.GatewayTimeout)), logger: logger)); }
public async Task DoesNotRetryIfNoExceptionIsThrown() { var attempts = 0; Func <Task <int> > executeAsync = () => Task.Run(() => { attempts++; return(Task.FromResult(23)); }); var result = await RetryUtility.ExecuteWithRetry( executeAsync, ex => true, _output); Assert.Equal(1, attempts); Assert.Equal(23, result); }
private Task PollAsync( string id, string version, Func <V2SearchResponse, bool> isComplete, string startingMessage, string successMessageFormat, string failureMessageFormat, ITestOutputHelper logger) { // We perform the retry at this level so that we can re-fetch the list of search service instances from // Azure Management API. This list can change during scale-up or scale-down events. return(RetryUtility.ExecuteWithRetry( async() => { var searchServices = await GetSearchServicesAsync(logger); var v2SearchUrls = searchServices .SelectMany(GetSearchUrlsForPolling) .ToList(); Assert.True(v2SearchUrls.Count > 0, "At least one search base URL must be configured."); logger.WriteLine( startingMessage + Environment.NewLine + string.Join(Environment.NewLine, v2SearchUrls.Select(u => $" - {u}"))); var tasks = v2SearchUrls .Select(u => PollAsync( u, id, version, isComplete, successMessageFormat, failureMessageFormat, logger)) .ToList(); await Task.WhenAll(tasks); }, ex => ex.HasTypeOrInnerType <SocketException>() || ex.HasTypeOrInnerType <TaskCanceledException>() || (ex.HasTypeOrInnerType <WebException>(out var we) && we.Status == WebExceptionStatus.NameResolutionFailure), logger: logger)); }
public Task <string> GetCloudServicePropertiesAsync( string subscription, string resourceGroup, string name, string slot, ITestOutputHelper logger, CancellationToken token) { return(RetryUtility.ExecuteWithRetry( () => _inner.GetCloudServicePropertiesAsync( subscription, resourceGroup, name, slot, token), ex => ex is AzureManagementException, maxAttempts: RetryUtility.DefaultMaxAttempts, sleepDuration: _sleepDuration, logger: logger)); }
public async Task DoesNotRetryIfShouldNotRetry() { var attempts = 0; Func <Exception, bool> shouldRetry = ex => ex is InvalidOperationException; Func <Task <int> > executeAsync = () => Task.Run <int>(async() => { attempts++; await Task.Yield(); throw new ApplicationException("Bad!"); }); var actualEx = await Assert.ThrowsAsync <ApplicationException>(() => RetryUtility.ExecuteWithRetry( executeAsync, ex => true, maxAttempts: 5, sleepDuration: TimeSpan.Zero, logger: _output)); Assert.Equal(5, attempts); Assert.Equal("Bad!", actualEx.Message); }
private Task PollAsync( HashSet <string> indexedFiles, string startingMessage, string successMessageFormat, string failureMessageFormat, ITestOutputHelper logger) { return(RetryUtility.ExecuteWithRetry( async() => { logger.WriteLine(startingMessage); var tasks = indexedFiles .Select(file => PollAsync(file, successMessageFormat, failureMessageFormat, logger)) .ToList(); await Task.WhenAll(tasks); }, ex => ex.HasTypeOrInnerType <SocketException>() || ex.HasTypeOrInnerType <TaskCanceledException>(), logger: logger)); }
public async Task SleepsSleepDurationBetweenAttempts() { var minimum = TimeSpan.FromMilliseconds(400); Func <Task <int> > executeAsync = () => Task.Run <int>(async() => { await Task.Yield(); throw new InvalidOperationException("Bad!"); }); var stopwatch = Stopwatch.StartNew(); var actualEx = await Assert.ThrowsAsync <InvalidOperationException>(() => RetryUtility.ExecuteWithRetry( executeAsync, ex => true, maxAttempts: 5, sleepDuration: TimeSpan.FromMilliseconds(100), logger: _output)); stopwatch.Stop(); Assert.True( stopwatch.Elapsed >= minimum, $"Elapsed was {stopwatch.Elapsed}. Should be greater than or equal to {minimum}."); Assert.Equal("Bad!", actualEx.Message); }
public async Task TriesUpToMaxAttemptsTimes() { var attempts = 0; Func <Task <int> > executeAsync = () => Task.Run <int>(async() => { attempts++; await Task.Yield(); throw new InvalidOperationException("Bad! " + attempts); }); var actualEx = await Assert.ThrowsAsync <InvalidOperationException>(() => RetryUtility.ExecuteWithRetry( executeAsync, ex => true, maxAttempts: 5, sleepDuration: TimeSpan.Zero, logger: _output)); Assert.Equal(5, attempts); Assert.Equal("Bad! 5", actualEx.Message); }