public SqlServerTransientFaultRetryPolicyFactory( SqlServerDataStoreConfiguration sqlServerDataStoreConfiguration, IPollyRetryLoggerFactory pollyRetryLoggerFactory) { EnsureArg.IsNotNull(sqlServerDataStoreConfiguration, nameof(sqlServerDataStoreConfiguration)); EnsureArg.IsNotNull(pollyRetryLoggerFactory, nameof(pollyRetryLoggerFactory)); SqlServerTransientFaultRetryPolicyConfiguration transientFaultRetryPolicyConfiguration = sqlServerDataStoreConfiguration.TransientFaultRetryPolicy; IEnumerable <TimeSpan> sleepDurations = Backoff.ExponentialBackoff( transientFaultRetryPolicyConfiguration.InitialDelay, transientFaultRetryPolicyConfiguration.RetryCount, transientFaultRetryPolicyConfiguration.Factor, transientFaultRetryPolicyConfiguration.FastFirst); PolicyBuilder policyBuilder = Policy .Handle <SqlException>(sqlException => sqlException.IsTransient()) .Or <TimeoutException>(); Action <Exception, TimeSpan, int, Context> onRetryLogger = pollyRetryLoggerFactory.Create(); _retryPolicy = policyBuilder.WaitAndRetryAsync( sleepDurations, onRetry: onRetryLogger); }
public void Backoff_WithFastFirstEqualToTrue_ResultIsZero() { // Arrange var initialDelay = TimeSpan.FromMilliseconds(1); const int retryCount = 3; const double factor = 2; const bool fastFirst = true; // Act IEnumerable <TimeSpan> result = Backoff.ExponentialBackoff(initialDelay, retryCount, factor, fastFirst); // Assert result.Should().NotBeNull(); result.Should().HaveCount(retryCount); bool first = true; foreach (TimeSpan timeSpan in result) { if (first) { timeSpan.Should().Be(TimeSpan.FromMilliseconds(0)); first = false; } else { timeSpan.Should().BeGreaterOrEqualTo(initialDelay); } } }
private Policy <T> BuildPolicy <T>() { var delay = Backoff.ExponentialBackoff(TimeSpan.FromMilliseconds(_backOffInitialInterval), _maxAttempts - 1, _backOffMultiplier, true); var retryPolicy = Policy <T> .HandleInner <Exception>((e) => { return(_retryableExceptions.Classify(e)); }) .WaitAndRetry(delay, (delegateResult, time, count, context) => OnRetry(delegateResult, time, count, context)); var fallbackPolicy = Policy <T> .Handle <Exception>() .Fallback <T>( (delegateResult, context, token) => { var retryContext = GetRetryContext(context); retryContext.LastException = delegateResult.Exception; var callback = retryContext.GetAttribute(RECOVERY_CALLBACK_KEY) as IRecoveryCallback; var result = default(T); if (callback != null) { result = (T)callback.Recover(retryContext); retryContext.SetAttribute(RECOVERED, true); retryContext.SetAttribute(RECOVERED_RESULT, result); } else if (delegateResult.Exception != null) { throw delegateResult.Exception; } return(result); }, (ex, context) => { }); return(fallbackPolicy.Wrap(retryPolicy)); }
/// <summary> /// Builds a <see cref="SqlStrategy"/> with a policy for retrying /// actions on transaction failures. /// </summary> /// <param name="sqlStrategy">The SQL strategy.</param> /// <param name="exceptionHandlingStrategy"> /// The exception handling strategy used to determine which exceptions /// should be retried. /// </param> /// <param name="sqlStrategyConfiguration"> /// An <see cref="SqlStrategyOptions"/> containing configuration parameters. /// </param> /// <returns>The strategy instance.</returns> public static SqlStrategyBuilder Retry(this SqlStrategyBuilder sqlStrategy, IExceptionHandlingStrategy exceptionHandlingStrategy, SqlStrategyOptions sqlStrategyConfiguration) { var backoff = Backoff.ExponentialBackoff(TimeSpan.FromSeconds(2), sqlStrategyConfiguration.RetryCount()); sqlStrategy.Policies.Add(Policy.Handle <SqlException>(exceptionHandlingStrategy.ShouldHandle).WaitAndRetry(backoff, SqlStrategyLoggingDelegates.OnRetry).WithPolicyKey(SqlServerPolicyKeys.TransactionPolicy)); sqlStrategy.Policies.Add(Policy.Handle <SqlException>(exceptionHandlingStrategy.ShouldHandle).WaitAndRetryAsync(backoff, SqlStrategyLoggingDelegates.OnRetryAsync).WithPolicyKey(SqlServerPolicyKeys.TransactionPolicyAsync)); return(sqlStrategy); }
private static ProcessResult ExecuteWithRetry(ProcessStartInfo info, Func <ProcessStartInfo, ProcessResult> executor) { ProcessResult processResult = Policy .HandleResult <ProcessResult>(result => result.Process.ExitCode != 0) .WaitAndRetry( Backoff.ExponentialBackoff(TimeSpan.FromSeconds(1), RetryHelper.MaxRetries, RetryHelper.WaitFactor), RetryHelper.GetOnRetryDelegate <ProcessResult>(RetryHelper.MaxRetries, loggerService)) .Execute(() => executor(info)); return(processResult); }
internal static IEnumerable <TimeSpan> Exponential(int retryCount, TimeSpan initialDelay) { if (retryCount < 0) { throw new ArgumentOutOfRangeException(nameof(retryCount), retryCount, "should be >= 0"); } if (initialDelay < TimeSpan.Zero) { throw new ArgumentOutOfRangeException(nameof(initialDelay), initialDelay, "should be >= 0ms"); } return(Backoff.ExponentialBackoff(initialDelay, retryCount)); }
public static SleepDurationProvider Exponential(int retryCount, TimeSpan initialDelay) { if (retryCount < 0) { throw new ArgumentOutOfRangeException(nameof(retryCount), retryCount, "should be >= 0"); } if (initialDelay < TimeSpan.Zero) { throw new ArgumentOutOfRangeException(nameof(initialDelay), initialDelay, "should be >= 0ms"); } return(new SleepDurationProvider(retryCount, Backoff.ExponentialBackoff(initialDelay, retryCount))); }
public void Backoff_WithInitialDelayLessThanZero_ThrowsException() { // Arrange var initialDelay = new TimeSpan(-1); const int retryCount = 3; const double factor = 2; const bool fastFirst = false; // Act Action act = () => Backoff.ExponentialBackoff(initialDelay, retryCount, factor, fastFirst); // Assert act.Should().Throw <ArgumentOutOfRangeException>() .And.ParamName.Should().Be("initialDelay"); }
public void ExponentialBackoff_CannotBeZero() { var backoffAction = Backoff.ExponentialBackoff(TimeSpan.FromSeconds(2), 5); var initialDateTime = DateTime.Now; InMemoryQueueItem item = new InMemoryQueueItem(); for (int i = 0; i < (5 * 2 ^ 5); i++) { item.HandleAfter = initialDateTime; backoffAction(item); Assert.IsTrue(item.HandleAfter.Subtract(initialDateTime).TotalSeconds > 1); Assert.IsTrue(item.HandleAfter.Subtract(initialDateTime).TotalSeconds < (2 ^ 5)); } }
public void Backoff_WithFactorLessThanOne_ThrowsException() { // Arrange var initialDelay = TimeSpan.FromMilliseconds(1); const int retryCount = 3; const double factor = 0.99; const bool fastFirst = false; // Act Action act = () => Backoff.ExponentialBackoff(initialDelay, retryCount, factor, fastFirst); // Assert act.Should().Throw <ArgumentOutOfRangeException>() .And.ParamName.Should().Be("factor"); }
public void Backoff_WithRetryEqualToZero_ResultIsEmpty() { // Arrange var initialDelay = TimeSpan.FromMilliseconds(1); const int retryCount = 0; const double factor = 2; const bool fastFirst = false; // Act IEnumerable <TimeSpan> result = Backoff.ExponentialBackoff(initialDelay, retryCount, factor, fastFirst); // Assert result.Should().NotBeNull(); result.Should().BeEmpty(); }
/// <summary> /// This will create an exponentially increasing retry delay of 100, 200, 400, 800, 1600ms. /// The default exponential growth factor is 2.0. However, can can provide our own. /// </summary> /// <typeparam name="T">The type of response.</typeparam> /// <param name="option">The retry option.</param> /// <param name="context">Request context.</param> /// <param name="action">Action to execute.</param> /// <param name="predicate">Handle result predicate.</param> /// <param name="onRetry">Handle custom on retries.</param> /// <returns>A <see cref="Task"/> representing the asynchronous operation.</returns> /// <remarks> /// Snippet: /// Formula = initial delay * 2 * attempt /// </remarks> public static Task <T> ExponentialBackoff <T>( RetryOption option, Context context, Func <Context, Task <T> > action, Func <T, bool> predicate, Action <DelegateResult <T>, TimeSpan, Context> onRetry) { if (option is null) { throw new ArgumentNullException(nameof(option)); } var delay = Backoff.ExponentialBackoff(TimeSpan.FromMilliseconds(option.MinDelayIsMs), retryCount: option.MaxRetry); return(HandleRetry(context, action, predicate, onRetry, delay)); }
public void Backoff_ResultIsCorrect() { // Arrange var initialDelay = TimeSpan.FromMilliseconds(10); const int retryCount = 5; const double factor = 2; const bool fastFirst = false; // Act IEnumerable <TimeSpan> result = Backoff.ExponentialBackoff(initialDelay, retryCount, factor, fastFirst); // Assert result.Should().NotBeNull(); result.Should().HaveCount(retryCount); result.Select(t => t.TotalMilliseconds).Should().BeEquivalentTo(new double[] { 10, 20, 40, 80, 160 }); }
private static void SetupEventsWithQueue() { // if handler fail we can do something else emailService.SuccessRate = 25; // Save incoming messages to queue EventBus.Subscribe(queueService.Enque <UserRegistered>()); EventBus.Subscribe( // We expectet UserRegistered events from EventQueue emailService.WhenQueued() // Once received, retry event 6 times with exponential delays .RetryQueued(6, Backoff.ExponentialBackoff(TimeSpan.FromSeconds(10))) .WhenRetryQueueFailed((item, e) => Output.Error("Queued handler failed answer 6 attempts")) ); // In this sample we are using timer to invoke queue // For web apps, it is more reliable to trigger execution via url Timer t = new Timer(100); t.Elapsed += timer_Elapsed; t.Start(); }
/// <summary> /// Generates sleep durations in an exponential manner. /// </summary> /// <param name="sqlStrategyOptions"> /// The <see cref="SqlStrategyOptions"/> instance. /// </param> /// <returns> /// The configured value if it is not <see langword="null"/> otherwise, /// the default value. /// </returns> public static IEnumerable <TimeSpan> ExponentialBackoff(this SqlStrategyOptions sqlStrategyOptions) { return(Backoff.ExponentialBackoff(TimeSpan.FromSeconds(2), sqlStrategyOptions.RetryCount())); }
/// <summary> /// Calls iDRAC for latest temperature. /// </summary> /// <remarks> /// Ensure that the Regex setting to retrieve the temp(s) has been /// updated for your particular system. Mine is set for an R620 system. /// The default values provided in this project are meant to parse an /// output like the below. The inline comments will reference this /// as an example: /// Inlet Temp | 04h | ok | 7.1 | 20 degrees C /// Exhaust Temp | 01h | ok | 7.1 | 25 degrees C /// Temp | 0Eh | ok | 3.1 | 30 degrees C /// Temp | 0Fh | ok | 3.2 | 31 degrees C /// </remarks> /// <returns></returns> private async Task CheckLatestTemperature(CancellationToken cancellationToken) { // Get the output string like the one in <remarks> above. Using Polly to handle if/when // the result is empty, which can happen from time to time. int retryCount = _settings.PollyRetryOnFailureCount; IEnumerable <TimeSpan> delay = Backoff.ExponentialBackoff( TimeSpan.FromMilliseconds(_settings.PollyInitialDelayInMillis), retryCount, _settings.PollyDelayIncreaseFactor); PolicyResult <string> policyExecutionResult = await Policy .HandleResult <string>(string.IsNullOrWhiteSpace) .WaitAndRetryAsync( delay, (_, span, iteration, _) => { LogWarning( "Temperature check command returned empty result. " + "Trying next of {retries} attempt(s) after {span} delay.", retryCount - iteration + 1, span); }) .ExecuteAndCaptureAsync( async token => await ExecuteIpmiToolCommand( CHECK_TEMPERATURE_CONTROL_COMMAND, token), cancellationToken); if (policyExecutionResult.Outcome == OutcomeType.Failure) { LogError( policyExecutionResult.FinalException, "Error fetching temperature after {retries} attempts!", retryCount); return; } string result = policyExecutionResult.Result; // Using the default of (?<=0Eh|0Fh).+(\\d{2}) will return all 2-digit numbers in lines // containing "0Eh" or "0Fh"-- in the above example, 30 and 31-- as captured groups. MatchCollection matches = Regex.Matches( result, _settings.RegexToRetrieveTemp, RegexOptions.Multiline); if (!matches.Any()) { return; } // For each matched line, grab the last capture group (the 2-digit // temp) and attempt to convert it to an integer. Find the max // int of all the matched lines and return it. int maxCpuTemp = matches.Select( x => int.TryParse(x.Groups.Values.LastOrDefault()?.Value, out int temp) ? temp : 0) .Max(); PushTemperature(maxCpuTemp); _lastRecordedTemp = maxCpuTemp; }
private static PolicyRegistry GetDefaultRegistry(IComponentContext componentContext) { var console = componentContext.Resolve <IConsole>(); var policyRegistry = new PolicyRegistry { { RetryPolicyKey.NoRetry.ToString(), Policy.NoOpAsync() }, { RetryPolicyKey.BasicRetryOnRpc.ToString(), Policy .Handle <RpcException>() .RetryAsync(MaxRetries, (exception, retryAttempt, context) => { console.Out.WriteLine($"Operation: {context.OperationKey}; Attempt {retryAttempt - 1} failed: {exception.Message}. Retrying."); return(Task.CompletedTask); }) }, { RetryPolicyKey.RetryOnRpcWithExponentialBackoff.ToString(), Policy .Handle <RpcException>() .WaitAndRetryAsync(Backoff.ExponentialBackoff( TimeSpan.FromSeconds(1), MaxRetries), (exception, timeSpan, retryAttempt, context) => { console.Out.WriteLine( $"Operation: {context.OperationKey}; TimeSpan: {timeSpan.ToString()}. Attempt {retryAttempt - 1} failed: {exception.Message}. Retrying."); return(Task.CompletedTask); }) }, { RetryPolicyKey.RetryOnRpcWithJitter.ToString(), Policy .Handle <RpcException>() .WaitAndRetryAsync(MaxRetries, retryAttempt => { var backoffSpans = Backoff .DecorrelatedJitterBackoffV2( TimeSpan.FromSeconds(1), MaxRetries) .ToList(); return(backoffSpans[retryAttempt - 1]); }, (exception, timeSpan, retryAttempt, context) => { console.Out.WriteLine( $"Operation: {context.OperationKey}; TimeSpan: {timeSpan.ToString()}. Attempt {retryAttempt - 1} failed: {exception.Message}. Retrying."); return(Task.CompletedTask); }) }, { CachePolicyKey.InMemoryCache.ToString(), Policy .CacheAsync( componentContext.Resolve <IAsyncCacheProvider>(), TimeSpan.FromMinutes(5), (policyContext, cacheKey) => console.WriteLine($"Operation {policyContext.OperationKey}: Cache get {cacheKey}"), (policyContext, cacheKey) => console.WriteLine($"Operation {policyContext.OperationKey}: Cache miss {cacheKey}"), (policyContext, cacheKey) => console.WriteLine($"Operation {policyContext.OperationKey}: Cache put {cacheKey}"), (policyContext, cacheKey, exception) => console.WriteLine($"Operation {policyContext.OperationKey}: Cache get error {cacheKey}; {exception}"), (policyContext, cacheKey, exception) => console.WriteLine($"Operation {policyContext.OperationKey}: Cache put error {cacheKey}; {exception}")) }, { CachePolicyKey.NoCache.ToString(), Policy.NoOpAsync() }, { TimeoutPolicyKey.NoTimeout.ToString(), Policy.NoOpAsync() }, { TimeoutPolicyKey.DefaultPessimisticTimeout.ToString(), Policy.TimeoutAsync(TimeSpan.FromMilliseconds(500), TimeoutStrategy.Pessimistic, (context, span, task) => { // do not await, otherwise policy is useless. task.ContinueWith(t => { // ContinueWith important the abandoned task may still be executing, when the caller times out if (t.IsFaulted) { console.Out.WriteLine( $"Operation {context.OperationKey}: execution timed out after {span.TotalSeconds} seconds, eventually terminated with: {t.Exception.Message}."); } else if (t.IsCanceled) { // (If the executed delegates do not honour cancellation, this IsCanceled branch may never be hit. // It can be good practice however to include, in case a Policy configured with TimeoutStrategy.Pessimistic // is used to execute a delegate honouring cancellation.) console.Out.WriteLine( $"Operation {context.OperationKey}: execution timed out after {span.TotalSeconds} seconds, task cancelled."); } else { // extra logic (if desired) for tasks which complete, despite the caller having 'walked away' earlier due to timeout. console.Out.WriteLine( $"Operation {context.OperationKey}: execution timed out after {span.TotalSeconds} seconds, task completed."); } // Additionally, clean up any resources ... }); console.Out.WriteLine($"Operation {context.OperationKey} timed out."); return(Task.CompletedTask); }) }, { TimeoutPolicyKey.DefaultOptimisticTimeout.ToString(), Policy.TimeoutAsync(TimeSpan.FromMilliseconds(500), TimeoutStrategy.Optimistic, (context, span, abandonedTask) => { console.Out.WriteLine($"Operation: {context.OperationKey}, timeout after {span}. "); abandonedTask.ContinueWith(t => { if (t.IsFaulted) { console.Out.WriteLine( $"Operation {context.OperationKey}: execution timed out after {span.TotalSeconds} seconds, eventually terminated with: {t.Exception.Message}."); } else if (t.IsCanceled) { console.Out.WriteLine( $"Operation {context.OperationKey}: execution timed out after {span.TotalSeconds} seconds, task cancelled."); } else { console.Out.WriteLine( $"Operation {context.OperationKey}: execution timed out after {span.TotalSeconds} seconds, task completed."); } }); return(Task.CompletedTask); }) }, { CircuitBreakerPolicyKey.NoBreaker.ToString(), Policy.NoOpAsync() }, { CircuitBreakerPolicyKey.DefaultCircuitBreaker.ToString(), Policy .Handle <RpcException>() .CircuitBreakerAsync( 2, TimeSpan.FromSeconds(2), (exception, span) => { console.WriteLine($"Circuit broken. Span: {span}; Exception: {exception.Message};"); }, () => { console.WriteLine("Circuit reset."); }, () => { console.WriteLine("Circuit half openBa."); }) }, }; return(policyRegistry); }
public static AsyncRetryPolicy GetExponentialHandlerRetryPolicy() { var delay = Backoff.ExponentialBackoff(TimeSpan.FromMilliseconds(100), retryCount: 5, fastFirst: true); return(Policy.Handle <Exception>().WaitAndRetryAsync(delay)); }