private static Func <Task> SendCommand(int run) { return(async() => { var r = new Random(); while (_run == run) { try { for (int i = 0; i < r.Next(1, 10); i++) { var payload = BitConverter.GetBytes(r.Next(0, 100000)).Concat(BitConverter.GetBytes(r.Next(0, 100000))).ToArray(); await _cluster.ApplyCommandAsync(new StateMachineCommandRequest() { Command = payload }); } TheTrace.TraceInformation("Sent commands"); } catch (Exception e) { TheTrace.TraceError(e.ToString()); } await Task.Delay(r.Next(0, 50)); } }); }
public async Task <IEnumerable <DynamicTableEntity> > QueryAsync(ShardKeyArrived shardKeyArrived) { //var account = CloudStorageAccount.Parse(shardKeyArrived.Source.ConnectionString); CloudStorageAccount account = null; if (!String.IsNullOrWhiteSpace(shardKeyArrived.Source.AccountSasKey)) { // Create new storage credentials using the SAS token. var accountSas = new StorageCredentials(shardKeyArrived.Source.AccountSasKey); // Use these credentials and the account name to create a Blob service client. try { account = new CloudStorageAccount(accountSas, shardKeyArrived.Source.AccountName, endpointSuffix: "", useHttps: true); } catch (Exception ex) { TheTrace.TraceError(ex.ToString()); } } else { account = CloudStorageAccount.Parse(shardKeyArrived.Source.ConnectionString); } var client = account.CreateCloudTableClient(); var table = client.GetTableReference(shardKeyArrived.Source.DynamicProperties["TableName"].ToString()); return(await table.ExecuteQueryAsync(new TableQuery().Where(TableQuery.GenerateFilterCondition("PartitionKey", "eq", shardKeyArrived.ShardKey))).ConfigureAwait(false)); }
private async Task DoScheduleAsync(PeckSource source) { var oldTimestamp = source.Timestamp; try { await _eventQueueOperator.PushAsync(new Event(new PeckSourceScheduled() { Source = source }) { QueueName = QueueName.FromTopicName("PeckSourceScheduled").ToString() }); source.LastOffset = DateTimeOffset.UtcNow; await _table.ExecuteAsync(TableOperation.InsertOrMerge(source)); } catch (Exception e) { source.LastOffset = oldTimestamp; TheTrace.TraceError("Error scheduling source {0} : {1}", source.Name, e); try { _table.Execute(TableOperation.InsertOrMerge(source)); } catch (Exception ex) { TheTrace.TraceError("Error saving error (!) in table. Source {0} : {1}", source.Name, ex); } } }
private async Task KeepExtendingLeaseAsync(Func <Task> extendLeaseAsync, TimeSpan howLong, CancellationToken cancellationToken, string resource) { await EnsureExists(); var thisLong = new TimeSpan(2 * howLong.Ticks / 3); // RATM: how long? This long, what you reap is what you sew! await Task.Delay(thisLong, cancellationToken); while (true) { try { if (cancellationToken.IsCancellationRequested) { break; } await extendLeaseAsync(); TheTrace.TraceInformation("Extended the lifetime of the lease for {0}...", resource); await Task.Delay(thisLong, cancellationToken); } catch (Exception exception) { if (!cancellationToken.IsCancellationRequested) // it is OK if cancellation requested, it would have been cancellation { TheTrace.TraceError(exception.ToString()); } break; } } }
private async Task PushbatchAsync() { if (_stringBuilder.Length == 0) { return; } try { var responseMessage = await _httpClient.PostAsync(_esUrl + "_bulk", new StringContent(_stringBuilder.ToString(), Encoding.UTF8, "application/json")); _stringBuilder.Clear(); _numberOfRecords = 0; responseMessage.EnsureSuccessStatusCode(); TheTrace.TraceInformation("ConveyorBelt_Pusher: Pushing to {0}", _esUrl); } catch (Exception e) { TheTrace.TraceError(e.ToString()); throw; } }
public async Task KeepExtendingLeaseAsync(Event message, TimeSpan howLong, CancellationToken cancellationToken) { await Task.Delay(new TimeSpan(2 * howLong.Ticks / 3), cancellationToken); while (true) { try { if (cancellationToken.IsCancellationRequested) { break; } var underlyingMessage = (BrokeredMessage)message.UnderlyingMessage; await underlyingMessage.RenewLockAsync(); await Task.Delay(new TimeSpan(2 * howLong.Ticks / 3), cancellationToken); } catch (Exception exception) { TheTrace.TraceError(exception.ToString()); break; } } }
private async Task <bool> Process(CancellationToken cancellationToken) { var result = await _queueOperator.NextAsync( new QueueName(_actorDescriptor.SourceQueueName)); var cancellationTokenSource = new CancellationTokenSource(); if (result.IsSuccessful) { TheTrace.TraceInformation("Receieved a message. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); var actor = (IProcessorActor)_serviceLocator.GetService(_actorDescriptor.ActorType); try { // this is NOT supposed to be awaited upon!! _queueOperator.KeepExtendingLeaseAsync(result.PollingResult, TimeSpan.FromSeconds(30), cancellationTokenSource.Token).SafeObserve(); // Would have been great to make this fixed memory foot-print with real iterable vs. list var events = (await actor.ProcessAsync(result.PollingResult)).ToArray(); // the enumerable has to be turned into a list anyway. it does further on var groups = events.GroupBy(x => x.QueueName); foreach (var gr in groups) { await _queueOperator.PushBatchAsync(gr); TryDisposeMessages(gr); } cancellationTokenSource.Cancel(); await _queueOperator.CommitAsync(result.PollingResult); TheTrace.TraceInformation("Processing succeeded. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); } catch (Exception exception) { TheTrace.TraceInformation("Processing failed. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); TheTrace.TraceError(exception.ToString()); cancellationTokenSource.Cancel(); _queueOperator.AbandonAsync(result.PollingResult).SafeObserve().Wait(); } finally { if (result.IsSuccessful) { TryDisposeMessage(result.PollingResult); } _serviceLocator.ReleaseService(actor); } } return(result.IsSuccessful); }
private async Task <bool> Process(CancellationToken cancellationToken) { var result = await _queueOperator.NextAsync( new QueueName(_actorDescriptor.SourceQueueName)); var cancellationTokenSource = new CancellationTokenSource(); if (result.IsSuccessful) { TheTrace.TraceInformation("Receieved a message. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); var actor = (IProcessorActor)_serviceLocator.GetService(_actorDescriptor.ActorType); try { _queueOperator.KeepExtendingLeaseAsync(result.PollingResult, TimeSpan.FromSeconds(30), cancellationTokenSource.Token).SafeObserve(); var events = (await actor.ProcessAsync(result.PollingResult)).ToArray(); cancellationTokenSource.Cancel(); await _queueOperator.CommitAsync(result.PollingResult); var groups = events.GroupBy(x => x.QueueName); foreach (var gr in groups) { await _queueOperator.PushBatchAsync(gr); TryDisposeMessages(gr); } TheTrace.TraceInformation("Processing succeeded. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); } catch (Exception exception) { TheTrace.TraceInformation("Processing failed. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); TheTrace.TraceError(exception.ToString()); cancellationTokenSource.Cancel(); _queueOperator.AbandonAsync(result.PollingResult).SafeObserve().Wait(); } finally { if (result.IsSuccessful) { TryDisposeMessage(result.PollingResult); } _serviceLocator.ReleaseService(actor); } } return(result.IsSuccessful); }
private async Task <bool> Process(CancellationToken cancellationToken) { var result = await _queueOperator.NextAsync( new QueueName(_actorDescriptor.SourceQueueName)); var cancellationTokenSource = new CancellationTokenSource(); if (result.IsSuccessful) { TheTrace.TraceInformation("Received a message. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); var actor = (IProcessorActor)_serviceLocator.GetService(_actorDescriptor.ActorType); try { // this is NOT supposed to be awaited upon!! if (!_queueOperator.IsEventDriven) { _queueOperator.KeepExtendingLeaseAsync(result.PollingResult, TimeSpan.FromSeconds(30), cancellationTokenSource.Token).SafeObserve(); } await ProcessEvent(actor, result.PollingResult, _queueOperator); if (!_queueOperator.IsEventDriven) { await _queueOperator.CommitAsync(result.PollingResult); } TheTrace.TraceInformation("Processing succeeded. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); } catch (Exception exception) { TheTrace.TraceInformation("Processing failed. Id: {0} Queue: {1} ", result.PollingResult.Id, _actorDescriptor.SourceQueueName); TheTrace.TraceError(exception.ToString()); cancellationTokenSource.Cancel(); _queueOperator.AbandonAsync(result.PollingResult).SafeObserve().Wait(); } finally { if (result.IsSuccessful) { TryDisposeMessage(result.PollingResult); } _serviceLocator.ReleaseService(actor); } } return(result.IsSuccessful); }
private async Task <CloudTable> GetTable() { if (!await _table.ExistsAsync()) { try { await _table.CreateAsync(); } catch (Exception exception) { TheTrace.TraceError(exception.ToString()); } } return(_table); }
private void TruncateLogUpToIndex(long index) { using (var tx = _env.BeginTransaction()) { try { tx.DeleteUpToValue(_logDb, LogKey, index); tx.Commit(); } catch { TheTrace.TraceError($"TruncateLogUpToIndex failed for index {index} when last index was {this.LastIndex}"); tx.Abort(); throw; } } }
public async Task ProcessEventsAsync(PartitionContext context, IEnumerable <EventData> messages) { try { var lazyEnumerable = messages.SelectMany(ev => _parser.Parse(ev.GetBodyStream, null, _source)); await _elasticsearchBatchPusher.PushAll(lazyEnumerable, _source); if (_timer.Elapsed > _checkpointInterval) { _timer.Restart(); await context.CheckpointAsync(); } } catch (Exception e) { TheTrace.TraceError(e.ToString()); } }
/// <summary> /// Custom Source Properties: /// 1- Parser /// 2- EventHubName /// 3- StorageConnectionString /// </summary> /// <param name="pusher"></param> /// <param name="source"></param> public EventHubConsumer(NestBatchPusher pusher, DiagnosticsSourceSummary source) { this._pusher = pusher; this.Source = source; _parser = FactoryHelper.Create <IParser>(source.DynamicProperties["Parser"].ToString()); _eventProcessorHost = new EventProcessorHost( "ConveyorBelt", source.DynamicProperties["EventHubName"].ToString(), EventHubConsumerGroup.DefaultGroupName, source.ConnectionString, source.DynamicProperties["StorageConnectionString"].ToString()); var options = new EventProcessorOptions(); options.ExceptionReceived += (sender, e) => { TheTrace.TraceError(e.Exception.ToString()); }; _eventProcessorHost.RegisterEventProcessorFactoryAsync(this, options).Wait(); }
public static T WrapException <T>(this Func <T> func, T exceptionResult, Action <Exception> handler = null) { try { return(func()); } catch (Exception exception) { if (handler == null) { TheTrace.TraceError(exception.ToString()); } else { handler(exception); } return(exceptionResult); } }
private async Task <CloudTable> GetTable(string tableName) { var account = CloudStorageAccount.Parse(_connectionString); var client = account.CreateCloudTableClient(); var table = client.GetTableReference(tableName); if (!table.Exists()) { try { await table.CreateAsync(); } catch (Exception exception) { TheTrace.TraceError(exception.ToString()); } } return(table); }
private void Sisyphus() { while (!_cancel.IsCancellationRequested) { try { var job = _q.Take(_cancel.Token); // yes, running async in sync because cannot leave these to threadpool to do. Too important to do that, cannot afford to face thread exhaustion job.DoAsync(_cancel.Token).ConfigureAwait(false).GetAwaiter().GetResult(); TheTrace.TraceInformation($"Job in the worker {this.Name} finished. Current QueueLength is {QueueDepth}."); } catch (OperationCanceledException ce) { // OK } catch (Exception e) { TheTrace.TraceError($"Job errorred: {e}"); } } }
public async Task ScheduleSourcesAsync() { if (!await ShouldScheduleAsync()) { return; } var seconds = Convert.ToInt32(_configurationValueProvider.GetValue(ConfigurationKeys.ClusterLockDurationSeconds)); var sources = _sourceConfiguration.GetSources(); foreach (var source in sources) { try { var lockToken = new LockToken(source.ToTypeKey()); if (!(await _lockStore.TryLockAsync(lockToken, tries: 0, aquireTimeoutMilliseconds: 100, timeoutMilliseconds: seconds * 1000))) // if tries < 1 it puts to 1 in beehive { TheTrace.TraceInformation("I could NOT be master for {0}", source.ToTypeKey()); continue; } var resultSource = await TryScheduleSourceAsync(source); if (resultSource != null) { _sourceConfiguration.UpdateSource(resultSource); TheTrace.TraceInformation("MasterScheduler - Updated {0}", resultSource.ToTypeKey()); } await _lockStore.ReleaseLockAsync(lockToken); } catch (Exception e) { TheTrace.TraceError(e.ToString()); } } }
public T GetProperty <T>(string name) { if (_entity.Properties.ContainsKey(name)) { try { if (typeof(T) == typeof(DateTimeOffset)) { return((T)(object)new DateTimeOffset((DateTime)_entity.Properties[name].PropertyAsObject)); } return((T)_entity.Properties[name].PropertyAsObject); } catch (Exception) { TheTrace.TraceError("Failed to convert {0} to {1}", name, typeof(T).Name); throw; } } else { return(default(T)); } }
static Task ExceptionReceivedHandler(ExceptionReceivedEventArgs args) { TheTrace.TraceError("Boy this should have never happened since we have exception handling in Factory Actor: {}", args.Exception); return(Task.FromResult(false)); }
private async Task PushbatchAsync() { if (_batch.Count == 0) { return; } try { int retry = 0; List <int> statuses = null; do { var responseMessage = await _httpClient.PostAsync(_esUrl + "_bulk", new StringContent(_batch.ToString(), Encoding.UTF8, "application/json")); var content = responseMessage.Content == null ? "" : (await responseMessage.Content.ReadAsStringAsync()); if (!responseMessage.IsSuccessStatusCode) { throw new ApplicationException(string.Format("Unsuccessful ES bulk: {0} - {1}", responseMessage.StatusCode, content)); } dynamic j = JObject.Parse(content); if (j == null || j.items == null) { throw new ApplicationException(string.Format("Unsuccessful ES bulk - items null: {0}", content)); } var items = (JArray)j.items; statuses = items.Children <JObject>().Select(x => x.Properties().First().Value["status"].Value <int>()).ToList(); if (statuses.Any(y => y < 200 || (y > 299 && y != 429))) { TheTrace.TraceWarning("LOOK!! We had some errors from ES bulk at retry {1}: {0}", content, retry); } if (statuses.Any(y => y == 429)) { var timeSpan = _interval.Next(); TheTrace.TraceWarning("LOOK!! Got 429 -> backing off for {0} seconds", timeSpan.TotalSeconds); Thread.Sleep(timeSpan); } else { _interval.Reset(); } TheTrace.TraceInformation("ConveyorBelt_Pusher: Pushing {1} records to {0} [retry: {2}]", _esUrl, _batch.Count, retry); } while (_batch.Prune(statuses) > 0 && retry++ < 3); if (_batch.Count > 0) { TheTrace.TraceWarning("WARNING!!! Some residual documents could not be inserted even after retries: {0}", _batch.Count); _batch.Clear(); } } catch (Exception e) { TheTrace.TraceError(e.ToString()); throw; } }
public async Task ScheduleSourcesAsync() { var sources = _sourceConfiguration.GetSources(); foreach (var source in sources) { try { TheTrace.TraceInformation("MasterScheduler - Scheduling {0}", source.ToTypeKey()); if (!source.IsActive.HasValue || !source.IsActive.Value) { TheTrace.TraceInformation("MasterScheduler - NOT active: {0}", source.ToTypeKey()); continue; } await SetupMappingsAsync(source); TheTrace.TraceInformation("MasterScheduler - Finished Mapping setup: {0}", source.ToTypeKey()); if (!source.LastScheduled.HasValue) { source.LastScheduled = DateTimeOffset.UtcNow.AddYears(-1); } // if has been recently scheduled if (source.LastScheduled.Value.AddMinutes(source.SchedulingFrequencyMinutes.Value) > DateTimeOffset.UtcNow) { TheTrace.TraceInformation("MasterScheduler - Nothing to do with {0}. LastScheduled in Future {1}", source.ToTypeKey(), source.LastScheduled.Value); continue; } var schedulerType = Assembly.GetExecutingAssembly().GetType(source.SchedulerType) ?? Type.GetType(source.SchedulerType); if (schedulerType == null) { source.ErrorMessage = "Could not find SchedulerType: " + source.SchedulerType; } var scheduler = (ISourceScheduler)_locator.GetService(schedulerType); var result = await scheduler.TryScheduleAsync(source); TheTrace.TraceInformation( "MasterScheduler - Got result for TryScheduleAsync in {0}. Success => {1}", source.ToTypeKey(), result.Item1); if (result.Item2) { await _eventQueueOperator.PushBatchAsync(result.Item1); } source.ErrorMessage = string.Empty; TheTrace.TraceInformation("MasterScheduler - Finished Scheduling {0}", source.ToTypeKey()); source.LastScheduled = DateTimeOffset.UtcNow; } catch (Exception e) { TheTrace.TraceError(e.ToString()); source.ErrorMessage = e.ToString(); } _sourceConfiguration.UpdateSource(source); TheTrace.TraceInformation("MasterScheduler - Updated {0}", source.ToTypeKey()); } }
private async Task <DiagnosticsSource> TryScheduleSourceAsync(DiagnosticsSource source) { try { source = _sourceConfiguration.RefreshSource(source); TheTrace.TraceInformation("MasterScheduler - Scheduling {0}", source.ToTypeKey()); if (!source.IsActive.HasValue || !source.IsActive.Value) { TheTrace.TraceInformation("MasterScheduler - NOT active: {0}", source.ToTypeKey()); return(null); } var createMappings = _configurationValueProvider.GetValue(ConfigurationKeys.EsCreateMappings); if (Convert.ToBoolean(createMappings)) { await SetupMappingsAsync(source); } if (!source.LastScheduled.HasValue) { source.LastScheduled = DateTimeOffset.UtcNow.AddDays(-1); } // if has been recently scheduled if (source.LastScheduled.Value.AddMinutes(source.SchedulingFrequencyMinutes.Value) > DateTimeOffset.UtcNow) { TheTrace.TraceInformation("MasterScheduler - Nothing to do with {0}. LastScheduled in Future {1}", source.ToTypeKey(), source.LastScheduled.Value); return(null); } _telemetryProvider.WriteTelemetry( "MasterScheduler duration since last scheduled", (long)(DateTime.UtcNow - source.LastScheduled).Value.TotalMilliseconds, source.ToTypeKey()); var schedulerType = Assembly.GetExecutingAssembly().GetType(source.SchedulerType) ?? Type.GetType(source.SchedulerType); if (schedulerType == null) { source.ErrorMessage = "Could not find SchedulerType: " + source.SchedulerType; } else { await _scheduleDurationInstrumentor.InstrumentAsync(async() => { var scheduler = (ISourceScheduler)_locator.GetService(schedulerType); var result = await scheduler.TryScheduleAsync(source); source.LastScheduled = DateTimeOffset.UtcNow; TheTrace.TraceInformation( "MasterScheduler - Got result for TryScheduleAsync in {0}. Success => {1}", source.ToTypeKey(), result.Item1); if (result.Item2) { await _eventQueueOperator.PushBatchAsync(result.Item1); } source.ErrorMessage = string.Empty; TheTrace.TraceInformation("MasterScheduler - Finished Scheduling {0}", source.ToTypeKey()); }, source.ToTypeKey()); } return(source); } catch (Exception e) { TheTrace.TraceError(e.ToString()); source.ErrorMessage = e.ToString(); return(source); } }
public void TraceErrorDoesNotThrowExceptionWithMissingParams() { TheTrace.TraceError("chappi {1} {0}", "hapachap"); }
public void TraceErrorDoesNotThrowExceptionWithNoParams() { TheTrace.TraceError("chappi {1} {0}"); }
public void TraceErrorDoesNotThrowException() { TheTrace.TraceError("chappi {1} {0}", "hapachap", 7979); }