public void OnBatch(EventBatch <XEvent> batch, long sequence) { foreach (var data in batch) { Sum += data.Data; } }
public void OnBatch(EventBatch <T> batch, long sequence) { foreach (var data in batch) { OnEventAction.Invoke(data); } }
public async ValueTask <bool> MoveNextAsync() { _sequence.SetValue(_sequence.Value + Current.Length); while (true) { var currentSequence = _sequence.Value; var nextSequence = currentSequence + 1; _linkedTokenSource.Token.ThrowIfCancellationRequested(); var waitResult = await _asyncEventStream._waitStrategy.WaitForAsync(nextSequence, _asyncEventStream._dependentSequences, _linkedTokenSource.Token).ConfigureAwait(false); if (waitResult.UnsafeAvailableSequence < nextSequence) { continue; } var availableSequence = _asyncEventStream._sequencer.GetHighestPublishedSequence(nextSequence, waitResult.UnsafeAvailableSequence); if (availableSequence >= nextSequence) { Current = _asyncEventStream._dataProvider.GetBatch(nextSequence, availableSequence); return(true); } } }
/// <summary> /// Returns the next batch of events in the stream, waiting indefinitely if /// a new batch is not immediately available. /// </summary> /// <exception cref="System.IO.IOException"> /// see /// <see cref="Poll()"/> /// </exception> /// <exception cref="Org.Apache.Hadoop.Hdfs.Inotify.MissingEventsException"> /// see /// <see cref="Poll()"/> /// </exception> /// <exception cref="System.Exception">if the calling thread is interrupted</exception> public virtual EventBatch Take() { TraceScope scope = Trace.StartSpan("inotifyTake", traceSampler); EventBatch next = null; try { int nextWaitMin = InitialWaitMs; while ((next = Poll()) == null) { // sleep for a random period between nextWaitMin and nextWaitMin * 2 // to avoid stampedes at the NN if there are multiple clients int sleepTime = nextWaitMin + rng.Next(nextWaitMin); Log.Debug("take(): poll() returned null, sleeping for {} ms", sleepTime); Sharpen.Thread.Sleep(sleepTime); // the maximum sleep is 2 minutes nextWaitMin = Math.Min(60000, nextWaitMin * 2); } } finally { scope.Close(); } return(next); }
public void UseEventAsSpan(EventBatch <Event> batch, long sequence) { foreach (var evt in batch.AsSpan()) { evt.Value = 42; } }
// expected because the old active will be unable to flush the // end-of-segment op since it is fenced /// <exception cref="System.IO.IOException"/> /// <exception cref="System.Exception"/> /// <exception cref="Org.Apache.Hadoop.Hdfs.Inotify.MissingEventsException"/> public virtual void TestReadEventsWithTimeout() { Configuration conf = new HdfsConfiguration(); MiniQJMHACluster cluster = new MiniQJMHACluster.Builder(conf).Build(); try { cluster.GetDfsCluster().WaitActive(); cluster.GetDfsCluster().TransitionToActive(0); DFSClient client = new DFSClient(cluster.GetDfsCluster().GetNameNode(0).GetNameNodeAddress (), conf); DFSInotifyEventInputStream eis = client.GetInotifyEventStream(); ScheduledExecutorService ex = Executors.NewSingleThreadScheduledExecutor(); ex.Schedule(new _Runnable_463(client), 1, TimeUnit.Seconds); // test will fail // a very generous wait period -- the edit will definitely have been // processed by the time this is up EventBatch batch = eis.Poll(5, TimeUnit.Seconds); NUnit.Framework.Assert.IsNotNull(batch); NUnit.Framework.Assert.AreEqual(1, batch.GetEvents().Length); NUnit.Framework.Assert.IsTrue(batch.GetEvents()[0].GetEventType() == Event.EventType .Create); NUnit.Framework.Assert.AreEqual("/dir", ((Event.CreateEvent)batch.GetEvents()[0]) .GetPath()); } finally { cluster.Shutdown(); } }
/// <summary> /// Stores a batch to the stream. /// Will protect stream integrity /// with regards to version. /// /// TODO: We need to return some richer model /// so that the outer scope can distinguish between version exception /// and other exceptions. This is needed so that ouoter scope can /// load the new events, apply to state and retry the cmd. /// </summary> /// <param name="databaseId"></param> /// <param name="streamKey"></param> /// <param name="batch"></param> /// <returns></returns> public async Task <Result <bool> > StoreBatchAsync(string databaseId, EventBatch batch) { // Since the streams only have insert permissions, // the version of it will increase in a deterministic manner, // and we can use the sequenceNr of last event in batch, to derive // a version number to supply to the network when mutating the MD. try { var(streamVersion, mdEntryVersion) = await GetStreamVersionAsync(databaseId, batch.StreamKey); if (streamVersion == -1) { await CreateNewStreamAsync(databaseId, batch); } else { var expectedVersion = batch.Body.First().MetaData.SequenceNumber - 1; if (streamVersion != expectedVersion) { throw new InvalidOperationException($"Concurrency exception! Expected stream version {expectedVersion}, but found {streamVersion}."); } return(await StoreToExistingStream(databaseId, batch, mdEntryVersion)); // todo: distinguish MD version exception result from other errors } return(Result.OK(true)); } catch (Exception ex) { return(Result.Fail <bool>(ex.Message)); } }
/// <summary> /// Parses a string to a <see cref="EventBatch{TEvent}"/> from either a <see cref="CloudEvent"/> or <see cref="EventGridEvent"/> implementation. /// </summary> /// <param name="rawJsonBody">Raw JSON body</param> /// <param name="sessionId">Session id for event grid message</param> public static EventBatch <Event> Parse(string rawJsonBody, string sessionId) { Guard.NotNullOrWhitespace(rawJsonBody, nameof(rawJsonBody)); Guard.NotNullOrWhitespace(sessionId, nameof(sessionId)); var array = JArray.Parse(rawJsonBody); var deserializedEvents = new List <Event>(); foreach (var eventObject in array.Children <JObject>()) { var rawEvent = eventObject.ToString(); if (eventObject.ContainsKey("cloudEventsVersion")) { var jsonFormatter = new JsonEventFormatter(); var cloudEvent = jsonFormatter.DecodeStructuredEvent(Encoding.UTF8.GetBytes(rawEvent)); deserializedEvents.Add(cloudEvent); } else { var gridEvent = JsonConvert.DeserializeObject <EventGridEvent>(rawEvent, JsonSerializerSettings); deserializedEvents.Add(gridEvent); } } var result = new EventBatch <Event>(sessionId, deserializedEvents); return(result); }
/// <summary> /// Loads a raw JSON payload into an abstracted event with a specific <paramref name="sessionId"/>. /// </summary> /// <param name="rawJson">The raw JSON payload, representing an event that can be handled by EventGrid.</param> /// <param name="sessionId">The reference ID for this event parsing session.</param> public static EventBatch <Event> Parse(string rawJson, string sessionId) { Guard.NotNullOrWhitespace(rawJson, nameof(rawJson), "Cannot parse a blank raw JSON payload to an abstracted event"); Guard.NotNullOrWhitespace(sessionId, nameof(sessionId), "Cannot parse a raw JSON payload with a blank session ID"); JToken jToken = JToken.Parse(rawJson); if (jToken.Type == JTokenType.Array) { List <Event> deserializedEvents = jToken.Children <JObject>() .Select(ParseJObject) .ToList(); var result = new EventBatch <Event>(sessionId, deserializedEvents); return(result); } else if (jToken.Type == JTokenType.Object) { Event @event = ParseJObject((JObject)jToken); var deserializedEvents = new List <Event> { @event }; var result = new EventBatch <Event>(sessionId, deserializedEvents); return(result); } throw new InvalidOperationException( "Couldn't find a correct JSON structure (array or object) to parse the EventGridEvent/CloudEvents from"); }
public async Task <bool> CommitAsync() { if (_instance == null) { throw new InvalidOperationException("Cannot commit before executing!"); } var events = _instance.GetUncommittedEvents(); if (events.Count == 0) { throw new InvalidOperationException("Already committed."); } var data = events.Select(e => new EventData( e.Payload, _cmd.CorrelationId, _cmd.Id, e.EventClrType, e.Id, e.Name, e.SequenceNumber, e.TimeStamp)) .ToList(); var batch = new EventBatch(_instance.StreamKey, _cmd.Id, data); if (!await _repo.Save(batch)) { return(false); } _instance.ClearUncommittedEvents(); return(true); }
public void HandleEventException(Exception ex, long sequence, EventBatch <T> batch) { var message = $"Exception processing sequence {sequence} for batch of {batch.Length} events, first event {batch[0]}: {ex}"; Console.WriteLine(message); throw new ApplicationException(message, ex); }
/// <summary> /// Loads a raw JSON payload into an abstracted event. /// </summary> /// <param name="rawJson">The raw JSON payload, representing an event that can be handled by EventGrid.</param> public static EventBatch <Event> Parse(string rawJson) { Guard.NotNullOrWhitespace(rawJson, nameof(rawJson), "Cannot parse a blank raw JSON payload to an abstracted event"); EventBatch <Event> eventBatch = Parse(rawJson, sessionId: Guid.NewGuid().ToString()); return(eventBatch); }
public ValueTask OnBatch(EventBatch <T> batch, long sequence) { foreach (var data in batch.AsSpan()) { _countDownLatch.Signal(); } return(new ValueTask()); }
public void OnBatch(EventBatch <PerfEvent> batch, long sequence) { foreach (var data in batch) { var next = _buffer.Next(); _buffer[next].Value = data.Value; _buffer.Publish(next); } }
/// <exception cref="System.IO.IOException"/> /// <exception cref="Org.Apache.Hadoop.Hdfs.Inotify.MissingEventsException"/> public static EventBatch WaitForNextEvents(DFSInotifyEventInputStream eis) { EventBatch batch = null; while ((batch = eis.Poll()) == null) { } return(batch); }
public void ShouldThrowOnInvalidSliceArguments(int start, int length) { // Arrange var array = new[] { new TestEvent(), new TestEvent(), new TestEvent() }; var batch = new EventBatch <TestEvent>(array, 0, 3); // Act/Assert Assert.Throws <ArgumentOutOfRangeException>(() => batch.Slice(start, length)); }
/// <summary> /// Loads a raw JSON payload into an abstracted event. /// </summary> /// <param name="rawJson">The raw JSON payload, representing an event that can be handled by EventGrid.</param> public static EventBatch <Event> Parse(byte[] rawJson) { Guard.NotNull(rawJson, nameof(rawJson), "Cannot parse a 'null' series of bytes raw JSON payload to an abstracted event"); Guard.NotLessThanOrEqualTo(rawJson.Length, threshold: 0, "Cannot parse a series of bytes of a length <= 0 to an abstracted event"); EventBatch <Event> eventBatch = Parse(rawJson, sessionId: Guid.NewGuid().ToString()); return(eventBatch); }
public async ValueTask OnBatch(EventBatch <T> batch, long sequence) { foreach (var data in batch) { OnEventAction.Invoke(data); } await Task.Yield(); }
public void ParseImplicit_ValidBlobCreatedEvent_ShouldSucceed() { // Arrange const string topic = "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"; const string subject = "/blobServices/default/containers/event-container/blobs/finnishjpeg"; const string eventType = "Microsoft.Storage.BlobCreated"; const string id = "5647b67c-b01e-002d-6a47-bc01ac063360"; const string dataVersion = "1"; const string metadataVersion = "1"; const string api = "PutBlockList"; const string clientRequestId = "5c24a322-35c9-4b46-8ef5-245a81af7037"; const string requestId = "5647b67c-b01e-002d-6a47-bc01ac000000"; const string eTag = "0x8D58A5F0C6722F9"; const string contentType = "image/jpeg"; const int contentLength = 29342; const string blobType = "BlockBlob"; const string url = "https://sample.blob.core.windows.net/event-container/finnish.jpeg"; const string sequencer = "00000000000000000000000000000094000000000017d503"; const string batchId = "69cd1576-e430-4aff-8153-570934a1f6e1"; string rawEvent = EventSamples.BlobCreateEvent; var eventTime = DateTimeOffset.Parse("2018-03-15T10:25:17.7535274Z"); // Act EventBatch <Event> eventGridMessage = EventParser.Parse(rawEvent); // Assert Assert.NotNull(eventGridMessage); Assert.NotNull(eventGridMessage.Events); Event eventGridEvent = Assert.Single(eventGridMessage.Events); Assert.NotNull(eventGridEvent); Assert.Equal(topic, eventGridEvent.Topic); Assert.Equal(subject, eventGridEvent.Subject); Assert.Equal(eventType, eventGridEvent.EventType); Assert.Equal(eventTime, eventGridEvent.EventTime); Assert.Equal(id, eventGridEvent.Id); Assert.Equal(dataVersion, eventGridEvent.DataVersion); Assert.Equal(metadataVersion, eventGridEvent.MetadataVersion); var eventPayload = eventGridEvent.GetPayload <StorageBlobCreatedEventData>(); Assert.NotNull(eventPayload); Assert.Equal(api, eventPayload.Api); Assert.Equal(clientRequestId, eventPayload.ClientRequestId); Assert.Equal(requestId, eventPayload.RequestId); Assert.Equal(eTag, eventPayload.ETag); Assert.Equal(contentType, eventPayload.ContentType); Assert.Equal(contentLength, eventPayload.ContentLength); Assert.Equal(blobType, eventPayload.BlobType); Assert.Equal(url, eventPayload.Url); Assert.Equal(sequencer, eventPayload.Sequencer); Assert.NotNull(eventPayload.StorageDiagnostics); var storageDiagnostics = Assert.IsType <JObject>(eventPayload.StorageDiagnostics); Assert.Equal(batchId, storageDiagnostics["batchId"]); }
/// <summary> /// Loads a raw JSON payload into an abstracted event with a specific <paramref name="sessionId"/>. /// </summary> /// <param name="rawJson">The raw JSON payload, representing an event that can be handled by EventGrid.</param> /// <param name="sessionId">The reference ID for this event parsing session.</param> public static EventBatch <Event> Parse(byte[] rawJson, string sessionId) { Guard.NotNull(rawJson, nameof(rawJson), "Cannot parse a 'null' series of bytes raw JSON payload to an abstracted event"); Guard.NotLessThanOrEqualTo(rawJson.Length, threshold: 0, "Cannot parse a series of bytes of a length <= 0 to an abstracted event"); string json = Encoding.UTF8.GetString(rawJson); EventBatch <Event> eventBatch = Parse(json, sessionId); return(eventBatch); }
public Task Save <T>(EventBatch <T> batch) { Check.NotNull(batch, nameof(batch)); lock (_store) { _store.Add(new StorageItem(typeof(T), batch)); } return(Task.CompletedTask); }
public void ParseEventExplicit_ValidStorageBlobCreatedCloudEvent_ShouldSucceed() { // Arrange const CloudEventsSpecVersion cloudEventsVersion = CloudEventsSpecVersion.V0_1; const string eventType = "Microsoft.Storage.BlobCreated", source = "/subscriptions/{subscription-id}/resourceGroups/{resource-group}/providers/Microsoft.Storage/storageAccounts/{storage-account}#blobServices/default/containers/{storage-container}/blobs/{new-file}", eventId = "173d9985-401e-0075-2497-de268c06ff25", eventTime = "2018-04-28T02:18:47.1281675Z"; const string api = "PutBlockList", clientRequestId = "6d79dbfb-0e37-4fc4-981f-442c9ca65760", requestId = "831e1650-001e-001b-66ab-eeb76e000000", etag = "0x8D4BCC2E4835CD0", contentType = "application/octet-stream", blobType = "BlockBlob", url = "https://oc2d2817345i60006.blob.core.windows.net/oc2d2817345i200097container/oc2d2817345i20002296blob", sequencer = "00000000000004420000000000028963", batchId = "b68529f3-68cd-4744-baa4-3c0498ec19f0"; const long contentLength = 524_288; string rawEvent = EventSamples.AzureBlobStorageCreatedCloudEvent; // Act EventBatch <Event> eventBatch = EventParser.Parse(rawEvent); // Assert Assert.NotNull(eventBatch); Assert.NotNull(eventBatch.Events); CloudEvent cloudEvent = Assert.Single(eventBatch.Events); Assert.NotNull(cloudEvent); Assert.Equal(cloudEventsVersion, cloudEvent.SpecVersion); Assert.Equal(eventType, cloudEvent.Type); Assert.Equal(source, cloudEvent.Source.OriginalString); Assert.Equal(eventId, cloudEvent.Id); Assert.Equal(eventTime, cloudEvent.Time.GetValueOrDefault().ToString("O")); var eventPayload = cloudEvent.GetPayload <StorageBlobCreatedEventData>(); Assert.NotNull(eventPayload); Assert.Equal(api, eventPayload.Api); Assert.Equal(clientRequestId, eventPayload.ClientRequestId); Assert.Equal(requestId, eventPayload.RequestId); Assert.Equal(etag, eventPayload.ETag); Assert.Equal(contentType, eventPayload.ContentType); Assert.Equal(contentLength, eventPayload.ContentLength); Assert.Equal(blobType, eventPayload.BlobType); Assert.Equal(url, eventPayload.Url); Assert.Equal(sequencer, eventPayload.Sequencer); Assert.NotNull(eventPayload.StorageDiagnostics); var storageDiagnostics = Assert.IsType <JObject>(eventPayload.StorageDiagnostics); Assert.Equal(batchId, storageDiagnostics["batchId"]); }
[TestMethod] // Debug this method to be able to see debug output etc. public async Task WriteData() { try { await InitApp(); var version = -1; var dbId = GetRandomString(15); await _db.CreateDbAsync(dbId); // here we create a random db var streamKey = $"{dbId}@{0}"; while (true) { try { var evt = new RaisedEvent(new NoteAdded(0, "someNote")) // create some data, in form of an event { SequenceNumber = ++version // protocol way of managing concurrent write to the stream }; var events = new List <RaisedEvent> { evt }; var data = events.Select(e => new EventData( e.Payload, Guid.NewGuid(), Guid.NewGuid(), e.EventClrType, e.Id, e.Name, e.SequenceNumber, e.TimeStamp)) .ToList(); // protocol way of how to serialize and package the event data var batch = new EventBatch(streamKey, Guid.NewGuid(), data); // another protocol way of packaging the data var res = await _db.StoreBatchAsync(dbId, batch); // store the data to the db if (res.Error) { } Debug.WriteLine(version); // so we expect to reach ~22-30 entries before sudden crash. Sometimes more, sometimes less. //await Task.Delay(1000); } catch (Exception ex) { } // you can put breakpoint here, however, the big problem (and mystery) is that these do not catch anything, program just dies OR a NullReferenceException is reported in logs; "occurred in Unknown Module". } } catch (Exception ex) { } // you can put breakpoint here, however, the big problem (and mystery) is that these do not catch anything, program just dies OR a NullReferenceException is reported in logs; "occurred in Unknown Module". }
public void OnBatch(EventBatch <XEvent> batch, long sequence) { foreach (var data in batch) { Sum += data.Data; } if (sequence + batch.Length == _ringBufferSize) { _shutdown.Invoke(); } }
public void ShouldConvertBatchToArrayFromEnumerable() { // Arrange var array = new[] { new TestEvent(), new TestEvent(), new TestEvent() }; var batch = new EventBatch <TestEvent>(array, 1, 2); // Act var copy = batch.AsEnumerable().ToArray(); // Assert Assert.AreEqual(array.Skip(1).ToArray(), copy); }
public void ShouldGetSlice(int start, int length) { // Arrange var array = new[] { new TestEvent(), new TestEvent(), new TestEvent() }; var batch = new EventBatch <TestEvent>(array, 0, 3); // Act var slice = batch.Slice(start, length); // Assert Assert.AreEqual(batch.AsSpan().Slice(start, length).ToArray(), slice.ToArray()); }
public void ParseExplicitAsEventGridEvent_ValidStorageBlobCreatedCloudEvent_ShouldSucceed() { // Arrange const string eventType = "Microsoft.Storage.BlobCreated", eventId = "173d9985-401e-0075-2497-de268c06ff25", eventTime = "2018-04-28T02:18:47.1281675"; const string api = "PutBlockList", clientRequestId = "6d79dbfb-0e37-4fc4-981f-442c9ca65760", requestId = "831e1650-001e-001b-66ab-eeb76e000000", etag = "0x8D4BCC2E4835CD0", contentType = "application/octet-stream", blobType = "BlockBlob", url = "https://oc2d2817345i60006.blob.core.windows.net/oc2d2817345i200097container/oc2d2817345i20002296blob", sequencer = "00000000000004420000000000028963", batchId = "b68529f3-68cd-4744-baa4-3c0498ec19f0"; const long contentLength = 524_288; string rawEvent = EventSamples.AzureBlobStorageCreatedCloudEvent; EventBatch <Event> eventBatch = EventParser.Parse(rawEvent); Assert.NotNull(eventBatch); Event @event = Assert.Single(eventBatch.Events); Assert.NotNull(@event); // Act EventGridEvent eventGridEvent = @event.AsEventGridEvent(); // Assert Assert.NotNull(eventGridEvent); Assert.Equal(eventType, eventGridEvent.EventType); Assert.Equal(eventId, eventGridEvent.Id); Assert.Equal(eventTime, eventGridEvent.EventTime.ToString("O")); var eventPayload = eventGridEvent.GetPayload <StorageBlobCreatedEventData>(); Assert.NotNull(eventPayload); Assert.Equal(api, eventPayload.Api); Assert.Equal(clientRequestId, eventPayload.ClientRequestId); Assert.Equal(requestId, eventPayload.RequestId); Assert.Equal(etag, eventPayload.ETag); Assert.Equal(contentType, eventPayload.ContentType); Assert.Equal(contentLength, eventPayload.ContentLength); Assert.Equal(blobType, eventPayload.BlobType); Assert.Equal(url, eventPayload.Url); Assert.Equal(sequencer, eventPayload.Sequencer); Assert.NotNull(eventPayload.StorageDiagnostics); var storageDiagnostics = Assert.IsType <JObject>(eventPayload.StorageDiagnostics); Assert.Equal(batchId, storageDiagnostics["batchId"]); }
public ValueTask OnBatch(EventBatch <PingPongEvent> batch, long sequence) { foreach (var data in batch) { var next = _buffer.Next(); var pingEvent = _buffer[next]; pingEvent.Counter = data.Counter; _buffer.Publish(next); } return(ValueTask.CompletedTask); }
private Event GetReceivedEvent(string eventId) { string receivedEvent = _serviceBusEventConsumerHost.GetReceivedEvent(eventId); EventBatch <Event> rawEvents = EventParser.Parse(receivedEvent); Assert.NotNull(rawEvents); Assert.NotNull(rawEvents.Events); Event firstEvent = Assert.Single(rawEvents.Events); Assert.NotNull(firstEvent); return(firstEvent); }
/// <summary> /// Create LogEvent instance /// </summary> /// <param name="userEvents">The UserEvent array</param> /// <param name="logger">The ILogger entity</param> /// <returns>LogEvent instance</returns> public static LogEvent CreateLogEvent(UserEvent[] userEvents, ILogger logger) { EventBatch.Builder builder = new EventBatch.Builder(); List <Visitor> visitors = new List <Visitor>(userEvents.Count()); foreach (UserEvent userEvent in userEvents) { if (userEvent is ImpressionEvent) { visitors.Add(CreateVisitor((ImpressionEvent)userEvent)); } else if (userEvent is ConversionEvent) { visitors.Add(CreateVisitor((ConversionEvent)userEvent, logger)); } else { //TODO: Need to log a message, invalid UserEvent added in a list. continue; } var userContext = userEvent.Context; builder .WithClientName(userContext.ClientName) .WithClientVersion(userContext.ClientVersion) .WithAccountId(userContext.AccountId) .WithAnonymizeIP(userContext.AnonymizeIP) .WithProjectID(userContext.ProjectId) .WithRevision(userContext.Revision) .WithEnrichDecisions(true); } if (visitors.Count == 0) { return(null); } builder.WithVisitors(visitors.ToArray()); EventBatch eventBatch = builder.Build(); var eventBatchDictionary = JObject.FromObject(eventBatch).ToObject <Dictionary <string, object> >(); return(new LogEvent(EVENT_ENDPOINT, eventBatchDictionary, "POST", headers: new Dictionary <string, string> { { "Content-Type", "application/json" } })); }