public async Task GrpcAiInputTests_StopsAndRestarts() { // ARRANGE int batchesReceived = 0; TelemetryBatch receivedBatch = null; int port = Common.GetPort(); var input = new GrpcAiInput("localhost", port); input.Start(telemetryBatch => { batchesReceived++; receivedBatch = telemetryBatch; }); Assert.IsTrue(SpinWait.SpinUntil(() => input.IsRunning, GrpcAiInputTests.DefaultTimeout)); var grpcWriter = new GrpcWriter(true, port); TelemetryBatch batch = new TelemetryBatch(); batch.Items.Add(new Telemetry() { Event = new Event() { Name = "Event1" } }); await grpcWriter.Write(batch).ConfigureAwait(false); Common.AssertIsTrueEventually( () => input.GetStats().BatchesReceived == 1 && batchesReceived == 1 && receivedBatch.Items.Single().Event.Name == "Event1", GrpcAiInputTests.DefaultTimeout); // ACT input.Stop(); Common.AssertIsTrueEventually( () => !input.IsRunning && input.GetStats().BatchesReceived == 1 && batchesReceived == 1 && receivedBatch.Items.Single().Event.Name == "Event1", GrpcAiInputTests.DefaultTimeout); input.Start(telemetryBatch => { batchesReceived++; receivedBatch = telemetryBatch; }); Assert.IsTrue(SpinWait.SpinUntil(() => input.IsRunning, GrpcAiInputTests.DefaultTimeout)); grpcWriter = new GrpcWriter(true, port); batch.Items.Single().Event.Name = "Event2"; await grpcWriter.Write(batch).ConfigureAwait(false); // ASSERT Common.AssertIsTrueEventually( () => input.IsRunning && input.GetStats().BatchesReceived == 1 && batchesReceived == 2 && receivedBatch.Items.Single().Event.Name == "Event2", GrpcAiInputTests.DefaultTimeout); }
public async Task NamedPipeInputTests_ReceivesData() { // ARRANGE int batchesReceived = 0; TelemetryBatch receivedBatch = null; var input = new NamedPipeInput(); input.Start((telemetryBatch) => { batchesReceived++; receivedBatch = telemetryBatch; }); Assert.IsTrue(SpinWait.SpinUntil(() => input.IsRunning, NamedPipeInputTests.DefaultTimeout)); var pipeWriter = new PipeWriter(NamedPipeInputTests.DefaultTimeout); await pipeWriter.Start().ConfigureAwait(false); // ACT TelemetryBatch batch = new TelemetryBatch(); batch.Items.Add(new Telemetry() { Event = new Event() { Name = "Event1" } }); using (var ms = new MemoryStream()) { batch.WriteTo(ms); await ms.FlushAsync().ConfigureAwait(false); await pipeWriter.Write(Common.EncodeLengthPrefix(batch.CalculateSize())).ConfigureAwait(false); await pipeWriter.Write(ms.ToArray()).ConfigureAwait(false); } // ASSERT Common.AssertIsTrueEventually( () => input.GetStats().ConnectionCount == 1 && input.GetStats().BatchesReceived == 1 && batchesReceived == 1 && receivedBatch.Items.Single().Event.Name == "Event1", NamedPipeInputTests.DefaultTimeout); pipeWriter.Stop(); input.Stop(); Assert.IsTrue(SpinWait.SpinUntil(() => !input.IsRunning, NamedPipeInputTests.DefaultTimeout)); }
public async Task Write(TelemetryBatch batch) { if (!this.aiMode) { throw new InvalidOperationException("Incorrect mode"); } try { await this.aiStreamingCall.RequestStream.WriteAsync(batch).ConfigureAwait(false); } catch (System.Exception e) { throw new InvalidOperationException( FormattableString.Invariant($"Error sending a message via gRpc. {e.ToString()}")); } }
public async Task GrpcAiInputTests_ReceivesDataFromMultipleClients() { // ARRANGE int batchesReceived = 0; TelemetryBatch receivedBatch = null; int port = Common.GetPort(); var input = new GrpcAiInput("localhost", port); input.Start( (telemetryBatch, callContext) => { Interlocked.Increment(ref batchesReceived); receivedBatch = telemetryBatch; }, null); Assert.IsTrue(SpinWait.SpinUntil(() => input.IsRunning, GrpcAiInputTests.DefaultTimeout)); // ACT TelemetryBatch batch = new TelemetryBatch(); batch.Items.Add(new Telemetry() { Event = new Event() { Name = "Event1" } }); Parallel.For(0, 1000, new ParallelOptions() { MaxDegreeOfParallelism = 1000 }, async i => { var grpcWriter = new GrpcWriter(true, port); await grpcWriter.Write(batch).ConfigureAwait(false); }); // ASSERT Common.AssertIsTrueEventually( () => input.GetStats().BatchesReceived == 1000 && batchesReceived == 1000, GrpcAiInputTests.DefaultTimeout); input.Stop(); Assert.IsTrue(SpinWait.SpinUntil(() => !input.IsRunning, GrpcAiInputTests.DefaultTimeout)); }
public async Task GrpcAiInputTests_HandlesExceptionsInProcessingHandler() { // ARRANGE int port = Common.GetPort(); var input = new GrpcAiInput("localhost", port); input.Start(telemetryBatch => throw new InvalidOperationException()); Assert.IsTrue(SpinWait.SpinUntil(() => input.IsRunning, GrpcAiInputTests.DefaultTimeout)); var grpcWriter = new GrpcWriter(true, port); TelemetryBatch batch = new TelemetryBatch(); batch.Items.Add(new Telemetry() { Event = new Event() { Name = "Event1" } }); // ACT await grpcWriter.Write(batch).ConfigureAwait(false); // ASSERT // must have handled the exception by logging it // should still be able to process items Common.AssertIsTrueEventually( () => input.IsRunning && input.GetStats().BatchesReceived == 0 && input.GetStats().BatchesFailed == 1, GrpcAiInputTests.DefaultTimeout); await grpcWriter.Write(batch).ConfigureAwait(false); Common.AssertIsTrueEventually( () => input.IsRunning && input.GetStats().BatchesReceived == 0 && input.GetStats().BatchesFailed == 2, GrpcAiInputTests.DefaultTimeout); }
/// <summary> /// Processes an incoming telemetry batch for AI channel. /// </summary> /// <remarks>This method may be called from multiple threads concurrently.</remarks> private void OnAiBatchReceived(TelemetryBatch batch, ServerCallContext callContext) { try { // send incoming telemetry items to the telemetryClient foreach (Telemetry telemetry in batch.Items) { ITelemetry convertedTelemetry = null; try { Diagnostics.LogTrace($"AI message received: {batch.Items.Count} items, first item: {batch.Items.First().InstrumentationKey}"); switch (telemetry.DataCase) { case Telemetry.DataOneofCase.Event: convertedTelemetry = AiTelemetryConverter.ConvertEventToSdkApi(telemetry); break; case Telemetry.DataOneofCase.Message: convertedTelemetry = AiTelemetryConverter.ConvertTraceToSdkApi(telemetry); break; case Telemetry.DataOneofCase.Metric: convertedTelemetry = AiTelemetryConverter.ConvertMetricToSdkApi(telemetry); break; case Telemetry.DataOneofCase.Exception: convertedTelemetry = AiTelemetryConverter.ConvertExceptionToSdkApi(telemetry); break; case Telemetry.DataOneofCase.Dependency: convertedTelemetry = AiTelemetryConverter.ConvertDependencyToSdkApi(telemetry); break; case Telemetry.DataOneofCase.Availability: convertedTelemetry = AiTelemetryConverter.ConvertAvailabilityToSdkApi(telemetry); break; case Telemetry.DataOneofCase.PageView: convertedTelemetry = AiTelemetryConverter.ConvertPageViewToSdkApi(telemetry); break; case Telemetry.DataOneofCase.Request: convertedTelemetry = AiTelemetryConverter.ConvertRequestToSdkApi(telemetry); break; case Telemetry.DataOneofCase.None: throw new ArgumentException( FormattableString.Invariant($"Empty AI telemetry item encountered")); default: throw new ArgumentException( FormattableString.Invariant($"Unknown AI telemetry item type encountered")); } } catch (Exception e) { // an unexpected issue during conversion // log and carry on Diagnostics.LogError( FormattableString.Invariant( $"Could not convert an incoming AI telemetry item. {e.ToString()}")); } try { if (convertedTelemetry != null) { this.telemetryClient.Track(convertedTelemetry); } } catch (Exception e) { // an unexpected issue while tracking an item // log and carry on Diagnostics.LogError( FormattableString.Invariant( $"Could not track an incoming AI telemetry item. {e.ToString()}")); } } } catch (Exception e) { // an unexpected issue while processing the batch // log and carry on Diagnostics.LogError( FormattableString.Invariant( $"Could not process an incoming AI telemetry batch. {e.ToString()}")); } }
public async Task LibraryTests_LibraryProcessesAiBatchesCorrectly() { // ARRANGE var telemetryClient = Common.SetupStubTelemetryClient(out var sentItems); int portAI = Common.GetPort(); int portOC = Common.GetPort(); var config = $@"<?xml version=""1.0"" encoding=""utf-8"" ?> <LocalForwarderConfiguration> <Inputs> <ApplicationInsightsInput Enabled=""true""> <Host>0.0.0.0</Host> <Port>{portAI}</Port> </ApplicationInsightsInput> <OpenCensusInput Enabled=""true""> <Host>0.0.0.0</Host> <Port>{portOC}</Port> </OpenCensusInput> </Inputs> <OpenCensusToApplicationInsights> <InstrumentationKey>ikey1</InstrumentationKey> </OpenCensusToApplicationInsights> <ApplicationInsights> <LiveMetricsStreamInstrumentationKey>[SPECIFY LIVE METRICS STREAM INSTRUMENTATION KEY HERE]</LiveMetricsStreamInstrumentationKey> </ApplicationInsights> </LocalForwarderConfiguration> "; var telemetryBatch = new TelemetryBatch(); telemetryBatch.Items.Add(new Telemetry() { Event = new Event() { Name = "Event1" } }); telemetryBatch.Items.Add(new Telemetry() { Message = new Message() { Message_ = "Message1" } }); telemetryBatch.Items.Add(new Telemetry() { Metric = new LocalForwarder.Library.Inputs.Contracts.Metric() { Metrics = { new DataPoint() { Name = "Metric1", Value = 1 } } } }); telemetryBatch.Items.Add(new Telemetry() { Exception = new LocalForwarder.Library.Inputs.Contracts.Exception() { ProblemId = "Exception1", Exceptions = { new ExceptionDetails() { Message = "Exception1" } } } }); telemetryBatch.Items.Add(new Telemetry() { Dependency = new Dependency() { Name = "Dependency1" } }); telemetryBatch.Items.Add(new Telemetry() { Availability = new Availability() { Name = "Availability1" } }); telemetryBatch.Items.Add(new Telemetry() { PageView = new PageView() { Id = "PageView1" } }); telemetryBatch.Items.Add(new Telemetry() { Request = new Request() { Name = "Request1" } }); var lib = new Library(config, telemetryClient); lib.Run(); // ACT var writer = new GrpcWriter(true, portAI); await writer.Write(telemetryBatch).ConfigureAwait(false); // ASSERT Common.AssertIsTrueEventually(() => sentItems.Count == 8); lib.Stop(); Assert.AreEqual("Event1", (sentItems.Skip(0).First() as EventTelemetry).Name); Assert.AreEqual("Message1", (sentItems.Skip(1).First() as TraceTelemetry).Message); Assert.AreEqual("Metric1", (sentItems.Skip(2).First() as MetricTelemetry).Name); Assert.AreEqual(1, (sentItems.Skip(2).First() as MetricTelemetry).Value); Assert.AreEqual("Exception1", (sentItems.Skip(3).First() as ExceptionTelemetry).ProblemId); Assert.AreEqual("Exception1", (sentItems.Skip(3).First() as ExceptionTelemetry).ExceptionDetailsInfoList.Single().Message); Assert.AreEqual("Dependency1", (sentItems.Skip(4).First() as DependencyTelemetry).Name); Assert.AreEqual("Availability1", (sentItems.Skip(5).First() as AvailabilityTelemetry).Name); Assert.AreEqual("PageView1", (sentItems.Skip(6).First() as PageViewTelemetry).Id); Assert.AreEqual("Request1", (sentItems.Skip(7).First() as RequestTelemetry).Name); }
public async Task LibraryTests_LibraryLogsInputStatsCorrectly() { // ARRANGE var telemetryClient = Common.SetupStubTelemetryClient(out var sentItems); int portAI = Common.GetPort(); int portOC = Common.GetPort(); var config = $@"<?xml version=""1.0"" encoding=""utf-8"" ?> <LocalForwarderConfiguration> <Inputs> <ApplicationInsightsInput Enabled=""true""> <Host>0.0.0.0</Host> <Port>{portAI}</Port> </ApplicationInsightsInput> <OpenCensusInput Enabled=""true""> <Host>0.0.0.0</Host> <Port>{portOC}</Port> </OpenCensusInput> </Inputs> <OpenCensusToApplicationInsights> <InstrumentationKey>ikey1</InstrumentationKey> </OpenCensusToApplicationInsights> <ApplicationInsights> <LiveMetricsStreamInstrumentationKey>[SPECIFY LIVE METRICS STREAM INSTRUMENTATION KEY HERE]</LiveMetricsStreamInstrumentationKey> <LiveMetricsStreamAuthenticationApiKey></LiveMetricsStreamAuthenticationApiKey> <AdaptiveSampling Enabled=""true""> <MaxEventsPerSecond>%APPINSIGHTS_ADAPTIVESAMPLINGEVENTSLIMIT%</MaxEventsPerSecond> <!--Telemetry items other than events are counted together--> <MaxOtherItemsPerSecond>%APPINSIGHTS_ADAPTIVESAMPLINGNONEVENTSLIMIT%</MaxOtherItemsPerSecond> </AdaptiveSampling> </ApplicationInsights> </LocalForwarderConfiguration> "; var telemetryBatchAI = new TelemetryBatch(); telemetryBatchAI.Items.Add(new Telemetry() { Event = new Event() { Name = "Event1" } }); telemetryBatchAI.Items.Add(new Telemetry() { Message = new Message() { Message_ = "Message1" } }); telemetryBatchAI.Items.Add(new Telemetry() { Metric = new LocalForwarder.Library.Inputs.Contracts.Metric() { Metrics = { new DataPoint() { Name = "Metric1", Value = 1 } } } }); telemetryBatchAI.Items.Add(new Telemetry() { Exception = new LocalForwarder.Library.Inputs.Contracts.Exception() { ProblemId = "Exception1", Exceptions = { new ExceptionDetails() { Message = "Exception1" } } } }); telemetryBatchAI.Items.Add(new Telemetry() { Dependency = new Dependency() { Name = "Dependency1" } }); telemetryBatchAI.Items.Add(new Telemetry() { Availability = new Availability() { Name = "Availability1" } }); telemetryBatchAI.Items.Add(new Telemetry() { PageView = new PageView() { Id = "PageView1" } }); telemetryBatchAI.Items.Add(new Telemetry() { Request = new Request() { Name = "Request1" } }); var telemetryBatchOC = new ExportTraceServiceRequest(); telemetryBatchOC.Spans.Add(new Span() { Name = new TruncatableString() { Value = "Span1" }, Kind = Span.Types.SpanKind.Server }); telemetryBatchOC.Spans.Add(new Span() { Name = new TruncatableString() { Value = "Span2" }, Kind = Span.Types.SpanKind.Client }); // redirect loggging to a new file Diagnostics.Flush(TimeSpan.FromSeconds(5)); string logFileName = Common.SwitchLoggerToDifferentFile(); var lib = new Library(config, telemetryClient, TimeSpan.FromMilliseconds(10)); lib.Run(); // ACT var writer = new GrpcWriter(true, portAI); await writer.Write(telemetryBatchAI).ConfigureAwait(false); writer = new GrpcWriter(false, portOC); await writer.Write(telemetryBatchOC).ConfigureAwait(false); // ASSERT Common.AssertIsTrueEventually(() => sentItems.Count == 10); await Task.Delay(TimeSpan.FromSeconds(1)).ConfigureAwait(false); lib.Stop(); Diagnostics.Flush(TimeSpan.FromSeconds(5)); // close the file Common.SwitchLoggerToDifferentFile(); string logs = await File.ReadAllTextAsync(logFileName).ConfigureAwait(false); Assert.IsTrue(logs.Contains("|INFO|AI input: [ConnectionCount: 0, BatchesReceived: 0, BatchesFailed: 0, ConfigsReceived: 0, ConfigsFailed: 0]")); Assert.IsTrue(logs.Contains("|INFO|OpenCensus input: [ConnectionCount: 0, BatchesReceived: 0, BatchesFailed: 0, ConfigsReceived: 0, ConfigsFailed: 0]")); Assert.IsTrue(logs.Contains("|INFO|AI input: [ConnectionCount: 0, BatchesReceived: 1, BatchesFailed: 0, ConfigsReceived: 0, ConfigsFailed: 0]")); Assert.IsTrue(logs.Contains("|INFO|OpenCensus input: [ConnectionCount: 0, BatchesReceived: 1, BatchesFailed: 0, ConfigsReceived: 0, ConfigsFailed: 0]")); Assert.IsFalse(logs.Contains("|INFO|AI input: [ConnectionCount: 0, BatchesReceived: 2, BatchesFailed, ConfigsReceived: 0, ConfigsFailed: 0]")); Assert.IsFalse(logs.Contains("|INFO|OpenCensus input: [ConnectionCount: 0, BatchesReceived: 2, BatchesFailed, ConfigsReceived: 0, ConfigsFailed: 0]")); }