public async Task UsesCustomPropertyNames() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public async Task UsesCustomPropertyNames() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public void IndexDecider_EndsUpInTheOutput() { //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); const string messageTemplate = "{Song}++ @{Complex}"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.IndexDecider = (l, utcTime) => string.Format("logstash-{1}-{0:yyyy.MM.dd}", utcTime, l.Level.ToString().ToLowerInvariant()); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")) }; var e = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(e); var exception = new ArgumentException("parameter"); properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("Old Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2})) }; e = new LogEvent(timestamp.AddYears(-2), LogEventLevel.Fatal, exception, template, properties); sink.Emit(e); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-information-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-fatal-2011.05.28"); bulkJsonPieces[3].Should().Contain("Old Macabre"); //serilog by default simpy .ToString()'s unknown objects bulkJsonPieces[3].Should().Contain("Complex\":\"{"); }
public async Task UsesCustomPropertyNames() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public async Task UsesCustomPropertyNames() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public void DefaultBulkActionV8() { _options.IndexFormat = "logs"; _options.TypeName = null; _options.PipelineName = null; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(ADummyLogEvent()); sink.Emit(ADummyLogEvent()); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 2, 1); const string expectedAction = @"{""index"":{""_index"":""logs""}}"; bulkJsonPieces[0].Should().Be(expectedAction); }
public void BulkActionV7OverrideTypeName() { _options.IndexFormat = "logs"; _options.TypeName = "logevent"; // This is the default value when creating the sink via configuration _options.AutoRegisterTemplateVersion = AutoRegisterTemplateVersion.ESv7; _options.PipelineName = null; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(ADummyLogEvent()); sink.Emit(ADummyLogEvent()); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 2, 1); const string expectedAction = @"{""index"":{""_type"":""_doc"",""_index"":""logs""}}"; bulkJsonPieces[0].Should().Be(expectedAction); }
public void PipelineAction() { _options.IndexFormat = "logs-my-stream"; _options.TypeName = "_doc"; _options.PipelineName = "my-pipeline"; _options.BatchAction = ElasticOpType.Index; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(ADummyLogEvent()); sink.Emit(ADummyLogEvent()); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 2, 1); const string expectedAction = @"{""index"":{""_type"":""_doc"",""_index"":""logs-my-stream"",""pipeline"":""my-pipeline""}}"; bulkJsonPieces[0].Should().Be(expectedAction); }
public void BulkActionDataStreams() { _options.IndexFormat = "logs-my-stream"; _options.TypeName = null; _options.PipelineName = null; _options.BatchAction = ElasticOpType.Create; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(ADummyLogEvent()); sink.Emit(ADummyLogEvent()); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 2, 1); const string expectedAction = @"{""create"":{""_index"":""logs-my-stream""}}"; bulkJsonPieces[0].Should().Be(expectedAction); }
public void UpperCasedIndex_And_TypeName_EndsUpInTheOutput() { //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); const string messageTemplate = "{Song}++ @{Complex}"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.TypeName = "custom-event-type"; _options.IndexFormat = "Event-Index-{0:yyyy.MM.dd}"; using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")) }; var e = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(e); sink.Emit(e); var exception = new ArgumentException("parameter"); properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("Old Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; e = new LogEvent(timestamp.AddYears(-2), LogEventLevel.Fatal, exception, template, properties); sink.Emit(e); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""event-index-2013.05.28"); bulkJsonPieces[0].Should().Contain(@"""_type"":""custom-event-type"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[2].Should().Contain(@"""_index"":""event-index-2011.05.28"); bulkJsonPieces[2].Should().Contain(@"""_type"":""custom-event-type"); bulkJsonPieces[3].Should().Contain("Old Macabre"); bulkJsonPieces[3].Should().Contain("Complex\":{"); }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.Serializer = new ElasticsearchJsonNetSerializer(); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2})) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); bulkJsonPieces[3].Should().Contain("exceptions\":[{"); } }
public void CustomIndex_And_TypeName_EndsUpInTheOutput() { //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); const string messageTemplate = "{Song}++ @{Complex}"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.TypeName = "custom-event-type"; _options.IndexFormat = "event-index-{0:yyyy.MM.dd}"; using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")) }; var e = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(e); var exception = new ArgumentException("parameter"); properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("Old Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; e = new LogEvent(timestamp.AddYears(-2), LogEventLevel.Fatal, exception, template, properties); sink.Emit(e); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""event-index-2013.05.28"); bulkJsonPieces[0].Should().Contain(@"""_type"":""custom-event-type"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[2].Should().Contain(@"""_index"":""event-index-2011.05.28"); bulkJsonPieces[2].Should().Contain(@"""_type"":""custom-event-type"); bulkJsonPieces[3].Should().Contain("Old Macabre"); //serilog by default simpy .ToString()'s unknown objects bulkJsonPieces[3].Should().Contain("Complex\":\"{"); }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.Serializer = new ElasticsearchJsonNetSerializer(); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); //Since we are passing a ISerializer the exception should be be logged as object and not string bulkJsonPieces[3].Should().Contain("exception\":{"); } }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); bulkJsonPieces[3].Should().Contain("exceptions\":[{"); } }
public void WhenLoggingAnEvent_OutputsValidJson() { const string expectedMessage = "test"; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(LogEventWithMessage(expectedMessage)); } var eventWritten = AssertAndGetJsonEvents().First(); eventWritten.Level.Should().Be(LogEventLevel.Warning); eventWritten.Message.Should().Be(expectedMessage); }
public void IndexDecider_EndsUpInTheOutput() { //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); const string messageTemplate = "{Song}++ @{Complex}"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.IndexDecider = (l, utcTime) => string.Format("logstash-{1}-{0:yyyy.MM.dd}", utcTime, l.Level.ToString().ToLowerInvariant()); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")) }; var e = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(e); sink.Emit(e); var exception = new ArgumentException("parameter"); properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("Old Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; e = new LogEvent(timestamp.AddYears(-2), LogEventLevel.Fatal, exception, template, properties); sink.Emit(e); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-information-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-fatal-2011.05.28"); bulkJsonPieces[3].Should().Contain("Old Macabre"); //serilog by default simpy .ToString()'s unknown objects bulkJsonPieces[3].Should().Contain("Complex\":{"); }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2})) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); bulkJsonPieces[3].Should().Contain("exceptions\":[{"); } }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options) ) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //We have no serializer associated with the sink so we expect the forced ToString() of scalar values bulkJsonPieces[3].Should().Contain("Complex\":\"{"); bulkJsonPieces[3].Should().Contain("exception\":\"System.Net.Http.HttpRequestException: An error"); } }
public void WhenLogging_ExceptionWithInner_ExceptionShouldIncludeInnerExceptions() { var inner = new InvalidOperationException(); var exception = new Exception("outer", inner); using (var sink = new ElasticsearchSink(_options)) { sink.Emit(LogEventWithMessage("test", exception)); } var eventWritten = AssertAndGetJsonEvents().First(); var exceptionInfo = eventWritten.Exception; exceptionInfo.InnerException.Should().NotBeNull(); }
public void WhenLogging_WithException_ExceptionShouldBeRenderedInExceptionField() { const string expectedExceptionMessage = "test exception"; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(LogEventWithMessage("test", new ApplicationException(expectedExceptionMessage))); } var eventWritten = AssertAndGetJsonEvents().First(); var exceptionInfo = eventWritten.Exception; exceptionInfo.Should().NotBeNull(); exceptionInfo.Message.Should().Be(expectedExceptionMessage); exceptionInfo.ClassName.Should().Be("System.ApplicationException"); }
public void WhenLogging_WithException_ExceptionShouldBeRenderedInExceptionField() { const string expectedExceptionMessage = "test exception"; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(LogEventWithMessage("test", new Exception(expectedExceptionMessage))); } var eventWritten = AssertAndGetJsonEvents().First(); var exceptionInfo = eventWritten.Exception; exceptionInfo.Should().NotBeNull(); exceptionInfo.Message.Should().Be(expectedExceptionMessage); #if !DOTNETCORE exceptionInfo.ClassName.Should().Be("System.Exception"); #endif }