public async Task UsesCustomPropertyNames() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public async Task UsesCustomPropertyNames() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public void when_400_error_is_returned_then_batch_fails_and_logs_exception_without_timeout() { var mockHttpListener = new MockHttpListener(); using (var collectErrorsListener = new MockEventListener()) { collectErrorsListener.EnableEvents(SemanticLoggingEventSource.Log, EventLevel.Error, Keywords.All); var endpoint = mockHttpListener.Start(new MockHttpListenerResponse() { ResponseCode = 400, ContentType = "application/json", Content = "{ \"error\": \"InvalidIndexNameException[[log,stash] Invalid index name [log,stash], must not contain the following characters [\\\\, /, *, ?, \\\", <, >, |, , ,]]\",\"status\": 400}" }); var sink = new ElasticsearchSink("instance", endpoint, "slabtest", "etw", true, TimeSpan.FromSeconds(1), 100, 800, TimeSpan.FromMinutes(1)); sink.OnNext(EventEntryTestHelper.Create()); var flushCompleteInTime = sink.FlushAsync().Wait(TimeSpan.FromSeconds(45)); mockHttpListener.Stop(); // Make sure the exception is logged Assert.IsTrue(collectErrorsListener.WrittenEntries.First().Payload.Single(m => m.ToString().Contains("InvalidIndexNameException")) != null); Assert.IsTrue(flushCompleteInTime); } }
/// <summary> /// Adds a sink that writes log events as documents to an ElasticSearch index. /// This works great with the Kibana web interface when using the default settings. /// Make sure to add a template to ElasticSearch like the one found here: /// https://gist.github.com/mivano/9688328 /// </summary> /// <param name="loggerSinkConfiguration"></param> /// <param name="options">Provides options specific to the Elasticsearch sink</param> /// <returns></returns> public static LoggerConfiguration Elasticsearch(this LoggerSinkConfiguration loggerSinkConfiguration, ElasticsearchSinkOptions options) { options = options ?? new ElasticsearchSinkOptions(new [] { new Uri("http://locahost:9200") }); var sink = new ElasticsearchSink(options); return(loggerSinkConfiguration.Sink(sink, options.MinimumLogEventLevel ?? LevelAlias.Minimum)); }
public async Task UsesCustomPropertyNames() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public async Task UsesCustomPropertyNames() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public void IndexDecider_EndsUpInTheOutput() { //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); const string messageTemplate = "{Song}++ @{Complex}"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.IndexDecider = (l, utcTime) => string.Format("logstash-{1}-{0:yyyy.MM.dd}", utcTime, l.Level.ToString().ToLowerInvariant()); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")) }; var e = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(e); var exception = new ArgumentException("parameter"); properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("Old Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2})) }; e = new LogEvent(timestamp.AddYears(-2), LogEventLevel.Fatal, exception, template, properties); sink.Emit(e); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-information-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-fatal-2011.05.28"); bulkJsonPieces[3].Should().Contain("Old Macabre"); //serilog by default simpy .ToString()'s unknown objects bulkJsonPieces[3].Should().Contain("Complex\":\"{"); }
protected override void Arrange() { base.Arrange(); this.sink = new ElasticsearchSink("instance", this.elasticsearchUrl, TestIndex, "etw", true, TimeSpan.FromSeconds(1), 100, 3000, TimeSpan.FromMinutes(1)); this.msgPropValues = new[] { "1", "2", "3" }; }
private InvalidResult GetInvalidPayloadAsync(DynamicResponse baseResult, List <string> payload, out List <string> cleanPayload) { int i = 0; cleanPayload = new List <string>(); var items = baseResult.Body["items"]; if (items == null) { return(null); } List <string> badPayload = new List <string>(); bool hasErrors = false; foreach (dynamic item in items) { var itemIndex = item?[ElasticsearchSink.BulkAction(_elasticOpType)]; long?status = itemIndex?["status"]; i++; if (!status.HasValue || status < 300) { continue; } hasErrors = true; var id = itemIndex?["_id"]; var error = itemIndex?["error"]; var errorString = $"type: {error?["type"] ?? "Unknown"}, reason: {error?["reason"] ?? "Unknown"}"; if (int.TryParse(id.Split('_')[0], out int index)) { SelfLog.WriteLine("Received failed ElasticSearch shipping result {0}: {1}. Failed payload : {2}.", status, errorString, payload.ElementAt(index * 2 + 1)); badPayload.Add(payload.ElementAt(index * 2)); badPayload.Add(payload.ElementAt(index * 2 + 1)); if (_cleanPayload != null) { cleanPayload.Add(payload.ElementAt(index * 2)); cleanPayload.Add(_cleanPayload(payload.ElementAt(index * 2 + 1), status, errorString)); } } else { SelfLog.WriteLine($"Received failed ElasticSearch shipping result {status}: {errorString}."); } } if (!hasErrors) { return(null); } return(new InvalidResult() { StatusCode = baseResult.HttpStatusCode ?? 500, Content = baseResult.ToString(), BadPayLoad = String.Join(Environment.NewLine, badPayload) }); }
public void WhenLoggingAnEvent_OutputsValidJson() { const string expectedMessage = "test"; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(LogEventWithMessage(expectedMessage)); } var eventWritten = AssertAndGetJsonEvents().First(); eventWritten.Level.Should().Be(LogEventLevel.Warning); eventWritten.Message.Should().Be(expectedMessage); }
public void WhenLogging_ExceptionWithInner_ExceptionShouldIncludeInnerExceptions() { var inner = new InvalidOperationException(); var exception = new Exception("outer", inner); using (var sink = new ElasticsearchSink(_options)) { sink.Emit(LogEventWithMessage("test", exception)); } var eventWritten = AssertAndGetJsonEvents().First(); var exceptionInfo = eventWritten.Exception; exceptionInfo.InnerException.Should().NotBeNull(); }
/// <summary> /// /// </summary> /// <param name="nextLine"></param> protected override void AddToPayLoad(string nextLine) { var indexName = _getIndexForEvent(nextLine, _date); var action = ElasticsearchSink.CreateElasticAction( opType: _elasticOpType, indexName: indexName, pipelineName: _pipelineName, id: _count + "_" + Guid.NewGuid(), mappingType: _typeName); var actionJson = LowLevelRequestResponseSerializer.Instance.SerializeToString(action); _payload.Add(actionJson); _payload.Add(nextLine); _count++; }
public void WhenLogging_WithException_ExceptionShouldBeRenderedInExceptionField() { const string expectedExceptionMessage = "test exception"; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(LogEventWithMessage("test", new ApplicationException(expectedExceptionMessage))); } var eventWritten = AssertAndGetJsonEvents().First(); var exceptionInfo = eventWritten.Exception; exceptionInfo.Should().NotBeNull(); exceptionInfo.Message.Should().Be(expectedExceptionMessage); exceptionInfo.ClassName.Should().Be("System.ApplicationException"); }
public void DefaultBulkActionV8() { _options.IndexFormat = "logs"; _options.TypeName = null; _options.PipelineName = null; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(ADummyLogEvent()); sink.Emit(ADummyLogEvent()); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 2, 1); const string expectedAction = @"{""index"":{""_index"":""logs""}}"; bulkJsonPieces[0].Should().Be(expectedAction); }
/// <summary> /// Subscribes to an <see cref="IObservable{EventEntry}" /> using a <see cref="ElasticsearchSink" />. /// </summary> /// <param name="eventStream">The event stream. Typically this is an instance of <see cref="ObservableEventListener" />.</param> /// <param name="instanceName">The name of the instance originating the entries.</param> /// <param name="connectionString">The endpoint address for the Elasticsearch Service.</param> /// <param name="index">Index name prefix formatted as index-{0:yyyy.MM.DD}</param> /// <param name="type">The Elasticsearch entry type</param> /// <param name="flattenPayload">Flatten the payload collection when serializing event entries</param> /// <param name="bufferingInterval">The buffering interval between each batch publishing. Default value is <see cref="Buffering.DefaultBufferingInterval" />.</param> /// <param name="onCompletedTimeout">Defines a timeout interval for when flushing the entries after an <see cref="ElasticsearchSink.OnCompleted" /> call is received and before disposing the sink.</param> /// <param name="bufferingCount">Buffering count to send entries sot Elasticsearch. Default value is <see cref="Buffering.DefaultBufferingCount" /></param> /// <param name="maxBufferSize">The maximum number of entries that can be buffered while it's sending to Elasticsearch before the sink starts dropping entries. /// This means that if the timeout period elapses, some event entries will be dropped and not sent to the store. Normally, calling <see cref="IDisposable.Dispose" /> on /// the <see cref="System.Diagnostics.Tracing.EventListener" /> will block until all the entries are flushed or the interval elapses. /// If <see langword="null" /> is specified, then the call will block indefinitely until the flush operation finishes.</param> /// <returns> /// A subscription to the sink that can be disposed to unsubscribe the sink and dispose it, or to get access to the sink instance. /// </returns> public static SinkSubscription <ElasticsearchSink> LogToElasticsearch(this IObservable <EventEntry> eventStream, string instanceName, string connectionString, string index, string type, bool flattenPayload = true, TimeSpan?bufferingInterval = null, TimeSpan?onCompletedTimeout = null, int bufferingCount = Buffering.DefaultBufferingCount, int maxBufferSize = Buffering.DefaultMaxBufferSize) { var sink = new ElasticsearchSink(instanceName, connectionString, index, type, flattenPayload, bufferingInterval ?? Buffering.DefaultBufferingInterval, bufferingCount, maxBufferSize, onCompletedTimeout ?? Timeout.InfiniteTimeSpan); var subscription = eventStream.Subscribe(sink); return(new SinkSubscription <ElasticsearchSink>(subscription, sink)); }
public void WhenLogging_WithException_ExceptionShouldBeRenderedInExceptionField() { const string expectedExceptionMessage = "test exception"; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(LogEventWithMessage("test", new Exception(expectedExceptionMessage))); } var eventWritten = AssertAndGetJsonEvents().First(); var exceptionInfo = eventWritten.Exception; exceptionInfo.Should().NotBeNull(); exceptionInfo.Message.Should().Be(expectedExceptionMessage); #if !DOTNETCORE exceptionInfo.ClassName.Should().Be("System.Exception"); #endif }
public void BulkActionV7OverrideTypeName() { _options.IndexFormat = "logs"; _options.TypeName = "logevent"; // This is the default value when creating the sink via configuration _options.AutoRegisterTemplateVersion = AutoRegisterTemplateVersion.ESv7; _options.PipelineName = null; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(ADummyLogEvent()); sink.Emit(ADummyLogEvent()); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 2, 1); const string expectedAction = @"{""index"":{""_type"":""_doc"",""_index"":""logs""}}"; bulkJsonPieces[0].Should().Be(expectedAction); }
/// <summary> /// Subscribes to an <see cref="IObservable{EventEntry}" /> using a <see cref="ElasticsearchSink" />. /// </summary> /// <param name="eventStream">The event stream. Typically this is an instance of <see cref="ObservableEventListener" />.</param> /// <param name="instanceName">The name of the instance originating the entries.</param> /// <param name="connectionString">The endpoint address for the Elasticsearch Service.</param> /// <param name="index">Index name prefix formatted as index-{0:yyyy.MM.DD}</param> /// <param name="type">The Elasticsearch entry type</param> /// <param name="flattenPayload">Flatten the payload collection when serializing event entries</param> /// <param name="bufferingInterval">The buffering interval between each batch publishing. Default value is <see cref="Buffering.DefaultBufferingInterval" />.</param> /// <param name="onCompletedTimeout">Defines a timeout interval for when flushing the entries after an <see cref="ElasticsearchSink.OnCompleted" /> call is received and before disposing the sink.</param> /// <param name="bufferingCount">Buffering count to send entries sot Elasticsearch. Default value is <see cref="Buffering.DefaultBufferingCount" /></param> /// <param name="maxBufferSize">The maximum number of entries that can be buffered while it's sending to Elasticsearch before the sink starts dropping entries. /// This means that if the timeout period elapses, some event entries will be dropped and not sent to the store. Normally, calling <see cref="IDisposable.Dispose" /> on /// the <see cref="System.Diagnostics.Tracing.EventListener" /> will block until all the entries are flushed or the interval elapses. /// If <see langword="null" /> is specified, then the call will block indefinitely until the flush operation finishes.</param> /// <param name="globalContextExtension">A dictionary of user defined keys and values to be attached to each log.</param> /// <returns> /// A subscription to the sink that can be disposed to unsubscribe the sink and dispose it, or to get access to the sink instance. /// </returns> public static SinkSubscription<ElasticsearchSink> LogToElasticsearch(this IObservable<EventEntry> eventStream, string instanceName, string connectionString, string index, string type, bool flattenPayload = true, TimeSpan? bufferingInterval = null, TimeSpan? onCompletedTimeout = null, int bufferingCount = Buffering.DefaultBufferingCount, int maxBufferSize = Buffering.DefaultMaxBufferSize, Dictionary<string,string> globalContextExtension = null) { var sink = new ElasticsearchSink(instanceName, connectionString, index, type, flattenPayload, bufferingInterval ?? Buffering.DefaultBufferingInterval, bufferingCount, maxBufferSize, onCompletedTimeout ?? Timeout.InfiniteTimeSpan, JsonConvert.SerializeObject(globalContextExtension)); var subscription = eventStream.Subscribe(sink); return new SinkSubscription<ElasticsearchSink>(subscription, sink); }
public void BulkActionDataStreams() { _options.IndexFormat = "logs-my-stream"; _options.TypeName = null; _options.PipelineName = null; _options.BatchAction = ElasticOpType.Create; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(ADummyLogEvent()); sink.Emit(ADummyLogEvent()); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 2, 1); const string expectedAction = @"{""create"":{""_index"":""logs-my-stream""}}"; bulkJsonPieces[0].Should().Be(expectedAction); }
public void PipelineAction() { _options.IndexFormat = "logs-my-stream"; _options.TypeName = "_doc"; _options.PipelineName = "my-pipeline"; _options.BatchAction = ElasticOpType.Index; using (var sink = new ElasticsearchSink(_options)) { sink.Emit(ADummyLogEvent()); sink.Emit(ADummyLogEvent()); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 2, 1); const string expectedAction = @"{""index"":{""_type"":""_doc"",""_index"":""logs-my-stream"",""pipeline"":""my-pipeline""}}"; bulkJsonPieces[0].Should().Be(expectedAction); }
/// <summary> /// Subscribes to an <see cref="IObservable{EventEntry}" /> using a <see cref="ElasticsearchSink" />. /// </summary> /// <param name="eventStream">The event stream. Typically this is an instance of <see cref="ObservableEventListener" />.</param> /// <param name="instanceName">The name of the instance originating the entries.</param> /// <param name="connectionString">The endpoint address for the Elasticsearch Service.</param> /// <param name="index">Index name prefix formatted as index-{0:yyyy.MM.DD}</param> /// <param name="type">The Elasticsearch entry type</param> /// <param name="flattenPayload">Flatten the payload collection when serializing event entries</param> /// <param name="bufferingInterval">The buffering interval between each batch publishing. Default value is <see cref="Buffering.DefaultBufferingInterval" />.</param> /// <param name="onCompletedTimeout">Defines a timeout interval for when flushing the entries after an <see cref="ElasticsearchSink.OnCompleted" /> call is received and before disposing the sink.</param> /// <param name="bufferingCount">Buffering count to send entries sot Elasticsearch. Default value is <see cref="Buffering.DefaultBufferingCount" /></param> /// <param name="maxBufferSize">The maximum number of entries that can be buffered while it's sending to Elasticsearch before the sink starts dropping entries. /// This means that if the timeout period elapses, some event entries will be dropped and not sent to the store. Normally, calling <see cref="IDisposable.Dispose" /> on /// the <see cref="System.Diagnostics.Tracing.EventListener" /> will block until all the entries are flushed or the interval elapses. /// If <see langword="null" /> is specified, then the call will block indefinitely until the flush operation finishes.</param> /// <param name="userName">The username to authenticate with Elasticsearch using Basic HTTP authentication.</param> /// <param name="password">The password to authenticate with Elasticsearch using Basic HTTP authentication.</param> /// <param name="globalContextExtension">A dictionary of user defined keys and values to be attached to each log.</param> /// <returns> /// A subscription to the sink that can be disposed to unsubscribe the sink and dispose it, or to get access to the sink instance. /// </returns> public static SinkSubscription <ElasticsearchSink> LogToElasticsearch(this IObservable <EventEntry> eventStream, string instanceName, string connectionString, string index, string type, bool flattenPayload = true, TimeSpan?bufferingInterval = null, TimeSpan?onCompletedTimeout = null, int bufferingCount = Buffering.DefaultBufferingCount, int maxBufferSize = Buffering.DefaultMaxBufferSize, string userName = null, string password = null, Dictionary <string, string> globalContextExtension = null) { var sink = new ElasticsearchSink(instanceName, connectionString, index, type, flattenPayload, bufferingInterval ?? Buffering.DefaultBufferingInterval, bufferingCount, maxBufferSize, onCompletedTimeout ?? Timeout.InfiniteTimeSpan, userName, password, JsonConvert.SerializeObject(globalContextExtension)); var subscription = eventStream.Subscribe(sink); return(new SinkSubscription <ElasticsearchSink>(subscription, sink)); }
public void CustomIndex_And_TypeName_EndsUpInTheOutput() { //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); const string messageTemplate = "{Song}++ @{Complex}"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.TypeName = "custom-event-type"; _options.IndexFormat = "event-index-{0:yyyy.MM.dd}"; using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")) }; var e = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(e); var exception = new ArgumentException("parameter"); properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("Old Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; e = new LogEvent(timestamp.AddYears(-2), LogEventLevel.Fatal, exception, template, properties); sink.Emit(e); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""event-index-2013.05.28"); bulkJsonPieces[0].Should().Contain(@"""_type"":""custom-event-type"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[2].Should().Contain(@"""_index"":""event-index-2011.05.28"); bulkJsonPieces[2].Should().Contain(@"""_type"":""custom-event-type"); bulkJsonPieces[3].Should().Contain("Old Macabre"); //serilog by default simpy .ToString()'s unknown objects bulkJsonPieces[3].Should().Contain("Complex\":\"{"); }
public void UpperCasedIndex_And_TypeName_EndsUpInTheOutput() { //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); const string messageTemplate = "{Song}++ @{Complex}"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.TypeName = "custom-event-type"; _options.IndexFormat = "Event-Index-{0:yyyy.MM.dd}"; using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")) }; var e = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(e); sink.Emit(e); var exception = new ArgumentException("parameter"); properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("Old Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; e = new LogEvent(timestamp.AddYears(-2), LogEventLevel.Fatal, exception, template, properties); sink.Emit(e); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""event-index-2013.05.28"); bulkJsonPieces[0].Should().Contain(@"""_type"":""custom-event-type"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[2].Should().Contain(@"""_index"":""event-index-2011.05.28"); bulkJsonPieces[2].Should().Contain(@"""_type"":""custom-event-type"); bulkJsonPieces[3].Should().Contain("Old Macabre"); bulkJsonPieces[3].Should().Contain("Complex\":{"); }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.Serializer = new ElasticsearchJsonNetSerializer(); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2})) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); bulkJsonPieces[3].Should().Contain("exceptions\":[{"); } }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.Serializer = new ElasticsearchJsonNetSerializer(); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); //Since we are passing a ISerializer the exception should be be logged as object and not string bulkJsonPieces[3].Should().Contain("exception\":{"); } }
public void IndexDecider_EndsUpInTheOutput() { //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); const string messageTemplate = "{Song}++ @{Complex}"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.IndexDecider = (l, utcTime) => string.Format("logstash-{1}-{0:yyyy.MM.dd}", utcTime, l.Level.ToString().ToLowerInvariant()); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")) }; var e = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(e); sink.Emit(e); var exception = new ArgumentException("parameter"); properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("Old Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; e = new LogEvent(timestamp.AddYears(-2), LogEventLevel.Fatal, exception, template, properties); sink.Emit(e); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-information-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-fatal-2011.05.28"); bulkJsonPieces[3].Should().Contain("Old Macabre"); //serilog by default simpy .ToString()'s unknown objects bulkJsonPieces[3].Should().Contain("Complex\":{"); }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2})) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); bulkJsonPieces[3].Should().Contain("exceptions\":[{"); } }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); bulkJsonPieces[3].Should().Contain("exceptions\":[{"); } }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options) ) { var properties = new List <LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //We have no serializer associated with the sink so we expect the forced ToString() of scalar values bulkJsonPieces[3].Should().Contain("Complex\":\"{"); bulkJsonPieces[3].Should().Contain("exception\":\"System.Net.Http.HttpRequestException: An error"); } }
public ElasticSearchEventService(ElasticsearchSinkOptions options, ILogEventMapper mapper = null) { var sink = new ElasticsearchSink(options); _emitter = new Emitter(sink, mapper); }