internal DemoStack(Construct scope, string id, IStackProps?props = null) : base(scope, id, props) { // For the sake of the demo, we connect a 'passthrough' // (similar to /dev/null) lambda to Kinesis. We will // therefore see that the lambda is invoked but does // nothing with the logs. If you look at the code of // the 'CreateDevNullLogShipper', you will see that it // is quite easy to implement a log shipper. In a real // use you would create a custom log shipper that forwards // to something like loggly, splunk, logz.io, etc. var logShipper = LogShipper.CreateDevNullLogShipper(this); // Create our aggregator and connect it with our log // shipper lambda. new LogAggregator(this, "Aggregator", new LogAggregatorProps( // The log shipper to connect to. logShipper, // In this demo, only the lambda that start with the // name 'Demo' will have their log forwarded to the // Kinesis stream and then to the 'DevNull' log shipper. logGroupsPrefix: "/aws/lambda/Demo", // Since the logs have been forwarded to an external // system like loggly, splunk or logz.io, there is no // point in keeping the logs for a long time in CloudWatch. // So all the lambdas that fit the 'logGroupsPrefix' will // have their CloudWatch logs retention automatically // set to 5 days instead of the default 'infinite' // retention. The default retention can quickly become // costly. cloudWatchLogRetentionInDays: 5, // To have faster feedback in this demo, we use a very // small value for the Kinesis batch size. So, as soon // as there is 1 message available in the Kinesis stream, // the log shipper will be invoked. kinesisBatchSize: 1 // NOTE: // // There are a few more parameters that are available. // See the code of 'LogAggregatorProps' for more details. )); }
protected override async Task <int> Run() { try { var enrichers = new List <ILogEventEnricher>(); foreach (var property in _properties.Properties) { enrichers.Add(new ScalarPropertyEnricher(property.Key, property.Value)); } Func <LogEvent, bool> filter = null; if (_filter != null) { var expr = _filter.Replace("@Level", SurrogateLevelProperty.PropertyName); var eval = FilterLanguage.CreateFilter(expr); filter = evt => true.Equals(eval(evt)); } using (var inputFile = _fileInputFeature.InputFilename != null ? new StreamReader(File.Open(_fileInputFeature.InputFilename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) : null) { var input = inputFile ?? Console.In; var reader = _json ? (ILogEventReader) new JsonLogEventReader(input) : new PlainTextLogEventReader(input, _pattern); return(await LogShipper.ShipEvents( _connectionFactory.Connect(_connection), reader, enrichers, _invalidDataHandlingFeature.InvalidDataHandling, _sendFailureHandlingFeature.SendFailureHandling, filter)); } } catch (Exception ex) { Log.Error(ex, "Ingestion failed: {ErrorMessage}", ex.Message); return(1); } }
public static async Task RunAsync(SeqConnection connection, string apiKey, int batchSize, bool echoToStdout) { var buffer = new BufferingSink(); using var logger = new LoggerConfiguration() .MinimumLevel.Debug() .Enrich.FromLogContext() .Enrich.WithProperty("Origin", "seqcli sample ingest") .WriteTo.Conditional(_ => echoToStdout, wt => wt.Console()) .WriteTo.Sink(buffer) .CreateLogger(); var ship = Task.Run(() => LogShipper.ShipEvents(connection, apiKey, buffer, InvalidDataHandling.Fail, SendFailureHandling.Continue, batchSize)); await Roastery.Program.Main(logger); logger.Dispose(); await ship; }
public ActiveLogBuffer(LogBuffer logBuffer, LogShipper logShipper) { Buffer = logBuffer ?? throw new ArgumentNullException(nameof(logBuffer)); Shipper = logShipper ?? throw new ArgumentNullException(nameof(logShipper)); }
protected override async Task <int> Run() { try { var enrichers = new List <ILogEventEnricher>(); foreach (var(name, value) in _properties.Properties) { enrichers.Add(new ScalarPropertyEnricher(name, value)); } if (_level != null) { enrichers.Add(new ScalarPropertyEnricher(SurrogateLevelProperty.PropertyName, _level)); } Func <LogEvent, bool> filter = null; if (_filter != null) { var expr = _filter.Replace("@Level", SurrogateLevelProperty.PropertyName); var eval = FilterLanguage.CreateFilter(expr); filter = evt => true.Equals(eval(evt)); } var connection = _connectionFactory.Connect(_connection); var(_, apiKey) = _connectionFactory.GetConnectionDetails(_connection); var batchSize = _batchSize.Value; foreach (var input in _fileInputFeature.OpenInputs()) { using (input) { var reader = _json ? (ILogEventReader) new JsonLogEventReader(input) : new PlainTextLogEventReader(input, _pattern); reader = new EnrichingReader(reader, enrichers); if (_message != null) { reader = new StaticMessageTemplateReader(reader, _message); } var exit = await LogShipper.ShipEvents( connection, apiKey, reader, _invalidDataHandlingFeature.InvalidDataHandling, _sendFailureHandlingFeature.SendFailureHandling, batchSize, filter); if (exit != 0) { return(exit); } } } return(0); } catch (Exception ex) { Log.Error(ex, "Ingestion failed: {ErrorMessage}", ex.Message); return(1); } }