public async Task ProcessLogsStream(string id, Stream stream, ModuleLogOptions logOptions, Func <ArraySegment <byte>, Task> callback) { GraphBuilder graphBuilder = GraphBuilder.CreateParsingGraphBuilder(stream, b => this.logMessageParser.Parse(b, id)); logOptions.Filter.LogLevel.ForEach(l => graphBuilder.AddFilter(m => m.LogLevel == l)); logOptions.Filter.Regex.ForEach(r => graphBuilder.AddFilter(m => r.IsMatch(m.Text))); async Task <bool> ConsumerCallback(ArraySegment <byte> a) { await callback(a); return(true); } ArraySegment <byte> BasicMapper(ModuleLogMessageData l) => logOptions.ContentType == LogsContentType.Text ? new ArraySegment <byte>(l.FullFrame.ToArray()) : new ArraySegment <byte>(l.ToBytes()); var mappers = new List <Func <ArraySegment <byte>, ArraySegment <byte> > >(); if (logOptions.ContentEncoding == LogsContentEncoding.Gzip) { mappers.Add(m => new ArraySegment <byte>(Compression.CompressToGzip(m.Array))); } IRunnableGraph <Task> graph = graphBuilder.GetStreamingGraph( ConsumerCallback, BasicMapper, mappers); await graph.Run(this.materializer); }
static Source <ArraySegment <byte>, NotUsed> GroupingGzipMapper(Source <ArraySegment <byte>, NotUsed> s, LogsOutputGroupingConfig outputGroupingConfig) => s.GroupedWithin(outputGroupingConfig.MaxFrames, outputGroupingConfig.MaxDuration) .Select( b => { var combinedArray = b.Select(a => a.Array).ToList().Combine(); return(new ArraySegment <byte>(combinedArray)); }) .Select(m => new ArraySegment <byte>(Compression.CompressToGzip(m.Array)));
public async Task PublishAsync(IEnumerable <Metric> metrics, CancellationToken cancellationToken) { Preconditions.CheckNotNull(metrics, nameof(metrics)); byte[] data = MetricsSerializer.MetricsToBytes(metrics).ToArray(); byte[] compressedData = Compression.CompressToGzip(data); // TODO: add check for too big of a message if (compressedData.Length > 0) { Message message = new Message(compressedData); await this.moduleClient.SendEventAsync(message); } }
public static void CompressionRoundtripTest() { // Arrange byte[] payload = Encoding.UTF8.GetBytes(TestCompressionString); // Act byte[] compressedBytes = Compression.CompressToGzip(payload); // Assert Assert.NotNull(compressedBytes); Assert.True(payload.Length > compressedBytes.Length); // Act byte[] decompressedBytes = Compression.DecompressFromGzip(compressedBytes); // Assert Assert.NotNull(decompressedBytes); Assert.Equal(decompressedBytes.Length, payload.Length); string decompressedPayload = Encoding.UTF8.GetString(decompressedBytes); Assert.Equal(decompressedPayload, TestCompressionString); }
public async Task GetTextGzipLogsTest() { var filter = new ModuleLogFilter(Option.Some(100), Option.Some(1501000), Option.Some(3), Option.Some("ERR")); LogsContentEncoding contentEncoding = LogsContentEncoding.Gzip; LogsContentType contentType = LogsContentType.Text; string payload = @"{ ""schemaVersion"": ""1.0"", ""items"": { ""id"": ""m1"", ""filter"": <filter> }, ""encoding"": ""gzip"", ""contentType"": ""text"" }" .Replace("<filter>", filter.ToJson()); string mod1 = "m1"; string mod2 = "m2"; string mod3 = "m3"; var moduleRuntimeInfoList = new List <ModuleRuntimeInfo> { new ModuleRuntimeInfo(mod1, "docker", ModuleStatus.Running, string.Empty, 0, Option.None <DateTime>(), Option.None <DateTime>()), new ModuleRuntimeInfo(mod2, "docker", ModuleStatus.Running, string.Empty, 0, Option.None <DateTime>(), Option.None <DateTime>()), new ModuleRuntimeInfo(mod3, "docker", ModuleStatus.Running, string.Empty, 0, Option.None <DateTime>(), Option.None <DateTime>()) }; var runtimeInfoProvider = new Mock <IRuntimeInfoProvider>(); runtimeInfoProvider.Setup(r => r.GetModules(It.IsAny <CancellationToken>())) .ReturnsAsync(moduleRuntimeInfoList); var logsProvider = new Mock <ILogsProvider>(); var module1LogOptions = new ModuleLogOptions(contentEncoding, contentType, filter, LogOutputFraming.None, Option.None <LogsOutputGroupingConfig>(), false); string mod1Logs = new[] { "Log line 1\n", "Log line 2\n", "Log line 3\n" }.Join(string.Empty); byte[] mod1LogBytes = Compression.CompressToGzip(mod1Logs.ToBytes()); logsProvider.Setup(l => l.GetLogs(mod1, module1LogOptions, It.IsAny <CancellationToken>())) .ReturnsAsync(mod1LogBytes); // Act var logsRequestHandler = new LogsRequestHandler(logsProvider.Object, runtimeInfoProvider.Object); Option <string> response = await logsRequestHandler.HandleRequest(Option.Maybe(payload), CancellationToken.None); // Assert Assert.True(response.HasValue); logsProvider.VerifyAll(); runtimeInfoProvider.VerifyAll(); var logsResponseList = response.OrDefault().FromJson <List <LogsResponse> >(); Assert.NotNull(logsResponseList); Assert.Single(logsResponseList); LogsResponse logsResponse = logsResponseList[0]; Assert.Equal(mod1, logsResponse.Id); Assert.False(logsResponse.Payload.HasValue); Assert.True(logsResponse.PayloadBytes.HasValue); Assert.Equal(mod1LogBytes, logsResponse.PayloadBytes.OrDefault()); }
static Source <ArraySegment <byte>, AkkaNet.NotUsed> GroupingGzipMapper(Source <ArraySegment <byte>, AkkaNet.NotUsed> s, LogsOutputGroupingConfig outputGroupingConfig) => s.GroupedWithin(outputGroupingConfig.MaxFrames, outputGroupingConfig.MaxDuration) .Select(b => new ArraySegment <byte>(Compression.CompressToGzip(b.SelectMany(a => a).ToArray())));
static Source <ArraySegment <byte>, AkkaNet.NotUsed> NonGroupingGzipMapper(Source <ArraySegment <byte>, AkkaNet.NotUsed> s) => s.Select(m => new ArraySegment <byte>(Compression.CompressToGzip(m.ToArray())));
static byte[] ProcessByContentEncoding(byte[] bytes, LogsContentEncoding contentEncoding) => contentEncoding == LogsContentEncoding.Gzip ? Compression.CompressToGzip(bytes) : bytes;