Пример #1
0
        public bool ProcessLogs()
        {
            LogConsumer.OnProgress -= LogConsumer_OnProgress;
            LogConsumer.OnProgress += LogConsumer_OnProgress;

            bool done = false;
            var  logs = LogSource.GetLogSources();

            if (logs.Length == 0)
            {
                OnLogsProcessingProgress(this.GetType().Name, "No logs to process since no LogSources were retrieved.", LogLevel.Verbose);
            }
            for (int k = 0; k < logs.Length; k++)
            {
                LogReader.Open(logs[k]);
                OnLogsProcessingProgress(this.GetType().Name, string.Format("Started processing log source {0} ({1}/{2}).", LogReader.SourceName, k + 1, logs.Length), LogLevel.Verbose);
                while (!done && LogReader.HasMoreLines)
                {
                    string line = LogReader.ReadLine();
                    done = LogConsumer.ConsumeLogEntry(line, LogReader.LineNumber, LogReader.SourceName);
                }
                LogReader.Close();
                if (done)
                {
                    OnLogsProcessingProgress(this.GetType().Name, string.Format("LogConsumer is DONE. Terminated processing in log source {0} ({1}/{2}).", logs[k], k + 1, logs.Length), LogLevel.Verbose);
                    break;
                }
                else
                {
                    OnLogsProcessingProgress(this.GetType().Name, string.Format("LogConsumer is NOT DONE after processing log source {0} ({1}/{2}).", logs[k], k + 1, logs.Length), LogLevel.Verbose);
                }
            }
            return(done);
        }
Пример #2
0
        public void ckmon_files_and_text_files_are_produced()
        {
            var secret    = Guid.NewGuid().ToString();
            var binSecret = Encoding.UTF8.GetBytes(secret);

            TestHelper.Monitor.Info($"This will appear in ckmon, text files and the console: {secret}");
            GrandOutput.Default.Should().NotBeNull();
            GrandOutput.Default.Dispose();
            Directory.EnumerateFiles(TestHelper.LogFolder, "*.log", SearchOption.AllDirectories)
            .Select(f => File.ReadAllText(f))
            .Count(text => text.Contains(secret))
            .Should().Be(1);
            // ckmon files are now gzipped by default.
            int count = 0;

            foreach (var fName in Directory.EnumerateFiles(TestHelper.LogFolder, "*.ckmon", SearchOption.AllDirectories))
            {
                using (var input = LogReader.Open(fName))
                {
                    while (input.MoveNext())
                    {
                        if (input.Current.LogType != LogEntryType.CloseGroup &&
                            input.Current.Text.Contains(secret))
                        {
                            ++count;
                        }
                    }
                }
            }
            count.Should().Be(1);
            //
            TestHelper.WithWeakAssemblyResolver(() => TestHelper.Monitor.Info("From WeakAssemblyResolver."));
            TestHelper.Monitor.Info($"From WeakAssemblyResolver: {TestHelper.WithWeakAssemblyResolver( () => 3 )}");
        }
Пример #3
0
        public void CompressedReadWriteTests()
        {
            TestHelper.CleanupTestFolder();
            string          directoryPath = Path.Combine(TestHelper.TestFolder, "GzipCKMonWriterClientTest");
            ActivityMonitor m             = new ActivityMonitor();
            var             client        = new CKMonWriterClient(directoryPath, 20000, LogFilter.Undefined, true);

            m.Output.RegisterClient(client);
            using (m.OpenWarn().Send("Group test"))
            {
                m.Info().Send("Line test");
            }
            // This closes the client: the file is then compressed asynchronously
            // on a thread from the ThreadPool.
            Assert.That(client.IsOpened);
            m.Output.UnregisterClient(client);
            string    ckmonPath = TestHelper.WaitForCkmonFilesInDirectory(directoryPath, 1)[0];
            LogReader r         = LogReader.Open(ckmonPath);

            r.MoveNext();
            Assert.That(r.Current.LogType, Is.EqualTo(LogEntryType.OpenGroup));
            Assert.That(r.Current.Text, Is.EqualTo("Group test"));
            r.MoveNext();
            Assert.That(r.Current.LogType, Is.EqualTo(LogEntryType.Line));
            Assert.That(r.Current.Text, Is.EqualTo("Line test"));
            r.MoveNext();
            Assert.That(r.Current.LogType, Is.EqualTo(LogEntryType.CloseGroup));
            bool hasRemainingEntries = r.MoveNext();

            Assert.That(hasRemainingEntries, Is.False);
        }
        public void BinaryGzip_reconfiguration()
        {
            string folder = TestHelper.PrepareLogFolder(nameof(BinaryGzip_reconfiguration));
            var    h      = new Handlers.BinaryFileConfiguration()
            {
                Path = folder + @"\FirstPath",
                UseGzipCompression = false
            };
            var c = new GrandOutputConfiguration().AddHandler(h);

            var m = new ActivityMonitor(applyAutoConfigurations: false);

            using (GrandOutput g = new GrandOutput(c))
            {
                g.EnsureGrandOutputClient(m);

                m.Trace("No Compression.");
                // We must ensure that the log above will use the current configuration.
                // This is by design and is a good thing: there is no causality/ordering between log emission and sink reconfigurations.
                Thread.Sleep(100);

                h.UseGzipCompression = true;
                g.ApplyConfiguration(c, true);
                m.Trace("With Compression.");
                Thread.Sleep(100);

                h.Path = folder + @"\SecondPath";
                g.ApplyConfiguration(c, true);
                m.Trace("With Compression (in second folder).");
                Thread.Sleep(100);

                h.UseGzipCompression = false;
                g.ApplyConfiguration(c, true);
                m.Trace("No Compression (in second folder).");
            }
            // First file is NOT compressed, the second one is.
            var fileNamesFirst = Directory.EnumerateFiles(folder + @"\FirstPath").ToList();

            fileNamesFirst.Should().BeInAscendingOrder().And.HaveCount(2).And.NotContain(s => s.EndsWith(".tmp"), "Temporary files have been closed.");
            File.ReadAllText(fileNamesFirst[0]).Should().Contain("No Compression.");
            File.ReadAllText(fileNamesFirst[1]).Should().NotContain("With Compression.", "Cannot read it in clear text since it is compressed...");
            using (var reader = LogReader.Open(fileNamesFirst[1]))
            {
                reader.MoveNext().Should().BeTrue();
                reader.Current.Text.Should().Be("With Compression.");
            }
            // First file is compressed, not the second one.
            var fileNamesSecond = Directory.EnumerateFiles(folder + @"\SecondPath").ToList();

            fileNamesSecond.Should().BeInAscendingOrder().And.HaveCount(2).And.NotContain(s => s.EndsWith(".tmp"), "Temporary files have been closed.");
            File.ReadAllText(fileNamesSecond[0]).Should().NotContain("With Compression (in second folder).", "The fist file is compressed...");
            // We restrict the log entries to the one of our monitor: this filters out the logs from the DispatcherSink.
            using (var reader = LogReader.Open(fileNamesSecond[0], filter: new LogReader.MulticastFilter(m)))
            {
                reader.MoveNext().Should().BeTrue();
                reader.Current.Text.Should().Be("With Compression (in second folder).");
            }
            File.ReadAllText(fileNamesSecond[1]).Should().Contain("No Compression (in second folder).");
        }
Пример #5
0
 public IndexCkmon(LuceneConfiguration configuration, string pathCkmon)
 {
     using (var indexer = new LuceneIndexer(configuration))
         using (LogReader reader = LogReader.Open(pathCkmon))
         {
             reader.MoveNext();
             for ( ; ;)
             {
                 indexer.IndexLog(reader.CurrentMulticast, configuration.Directory);
                 if (!reader.MoveNext())
                 {
                     return;
                 }
             }
         }
 }
Пример #6
0
        public void LevelDbSearchLogTest()
        {
            // https://github.com/google/leveldb/blob/master/doc/log_format.md

            LogReader logReader = new LogReader(new FileInfo(@"TestWorld\000047.log"));

            logReader.Open();
            MemCache memCache = new MemCache();

            memCache.Load(logReader);

            var result = memCache.Get(new byte[] { 0xeb, 0xff, 0xff, 0xff, 0xf3, 0xff, 0xff, 0xff, 0x31 });

            Assert.IsTrue(ReadOnlySpan <byte> .Empty != result.Data);
            Assert.AreEqual(new byte[] { 0xA, 0x00, 0x00, 0x02, 0x05 }, result.Data.Slice(0, 5).ToArray());
        }
Пример #7
0
 private static void CopyTo(MulticastLogEntryTextBuilder b, string filePath)
 {
     using (LogReader r = LogReader.Open(filePath, 0L, null))
     {
         var o = Console.Out;
         {
             int i = 0;
             while (r.MoveNext())
             {
                 b.AppendEntry(r.CurrentMulticast);
                 if (i++ == 20)
                 {
                     o.Write(b.Builder.ToString());
                     b.Builder.Clear();
                     i = 0;
                 }
             }
             o.Write(b.Builder.ToString());
         }
     }
 }
Пример #8
0
        public void LevelDbWriteUserDataTest()
        {
            // Plan

            var operations = new KeyValuePair <byte[], MemCache.ResultCacheEntry> [3];

            for (int i = 0; i < 3; i++)
            {
                byte[] key   = TestUtils.FillArrayWithRandomBytes(20);
                var    entry = new MemCache.ResultCacheEntry();
                entry.ResultState = ResultState.Exist;
                entry.Sequence    = 10;
                entry.Data        = TestUtils.FillArrayWithRandomBytes(32768);          // 32KB is maz size for a block, not that it matters for this
                operations[i]     = new KeyValuePair <byte[], MemCache.ResultCacheEntry>(key, entry);
            }

            MemCache memCache = new MemCache();

            // Do

            ReadOnlySpan <byte> result = memCache.EncodeBatch(operations);

            // Check

            SpanReader reader = new SpanReader(result);

            Assert.AreEqual(10, reader.ReadInt64(), "Sequence number");
            Assert.AreEqual(3, reader.ReadInt32(), "Operations count");

            for (int i = 0; i < 3; i++)
            {
                var expectedKey  = operations[i].Key;
                var expectedData = operations[i].Value.Data;

                Assert.AreEqual(1, reader.ReadByte(), "Operations type PUT");
                var keyLen = reader.ReadVarLong();

                Assert.AreEqual(expectedKey.Length, keyLen, "Key len");
                Assert.AreEqual(expectedKey, reader.Read(keyLen).ToArray(), "Key");

                var dataLen = reader.ReadVarLong();
                Assert.AreEqual(expectedData.Length, dataLen, "Data len");
                Assert.AreEqual(expectedData, reader.Read(dataLen).ToArray(), "Data");
            }

            // test encoding complete blocks

            var       stream = new MemoryStream();
            LogWriter writer = new LogWriter(stream);

            writer.WriteData(result);
            Assert.Less(0, stream.Length);
            stream.Position = 0;

            // Roundtrip test by making sure i can read blocks I've encoded myself.

            LogReader logReader = new LogReader(stream);

            logReader.Open();

            MemCache memCache2 = new MemCache();

            memCache2.Load(logReader);

            var cache = memCache2._resultCache;

            Assert.AreEqual(3, cache.Count);

            int j = 0;

            foreach (var entry in cache)
            {
                var expectedKey  = operations[j].Key;
                var expectedData = operations[j].Value.Data;

                Assert.AreEqual(ResultState.Exist, entry.Value.ResultState, "Value exists");

                Assert.AreEqual(expectedKey.Length, entry.Key.Length, "Key len");
                Assert.AreEqual(expectedKey, entry.Key, "Key");

                Assert.AreEqual(expectedData.Length, entry.Value.Data.Length, "Data len");
                Assert.AreEqual(expectedData, entry.Value.Data, "Data");
                j++;
            }
        }