コード例 #1
0
        public virtual void Run(string inputFile = null)
        {
            var cfg = Current;

            if (inputFile != null)
            {
                cfg.InputFile = inputFile;
            }

            var parser = cfg.Parser;

            BeginRunSummary(cfg.Parser.DateTimeFormat);
            var runSummary = RunSummary;

            // pre-filtering is done here
            var inputFiles = GatherInputFiles();

            if (!inputFiles.Any())
            {
                InvokeReportProgress("No log files found in input folder or none passed pre-filtering.", 100);
                return;
            }

            // NOTE: files are ordered here by LastWriteTime
            // reverse it to preserve the order in the output files
            foreach (var fileInfo in inputFiles.Reverse())
            {
                try
                {
                    Run(fileInfo);
                }
                catch (ParserException)
                {
                    if (parser.NonStandardLines.Any())
                    {
                        // write non-standard entries to runSummary
                        runSummary.NonStandardEntries += parser.NonStandardLines.Count;

                        var filePath   = fileInfo.FullName;
                        var outputPath = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, $"[{cfg.ParserName}-FAILED]-");
                        if (FileProcessor.WriteFile(outputPath, parser.NonStandardLines, cfg.OverwriteFiles))
                        {
                            runSummary.FilesWritten++;
                        }
                    }

                    throw;
                }
            }

            if (inputFiles.Length > 1)
            {
                // do not write run summary if
                // only one file has been processed
                EndRunSummary();
            }
        }
コード例 #2
0
        protected void Split(string filePath, LogEntry[] filteredEntries, Summary currentSummary)
        {
            var cfg        = Current;
            var parser     = cfg.Parser;
            var runSummary = RunSummary;

            if (cfg.SplitByThreads != null)
            {
                // whether or not we should write file for each thread
                var splitByAllThreads = cfg.SplitByThreads.Length == 0;

                var groupsByKey = filteredEntries.GroupBy(entry => entry.Thread);
                foreach (var groupedEntries in groupsByKey)
                {
                    // if we're not splitting by all threads, other threads should be skipped
                    if (!splitByAllThreads && !cfg.SplitByThreads.Contains(groupedEntries.Key))
                    {
                        continue;
                    }

                    var currentThreadOutputPath = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, $"[THREAD#{groupedEntries.Key}]-");
                    if (FileProcessor.WriteFile(currentThreadOutputPath, parser.ToLines(groupedEntries), cfg.OverwriteFiles))
                    {
                        runSummary.FilesWritten++;
                        currentSummary.FilesWritten++;
                    }

                    InvokeReportProgress($"\rTHREAD#{groupedEntries.Key}: {groupedEntries.Count()}", -1);
                }

                InvokeReportProgress(string.Empty);
            }

            if (cfg.SplitByIdentities != null)
            {
                // whether or not we should write file for each identity
                var splitByAllIdentities = cfg.SplitByIdentities.Length == 0;

                var groupsByKey = filteredEntries.GroupBy(entry => entry.Identity);
                foreach (var groupedEntries in groupsByKey)
                {
                    // if we're not splitting by all users, other users should be skipped
                    if (!splitByAllIdentities && !cfg.SplitByIdentities.Contains(groupedEntries.Key))
                    {
                        continue;
                    }

                    var currentIdentityOutputPath = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, $"[IDENTITY#{groupedEntries.Key}]-");
                    if (FileProcessor.WriteFile(currentIdentityOutputPath, parser.ToLines(groupedEntries), cfg.OverwriteFiles))
                    {
                        runSummary.FilesWritten++;
                        currentSummary.FilesWritten++;
                    }

                    InvokeReportProgress($"\rIDENTITY#{groupedEntries.Key}: {groupedEntries.Count()}", -1);
                }

                InvokeReportProgress(string.Empty);
            }

            if (cfg.SplitByLogLevels != null)
            {
                // whether or not we should write file for each log level
                var splitByAllLogLevels = cfg.SplitByLogLevels.Length == 0;

                var groupsByKey = filteredEntries.GroupBy(entry => entry.Level);
                foreach (var groupedEntries in groupsByKey)
                {
                    // if we're not splitting by all log levels, other levels should be skipped
                    if (!splitByAllLogLevels && !cfg.SplitByLogLevels.Contains(groupedEntries.Key))
                    {
                        continue;
                    }

                    var currentLogLevelOutputPath = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, $"[LEVEL#{groupedEntries.Key}]-");
                    if (FileProcessor.WriteFile(currentLogLevelOutputPath, parser.ToLines(groupedEntries), cfg.OverwriteFiles))
                    {
                        runSummary.FilesWritten++;
                        currentSummary.FilesWritten++;
                    }

                    InvokeReportProgress($"\rLEVEL#{groupedEntries.Key}: {groupedEntries.Count()}", -1);
                }

                InvokeReportProgress(string.Empty);
            }
        }
コード例 #3
0
        protected virtual void Run(FileInfo logFileInput)
        {
            var cfg        = Current;
            var parser     = cfg.Parser;
            var filters    = cfg.Filters;
            var runSummary = RunSummary;

            var filePath       = logFileInput.FullName;
            var currentSummary = parser.BeginSummary();

            InvokeReportProgress($"Reading file '{filePath}'...");

            void ProgressCallback(int percent)
            {
                InvokeReportProgress("Processing...", percent);
            }

            var logLines = FileProcessor.ReadLogLines(filePath, ProgressCallback, out var linesRead, parser.Expression);

            InvokeReportProgress("Done!        ", 100);
            InvokeReportProgress(Environment.NewLine);

            if (!string.IsNullOrEmpty(cfg.FilePrefix))
            {
                // if we're reparsing we need to replace the original file name (that's with a prefix)
                // with a one without a prefix, and prefix it accordingly during this parser run
                filePath = FileProcessor.ExtractFileName(filePath, cfg.FilePrefix);
            }

            var currentOutputPath          = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, cfg.FilePrefix);
            var currentDirectoryOutputPath = FileProcessor.GetFileDirectory(currentOutputPath);

            currentSummary.CopyConfiguration(cfg, filePath, currentDirectoryOutputPath);

            runSummary.FilesRead++;
            currentSummary.FilesRead++;
            runSummary.LinesRead    += (ulong)linesRead;
            currentSummary.LinesRead = (ulong)linesRead;
            runSummary.LogsRead     += (ulong)logLines.Length;
            currentSummary.LogsRead  = (ulong)logLines.Length;

            InvokeReportProgress($"Lines: {linesRead}, Logs: {logLines.Length}, Constructing entries...");

            var logEntries = parser.ToLogEntry(logLines).ToArray();

            runSummary.NonStandardEntries    += parser.NonStandardLines.Count;
            currentSummary.NonStandardEntries = parser.NonStandardLines.Count;

            if (parser.NonStandardLines.Any())
            {
                // write failed log entries and continue with the parsing run (if it didn't throw an exception, threshold has not been exceeded)
                var outputPath = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, $"[{cfg.ParserName}-FAILED]-");
                if (FileProcessor.WriteFile(outputPath, parser.NonStandardLines, cfg.OverwriteFiles))
                {
                    runSummary.FilesWritten++;
                    currentSummary.FilesWritten++;
                }
            }

            runSummary.EntriesConstructed    += (ulong)logEntries.Length;
            currentSummary.EntriesConstructed = (ulong)logEntries.Length;

            InvokeReportProgress($"Logs: {logLines.Length}, Constructed: {logEntries.Length}, Filtering file...");

            var filteredEntries = parser.FilterLogEntries(logEntries, ProgressCallback);

            InvokeReportProgress("Done!        ", 100);
            InvokeReportProgress(Environment.NewLine);

            runSummary.FilteredEntries    += (ulong)filteredEntries.Length;
            currentSummary.FilteredEntries = (ulong)filteredEntries.Length;

            InvokeReportProgress($"Entries: {logEntries.Length}, Filtered: {filteredEntries.Length}, Writing files...");

            if (filteredEntries.Any())
            {
                Split(filePath, filteredEntries, currentSummary);

                // write filtered file
                var filteredLines = parser.ToLines(filteredEntries).ToArray();

                var filteredEntriesOutputPath = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, "filtered");
                if (FileProcessor.WriteFile(filteredEntriesOutputPath, filteredLines, cfg.OverwriteFiles))
                {
                    runSummary.FilesWritten++;
                    currentSummary.FilesWritten++;

                    runSummary.LinesWritten    += (ulong)filteredLines.Length;
                    currentSummary.LinesWritten = (ulong)filteredLines.Length;
                }

                InvokeReportProgress($"FILTERED: {filteredEntries.Length}");
            }
            else
            {
                // TODO: At level WARN!
                InvokeReportProgress("No filtered entries resulted after run.");
            }

            // write entries accumulated in the filters
            var dedicatedFilterLogs = filters.Where(f => f.Type == FilterType.WriteToFile || f.Type == FilterType.IncludeAndWriteToFile);

            foreach (var dedicatedFilter in dedicatedFilterLogs)
            {
                // do not write empty files
                if (!dedicatedFilter.Entries.Any())
                {
                    continue;
                }

                // write one file per each custom filter that requires output
                var dedicatedFilterOutputPath = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, dedicatedFilter.Name);
                //dedicatedFilter.FileName = dedicatedFilterOutputPath; // TODO: Figure out what's the idea here?
                if (FileProcessor.WriteFile(dedicatedFilterOutputPath, parser.ToLines(dedicatedFilter.Entries), cfg.OverwriteFiles))
                {
                    runSummary.FilesWritten++;
                    currentSummary.FilesWritten++;
                }
            }

            if (cfg.CopyOriginal)
            {
                InvokeReportProgress("Writing original file...");

                // write the original file to output folder
                var originalOutputFilePath = FileProcessor.GetOutputFilePath(filePath, cfg.InputFolder, cfg.OutputFolder, "original");
                if (FileProcessor.WriteOriginalFile(sourcePath: filePath, destinationPath: originalOutputFilePath, lines: logLines, overwrite: cfg.OverwriteFiles))
                {
                    runSummary.FilesWritten++;
                    currentSummary.FilesWritten++;
                }
            }

            // end current summary
            parser.EndSummary();

            // write current summary
            var currentSummaryOutputPath = FileProcessor.GetSummaryFilePath(filePath, cfg.InputFolder, cfg.OutputFolder);

            if (FileProcessor.WriteFile(currentSummaryOutputPath, currentSummary.ToJson(), cfg.OverwriteFiles))
            {
                FileProcessor.SetReadonly(currentSummaryOutputPath);
                runSummary.FilesWritten++;
            }

            AggregateRunSummaryCounters(currentSummary);

            // NOTE: A double empty console line intended
            InvokeReportProgress("Done!" + Environment.NewLine);
        }