/// <summary> /// Splits a single logfile into a number of partitions. /// </summary> /// <returns>List containing all logfiles which were produced from the original source file.</returns> public IList <LogFileContext> PartitionFile() { if (finishedPartitioning) { throw new InvalidOperationException("Cannot invoke partitioning more than once on a single file!"); } IList <LogFileContext> partitions = new List <LogFileContext>(); // Sanity check to see if this file can & should be partitioned. if (file.FileSize <= partitionSizeBytes) { partitions.Add(file); return(partitions); } // Generate chunks. using (var lineIterator = File.ReadLines(file.FilePath).GetEnumerator()) { while (!encounteredEndOfFile) { int partitionIndex = partitions.Count + 1; LogFileContext partition = WritePartition(lineIterator, partitionIndex); partitions.Add(partition); } } // Destroy the original file now that we're finished with it. File.Delete(file.FilePath); finishedPartitioning = true; return(partitions); }
/// <summary> /// Parse the given log file. /// </summary> /// <returns>Count of documents that were successfully parsed.</returns> public long Parse(LogFileContext logFile) { long processedDocumentCount = 0; using (var reader = new StreamReader(new FileStream(logFile.FilePath, FileMode.Open, FileAccess.Read, FileShare.Read))) { while (!parser.FinishedParsing) { // Parse a document. JObject document = parser.ParseLogDocument(reader); if (document != null) { DocumentWriteResult result = writer.Write(document); if (!result.IsSuccessful) { Log.WarnFormat("Failed to write document parsed from file '{0}': {1}", logFile, result.ErrorMessage); } else { processedDocumentCount++; } } } } writer.Shutdown(); return(processedDocumentCount); }
public IDictionary <string, object> GetAdditionalFileMetadata(LogFileContext fileContext) { return(new Dictionary <string, object> { { "worker", GetWorkerId(fileContext) } }); }
/// <summary> /// Process a single log file. /// </summary> /// <param name="fileContext"></param> private void ProcessFile(LogFileContext fileContext) { try { Log.InfoFormat("Processing {0}.. ({1})", fileContext, fileContext.FileSize.ToPrettySize()); var parseTimer = logsharkRequest.RunContext.CreateTimer("Parse File", fileContext.ToString()); // Attempt to process the file; register a failure if we don't yield at least one document for a file // with at least one byte of content. var fileProcessor = new MongoInsertionFileProcessor(fileContext, logsharkRequest); bool processedSuccessfully = fileProcessor.ProcessFile(); if (fileContext.FileSize > 0 && !processedSuccessfully) { Log.WarnFormat("Failed to parse any log events from {0}!", fileContext); logsharkRequest.RunContext.RegisterParseFailure(fileContext.ToString()); } parseTimer.Stop(); Log.InfoFormat("Completed processing of {0} ({1}) [{2}]", fileContext, fileContext.FileSize.ToPrettySize(), parseTimer.Elapsed.Print()); } catch (Exception ex) { Log.Error(ex.Message); } Cleanup(fileContext); }
/// <summary> /// Parse the given log file. /// </summary> /// <returns>Count of documents that were successfully parsed.</returns> public long Parse(LogFileContext logFile) { long processedDocumentCount = 0; using (var reader = new StreamReader(new FileStream(logFile.FilePath, FileMode.Open, FileAccess.Read, FileShare.Read))) { while (!parser.FinishedParsing) { // Parse a document. JObject document = parser.ParseLogDocument(reader); if (document != null) { DocumentWriteResult result = writer.Write(document); switch (result.Result) { case DocumentWriteResultType.Failure: Log.WarnFormat("Failed to write document parsed from file '{0}': {1}", logFile, result.ErrorMessage); break; case DocumentWriteResultType.SuccessWithWarning: Log.WarnFormat($"Document from file '{logFile}' processed with warning: {result.ErrorMessage}"); break; } processedDocumentCount++; } } } writer.Shutdown(); return(processedDocumentCount); }
/// <summary> /// Create an instance of the correct parser type for a given log file. /// </summary> /// <param name="fileName">The logfile to be parsed.</param> /// <returns>Parser that can parse the log.</returns> public IParser GetParser(string fileName) { var parserBuilder = GetParserBuilder(fileName); var fileContext = new LogFileContext(fileName, rootLogLocation); return(parserBuilder.GetParser(fileContext)); }
public IDictionary <string, object> GetAdditionalFileMetadata(LogFileContext fileContext) { // This is a compatibility shim to allow Desktop to leverage certain Server plugins which expect the "worker" field to be present. return(new Dictionary <string, object> { { "worker", "0" } }); }
public IDictionary <string, object> GetAdditionalFileMetadata(LogFileContext fileContext) { return(new Dictionary <string, object> { // Store the hostname in the "worker" field to maintain compatability with Server "classic" logsets. { "worker", GetHostname(fileContext) } }); }
public MongoInsertionFileProcessor(LogFileContext logFile, LogsharkRequest request, IParserFactory parserFactory) { this.logFile = logFile; parser = parserFactory.GetParser(logFile); mongoDatabase = request.Configuration.MongoConnectionInfo.GetDatabase(request.RunContext.MongoDatabaseName); ignoreDebugLogs = request.IgnoreDebugLogs; inFlightInsertions = new List <Thread>(); insertionQueue = new List <BsonDocument>(); }
/// <summary> /// Given a log file path, attempt to glean a worker index from it. /// </summary> /// <returns>Id of worker node.</returns> private static string GetWorkerId(LogFileContext fileContext) { var workerIndex = ParserUtil.GetParentLogDirs(fileContext.FilePath, fileContext.RootLogDirectory) .Where(parent => parent.StartsWith("worker")) .Select(name => name.Replace("worker", "")) .DefaultIfEmpty("0") .First(); return(workerIndex); }
/// <summary> /// Indicates whether a given file qualifies for partitioning. /// </summary> protected virtual bool IsPartitionableFile(LogFileContext file, long maxFileSizeBytes) { if (file.FileSize <= maxFileSizeBytes) { return(false); } IParser parser = parserFactory.GetParser(file.FilePath); return(parser != null && !parser.IsMultiLineLogType); }
/// <summary> /// Handles any resource cleanup associated with processing this file. /// </summary> private void Cleanup(LogFileContext fileContext) { // Now that we've processed the file, we can delete it. try { File.Delete(fileContext.FilePath); } catch (Exception ex) { // Log & swallow exception; cleanup is a nice-to-have, not a need-to-have. Log.DebugFormat("Failed to remove processed file '{0}': {1}", fileContext.FilePath, ex.Message); } }
/// <summary> /// Partitions a single file into multiple pieces. /// </summary> protected virtual IEnumerable <LogFileContext> PartitionFile(LogFileContext fileToPartition, long maxFileSizeBytes) { using (var partitionFileTimer = new LogsharkTimer("Partition File", String.Format("{0}/{1}", fileToPartition.FileLocationRelativeToRoot, fileToPartition.FileName), GlobalEventTimingData.Add)) { Log.InfoFormat("Partitioning file {0}.. ({1})", fileToPartition.FileName, fileToPartition.FileSize.ToPrettySize()); var partitioner = new FilePartitioner(fileToPartition, maxFileSizeBytes); IList <LogFileContext> partitions = partitioner.PartitionFile(); Log.InfoFormat("Finished partitioning file {0} ({1}) [{2}]", fileToPartition.FileName, fileToPartition.FileSize.ToPrettySize(), partitionFileTimer.Elapsed.Print()); return(partitions); } }
public void PartitionFile(string logfileName) { long partitionSize = 1024 * 1024; var logPath = TestDataHelper.GetResourcePath(logfileName); string rootLogDirectory = TestDataHelper.GetDataDirectory(); LogFileContext context = new LogFileContext(logPath, rootLogDirectory); FilePartitioner partitioner = new FilePartitioner(context, partitionSize); var partitions = partitioner.PartitionFile(); partitions.Count.Should().Be(5); }
/// <summary> /// Handles any resource cleanup associated with processing this file. /// </summary> protected virtual bool Cleanup(LogFileContext fileContext) { // Now that we've processed the file, we can delete it. try { File.Delete(fileContext.FilePath); return(true); } catch (Exception ex) { // Log & swallow exception; cleanup is a nice-to-have, not a need-to-have. Log.DebugFormat($"Failed to remove processed file '{fileContext.FilePath}': {ex.Message}"); return(false); } }
/// <summary> /// Retrieves the correct parser for a given log file. /// </summary> /// <param name="logFileContext">Context about the log file to retrieve a parser for.</param> /// <returns>Parser object that supports the specified log.</returns> public virtual IParser GetParser(LogFileContext logFileContext) { // Check to see if this file is in our map of known file types that we have parsers for. foreach (var fileMapping in FileMap.Keys) { var filePattern = new Regex(fileMapping); if (filePattern.IsMatch(logFileContext.FileName)) { // New up parser. var parser = Activator.CreateInstance(FileMap[fileMapping], logFileContext) as IParser; return(parser); } } // Didn't find a match in the fileMap dictionary. return(null); }
/// <summary> /// Process a single log file. /// </summary> protected bool ProcessFile(LogFileContext file, IParserFactory parserFactory, string logsetHash) { try { Log.InfoFormat("Processing {0}.. ({1})", file, file.FileSize.ToPrettySize()); using (var parseTimer = new LogsharkTimer("Parse File", file.ToString(), GlobalEventTimingData.Add)) { IParser parser = parserFactory.GetParser(file); if (parser == null) { Log.ErrorFormat("Failed to locate a parser for file '{0}'. Skipping this file..", file.FilePath); return(false); } IDocumentWriter writer = GetDocumentWriter(file, parser.CollectionSchema.CollectionName, logsetHash); // Attempt to process the file; register a failure if we don't yield at least one document for a file // with at least one byte of content. var fileProcessor = new LogFileParser(parser, writer); long documentsSuccessfullyParsed = fileProcessor.Parse(file); if (file.FileSize > 0 && documentsSuccessfullyParsed == 0) { Log.WarnFormat("Failed to parse any log events from {0}!", file); return(false); } Log.InfoFormat("Completed processing of {0} ({1}) [{2}]", file, file.FileSize.ToPrettySize(), parseTimer.Elapsed.Print()); return(true); } } catch (Exception ex) { Log.Error(String.Format("Failed to process file '{0}': {1}", file, ex.Message)); Log.Debug(ex.StackTrace); return(false); } finally { Cleanup(file); } }
public FileStoreParser(LogFileContext fileContext) : base(fileContext) { }
public DataServerCppParser(LogFileContext fileContext) : base(fileContext) { }
public SearchServerParser(LogFileContext fileContext) : base(fileContext) { }
public SearchServerLocalhostParser(LogFileContext fileContext) : base(fileContext) { }
protected AbstractYamlParser(LogFileContext fileContext) : base(fileContext) { }
public VizqlServerCppParser(LogFileContext fileContext) : base(fileContext) { }
public VizportalJavaParser(LogFileContext fileContext) : base(fileContext) { }
public PostgresLegacyParser(LogFileContext fileContext) : base(fileContext) { }
public BackgrounderCppParser(LogFileContext fileContext) : base(fileContext) { }
public PostgresHostConfigParser(LogFileContext fileContext) : base(fileContext) { }
public ProtocolServerParser(LogFileContext fileContext) : base(fileContext) { }
public TabAdminControllerCppParser(LogFileContext fileContext) : base(fileContext) { }
protected AbstractRegexParser(LogFileContext fileContext) : base(fileContext) { }