/// <summary> /// Processes an entire directory of log files. /// </summary> public LogsetParsingResult ParseLogset(LogsetParsingRequest request) { Log.InfoFormat("Processing log directory '{0}'..", request.Target); LogsetParsingResult result; using (var parseTimer = new LogsharkTimer("Parsed Files", request.LogsetHash, GlobalEventTimingData.Add)) { var logsetPreprocessor = new LogsetPreprocessor(tuningOptions); Queue <LogFileContext> logFiles = logsetPreprocessor.Preprocess(request.Target, request.ArtifactProcessor, request.CollectionsToParse); Initialize(request); using (GetProcessingWrapper(request)) { result = ProcessFiles(logFiles, request.ArtifactProcessor.GetParserFactory(request.Target), request.LogsetHash); } Log.InfoFormat("Finished processing log directory '{0}'! [{1}]", request.Target, parseTimer.Elapsed.Print()); } Finalize(request, result); var validator = GetValidator(); validator.ValidateDataExists(request.LogsetHash); return(result); }
public string CopyLogset(string target, string destination) { if (!PathHelper.IsPathToExistingResource(target)) { throw new ArgumentException(String.Format("Failed to copy target '{0}': path does not exist!", target)); } try { using (var copyTimer = new LogsharkTimer("Copy Logset Locally", GlobalEventTimingData.Add)) { string copyPath = CopyTarget(target, destination); Log.InfoFormat("Finished copying logset. [{0}]", copyTimer.Elapsed.Print()); return(copyPath); } } catch (InsufficientDiskSpaceException) { throw; } catch (Exception ex) { throw new LogsetCopyException(String.Format("Failed to copy target '{0}' to local temp directory: {1}", target, ex.Message), ex); } }
/// <summary> /// Extracts the target logset. /// </summary> /// <returns>The root path where files were extracted.</returns> public ExtractionResult Extract(string target, string destination) { // Unpack files. try { using (var unpackTimer = new LogsharkTimer("Unpack Archives", GlobalEventTimingData.Add)) { ICollection <string> archivesToUnpack = GetArchivesToUnpack(target, destination); var unpackResults = UnpackArchives(archivesToUnpack, destination, PathHelper.IsDirectory(target)); if (unpackResults.Any()) { long inputSize = DiskSpaceHelper.GetSize(target); long extractedSize = DiskSpaceHelper.GetDirectorySize(destination); Log.InfoFormat("Finished extracting required files from logset! Unpacked {0} out of {1}. [{2}]", extractedSize.ToPrettySize(), inputSize.ToPrettySize(), unpackTimer.Elapsed.Print()); } return(new ExtractionResult(destination, unpackResults)); } } catch (ZipException ex) { throw new InvalidLogsetException(String.Format("Cannot read logset archive: {0}", ex.Message)); } catch (InsufficientDiskSpaceException) { throw; } catch (Exception ex) { throw new ExtractionException(ex.Message, ex); } }
/// <summary> /// Partitions a single file into multiple pieces. /// </summary> protected virtual IEnumerable <LogFileContext> PartitionFile(LogFileContext fileToPartition, long maxFileSizeBytes) { using (var partitionFileTimer = new LogsharkTimer("Partition File", String.Format("{0}/{1}", fileToPartition.FileLocationRelativeToRoot, fileToPartition.FileName), GlobalEventTimingData.Add)) { Log.InfoFormat("Partitioning file {0}.. ({1})", fileToPartition.FileName, fileToPartition.FileSize.ToPrettySize()); var partitioner = new FilePartitioner(fileToPartition, maxFileSizeBytes); IList <LogFileContext> partitions = partitioner.PartitionFile(); Log.InfoFormat("Finished partitioning file {0} ({1}) [{2}]", fileToPartition.FileName, fileToPartition.FileSize.ToPrettySize(), partitionFileTimer.Elapsed.Print()); return(partitions); } }
/// <summary> /// Orchestrates a Logshark run from end to end. /// </summary> /// <param name="request">The user's processing request.</param> /// <param name="metadataWriter">The metadata writer responsible for writing information about the state of the run.</param> /// <returns>Run context containing the run outcome and details of what happened during the run.</returns> private LogsharkRunContext ExecuteLogsharkRun(LogsharkRequest request, ILogsharkRunMetadataWriter metadataWriter) { using (var runTimer = new LogsharkTimer("Logshark Run", request.Target, GlobalEventTimingData.Add)) { var run = new LogsharkRunContext(request); try { Log.InfoFormat("Preparing logset target '{0}' for processing..", request.Target); StartPhase(ProcessingPhase.Initializing, run, metadataWriter); run.InitializationResult = InitializeRun(request); run.IsValidLogset = true; StartPhase(ProcessingPhase.Parsing, run, metadataWriter); run.ParsingResult = ProcessLogset(request, run.InitializationResult); StartPhase(ProcessingPhase.ExecutingPlugins, run, metadataWriter); run.PluginExecutionResult = ExecutePlugins(request, run.InitializationResult); run.SetRunSuccessful(); return(run); } catch (Exception ex) { run.SetRunFailed(ex); throw; } finally { StartPhase(ProcessingPhase.Complete, run, metadataWriter); TearDown(run); Log.InfoFormat("Logshark run complete! [{0}]", runTimer.Elapsed.Print()); string runSummary = run.BuildRunSummary(); if (!String.IsNullOrWhiteSpace(runSummary)) { Log.Info(runSummary); } } } }
/// <summary> /// Process a single log file. /// </summary> protected bool ProcessFile(LogFileContext file, IParserFactory parserFactory, string logsetHash) { try { Log.InfoFormat("Processing {0}.. ({1})", file, file.FileSize.ToPrettySize()); using (var parseTimer = new LogsharkTimer("Parse File", file.ToString(), GlobalEventTimingData.Add)) { IParser parser = parserFactory.GetParser(file); if (parser == null) { Log.ErrorFormat("Failed to locate a parser for file '{0}'. Skipping this file..", file.FilePath); return(false); } IDocumentWriter writer = GetDocumentWriter(file, parser.CollectionSchema.CollectionName, logsetHash); // Attempt to process the file; register a failure if we don't yield at least one document for a file // with at least one byte of content. var fileProcessor = new LogFileParser(parser, writer); long documentsSuccessfullyParsed = fileProcessor.Parse(file); if (file.FileSize > 0 && documentsSuccessfullyParsed == 0) { Log.WarnFormat("Failed to parse any log events from {0}!", file); return(false); } Log.InfoFormat("Completed processing of {0} ({1}) [{2}]", file, file.FileSize.ToPrettySize(), parseTimer.Elapsed.Print()); return(true); } } catch (Exception ex) { Log.Error(String.Format("Failed to process file '{0}': {1}", file, ex.Message)); Log.Debug(ex.StackTrace); return(false); } finally { Cleanup(file); } }
/// <summary> /// Executes a single plugin. /// </summary> /// <param name="pluginType">The type of the plugin to execute.</param> /// <param name="pluginExecutionRequest">Plugin execution options.</param> /// <param name="previousPluginResponses">The set of plugin responses associated with the current run. Used for plugin chaining.</param> /// <returns>Response containing state about the success/failure of the plugin's execution.</returns> protected IPluginResponse ExecutePlugin(Type pluginType, PluginExecutionRequest pluginExecutionRequest, IEnumerable <IPluginResponse> previousPluginResponses) { string pluginName = pluginType.Name; // Setup plugin for execution. IPluginRequest pluginRequest = CreatePluginRequest(pluginType, pluginExecutionRequest); var pluginTimer = new LogsharkTimer("Executed Plugin", pluginName, GlobalEventTimingData.Add); // Execute plugin. IPluginResponse pluginResponse = new PluginResponse(pluginName); try { string outputDatabaseName = GetOutputDatabaseName(pluginName, pluginRequest, pluginExecutionRequest); var plugin = InitializePlugin(pluginType, pluginRequest, pluginExecutionRequest.MongoDatabaseName, outputDatabaseName, previousPluginResponses); Log.InfoFormat("Execution of {0} plugin started at {1}..", pluginName, DateTime.Now.ToString("h:mm tt", CultureInfo.InvariantCulture)); pluginResponse = plugin.Execute(); // Flush any workbooks, if this was a workbook creation plugin. if (plugin is IWorkbookCreationPlugin) { IEnumerable <string> workbookFilePaths = WriteWorkbooksToDisk(pluginRequest.OutputDirectory, plugin as IWorkbookCreationPlugin, pluginResponse, outputDatabaseName); pluginResponse.WorkbooksOutput.AddRange(workbookFilePaths); } // Publish any associated workbooks, if requested. if (pluginExecutionRequest.PublishingOptions != null && pluginExecutionRequest.PublishingOptions.PublishWorkbooks) { var restApiRequestor = new RestApiRequestor(tableauConnectionInfo.Uri, tableauConnectionInfo.Username, tableauConnectionInfo.Password, tableauConnectionInfo.Site); var workbookPublisher = new WorkbookPublisher(tableauConnectionInfo, postgresConnectionInfo, pluginExecutionRequest.PublishingOptions, restApiRequestor); ICollection <PublishedWorkbookResult> workbooksPublished = workbookPublisher.PublishWorkbooks(pluginResponse); pluginResponse.WorkbooksPublished.AddRange(workbooksPublished); } } catch (PluginInitializationException ex) { string errorMessage = String.Format("Failed to initialize {0} plugin: {1}", pluginName, ex.Message); HandlePluginExecutionFailure(pluginResponse, errorMessage, ex); } catch (PublishingException ex) { string errorMessage = String.Format("Failed to publish workbooks: {0}", ex.Message); HandlePluginExecutionFailure(pluginResponse, errorMessage, ex); } catch (Exception ex) { string errorMessage = String.Format("Encountered uncaught exception while executing plugin '{0}': {1}", pluginName, ex.GetFlattenedMessage()); HandlePluginExecutionFailure(pluginResponse, errorMessage, ex); } finally { pluginTimer.Stop(); pluginResponse.PluginRunTime = pluginTimer.Elapsed; LogExecutionOutcome(pluginResponse); } return(pluginResponse); }