public MongoProcessingHeartbeatTimer(MongoLogProcessingMetadataWriter metadataWriter, string databaseName) { this.metadataWriter = metadataWriter; long heartbeatDelayMs = 1000 * MongoProcessingHeartbeatInterval; timer = new Timer(WriteHeartbeat, databaseName, 0, heartbeatDelayMs); }
public LogsetProcessingStatus GetStatus(string logsetHash, IEnumerable <string> requiredCollections) { // Retrieve logset metadata for the given logset hash from MongoDB LogProcessingMetadata logsetMetadata; try { if (!RemoteLogsetHasData(logsetHash)) { return(new LogsetProcessingStatus(ProcessedLogsetState.NonExistent)); } var metadataReader = new MongoLogProcessingMetadataWriter(mongoConnectionInfo); logsetMetadata = metadataReader.Read(logsetHash); } catch (Exception ex) { Log.ErrorFormat("Unable to retrieve logset metadata from MongoDB: {0}", ex.Message); return(new LogsetProcessingStatus(ProcessedLogsetState.Indeterminable)); } // Lack of metadata is treated as a corrupt state. if (logsetMetadata == null || logsetMetadata.CollectionsParsed == null || String.IsNullOrWhiteSpace(logsetMetadata.LogsetType)) { return(new LogsetProcessingStatus(ProcessedLogsetState.Corrupt)); } // Check to see if the run completed successfully. if (!logsetMetadata.ProcessedSuccessfully) { if (IsHeartbeatExpired(logsetMetadata)) { return(new LogsetProcessingStatus(ProcessedLogsetState.Corrupt)); } return(new LogsetProcessingStatus(ProcessedLogsetState.InFlight, logsetMetadata.ProcessedSize)); } // Check to see if the remote logset has all of the collections we need. var missingCollections = requiredCollections.Except(logsetMetadata.CollectionsParsed).ToHashSet(); if (missingCollections.Any()) { Log.DebugFormat("Remote {0} logset does not contain required collections: {1}", logsetMetadata.LogsetType, String.Join(", ", missingCollections)); return(new LogsetProcessingStatus(ProcessedLogsetState.Incomplete, logsetMetadata.ProcessedSize)); } return(new LogsetProcessingStatus(ProcessedLogsetState.Valid, logsetMetadata.ProcessedSize)); }
public RunInitializationResult Initialize(RunInitializationRequest request) { if (request.Target.Type != LogsetTarget.Hash) { throw new ArgumentException("Request target must be a logset hash!", "request"); } var metadataReader = new MongoLogProcessingMetadataWriter(mongoConnectionInfo); LogProcessingMetadata logsetMetadata = metadataReader.Read(request.Target); if (logsetMetadata == null) { throw new InvalidTargetHashException(String.Format("No logset exists that matches logset hash '{0}'. Aborting..", request.Target)); } var artifactProcessorLoader = new HashArtifactProcessorLoader(); IArtifactProcessor artifactProcessor = artifactProcessorLoader.LoadArtifactProcessor(logsetMetadata.ArtifactProcessorType); var pluginLoader = new PluginLoader(request.ArtifactProcessorOptions); ISet <Type> pluginsToExecute = pluginLoader.LoadPlugins(request.RequestedPlugins, artifactProcessor); return(new RunInitializationResult(request.Target, artifactProcessor, logsetMetadata.CollectionsParsed, request.Target, pluginsToExecute)); }
public MongoLogsetParser(MongoConnectionInfo mongoConnectionInfo, LogsharkTuningOptions tuningOptions) : base(tuningOptions) { this.mongoConnectionInfo = mongoConnectionInfo; metadataWriter = new MongoLogProcessingMetadataWriter(mongoConnectionInfo); }