Esempio n. 1
0
        /// <summary>
        /// Creates all of the required MongoDB collections that this logset requires.
        /// </summary>
        private void CreateMongoDbCollections()
        {
            var collections = new Dictionary <string, HashSet <string> >();

            ISet <IParser> parsers = parserFactory.GetAllParsers();

            // Stuff collection names & indexes into the dictionary, deduping in the process.
            foreach (var parser in parsers)
            {
                var            collectionName = parser.CollectionSchema.CollectionName.ToLowerInvariant();
                IList <string> indexes        = parser.CollectionSchema.Indexes;

                if (!collections.ContainsKey(collectionName))
                {
                    if (LogsetDependencyHelper.IsCollectionRequiredForRequest(collectionName, logsharkRequest))
                    {
                        collections.Add(collectionName, new HashSet <string>());
                    }
                }

                // Add indexes.
                if (collections.ContainsKey(collectionName))
                {
                    foreach (var index in indexes)
                    {
                        if (collections.ContainsKey(collectionName))
                        {
                            collections[collectionName].Add(index);
                        }
                    }
                }
            }

            // New up collections & indexes using the dictionary.
            foreach (var collection in collections)
            {
                var           collectionName = collection.Key;
                ISet <string> indexes        = collection.Value;

                var dbCollection = database.GetCollection <BsonDocument>(collectionName);
                logsharkRequest.RunContext.CollectionsGenerated.Add(collectionName);

                foreach (var index in indexes)
                {
                    var indexKeysBuilder            = new IndexKeysDefinitionBuilder <BsonDocument>();
                    CreateIndexOptions indexOptions = new CreateIndexOptions {
                        Sparse = false
                    };
                    dbCollection.Indexes.CreateOne(indexKeysBuilder.Ascending(index), indexOptions);
                }

                // If we are working against a sharded Mongo cluster, we need to explicitly shard each collection.
                MongoConnectionInfo mongoConnectionInfo = logsharkRequest.Configuration.MongoConnectionInfo;
                if (mongoConnectionInfo.ConnectionType == MongoConnectionType.ShardedCluster)
                {
                    MongoAdminUtil.EnableShardingOnCollectionIfNotEnabled(mongoConnectionInfo.GetClient(), logsharkRequest.RunContext.MongoDatabaseName, collectionName);
                }
            }
        }
        /// <summary>
        /// Retrieves the list of files in the target directory which are required to process this request.
        /// </summary>
        protected static IEnumerable <string> GetRequiredFilesInDirectory(LogsharkRequest request)
        {
            IEnumerable <string> allFiles = Directory.GetFiles(request.Target, "*", SearchOption.AllDirectories);
            var whiteListPattern          = LogsharkConstants.EXTRACTION_FILE_WHITELIST;
            var whiteListedFiles          = allFiles.Where(file => whiteListPattern.IsMatch(Path.GetFileName(file)));

            return(whiteListedFiles.Where(file => LogsetDependencyHelper.IsLogfileRequiredForRequest(file, request.Target, request)));
        }
Esempio n. 3
0
        public static LogsetStatus GetStatus(LogsharkRequest request)
        {
            LogsetMetadata logsetMetadata;
            LogsetType     logsetType;

            try
            {
                LogsetMetadataReader metadataReader = new LogsetMetadataReader(request);

                if (!RemoteLogsetHasData(request))
                {
                    return(LogsetStatus.NonExistent);
                }

                logsetMetadata = metadataReader.GetMetadata();
                logsetType     = metadataReader.GetLogsetType();
            }
            catch (Exception ex)
            {
                Log.ErrorFormat("Unable to retrieve logset metadata from MongoDB: {0}", ex.Message);
                return(LogsetStatus.Indeterminable);
            }

            // Lack of metadata is treated as a corrupt state.
            if (logsetMetadata == null || logsetMetadata.CollectionsParsed == null || logsetType == LogsetType.Unknown)
            {
                return(LogsetStatus.Corrupt);
            }

            if (!logsetMetadata.ProcessedSuccessfully)
            {
                if (logsetMetadata.IsHeartbeatExpired())
                {
                    return(LogsetStatus.Corrupt);
                }
                else
                {
                    return(LogsetStatus.InFlight);
                }
            }

            // Check to make sure the remote logset has all of the collections we need.
            var missingCollections = LogsetDependencyHelper.GetMissingRequiredCollections(request, logsetType, logsetMetadata.CollectionsParsed);

            if (missingCollections.Count > 0)
            {
                Log.DebugFormat("Remote {0} logset does not contain required collections: {1}", logsetType, String.Join(", ", missingCollections));
                return(LogsetStatus.Incomplete);
            }

            return(LogsetStatus.Valid);
        }
Esempio n. 4
0
        protected override bool QualifiesForExtraction(ZipEntry zipEntry, string destinationDirectory)
        {
            // Disqualify any zip entry that contains an illegal path character, as valid Tableau log files do not.
            if (ContainsInvalidPathCharacters(zipEntry.Name))
            {
                return(false);
            }

            // If we don't actually need this file, don't unzip it.
            string outputFile = Path.Combine(destinationDirectory, zipEntry.Name);

            if (!LogsetDependencyHelper.IsLogfileRequiredForRequest(outputFile, destinationDirectory, request))
            {
                return(false);
            }

            return(base.QualifiesForExtraction(zipEntry, destinationDirectory));
        }
Esempio n. 5
0
        /// <summary>
        /// Loads of all the logs required for this request.
        /// </summary>
        /// <returns>Log contexts for all logs required for request.</returns>
        public IEnumerable <LogFileContext> LoadRequiredLogs()
        {
            var logsToProcess = new List <LogFileContext>();

            // Filter down to only supported files.
            var supportedFiles = GetSupportedFiles(request.RunContext.RootLogDirectory);

            // Filter supported files to keep only what we need to populate the required collections.
            foreach (var supportedFile in supportedFiles)
            {
                var    parser         = parserFactory.GetParser(supportedFile.FullName);
                string collectionName = parser.CollectionSchema.CollectionName.ToLowerInvariant();

                if (LogsetDependencyHelper.IsCollectionRequiredForRequest(collectionName, request))
                {
                    logsToProcess.Add(new LogFileContext(supportedFile.FullName, request.RunContext.RootLogDirectory));
                }
            }

            return(logsToProcess);
        }