/// <summary> /// Main entry point to start analysis; handles setting up rules, directory enumeration /// file type detection and handoff /// Pre: All Configure Methods have been called already and we are ready to SCAN /// </summary> /// <returns></returns> public int Run() { WriteOnce.SafeLog("AnalyzeCommand::Run", LogLevel.Trace); DateTime start = DateTime.Now; WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Analyze")); _appProfile.MetaData.TotalFiles = _srcfileList.Count();//updated for zipped files later // Iterate through all files and process against rules foreach (string filename in _srcfileList) { var fileExtension = new FileInfo(filename).Extension; if (COMPRESSED_EXTENSIONS.Any(fileExtension.Contains)) { UnZipAndProcess(filename); //determine if file is a compressed item to unpackage for processing } else { ProcessAsFile(filename); } } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, 100)); WriteOnce.Operation(ErrMsg.GetString(ErrMsg.ID.CMD_PREPARING_REPORT)); //Prepare report results _appProfile.MetaData.LastUpdated = LastUpdated.ToString(); _appProfile.DateScanned = DateScanned.ToString(); _appProfile.PrepareReport(); TimeSpan timeSpan = start - DateTime.Now; WriteOnce.SafeLog(String.Format("Processing time: seconds:{0}", timeSpan.TotalSeconds * -1), LogLevel.Trace); FlushAll(); //wrapup result status if (_appProfile.MetaData.TotalFiles == _appProfile.MetaData.FilesSkipped) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOSUPPORTED_FILETYPES)); } else if (_appProfile.MatchList.Count == 0) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOPATTERNS)); } else { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Analyze")); } //html report size warning if (_arg_fileFormat == "html" && new FileInfo("output.html").Length > MAX_HTML_REPORT_FILE_SIZE) { WriteOnce.Info(ErrMsg.GetString(ErrMsg.ID.ANALYZE_REPORTSIZE_WARN)); } return(_appProfile.MatchList.Count() == 0 ? (int)ExitCode.NoMatches : (int)ExitCode.MatchesFound); }
/// <summary> /// Main entry point to start analysis; handles setting up rules, directory enumeration /// file type detection and handoff /// Pre: All Configure Methods have been called already and we are ready to SCAN /// </summary> /// <returns></returns> public int Run() { WriteOnce.SafeLog("AnalyzeCommand::Run", LogLevel.Trace); DateTime start = DateTime.Now; WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Analyze")); _appProfile.MetaData.TotalFiles = _srcfileList.Count();//updated for zipped files later // Iterate through all files and process against rules foreach (string filename in _srcfileList) { ArchiveFileType archiveFileType = MiniMagic.DetectFileType(filename); if (archiveFileType == ArchiveFileType.UNKNOWN)//not a known zipped file type { ProcessAsFile(filename); } else { UnZipAndProcess(filename, archiveFileType); } } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, 100)); WriteOnce.Operation(ErrMsg.GetString(ErrMsg.ID.CMD_PREPARING_REPORT)); //Prepare report results _appProfile.MetaData.LastUpdated = LastUpdated.ToString(); _appProfile.DateScanned = DateScanned.ToString(); _appProfile.PrepareReport(); TimeSpan timeSpan = start - DateTime.Now; WriteOnce.SafeLog(String.Format("Processing time: seconds:{0}", timeSpan.TotalSeconds * -1), LogLevel.Trace); FlushAll(); //wrapup result status if (_appProfile.MetaData.TotalFiles == _appProfile.MetaData.FilesSkipped) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOSUPPORTED_FILETYPES)); } else if (_appProfile.MatchList.Count == 0) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOPATTERNS)); } else { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Analyze")); if (!_arg_autoBrowserOpen) { WriteOnce.Any(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_OUTPUT_FILE, "output.html")); } } return(_appProfile.MatchList.Count() == 0 ? (int)ExitCode.NoMatches : (int)ExitCode.MatchesFound); }
void UnZipAndProcess(string filename, ArchiveFileType archiveFileType) { //zip itself may be too huge for timely processing if (new FileInfo(filename).Length > WARN_ZIP_FILE_SIZE) { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_FILESIZE_WARN)); } else { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_PROCESSING)); } try { IEnumerable <FileEntry> files = Extractor.ExtractFile(filename); if (files.Count() > 0) { _appProfile.MetaData.TotalFiles += files.Count();//additive in case additional child zip files processed _appProfile.MetaData.PackageTypes.Add(ErrMsg.GetString(ErrMsg.ID.ANALYZE_COMPRESSED_FILETYPE)); foreach (FileEntry file in files) { if (file.Content.Length > MAX_FILESIZE) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, file.FullPath), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; continue; } //dup check vs Run() for zip contents; exclude sample, test or similar files by default or as specified in exclusion list if (!_arg_allowSampleFiles && _fileExclusionList.Any(v => file.FullPath.ToLower().Contains(v))) { WriteOnce.SafeLog("Part of excluded list: " + file.FullPath, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, file.FullPath), LogLevel.Trace); _appProfile.MetaData.FilesSkipped++; continue; } WriteOnce.Log.Trace("processing zip file entry: " + file.FullPath); byte[] streamByteArray = file.Content.ToArray(); ProcessInMemory(file.FullPath, Encoding.UTF8.GetString(streamByteArray, 0, streamByteArray.Length), true); } } else { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename)); } } catch (Exception e) { string errmsg = ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename); WriteOnce.Error(errmsg); throw new Exception(errmsg + e.Message + "\n" + e.StackTrace); } }
void UnZipAndProcess(string filename, ArchiveFileType archiveFileType) { // zip itself may be in excluded list i.e. sample, test or similar unless ignore filter requested if (_fileExclusionList.Any(v => filename.ToLower().Contains(v))) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_EXCLUDED_TYPE_SKIPPED, filename), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; return; } //zip itself may be too huge for timely processing if (new FileInfo(filename).Length > WARN_ZIP_FILE_SIZE) { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_FILESIZE_WARN)); } else { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_PROCESSING)); } LastUpdated = File.GetLastWriteTime(filename); _appProfile.MetaData.PackageTypes.Add(ErrMsg.GetString(ErrMsg.ID.ANALYZE_COMPRESSED_FILETYPE)); try { IEnumerable <FileEntry> files = Extractor.ExtractFile(filename); if (files.Count() > 0) { _appProfile.MetaData.TotalFiles += files.Count();//additive in case additional child zip files processed foreach (FileEntry file in files) { //check uncompressed file passes standard checks LanguageInfo languageInfo = new LanguageInfo(); if (FileChecksPassed(file.FullPath, ref languageInfo, file.Content.Length)) { byte[] streamByteArray = file.Content.ToArray(); ProcessInMemory(file.FullPath, Encoding.UTF8.GetString(streamByteArray, 0, streamByteArray.Length), languageInfo); } } } else { WriteOnce.SafeLog(string.Format("Decompression found no files in {0}", filename), LogLevel.Warn);//zero results can be valid } } catch (Exception e) { string errmsg = ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename); WriteOnce.Error(errmsg); throw new Exception(errmsg + e.Message + "\n" + e.StackTrace); } }
public static void OpenBrowser(string url) { if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { try { Process.Start(new ProcessStartInfo("cmd", $"/c start {url}")); WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_SUCCESS)); } catch (Exception) { WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_FAIL)); } } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("BROWSER"))) { try { Process.Start("xdg-open", url); WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_SUCCESS)); } catch (Exception) { WriteOnce.SafeLog("Unable to open browser using BROWSER environment var", NLog.LogLevel.Error); } } else { WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_ENVIRONMENT_VAR)); } } else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { try { Process.Start("open", url); WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_SUCCESS)); } catch (Exception) { WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_FAIL)); } } }
void UnZipAndProcess(string filename, ArchiveFileType archiveFileType) { //zip itself may be too huge for timely processing if (new FileInfo(filename).Length > WARN_ZIP_FILE_SIZE) { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_FILESIZE_WARN)); } else { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_PROCESSING)); } LastUpdated = File.GetLastWriteTime(filename); _appProfile.MetaData.PackageTypes.Add(ErrMsg.GetString(ErrMsg.ID.ANALYZE_COMPRESSED_FILETYPE)); try { IEnumerable <FileEntry> files = Extractor.ExtractFile(filename); if (files.Count() > 0) { _appProfile.MetaData.TotalFiles += files.Count();//additive in case additional child zip files processed foreach (FileEntry file in files) { //check for supported language LanguageInfo languageInfo = new LanguageInfo(); if (FileChecksPassed(file.FullPath, ref languageInfo, file.Content.Length)) { byte[] streamByteArray = file.Content.ToArray(); ProcessInMemory(file.FullPath, Encoding.UTF8.GetString(streamByteArray, 0, streamByteArray.Length), languageInfo); } } } else { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename)); } } catch (Exception e) { string errmsg = ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename); WriteOnce.Error(errmsg); throw new Exception(errmsg + e.Message + "\n" + e.StackTrace); } }
/// <summary> /// Main WORKHORSE for analyzing file; called from file based or decompression functions /// </summary> /// <param name="filename"></param> /// <param name="fileText"></param> void ProcessInMemory(string filePath, string fileText) { #region quickvalidation if (fileText.Length > MAX_FILESIZE) { WriteOnce.SafeLog("File too large: " + filePath, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Error); _appProfile.MetaData.FilesSkipped++; return; } if (!_arg_allowSampleFiles && _fileExclusionList.Any(v => filePath.Contains(v))) { WriteOnce.SafeLog("Part of excluded list: " + filePath, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Error); _appProfile.MetaData.FilesSkipped++; return; } //determine if file is a compressed item to unpackage for processing string language = Language.FromFileName(filePath); // Skip files written in unknown language if (string.IsNullOrEmpty(language)) { WriteOnce.SafeLog("Language not found for file: " + filePath, LogLevel.Trace); language = Path.GetFileName(filePath); _appProfile.MetaData.FilesSkipped++; return; } else { WriteOnce.SafeLog("Preparing to process file: " + filePath, LogLevel.Trace); } #endregion #region minorRollupTrackingAndProgress _appProfile.MetaData.FilesAnalyzed++; _appProfile.MetaData.AddLanguage(language); _appProfile.MetaData.FileExtensions.Add(Path.GetExtension(filePath).Replace('.', ' ').TrimStart()); LastUpdated = File.GetLastWriteTime(filePath); int totalFilesReviewed = _appProfile.MetaData.FilesAnalyzed + _appProfile.MetaData.FilesSkipped; int percentCompleted = (int)((float)totalFilesReviewed / (float)_appProfile.MetaData.TotalFiles * 100); WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, percentCompleted), false); #endregion //process file against rules Issue[] matches = _rulesProcessor.Analyze(fileText, language); //if any matches found for this file... if (matches.Count() > 0) { _appProfile.MetaData.FilesAffected++; _appProfile.MetaData.TotalMatchesCount += matches.Count(); HashSet <string> uniqueTagsControl = new HashSet <string>(); // Iterate through each match issue foreach (Issue match in matches) { WriteOnce.SafeLog(string.Format("Processing pattern matches for ruleId {0}, ruleName {1} file {2}", match.Rule.Id, match.Rule.Name, filePath), LogLevel.Trace); //maintain a list of unique tags; multi-purpose but primarily for filtering -u option bool dupTagFound = false; foreach (string t in match.Rule.Tags) { dupTagFound = !uniqueTagsControl.Add(t); } //save all unique dependendencies even if Dependency tag pattern is not-unique var tagPatternRegex = new Regex("Dependency.SourceInclude", RegexOptions.IgnoreCase); String textMatch; if (match.Rule.Tags.Any(v => tagPatternRegex.IsMatch(v))) { textMatch = ExtractDependency(fileText, match.Boundary.Index, match.PatternMatch, language); } else { textMatch = ExtractTextSample(fileText, match.Boundary.Index, match.Boundary.Length); } //wrap rule issue result to add metadata MatchRecord record = new MatchRecord() { Filename = filePath, Language = language, Filesize = fileText.Length, TextSample = textMatch, Excerpt = ExtractExcerpt(fileText, match.StartLocation.Line), Issue = match }; //preserve issue level characteristics as rolled up meta data of interest bool valid = _appProfile.MetaData.AddStandardProperties(record); //bail after extracting any dependency unique items IF user requested if (_arg_outputUniqueTagsOnly && dupTagFound) { continue; } else if (valid) { _appProfile.MatchList.Add(record); } } } else { WriteOnce.SafeLog("No pattern matches detected for file: " + filePath, LogLevel.Trace); } }
/// <summary> /// Main WORKHORSE for analyzing file; called from file based or decompression functions /// </summary> /// <param name="filename"></param> /// <param name="fileText"></param> void ProcessInMemory(string filePath, string fileText, LanguageInfo languageInfo) { #region minorRollupTrackingAndProgress WriteOnce.SafeLog("Preparing to process file: " + filePath, LogLevel.Trace); _appProfile.MetaData.FilesAnalyzed++; int totalFilesReviewed = _appProfile.MetaData.FilesAnalyzed + _appProfile.MetaData.FilesSkipped; int percentCompleted = (int)((float)totalFilesReviewed / (float)_appProfile.MetaData.TotalFiles * 100); //earlier issue now resolved so app handles mixed zipped/zipped and unzipped/zipped directories but catch all for non-critical UI if (percentCompleted > 100) { percentCompleted = 100; } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, percentCompleted), false); #endregion //process file against rules Issue[] matches = _rulesProcessor.Analyze(fileText, languageInfo); //if any matches found for this file... if (matches.Count() > 0) { _appProfile.MetaData.FilesAffected++; _appProfile.MetaData.TotalMatchesCount += matches.Count(); // Iterate through each match issue foreach (Issue match in matches) { WriteOnce.SafeLog(string.Format("Processing pattern matches for ruleId {0}, ruleName {1} file {2}", match.Rule.Id, match.Rule.Name, filePath), LogLevel.Trace); //maintain a list of unique tags; multi-purpose but primarily for filtering -d option bool dupTagFound = false; foreach (string t in match.Rule.Tags) { dupTagFound = !_uniqueTagsControl.Add(t); } //save all unique dependencies even if Dependency tag pattern is not-unique var tagPatternRegex = new Regex("Dependency.SourceInclude", RegexOptions.IgnoreCase); String textMatch; if (match.Rule.Tags.Any(v => tagPatternRegex.IsMatch(v))) { textMatch = ExtractDependency(fileText, match.Boundary.Index, match.PatternMatch, languageInfo.Name); } else { textMatch = ExtractTextSample(fileText, match.Boundary.Index, match.Boundary.Length); } //wrap rule issue result to add metadata MatchRecord record = new MatchRecord() { Filename = filePath, Language = languageInfo, Filesize = fileText.Length, TextSample = textMatch, Excerpt = ExtractExcerpt(fileText, match.StartLocation.Line), Issue = match }; //preserve issue level characteristics as rolled up meta data of interest bool addAsFeatureMatch = _appProfile.MetaData.AddStandardProperties(ref record); //bail after extracting any dependency unique items IF user requested if (_arg_outputUniqueTagsOnly && dupTagFound) { continue; } else if (addAsFeatureMatch) { _appProfile.MatchList.Add(record); } } } else { WriteOnce.SafeLog("No pattern matches detected for file: " + filePath, LogLevel.Trace); } }
/// <summary> /// Main WORKHORSE for analyzing file; called from file based or decompression functions /// </summary> /// <param name="filename"></param> /// <param name="fileText"></param> void ProcessInMemory(string filePath, string fileText) { #region quickvalidation if (fileText.Length > MAX_FILESIZE) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; return; } //exclude sample, test or similar files by default or as specified in exclusion list if (!_arg_allowSampleFiles && _fileExclusionList.Any(v => filePath.ToLower().Contains(v))) { WriteOnce.SafeLog("Part of excluded list: " + filePath, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Trace); _appProfile.MetaData.FilesSkipped++; return; } //check for supported language LanguageInfo languageInfo = new LanguageInfo(); // Skip files written in unknown language if (!Language.FromFileName(filePath, ref languageInfo)) { WriteOnce.SafeLog("Language not found for file: " + filePath, LogLevel.Trace); _appProfile.MetaData.FilesSkipped++; return; } else { WriteOnce.SafeLog("Preparing to process file: " + filePath, LogLevel.Trace); } #endregion #region minorRollupTrackingAndProgress _appProfile.MetaData.FilesAnalyzed++; _appProfile.MetaData.AddLanguage(languageInfo.Name); _appProfile.MetaData.FileExtensions.Add(Path.GetExtension(filePath).Replace('.', ' ').TrimStart()); LastUpdated = File.GetLastWriteTime(filePath); int totalFilesReviewed = _appProfile.MetaData.FilesAnalyzed + _appProfile.MetaData.FilesSkipped; int percentCompleted = (int)((float)totalFilesReviewed / (float)_appProfile.MetaData.TotalFiles * 100); //reported: if a zip contains more zip files in it the total count may be off -complex. ~workaround: freeze UI if (percentCompleted > 100) { percentCompleted = 100; } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, percentCompleted), false); #endregion //process file against rules Issue[] matches = _rulesProcessor.Analyze(fileText, languageInfo.Name); //if any matches found for this file... if (matches.Count() > 0) { _appProfile.MetaData.FilesAffected++; _appProfile.MetaData.TotalMatchesCount += matches.Count(); // Iterate through each match issue foreach (Issue match in matches) { WriteOnce.SafeLog(string.Format("Processing pattern matches for ruleId {0}, ruleName {1} file {2}", match.Rule.Id, match.Rule.Name, filePath), LogLevel.Trace); //do not accept features from build type files (only metadata) to avoid false positives that are not part of the executable program if (languageInfo.Type == LanguageInfo.LangFileType.Build && match.Rule.Tags.Any(v => !v.Contains("Metadata"))) { continue; } //maintain a list of unique tags; multi-purpose but primarily for filtering -d option bool dupTagFound = false; foreach (string t in match.Rule.Tags) { dupTagFound = !_uniqueTagsControl.Add(t); } //save all unique dependendencies even if Dependency tag pattern is not-unique var tagPatternRegex = new Regex("Dependency.SourceInclude", RegexOptions.IgnoreCase); String textMatch; if (match.Rule.Tags.Any(v => tagPatternRegex.IsMatch(v))) { textMatch = ExtractDependency(fileText, match.Boundary.Index, match.PatternMatch, languageInfo.Name); } else { textMatch = ExtractTextSample(fileText, match.Boundary.Index, match.Boundary.Length); } //wrap rule issue result to add metadata MatchRecord record = new MatchRecord() { Filename = filePath, Language = languageInfo, Filesize = fileText.Length, TextSample = textMatch, Excerpt = ExtractExcerpt(fileText, match.StartLocation.Line), Issue = match }; //preserve issue level characteristics as rolled up meta data of interest bool addAsFeatureMatch = _appProfile.MetaData.AddStandardProperties(record); //bail after extracting any dependency unique items IF user requested if (_arg_outputUniqueTagsOnly && dupTagFound) { continue; } else if (addAsFeatureMatch) { _appProfile.MatchList.Add(record); } } } else { WriteOnce.SafeLog("No pattern matches detected for file: " + filePath, LogLevel.Trace); } }
/// <summary> /// Main entry point to start analysis; handles setting up rules, directory enumeration /// file type detection and handoff /// Pre: All Configure Methods have been called already and we are ready to SCAN /// </summary> /// <returns></returns> public int Run() { WriteOnce.SafeLog("AnalyzeCommand::Run", LogLevel.Trace); DateTime start = DateTime.Now; WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Analyze")); _appProfile.MetaData.TotalFiles = _srcfileList.Count();//updated for zipped files later // Iterate through all files and process against rules foreach (string filename in _srcfileList) { //exclude sample, test or similar files by default or as specified in exclusion list if (!_arg_allowSampleFiles && _fileExclusionList.Any(v => filename.ToLower().Contains(v))) { WriteOnce.SafeLog("Part of excluded list: " + filename, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filename), LogLevel.Trace); _appProfile.MetaData.FilesSkipped++; continue; } ArchiveFileType archiveFileType = MiniMagic.DetectFileType(filename); if (archiveFileType == ArchiveFileType.UNKNOWN) { ProcessAsFile(filename); } else { UnZipAndProcess(filename, archiveFileType); } } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, 100)); WriteOnce.Operation(ErrMsg.GetString(ErrMsg.ID.CMD_PREPARING_REPORT)); //Prepare report results _appProfile.MetaData.LastUpdated = LastUpdated.ToString(); _appProfile.DateScanned = DateScanned.ToString(); _appProfile.PrepareReport(); TimeSpan timeSpan = start - DateTime.Now; WriteOnce.SafeLog(String.Format("Processing time: seconds:{0}", timeSpan.TotalSeconds * -1), LogLevel.Trace); FlushAll(); //wrapup result status if (_appProfile.MetaData.TotalFiles == _appProfile.MetaData.FilesSkipped) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOSUPPORTED_FILETYPES)); } else if (_appProfile.MatchList.Count == 0) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOPATTERNS)); } else { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Analyze")); } return(_appProfile.MatchList.Count() == 0 ? (int)ExitCode.NoMatches : (int)ExitCode.MatchesFound); }
public int Run() { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Tagdiff")); //setup output TextWriter outputWriter; if (!string.IsNullOrEmpty(_arg_outputFile)) { outputWriter = File.CreateText(_arg_outputFile); outputWriter.WriteLine(Program.GetVersionString()); WriteOnce.Writer = outputWriter; } if (_arg_src1 == _arg_src2) { throw new OpException(ErrMsg.GetString(ErrMsg.ID.TAGDIFF_SAME_FILE_ARG)); } else if (string.IsNullOrEmpty(_arg_src1) || string.IsNullOrEmpty(_arg_src2)) { throw new OpException(ErrMsg.GetString(ErrMsg.ID.CMD_INVALID_ARG_VALUE)); } #region setup analyze calls //save to quiet analyze cmd WriteOnce.ConsoleVerbosity saveVerbosity = WriteOnce.Verbosity; string tmp1 = Path.GetTempFileName(); string tmp2 = Path.GetTempFileName(); AnalyzeCommand.ExitCode result1 = AnalyzeCommand.ExitCode.CriticalError; AnalyzeCommand.ExitCode result2 = AnalyzeCommand.ExitCode.CriticalError; try { AnalyzeCommand cmd1 = new AnalyzeCommand(new AnalyzeCommandOptions { SourcePath = _arg_src1, OutputFilePath = tmp1, OutputFileFormat = "json", CustomRulesPath = _arg_rulesPath, IgnoreDefaultRules = _arg_ignoreDefault, SimpleTagsOnly = true, AllowDupTags = false, FilePathExclusions = "sample,example,test,docs,.vs,.git", ConsoleVerbosityLevel = "None" }); AnalyzeCommand cmd2 = new AnalyzeCommand(new AnalyzeCommandOptions { SourcePath = _arg_src2, OutputFilePath = tmp2, OutputFileFormat = "json", CustomRulesPath = _arg_rulesPath, IgnoreDefaultRules = _arg_ignoreDefault, SimpleTagsOnly = true, AllowDupTags = false, FilePathExclusions = "sample,example,test,docs,.vs,.git", ConsoleVerbosityLevel = "None" }); result1 = (AnalyzeCommand.ExitCode)cmd1.Run(); result2 = (AnalyzeCommand.ExitCode)cmd2.Run(); } catch (Exception e) { //restore WriteOnce.Verbosity = saveVerbosity; throw e; } //restore WriteOnce.Verbosity = saveVerbosity; #endregion bool successResult; bool equal1 = true; bool equal2 = true; //process results for each analyze call before comparing results if (result1 == AnalyzeCommand.ExitCode.CriticalError) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_CRITICAL_FILE_ERR, _arg_src1)); } else if (result2 == AnalyzeCommand.ExitCode.CriticalError) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_CRITICAL_FILE_ERR, _arg_src2)); } else if (result1 == AnalyzeCommand.ExitCode.NoMatches || result2 == AnalyzeCommand.ExitCode.NoMatches) { throw new OpException(ErrMsg.GetString(ErrMsg.ID.TAGDIFF_NO_TAGS_FOUND)); } else //compare tag results; assumed (result1&2 == AnalyzeCommand.ExitCode.MatchesFound) { string file1TagsJson = File.ReadAllText(tmp1); string file2TagsJson = File.ReadAllText(tmp2); var file1Tags = JsonConvert.DeserializeObject <TagsFile[]>(file1TagsJson).First(); var file2Tags = JsonConvert.DeserializeObject <TagsFile[]>(file2TagsJson).First(); //can't simply compare counts as content may differ; must compare both in directions in two passes a->b; b->a //first pass WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.TAGDIFF_RESULTS_GAP, Path.GetFileName(_arg_src1), Path.GetFileName(_arg_src2)), true, WriteOnce.ConsoleVerbosity.High); equal1 = CompareTags(file1Tags.Tags, file2Tags.Tags); //reverse order for second pass WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.TAGDIFF_RESULTS_GAP, Path.GetFileName(_arg_src2), Path.GetFileName(_arg_src1)), true, WriteOnce.ConsoleVerbosity.High); equal2 = CompareTags(file2Tags.Tags, file1Tags.Tags); //final results bool resultsDiffer = !(equal1 && equal2); if (_arg_tagTestType == TagTestType.Inequality && resultsDiffer) { successResult = true; } else if (_arg_tagTestType == TagTestType.Equality && !resultsDiffer) { successResult = true; } else { successResult = false; } WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.TAGDIFF_RESULTS_DIFFER), false); WriteOnce.Result(resultsDiffer.ToString()); } //cleanup try { File.Delete(tmp1); File.Delete(tmp2); } catch { //no action needed; } WriteOnce.FlushAll(); WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Tagdiff")); return(successResult ? (int)ExitCode.TestPassed : (int)ExitCode.TestFailed); }
public int Run() { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "tagtest")); //init based on true or false present argument value bool testSuccess = true; //one file vs ruleset string tmp1 = Path.GetTempFileName(); WriteOnce.ConsoleVerbosity saveVerbosity = WriteOnce.Verbosity; AnalyzeCommand.ExitCode result = AnalyzeCommand.ExitCode.CriticalError; //setup analyze call with silent option #region analyzesetup try { AnalyzeCommand cmd1 = new AnalyzeCommand(new AnalyzeCommandOptions { SourcePath = _arg_srcPath, OutputFilePath = tmp1, OutputFileFormat = "json", CustomRulesPath = _arg_customRulesPath, IgnoreDefaultRules = _arg_ignoreDefaultRules, SimpleTagsOnly = true, AllowDupTags = false, ConsoleVerbosityLevel = "None" }); //quiet analysis commands result = (AnalyzeCommand.ExitCode)cmd1.Run(); } catch (Exception e) { WriteOnce.Verbosity = saveVerbosity; throw e; } //restore WriteOnce.Verbosity = saveVerbosity; if (result == AnalyzeCommand.ExitCode.CriticalError) { throw new OpException(ErrMsg.GetString(ErrMsg.ID.CMD_CRITICAL_FILE_ERR)); } else if (result == AnalyzeCommand.ExitCode.NoMatches) { //results WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.TAGTEST_RESULTS_TEST_TYPE, _arg_tagTestType.ToString()), false, WriteOnce.ConsoleVerbosity.Low); if (_arg_tagTestType == TagTestType.RulesPresent) { WriteOnce.Any(ErrMsg.GetString(ErrMsg.ID.TAGTEST_RESULTS_FAIL), true, ConsoleColor.Red, WriteOnce.ConsoleVerbosity.Low); } else { WriteOnce.Any(ErrMsg.GetString(ErrMsg.ID.TAGTEST_RESULTS_SUCCESS), true, ConsoleColor.Green, WriteOnce.ConsoleVerbosity.Low); } WriteOnce.FlushAll(); WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Tagtest")); return((int)ExitCode.CriticalError); } #endregion //assumed (result == AnalyzeCommand.ExitCode.MatchesFound) string file1TagsJson = File.ReadAllText(tmp1); var file1TagsObj = JsonConvert.DeserializeObject <TagsFile[]>(file1TagsJson); var file1Tags = file1TagsObj.First(); // here we have a single FileList object File.Delete(tmp1); foreach (Rule r in _rulesSet) { //supports both directions by generalizing string[] testList1 = _arg_tagTestType == TagTestType.RulesNotPresent ? r.Tags : file1Tags.Tags; string[] testList2 = _arg_tagTestType == TagTestType.RulesNotPresent ? file1Tags.Tags : r.Tags; foreach (string t in testList2) { if (TagTest(testList1, t)) { WriteOnce.Result(ErrMsg.FormatString(ErrMsg.ID.TAGTEST_RESULTS_TAGS_FOUND, t), true, WriteOnce.ConsoleVerbosity.High); } else { testSuccess = false; WriteOnce.Result(ErrMsg.FormatString(ErrMsg.ID.TAGTEST_RESULTS_TAGS_MISSING, t), true, WriteOnce.ConsoleVerbosity.High); } } } //results WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.TAGTEST_RESULTS_TEST_TYPE, _arg_tagTestType.ToString()), false, WriteOnce.ConsoleVerbosity.Low); if (testSuccess) { WriteOnce.Any(ErrMsg.GetString(ErrMsg.ID.TAGTEST_RESULTS_SUCCESS), true, ConsoleColor.Green, WriteOnce.ConsoleVerbosity.Low); } else { WriteOnce.Any(ErrMsg.GetString(ErrMsg.ID.TAGTEST_RESULTS_FAIL), true, ConsoleColor.Red, WriteOnce.ConsoleVerbosity.Low); } WriteOnce.FlushAll(); WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Tagtest")); return(testSuccess ? (int)ExitCode.NoDiff : (int)ExitCode.DiffFound); }