void WritePartialRuleDetails(RuleSet rules) { WriteOnce.Result("RuleId,Rulename,RuleDesc,Tags,AppliesToLanguage", true, WriteOnce.ConsoleVerbosity.High); //option to write out partial rule data foreach (Rule r in rules) { string tags = ""; string languages = ""; foreach (string tag in r.Tags) { tags += tag + ","; } tags = tags.Remove(tags.Length - 1); if (r.AppliesTo != null && r.AppliesTo.Length > 0) { foreach (string lang in r.AppliesTo) { languages += lang + ","; } languages = languages.Remove(languages.Length - 1); } else { languages = "Not-specified-so-all"; } WriteOnce.Result(string.Format("{0},{1},{2},{3},{4}", r.Id, r.Name, r.Description, tags, languages), true, WriteOnce.ConsoleVerbosity.High); } }
/// <summary> /// Expects user to supply all that apply /// </summary> void ConfigConfidenceFilters() { WriteOnce.SafeLog("AnalyzeCommand::ConfigConfidenceFilters", LogLevel.Trace); //parse and verify confidence values if (String.IsNullOrEmpty(_arg_confidenceFilters)) { _arg_confidence = Confidence.High | Confidence.Medium; //excludes low by default } else { string[] confidences = _arg_confidenceFilters.Split(','); foreach (string confidence in confidences) { Confidence single; if (Enum.TryParse(confidence, true, out single)) { _arg_confidence |= single; } else { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_ARG_VALUE, "x")); } } } }
/// <summary> /// Simple validation on source path provided for scanning and preparation /// </summary> void ConfigSourcetoScan() { WriteOnce.SafeLog("AnalyzeCommand::ConfigSourcetoScan", LogLevel.Trace); if (Directory.Exists(_arg_sourcePath)) { try { _srcfileList = Directory.EnumerateFiles(_arg_sourcePath, "*.*", SearchOption.AllDirectories); if (_srcfileList.Count() == 0) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_FILE_OR_DIR, _arg_sourcePath)); } } catch (Exception) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_FILE_OR_DIR, _arg_sourcePath)); } } else if (File.Exists(_arg_sourcePath)) //not a directory but make one for single flow { _srcfileList = new List <string>() { _arg_sourcePath } } ; else { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_FILE_OR_DIR, _arg_sourcePath)); } }
/// <summary> /// Main entry point to start analysis; handles setting up rules, directory enumeration /// file type detection and handoff /// Pre: All Configure Methods have been called already and we are ready to SCAN /// </summary> /// <returns></returns> public int Run() { WriteOnce.SafeLog("AnalyzeCommand::Run", LogLevel.Trace); DateTime start = DateTime.Now; WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Analyze")); _appProfile.MetaData.TotalFiles = _srcfileList.Count();//updated for zipped files later // Iterate through all files and process against rules foreach (string filename in _srcfileList) { var fileExtension = new FileInfo(filename).Extension; if (COMPRESSED_EXTENSIONS.Any(fileExtension.Contains)) { UnZipAndProcess(filename); //determine if file is a compressed item to unpackage for processing } else { ProcessAsFile(filename); } } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, 100)); WriteOnce.Operation(ErrMsg.GetString(ErrMsg.ID.CMD_PREPARING_REPORT)); //Prepare report results _appProfile.MetaData.LastUpdated = LastUpdated.ToString(); _appProfile.DateScanned = DateScanned.ToString(); _appProfile.PrepareReport(); TimeSpan timeSpan = start - DateTime.Now; WriteOnce.SafeLog(String.Format("Processing time: seconds:{0}", timeSpan.TotalSeconds * -1), LogLevel.Trace); FlushAll(); //wrapup result status if (_appProfile.MetaData.TotalFiles == _appProfile.MetaData.FilesSkipped) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOSUPPORTED_FILETYPES)); } else if (_appProfile.MatchList.Count == 0) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOPATTERNS)); } else { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Analyze")); } //html report size warning if (_arg_fileFormat == "html" && new FileInfo("output.html").Length > MAX_HTML_REPORT_FILE_SIZE) { WriteOnce.Info(ErrMsg.GetString(ErrMsg.ID.ANALYZE_REPORTSIZE_WARN)); } return(_appProfile.MatchList.Count() == 0 ? (int)ExitCode.NoMatches : (int)ExitCode.MatchesFound); }
/// <summary> /// Main entry point to start analysis; handles setting up rules, directory enumeration /// file type detection and handoff /// Pre: All Configure Methods have been called already and we are ready to SCAN /// </summary> /// <returns></returns> public int Run() { WriteOnce.SafeLog("AnalyzeCommand::Run", LogLevel.Trace); DateTime start = DateTime.Now; WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Analyze")); _appProfile.MetaData.TotalFiles = _srcfileList.Count();//updated for zipped files later // Iterate through all files and process against rules foreach (string filename in _srcfileList) { ArchiveFileType archiveFileType = MiniMagic.DetectFileType(filename); if (archiveFileType == ArchiveFileType.UNKNOWN)//not a known zipped file type { ProcessAsFile(filename); } else { UnZipAndProcess(filename, archiveFileType); } } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, 100)); WriteOnce.Operation(ErrMsg.GetString(ErrMsg.ID.CMD_PREPARING_REPORT)); //Prepare report results _appProfile.MetaData.LastUpdated = LastUpdated.ToString(); _appProfile.DateScanned = DateScanned.ToString(); _appProfile.PrepareReport(); TimeSpan timeSpan = start - DateTime.Now; WriteOnce.SafeLog(String.Format("Processing time: seconds:{0}", timeSpan.TotalSeconds * -1), LogLevel.Trace); FlushAll(); //wrapup result status if (_appProfile.MetaData.TotalFiles == _appProfile.MetaData.FilesSkipped) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOSUPPORTED_FILETYPES)); } else if (_appProfile.MatchList.Count == 0) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOPATTERNS)); } else { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Analyze")); if (!_arg_autoBrowserOpen) { WriteOnce.Any(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_OUTPUT_FILE, "output.html")); } } return(_appProfile.MatchList.Count() == 0 ? (int)ExitCode.NoMatches : (int)ExitCode.MatchesFound); }
void UnZipAndProcess(string filename, ArchiveFileType archiveFileType) { //zip itself may be too huge for timely processing if (new FileInfo(filename).Length > WARN_ZIP_FILE_SIZE) { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_FILESIZE_WARN)); } else { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_PROCESSING)); } try { IEnumerable <FileEntry> files = Extractor.ExtractFile(filename); if (files.Count() > 0) { _appProfile.MetaData.TotalFiles += files.Count();//additive in case additional child zip files processed _appProfile.MetaData.PackageTypes.Add(ErrMsg.GetString(ErrMsg.ID.ANALYZE_COMPRESSED_FILETYPE)); foreach (FileEntry file in files) { if (file.Content.Length > MAX_FILESIZE) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, file.FullPath), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; continue; } //dup check vs Run() for zip contents; exclude sample, test or similar files by default or as specified in exclusion list if (!_arg_allowSampleFiles && _fileExclusionList.Any(v => file.FullPath.ToLower().Contains(v))) { WriteOnce.SafeLog("Part of excluded list: " + file.FullPath, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, file.FullPath), LogLevel.Trace); _appProfile.MetaData.FilesSkipped++; continue; } WriteOnce.Log.Trace("processing zip file entry: " + file.FullPath); byte[] streamByteArray = file.Content.ToArray(); ProcessInMemory(file.FullPath, Encoding.UTF8.GetString(streamByteArray, 0, streamByteArray.Length), true); } } else { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename)); } } catch (Exception e) { string errmsg = ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename); WriteOnce.Error(errmsg); throw new Exception(errmsg + e.Message + "\n" + e.StackTrace); } }
void UnZipAndProcess(string filename, ArchiveFileType archiveFileType) { // zip itself may be in excluded list i.e. sample, test or similar unless ignore filter requested if (_fileExclusionList.Any(v => filename.ToLower().Contains(v))) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_EXCLUDED_TYPE_SKIPPED, filename), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; return; } //zip itself may be too huge for timely processing if (new FileInfo(filename).Length > WARN_ZIP_FILE_SIZE) { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_FILESIZE_WARN)); } else { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_PROCESSING)); } LastUpdated = File.GetLastWriteTime(filename); _appProfile.MetaData.PackageTypes.Add(ErrMsg.GetString(ErrMsg.ID.ANALYZE_COMPRESSED_FILETYPE)); try { IEnumerable <FileEntry> files = Extractor.ExtractFile(filename); if (files.Count() > 0) { _appProfile.MetaData.TotalFiles += files.Count();//additive in case additional child zip files processed foreach (FileEntry file in files) { //check uncompressed file passes standard checks LanguageInfo languageInfo = new LanguageInfo(); if (FileChecksPassed(file.FullPath, ref languageInfo, file.Content.Length)) { byte[] streamByteArray = file.Content.ToArray(); ProcessInMemory(file.FullPath, Encoding.UTF8.GetString(streamByteArray, 0, streamByteArray.Length), languageInfo); } } } else { WriteOnce.SafeLog(string.Format("Decompression found no files in {0}", filename), LogLevel.Warn);//zero results can be valid } } catch (Exception e) { string errmsg = ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename); WriteOnce.Error(errmsg); throw new Exception(errmsg + e.Message + "\n" + e.StackTrace); } }
/// <summary> /// Program entry point which defines command verbs and options to running /// </summary> /// <param name="args"></param> static int Main(string[] args) { int finalResult = -1; WriteOnce.Verbosity = WriteOnce.ConsoleVerbosity.Medium; try { WriteOnce.Info(GetVersionString()); var argsResult = Parser.Default.ParseArguments <AnalyzeCommandOptions, TagDiffCommandOptions, TagTestCommandOptions, ExportTagsCommandOptions, VerifyRulesCommandOptions>(args) .MapResult( (AnalyzeCommandOptions opts) => RunAnalyzeCommand(opts), (TagDiffCommandOptions opts) => RunTagDiffCommand(opts), (TagTestCommandOptions opts) => RunTagTestCommand(opts), (ExportTagsCommandOptions opts) => RunExportTagsCommand(opts), (VerifyRulesCommandOptions opts) => RunVerifyRulesCommand(opts), errs => 1 ); finalResult = argsResult; } catch (OpException e) { if (Logger != null) { WriteOnce.Error(ErrMsg.FormatString(ErrMsg.ID.RUNTIME_ERROR_NAMED, e.Message)); Logger.Error($"Runtime error: {e.StackTrace}"); } else { WriteOnce.Error(ErrMsg.FormatString(ErrMsg.ID.RUNTIME_ERROR_PRELOG, e.Message)); } } catch (Exception e) { if (Logger != null) { WriteOnce.Error(ErrMsg.FormatString(ErrMsg.ID.RUNTIME_ERROR_UNNAMED)); Logger.Error($"Runtime error: {e.StackTrace}"); } else { WriteOnce.Error(ErrMsg.FormatString(ErrMsg.ID.RUNTIME_ERROR_PRELOG, e.Message)); } } return(finalResult); }
/// <summary> /// Simple but keeps calling code consistent /// </summary> /// <param name="fileText"></param> /// <param name="index"></param> /// <param name="length"></param> /// <returns></returns> string ExtractTextSample(string fileText, int index, int length) { string result = ""; try { result = fileText.Substring(index, length).Trim(); } catch (Exception) { //control the error description and continue; error in rules engine possible WriteOnce.SafeLog("Unexpected indexing issue in ExtractTextSample. Process continued", LogLevel.Error); } return(result); }
public static void OpenBrowser(string url) { if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { try { Process.Start(new ProcessStartInfo("cmd", $"/c start {url}")); WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_SUCCESS)); } catch (Exception) { WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_FAIL)); } } else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) { if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("BROWSER"))) { try { Process.Start("xdg-open", url); WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_SUCCESS)); } catch (Exception) { WriteOnce.SafeLog("Unable to open browser using BROWSER environment var", NLog.LogLevel.Error); } } else { WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_ENVIRONMENT_VAR)); } } else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { try { Process.Start("open", url); WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_SUCCESS)); } catch (Exception) { WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.BROWSER_START_FAIL)); } } }
void UnZipAndProcess(string filename, ArchiveFileType archiveFileType) { //zip itself may be too huge for timely processing if (new FileInfo(filename).Length > WARN_ZIP_FILE_SIZE) { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_FILESIZE_WARN)); } else { WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_PROCESSING)); } LastUpdated = File.GetLastWriteTime(filename); _appProfile.MetaData.PackageTypes.Add(ErrMsg.GetString(ErrMsg.ID.ANALYZE_COMPRESSED_FILETYPE)); try { IEnumerable <FileEntry> files = Extractor.ExtractFile(filename); if (files.Count() > 0) { _appProfile.MetaData.TotalFiles += files.Count();//additive in case additional child zip files processed foreach (FileEntry file in files) { //check for supported language LanguageInfo languageInfo = new LanguageInfo(); if (FileChecksPassed(file.FullPath, ref languageInfo, file.Content.Length)) { byte[] streamByteArray = file.Content.ToArray(); ProcessInMemory(file.FullPath, Encoding.UTF8.GetString(streamByteArray, 0, streamByteArray.Length), languageInfo); } } } else { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename)); } } catch (Exception e) { string errmsg = ErrMsg.FormatString(ErrMsg.ID.ANALYZE_COMPRESSED_ERROR, filename); WriteOnce.Error(errmsg); throw new Exception(errmsg + e.Message + "\n" + e.StackTrace); } }
void ProcessTarGzFile(string filename) { WriteOnce.SafeLog(string.Format("Analyzing .tar.gz file: [{0}]", filename), LogLevel.Trace); _appProfile.MetaData.TotalFiles = GetTarGzFileCount(filename); using (var fileStream = new FileStream(filename, FileMode.Open, FileAccess.Read)) using (var gzipStream = new GZipInputStream(fileStream)) using (var memoryStream = new MemoryStream()) { var tarStream = new TarInputStream(gzipStream); TarEntry tarEntry; while ((tarEntry = tarStream.GetNextEntry()) != null) { if (tarEntry.IsDirectory) { continue; } tarStream.CopyEntryContents(memoryStream); if (tarEntry.Size > MAX_FILESIZE) { _appProfile.MetaData.FilesSkipped++; WriteOnce.SafeLog(string.Format("{0} in {1} is too large. File skipped", tarEntry.Name, filename), LogLevel.Error); tarStream.Close(); continue; } var mimeType = MimeTypeMap.GetMimeType(Path.GetExtension(tarEntry.Name)); if (IgnoreMimeRegex.IsMatch(mimeType) && new FileInfo(filename).Extension != "ts") { _appProfile.MetaData.FilesSkipped++; WriteOnce.SafeLog(string.Format("Ignoring tar entry [{0}]", tarEntry.Name), LogLevel.Error); } else { //file name may contain slashes; remove prior to call byte[] streamByteArray = memoryStream.ToArray(); ProcessInMemory(Path.GetFileName(tarEntry.Name), Encoding.UTF8.GetString(streamByteArray, 0, streamByteArray.Length)); } memoryStream.SetLength(0); // Clear out the stream } tarStream.Close(); } }
void ProcessZipFile(string filename) { WriteOnce.SafeLog(string.Format("Analyzing .zip file: [{0}])", filename), LogLevel.Trace); ZipFile zipFile; using (var fileStream = new FileStream(filename, FileMode.Open, FileAccess.Read)) using (var memoryStream = new MemoryStream()) { zipFile = new ZipFile(fileStream); _appProfile.MetaData.TotalFiles = (int)zipFile.Count; foreach (ZipEntry zipEntry in zipFile) { if (zipEntry.IsDirectory) { continue; } byte[] buffer = new byte[4096]; var zipStream = zipFile.GetInputStream(zipEntry); if (zipEntry.Size > MAX_FILESIZE) { _appProfile.MetaData.FilesSkipped++; WriteOnce.SafeLog(string.Format("{0} in {1} is too large. File skipped", zipEntry.Name, filename), LogLevel.Error); zipFile.Close(); continue; } StreamUtils.Copy(zipStream, memoryStream, buffer); var mimeType = MimeTypeMap.GetMimeType(Path.GetExtension(zipEntry.Name)); if (IgnoreMimeRegex.IsMatch(mimeType) && new FileInfo(filename).Extension != "ts") { _appProfile.MetaData.FilesSkipped++; WriteOnce.SafeLog(string.Format("Ignoring zip entry [{0}]", zipEntry.Name), LogLevel.Error); } else { byte[] streamByteArray = memoryStream.ToArray(); ProcessInMemory(Path.GetFileName(zipEntry.Name), Encoding.UTF8.GetString(streamByteArray, 0, streamByteArray.Length)); } memoryStream.SetLength(0); // Clear out the stream } zipFile.Close(); } }
void ConfigOutput() { WriteOnce.SafeLog("AnalyzeCommand::ConfigOutput", LogLevel.Trace); //Set output type, format and outstream _outputWriter = WriterFactory.GetWriter(_arg_fileFormat ?? "text", (string.IsNullOrEmpty(_arg_outputFile)) ? null : "text", _arg_outputTextFormat); if (!string.IsNullOrEmpty(_arg_outputFile)) { if (_arg_fileFormat != "html") { _outputWriter.TextWriter = File.CreateText(_arg_outputFile);//not needed if html output since application controlled } _outputWriter.TextWriter.WriteLine(Program.GetVersionString()); } else { _outputWriter.TextWriter = Console.Out; } }
public void FlushAll() { if (_outputWriter != null) { _outputWriter.WriteApp(_appProfile); if (_outputWriter.TextWriter != null && _arg_fileFormat != "html") { _outputWriter.FlushAndClose();//not required for html formal i.e. multiple files already closed _outputWriter = null; if (!String.IsNullOrEmpty(_arg_outputFile)) { WriteOnce.Any(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_OUTPUT_FILE, _arg_outputFile)); } else { WriteOnce.NewLine(); } } } }
/// <summary> /// Common validation called by ProcessAsFile and UnzipAndProcess to ensure same order and checks made /// </summary> /// <param name="filePath"></param> /// <param name="languageInfo"></param> /// <param name="fileLength">should be > zero if called from unzip method</param> /// <returns></returns> bool FileChecksPassed(string filePath, ref LanguageInfo languageInfo, long fileLength = 0) { _appProfile.MetaData.FileExtensions.Add(Path.GetExtension(filePath).Replace('.', ' ').TrimStart()); // 1. Skip files written in unknown language if (!Language.FromFileName(filePath, ref languageInfo)) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_LANGUAGE_NOTFOUND, filePath), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; return(false); } _appProfile.MetaData.AddLanguage(languageInfo.Name); // 2. Skip excluded files i.e. sample, test or similar unless ignore filter requested if (_fileExclusionList.Any(v => filePath.ToLower().Contains(v))) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_EXCLUDED_TYPE_SKIPPED, filePath), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; return(false); } // 3. Skip if exceeds file size limits try { fileLength = fileLength <= 0 ? new FileInfo(filePath).Length : fileLength; if (fileLength > MAX_FILESIZE) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; return(false); } } catch (Exception) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_FILE_OR_DIR, filePath)); } return(true); }
/// <summary> /// Simple wrapper but keeps calling code consistent /// Do not html code result which is accomplished later before out put to report /// </summary> /// <param name="fileText"></param> /// <param name="index"></param> /// <param name="length"></param> /// <returns></returns> string ExtractTextSample(string fileText, int index, int length) { string result = ""; try { //some js file results may be too long for practical display if (length > MAX_TEXT_SAMPLE_LENGTH) { length = MAX_TEXT_SAMPLE_LENGTH; } result = fileText.Substring(index, length).Trim(); } catch (Exception) { //control the error description and continue; error in rules engine possible WriteOnce.SafeLog("Unexpected indexing issue in ExtractTextSample. Process continued", LogLevel.Error); } return(result); }
bool CompareTags(string[] fileTags1, string[] fileTags2) { bool found = true; //are all tags in file1 found in file2 foreach (string s1 in fileTags1) { if (!fileTags2.Contains(s1)) { found = false; WriteOnce.Result(s1, true, WriteOnce.ConsoleVerbosity.High); } } //none missing if (found) { WriteOnce.Result(ErrMsg.GetString(ErrMsg.ID.TAGTEST_RESULTS_NONE), true, WriteOnce.ConsoleVerbosity.High); } return(found); }
public int Run() { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Exporttags")); SortedDictionary <string, string> uniqueTags = new SortedDictionary <string, string>(); foreach (Rule r in _rules) { //builds a list of unique tags foreach (string t in r.Tags) { if (uniqueTags.ContainsKey(t)) { continue; } else { uniqueTags.Add(t, t); } } } //separate loop so results are sorted (Sorted type) foreach (string s in uniqueTags.Values) { WriteOnce.Result(s, true); } WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Exporttags")); WriteOnce.FlushAll(); if (!String.IsNullOrEmpty(_arg_outputFile)) { WriteOnce.Any(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_OUTPUT_FILE, _arg_outputFile), true, ConsoleColor.Gray, WriteOnce.ConsoleVerbosity.Low); } return((int)ExitCode.Success); }
/// <summary> /// Wrapper for files that are on disk and ready to read to allow separation of core /// scan evaluation for use by decompression methods as well /// </summary> /// <param name="filename"></param> void ProcessAsFile(string filename) { try { if (new FileInfo(filename).Length > MAX_FILESIZE) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filename), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; return; } } catch (Exception) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_FILE_OR_DIR, filename)); } _appProfile.MetaData.FileNames.Add(filename); _appProfile.MetaData.PackageTypes.Add(ErrMsg.GetString(ErrMsg.ID.ANALYZE_UNCOMPRESSED_FILETYPE)); string fileText = File.ReadAllText(filename); ProcessInMemory(filename, fileText); }
public AppMetaData(string sourcePath, List <string> rulePaths) { //Initial value for ApplicationName may be replaced if rule pattern match found later if (Directory.Exists(sourcePath)) { try { ApplicationName = sourcePath.Substring(sourcePath.LastIndexOf(Path.DirectorySeparatorChar)).Replace(Path.DirectorySeparatorChar, ' ').Trim(); } catch (Exception) { ApplicationName = Path.GetFileNameWithoutExtension(sourcePath); } } else { ApplicationName = Path.GetFileNameWithoutExtension(sourcePath); } //initialize standard set groups using dynamic lists variables that may have more than one value; some are filled //using tag tests and others by different means like file type examination KeyedPropertyLists = new Dictionary <string, HashSet <string> > { ["strGrpRulePaths"] = rulePaths.ToHashSet(), ["strGrpPackageTypes"] = new HashSet <string>(), ["strGrpAppTypes"] = new HashSet <string>(), ["strGrpFileTypes"] = new HashSet <string>(), ["strGrpUniqueTags"] = new HashSet <string>(), ["strGrpOutputs"] = new HashSet <string>(), ["strGrpTargets"] = new HashSet <string>(), ["strGrpOSTargets"] = new HashSet <string>(), ["strGrpFileExtensions"] = new HashSet <string>(), ["strGrpFileNames"] = new HashSet <string>(), ["strGrpCPUTargets"] = new HashSet <string>(), ["strGrpCloudTargets"] = new HashSet <string>(), ["strGrpUniqueDependencies"] = new HashSet <string>() }; //predefined standard tags to track; only some are propertygrouplist are tag based _propertyTagSearchPatterns = new Dictionary <string, string>(); _propertyTagSearchPatterns.Add("strGrpOSTargets", ".OS.Targets"); _propertyTagSearchPatterns.Add("strGrpCloudTargets", ".Cloud"); _propertyTagSearchPatterns.Add("strGrpOutputs", ".Outputs"); _propertyTagSearchPatterns.Add("strGrpCPUTargets", ".CPU"); //read default/user preferences on what tags to count if (File.Exists(Utils.GetPath(Utils.AppPath.tagCounterPref))) { TagCounters = JsonConvert.DeserializeObject <List <TagCounter> >(File.ReadAllText(Utils.GetPath(Utils.AppPath.tagCounterPref))); } else { TagCounters = new List <TagCounter>(); } HashSet <string> dupCountersCheck = new HashSet <string>(); foreach (TagCounter counter in TagCounters) { if (!dupCountersCheck.Add(counter.Tag)) { WriteOnce.SafeLog("Duplidate counter specified in preferences", NLog.LogLevel.Error); } } Languages = new Dictionary <string, int>(); }
public int Run() { bool issues = false; WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Verify Rules")); //load [each] rules file separately to report out where a failure is happening RuleSet rules = new RuleSet(WriteOnce.Log); IEnumerable <string> fileListing = new List <string>(); foreach (string rulePath in _rulePaths) { if (Directory.Exists(rulePath)) { fileListing = Directory.EnumerateFiles(rulePath, "*.json", SearchOption.AllDirectories); } else if (File.Exists(rulePath) && Path.GetExtension(rulePath) == ".json") { fileListing = new List <string>() { new string(rulePath) } } ; else { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_RULE_PATH, rulePath)); } //test loading each file foreach (string filename in fileListing) { try { rules.AddFile(filename); WriteOnce.Info(string.Format("Rule file added {0}", filename), true, WriteOnce.ConsoleVerbosity.High); } catch (Exception e) { WriteOnce.Error(string.Format("Rule file add failed {0}", filename)); WriteOnce.SafeLog(e.Message + "\n" + e.StackTrace, NLog.LogLevel.Error); issues = true; } } } //option to write validating data if (_arg_consoleVerbosityLevel == WriteOnce.ConsoleVerbosity.High) { WritePartialRuleDetails(rules); } //final status report if (issues) { WriteOnce.Any(ErrMsg.GetString(ErrMsg.ID.VERIFY_RULES_RESULTS_FAIL), true, ConsoleColor.Red, WriteOnce.ConsoleVerbosity.Low); } else { WriteOnce.Any(ErrMsg.GetString(ErrMsg.ID.VERIFY_RULES_RESULTS_SUCCESS), true, ConsoleColor.Green, WriteOnce.ConsoleVerbosity.Low); } WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Verify Rules")); WriteOnce.FlushAll(); if (!String.IsNullOrEmpty(_arg_outputFile)) { WriteOnce.Any(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_OUTPUT_FILE, _arg_outputFile), true, ConsoleColor.Gray, WriteOnce.ConsoleVerbosity.Low); } return(issues ? (int)ExitCode.NotVerified : (int)ExitCode.Verified); }
/// <summary> /// Main WORKHORSE for analyzing file; called from file based or decompression functions /// </summary> /// <param name="filename"></param> /// <param name="fileText"></param> void ProcessInMemory(string filePath, string fileText, LanguageInfo languageInfo) { #region minorRollupTrackingAndProgress WriteOnce.SafeLog("Preparing to process file: " + filePath, LogLevel.Trace); _appProfile.MetaData.FilesAnalyzed++; int totalFilesReviewed = _appProfile.MetaData.FilesAnalyzed + _appProfile.MetaData.FilesSkipped; int percentCompleted = (int)((float)totalFilesReviewed / (float)_appProfile.MetaData.TotalFiles * 100); //earlier issue now resolved so app handles mixed zipped/zipped and unzipped/zipped directories but catch all for non-critical UI if (percentCompleted > 100) { percentCompleted = 100; } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, percentCompleted), false); #endregion //process file against rules Issue[] matches = _rulesProcessor.Analyze(fileText, languageInfo); //if any matches found for this file... if (matches.Count() > 0) { _appProfile.MetaData.FilesAffected++; _appProfile.MetaData.TotalMatchesCount += matches.Count(); // Iterate through each match issue foreach (Issue match in matches) { WriteOnce.SafeLog(string.Format("Processing pattern matches for ruleId {0}, ruleName {1} file {2}", match.Rule.Id, match.Rule.Name, filePath), LogLevel.Trace); //maintain a list of unique tags; multi-purpose but primarily for filtering -d option bool dupTagFound = false; foreach (string t in match.Rule.Tags) { dupTagFound = !_uniqueTagsControl.Add(t); } //save all unique dependencies even if Dependency tag pattern is not-unique var tagPatternRegex = new Regex("Dependency.SourceInclude", RegexOptions.IgnoreCase); String textMatch; if (match.Rule.Tags.Any(v => tagPatternRegex.IsMatch(v))) { textMatch = ExtractDependency(fileText, match.Boundary.Index, match.PatternMatch, languageInfo.Name); } else { textMatch = ExtractTextSample(fileText, match.Boundary.Index, match.Boundary.Length); } //wrap rule issue result to add metadata MatchRecord record = new MatchRecord() { Filename = filePath, Language = languageInfo, Filesize = fileText.Length, TextSample = textMatch, Excerpt = ExtractExcerpt(fileText, match.StartLocation.Line), Issue = match }; //preserve issue level characteristics as rolled up meta data of interest bool addAsFeatureMatch = _appProfile.MetaData.AddStandardProperties(ref record); //bail after extracting any dependency unique items IF user requested if (_arg_outputUniqueTagsOnly && dupTagFound) { continue; } else if (addAsFeatureMatch) { _appProfile.MatchList.Add(record); } } } else { WriteOnce.SafeLog("No pattern matches detected for file: " + filePath, LogLevel.Trace); } }
void UnZipAndProcess(string filename) { if (!File.Exists(filename)) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_FILE_OR_DIR, filename)); } // Ignore images and other junk like that var fileExtension = new FileInfo(filename).Extension; var mimeType = MimeTypeMap.GetMimeType(fileExtension); bool mimeMatch = false; if (!IgnoreMimeRegex.IsMatch(mimeType)) { var isValidExtension = COMPRESSED_EXTENSIONS.Any(fileExtension.Contains); if (isValidExtension || fileExtension == "ts") { mimeMatch = true; } else if (mimeType.Contains("zip", StringComparison.CurrentCultureIgnoreCase) || // Should have been caught in file extensions above, but still OK. mimeType.Contains("tar", StringComparison.CurrentCultureIgnoreCase) || mimeType.Contains("compressed", StringComparison.CurrentCultureIgnoreCase)) { mimeMatch = true; } if (mimeMatch) { // Now process the file switch (fileExtension) { case ".tgz": ProcessTarGzFile(filename); break; case ".gz": if (filename.Contains(".tar.gz")) { ProcessTarGzFile(filename); } else { WriteOnce.SafeLog("no support for .gz unless .tar.gz: " + fileExtension, LogLevel.Warn); _appProfile.MetaData.PackageTypes.Add("compressed-unsupported"); } break; case ".jar": case ".zip": ProcessZipFile(filename); break; case ".gem": case ".tar": case ".nupkg": WriteOnce.SafeLog($"Processing of {fileExtension} not implemented yet.", LogLevel.Warn); break; default: WriteOnce.SafeLog("no support for compressed type: " + fileExtension, LogLevel.Warn); break; } _appProfile.MetaData.PackageTypes.Add("compressed"); } else { _appProfile.MetaData.PackageTypes.Add("compressed-unsupported"); } } }
/// <summary> /// Main WORKHORSE for analyzing file; called from file based or decompression functions /// </summary> /// <param name="filename"></param> /// <param name="fileText"></param> void ProcessInMemory(string filePath, string fileText) { #region quickvalidation if (fileText.Length > MAX_FILESIZE) { WriteOnce.SafeLog("File too large: " + filePath, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Error); _appProfile.MetaData.FilesSkipped++; return; } if (!_arg_allowSampleFiles && _fileExclusionList.Any(v => filePath.Contains(v))) { WriteOnce.SafeLog("Part of excluded list: " + filePath, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Error); _appProfile.MetaData.FilesSkipped++; return; } //determine if file is a compressed item to unpackage for processing string language = Language.FromFileName(filePath); // Skip files written in unknown language if (string.IsNullOrEmpty(language)) { WriteOnce.SafeLog("Language not found for file: " + filePath, LogLevel.Trace); language = Path.GetFileName(filePath); _appProfile.MetaData.FilesSkipped++; return; } else { WriteOnce.SafeLog("Preparing to process file: " + filePath, LogLevel.Trace); } #endregion #region minorRollupTrackingAndProgress _appProfile.MetaData.FilesAnalyzed++; _appProfile.MetaData.AddLanguage(language); _appProfile.MetaData.FileExtensions.Add(Path.GetExtension(filePath).Replace('.', ' ').TrimStart()); LastUpdated = File.GetLastWriteTime(filePath); int totalFilesReviewed = _appProfile.MetaData.FilesAnalyzed + _appProfile.MetaData.FilesSkipped; int percentCompleted = (int)((float)totalFilesReviewed / (float)_appProfile.MetaData.TotalFiles * 100); WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, percentCompleted), false); #endregion //process file against rules Issue[] matches = _rulesProcessor.Analyze(fileText, language); //if any matches found for this file... if (matches.Count() > 0) { _appProfile.MetaData.FilesAffected++; _appProfile.MetaData.TotalMatchesCount += matches.Count(); HashSet <string> uniqueTagsControl = new HashSet <string>(); // Iterate through each match issue foreach (Issue match in matches) { WriteOnce.SafeLog(string.Format("Processing pattern matches for ruleId {0}, ruleName {1} file {2}", match.Rule.Id, match.Rule.Name, filePath), LogLevel.Trace); //maintain a list of unique tags; multi-purpose but primarily for filtering -u option bool dupTagFound = false; foreach (string t in match.Rule.Tags) { dupTagFound = !uniqueTagsControl.Add(t); } //save all unique dependendencies even if Dependency tag pattern is not-unique var tagPatternRegex = new Regex("Dependency.SourceInclude", RegexOptions.IgnoreCase); String textMatch; if (match.Rule.Tags.Any(v => tagPatternRegex.IsMatch(v))) { textMatch = ExtractDependency(fileText, match.Boundary.Index, match.PatternMatch, language); } else { textMatch = ExtractTextSample(fileText, match.Boundary.Index, match.Boundary.Length); } //wrap rule issue result to add metadata MatchRecord record = new MatchRecord() { Filename = filePath, Language = language, Filesize = fileText.Length, TextSample = textMatch, Excerpt = ExtractExcerpt(fileText, match.StartLocation.Line), Issue = match }; //preserve issue level characteristics as rolled up meta data of interest bool valid = _appProfile.MetaData.AddStandardProperties(record); //bail after extracting any dependency unique items IF user requested if (_arg_outputUniqueTagsOnly && dupTagFound) { continue; } else if (valid) { _appProfile.MatchList.Add(record); } } } else { WriteOnce.SafeLog("No pattern matches detected for file: " + filePath, LogLevel.Trace); } }
public int Run() { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Tagdiff")); //setup output TextWriter outputWriter; if (!string.IsNullOrEmpty(_arg_outputFile)) { outputWriter = File.CreateText(_arg_outputFile); outputWriter.WriteLine(Program.GetVersionString()); WriteOnce.Writer = outputWriter; } if (_arg_src1 == _arg_src2) { throw new OpException(ErrMsg.GetString(ErrMsg.ID.TAGDIFF_SAME_FILE_ARG)); } else if (string.IsNullOrEmpty(_arg_src1) || string.IsNullOrEmpty(_arg_src2)) { throw new OpException(ErrMsg.GetString(ErrMsg.ID.CMD_INVALID_ARG_VALUE)); } #region setup analyze calls //save to quiet analyze cmd WriteOnce.ConsoleVerbosity saveVerbosity = WriteOnce.Verbosity; string tmp1 = Path.GetTempFileName(); string tmp2 = Path.GetTempFileName(); AnalyzeCommand.ExitCode result1 = AnalyzeCommand.ExitCode.CriticalError; AnalyzeCommand.ExitCode result2 = AnalyzeCommand.ExitCode.CriticalError; try { AnalyzeCommand cmd1 = new AnalyzeCommand(new AnalyzeCommandOptions { SourcePath = _arg_src1, OutputFilePath = tmp1, OutputFileFormat = "json", CustomRulesPath = _arg_rulesPath, IgnoreDefaultRules = _arg_ignoreDefault, SimpleTagsOnly = true, AllowDupTags = false, FilePathExclusions = "sample,example,test,docs,.vs,.git", ConsoleVerbosityLevel = "None" }); AnalyzeCommand cmd2 = new AnalyzeCommand(new AnalyzeCommandOptions { SourcePath = _arg_src2, OutputFilePath = tmp2, OutputFileFormat = "json", CustomRulesPath = _arg_rulesPath, IgnoreDefaultRules = _arg_ignoreDefault, SimpleTagsOnly = true, AllowDupTags = false, FilePathExclusions = "sample,example,test,docs,.vs,.git", ConsoleVerbosityLevel = "None" }); result1 = (AnalyzeCommand.ExitCode)cmd1.Run(); result2 = (AnalyzeCommand.ExitCode)cmd2.Run(); } catch (Exception e) { //restore WriteOnce.Verbosity = saveVerbosity; throw e; } //restore WriteOnce.Verbosity = saveVerbosity; #endregion bool successResult; bool equal1 = true; bool equal2 = true; //process results for each analyze call before comparing results if (result1 == AnalyzeCommand.ExitCode.CriticalError) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_CRITICAL_FILE_ERR, _arg_src1)); } else if (result2 == AnalyzeCommand.ExitCode.CriticalError) { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_CRITICAL_FILE_ERR, _arg_src2)); } else if (result1 == AnalyzeCommand.ExitCode.NoMatches || result2 == AnalyzeCommand.ExitCode.NoMatches) { throw new OpException(ErrMsg.GetString(ErrMsg.ID.TAGDIFF_NO_TAGS_FOUND)); } else //compare tag results; assumed (result1&2 == AnalyzeCommand.ExitCode.MatchesFound) { string file1TagsJson = File.ReadAllText(tmp1); string file2TagsJson = File.ReadAllText(tmp2); var file1Tags = JsonConvert.DeserializeObject <TagsFile[]>(file1TagsJson).First(); var file2Tags = JsonConvert.DeserializeObject <TagsFile[]>(file2TagsJson).First(); //can't simply compare counts as content may differ; must compare both in directions in two passes a->b; b->a //first pass WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.TAGDIFF_RESULTS_GAP, Path.GetFileName(_arg_src1), Path.GetFileName(_arg_src2)), true, WriteOnce.ConsoleVerbosity.High); equal1 = CompareTags(file1Tags.Tags, file2Tags.Tags); //reverse order for second pass WriteOnce.General(ErrMsg.FormatString(ErrMsg.ID.TAGDIFF_RESULTS_GAP, Path.GetFileName(_arg_src2), Path.GetFileName(_arg_src1)), true, WriteOnce.ConsoleVerbosity.High); equal2 = CompareTags(file2Tags.Tags, file1Tags.Tags); //final results bool resultsDiffer = !(equal1 && equal2); if (_arg_tagTestType == TagTestType.Inequality && resultsDiffer) { successResult = true; } else if (_arg_tagTestType == TagTestType.Equality && !resultsDiffer) { successResult = true; } else { successResult = false; } WriteOnce.General(ErrMsg.GetString(ErrMsg.ID.TAGDIFF_RESULTS_DIFFER), false); WriteOnce.Result(resultsDiffer.ToString()); } //cleanup try { File.Delete(tmp1); File.Delete(tmp2); } catch { //no action needed; } WriteOnce.FlushAll(); WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Tagdiff")); return(successResult ? (int)ExitCode.TestPassed : (int)ExitCode.TestFailed); }
/// <summary> /// Main WORKHORSE for analyzing file; called from file based or decompression functions /// </summary> /// <param name="filename"></param> /// <param name="fileText"></param> void ProcessInMemory(string filePath, string fileText) { #region quickvalidation if (fileText.Length > MAX_FILESIZE) { WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Warn); _appProfile.MetaData.FilesSkipped++; return; } //exclude sample, test or similar files by default or as specified in exclusion list if (!_arg_allowSampleFiles && _fileExclusionList.Any(v => filePath.ToLower().Contains(v))) { WriteOnce.SafeLog("Part of excluded list: " + filePath, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filePath), LogLevel.Trace); _appProfile.MetaData.FilesSkipped++; return; } //check for supported language LanguageInfo languageInfo = new LanguageInfo(); // Skip files written in unknown language if (!Language.FromFileName(filePath, ref languageInfo)) { WriteOnce.SafeLog("Language not found for file: " + filePath, LogLevel.Trace); _appProfile.MetaData.FilesSkipped++; return; } else { WriteOnce.SafeLog("Preparing to process file: " + filePath, LogLevel.Trace); } #endregion #region minorRollupTrackingAndProgress _appProfile.MetaData.FilesAnalyzed++; _appProfile.MetaData.AddLanguage(languageInfo.Name); _appProfile.MetaData.FileExtensions.Add(Path.GetExtension(filePath).Replace('.', ' ').TrimStart()); LastUpdated = File.GetLastWriteTime(filePath); int totalFilesReviewed = _appProfile.MetaData.FilesAnalyzed + _appProfile.MetaData.FilesSkipped; int percentCompleted = (int)((float)totalFilesReviewed / (float)_appProfile.MetaData.TotalFiles * 100); //reported: if a zip contains more zip files in it the total count may be off -complex. ~workaround: freeze UI if (percentCompleted > 100) { percentCompleted = 100; } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, percentCompleted), false); #endregion //process file against rules Issue[] matches = _rulesProcessor.Analyze(fileText, languageInfo.Name); //if any matches found for this file... if (matches.Count() > 0) { _appProfile.MetaData.FilesAffected++; _appProfile.MetaData.TotalMatchesCount += matches.Count(); // Iterate through each match issue foreach (Issue match in matches) { WriteOnce.SafeLog(string.Format("Processing pattern matches for ruleId {0}, ruleName {1} file {2}", match.Rule.Id, match.Rule.Name, filePath), LogLevel.Trace); //do not accept features from build type files (only metadata) to avoid false positives that are not part of the executable program if (languageInfo.Type == LanguageInfo.LangFileType.Build && match.Rule.Tags.Any(v => !v.Contains("Metadata"))) { continue; } //maintain a list of unique tags; multi-purpose but primarily for filtering -d option bool dupTagFound = false; foreach (string t in match.Rule.Tags) { dupTagFound = !_uniqueTagsControl.Add(t); } //save all unique dependendencies even if Dependency tag pattern is not-unique var tagPatternRegex = new Regex("Dependency.SourceInclude", RegexOptions.IgnoreCase); String textMatch; if (match.Rule.Tags.Any(v => tagPatternRegex.IsMatch(v))) { textMatch = ExtractDependency(fileText, match.Boundary.Index, match.PatternMatch, languageInfo.Name); } else { textMatch = ExtractTextSample(fileText, match.Boundary.Index, match.Boundary.Length); } //wrap rule issue result to add metadata MatchRecord record = new MatchRecord() { Filename = filePath, Language = languageInfo, Filesize = fileText.Length, TextSample = textMatch, Excerpt = ExtractExcerpt(fileText, match.StartLocation.Line), Issue = match }; //preserve issue level characteristics as rolled up meta data of interest bool addAsFeatureMatch = _appProfile.MetaData.AddStandardProperties(record); //bail after extracting any dependency unique items IF user requested if (_arg_outputUniqueTagsOnly && dupTagFound) { continue; } else if (addAsFeatureMatch) { _appProfile.MatchList.Add(record); } } } else { WriteOnce.SafeLog("No pattern matches detected for file: " + filePath, LogLevel.Trace); } }
/// <summary> /// Add default and/or custom rules paths /// Iterate paths and add to ruleset /// </summary> void ConfigRules() { WriteOnce.SafeLog("AnalyzeCommand::ConfigRules", LogLevel.Trace); RuleSet rulesSet = new RuleSet(Program.Logger); List <string> rulePaths = new List <string>(); if (!_arg_ignoreDefaultRules) { rulePaths.Add(Utils.GetPath(Utils.AppPath.defaultRules)); } if (!string.IsNullOrEmpty(_arg_customRulesPath)) { rulePaths.Add(_arg_customRulesPath); } foreach (string rulePath in rulePaths) { if (Directory.Exists(rulePath)) { rulesSet.AddDirectory(rulePath); } else if (File.Exists(rulePath)) { rulesSet.AddFile(rulePath); } else { throw new OpException(ErrMsg.FormatString(ErrMsg.ID.CMD_INVALID_RULE_PATH, rulePath)); } } //error check based on ruleset not path enumeration if (rulesSet.Count() == 0) { throw new OpException(ErrMsg.GetString(ErrMsg.ID.CMD_NORULES_SPECIFIED)); } //instantiate a RuleProcessor with the added rules and exception for dependency _rulesProcessor = new RuleProcessor(rulesSet, _arg_confidence, _arg_outputUniqueTagsOnly, _arg_simpleTagsOnly, Program.Logger); if (_arg_outputUniqueTagsOnly) { List <TagException> tagExceptions; if (File.Exists(Utils.GetPath(Utils.AppPath.tagCounterPref))) { tagExceptions = JsonConvert.DeserializeObject <List <TagException> >(File.ReadAllText(Utils.GetPath(Utils.AppPath.tagCounterPref))); string[] exceptions = new string[tagExceptions.Count]; for (int i = 0; i < tagExceptions.Count; i++) { exceptions[i] = tagExceptions[i].Tag; } _rulesProcessor.UniqueTagExceptions = exceptions; } } _appProfile = new AppProfile(_arg_sourcePath, rulePaths, false, _arg_simpleTagsOnly, _arg_outputUniqueTagsOnly); _appProfile.Args = "analyze -f " + _arg_fileFormat + " -u " + _arg_outputUniqueTagsOnly.ToString().ToLower() + " -v " + WriteOnce.Verbosity.ToString() + " -x " + _arg_confidence + " -i " + _arg_ignoreDefaultRules.ToString().ToLower(); }
readonly int MAX_HTML_REPORT_FILE_SIZE = 1024 * 1000 * 3; //warn about potential slow rendering /// <summary> /// Registers datatypes with html framework liquid and sets up data for use within it and used /// with html partial.liquid files that are embedded as resources /// </summary> /// <param name="app"></param> public override void WriteApp(AppProfile app) { var htmlTemplateText = File.ReadAllText(Path.Combine(Utils.GetPath(Utils.AppPath.basePath), "html/index.html")); Assembly test = Assembly.GetEntryAssembly(); Template.FileSystem = new EmbeddedFileSystem(Assembly.GetEntryAssembly(), "ApplicationInspector.html.partials"); RegisterSafeType(typeof(AppProfile)); RegisterSafeType(typeof(AppMetaData)); var htmlTemplate = Template.Parse(htmlTemplateText); var data = new Dictionary <string, object>(); data["AppProfile"] = app; //matchitems rather than records created to exclude full rule/patterns/cond. List <MatchItems> matches = new List <MatchItems>(); foreach (MatchRecord match in app.MatchList) { MatchItems matchItem = new MatchItems(match); matches.Add(matchItem); } data["matchDetails"] = matches; var hashData = new Hash(); hashData["json"] = JsonConvert.SerializeObject(data, Formatting.Indented); hashData["application_version"] = Program.GetVersionString(); //add dynamic sets of groups of taginfo read from preferences for Profile page List <TagGroup> tagGroupList = app.GetCategoryTagGroups("profile"); hashData["groups"] = tagGroupList; //add summary values for sorted tags lists of taginfo foreach (string outerKey in app.KeyedSortedTagInfoLists.Keys) { hashData.Add(outerKey, app.KeyedSortedTagInfoLists[outerKey]); } //add summary metadata lists hashData["cputargets"] = app.MetaData.CPUTargets; hashData["apptypes"] = app.MetaData.AppTypes; hashData["packagetypes"] = app.MetaData.PackageTypes; hashData["ostargets"] = app.MetaData.OSTargets; hashData["outputs"] = app.MetaData.Outputs; hashData["filetypes"] = app.MetaData.FileExtensions; hashData["tagcounters"] = app.MetaData.TagCountersUI; var htmlResult = htmlTemplate.Render(hashData); string htmlOutputFilePath = Path.Combine(Utils.GetPath(Utils.AppPath.basePath), "output.html"); File.WriteAllText(htmlOutputFilePath, htmlResult); //writes out json report for convenience and linking to from report page(s) String jsonReportPath = Path.Combine(Utils.GetPath(Utils.AppPath.basePath), "output.json"); Writer jsonWriter = WriterFactory.GetWriter("json", jsonReportPath); jsonWriter.TextWriter = File.CreateText(jsonReportPath); jsonWriter.WriteApp(app); jsonWriter.FlushAndClose(); //html report size warning string outputHTMLPath = Path.Combine(Utils.GetPath(Utils.AppPath.basePath), "output.html"); if (File.Exists(outputHTMLPath) && new FileInfo(outputHTMLPath).Length > MAX_HTML_REPORT_FILE_SIZE) { WriteOnce.Info(ErrMsg.GetString(ErrMsg.ID.ANALYZE_REPORTSIZE_WARN)); } Utils.OpenBrowser(htmlOutputFilePath); }
/// <summary> /// Main entry point to start analysis; handles setting up rules, directory enumeration /// file type detection and handoff /// Pre: All Configure Methods have been called already and we are ready to SCAN /// </summary> /// <returns></returns> public int Run() { WriteOnce.SafeLog("AnalyzeCommand::Run", LogLevel.Trace); DateTime start = DateTime.Now; WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_RUNNING, "Analyze")); _appProfile.MetaData.TotalFiles = _srcfileList.Count();//updated for zipped files later // Iterate through all files and process against rules foreach (string filename in _srcfileList) { //exclude sample, test or similar files by default or as specified in exclusion list if (!_arg_allowSampleFiles && _fileExclusionList.Any(v => filename.ToLower().Contains(v))) { WriteOnce.SafeLog("Part of excluded list: " + filename, LogLevel.Trace); WriteOnce.SafeLog(ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILESIZE_SKIPPED, filename), LogLevel.Trace); _appProfile.MetaData.FilesSkipped++; continue; } ArchiveFileType archiveFileType = MiniMagic.DetectFileType(filename); if (archiveFileType == ArchiveFileType.UNKNOWN) { ProcessAsFile(filename); } else { UnZipAndProcess(filename, archiveFileType); } } WriteOnce.General("\r" + ErrMsg.FormatString(ErrMsg.ID.ANALYZE_FILES_PROCESSED_PCNT, 100)); WriteOnce.Operation(ErrMsg.GetString(ErrMsg.ID.CMD_PREPARING_REPORT)); //Prepare report results _appProfile.MetaData.LastUpdated = LastUpdated.ToString(); _appProfile.DateScanned = DateScanned.ToString(); _appProfile.PrepareReport(); TimeSpan timeSpan = start - DateTime.Now; WriteOnce.SafeLog(String.Format("Processing time: seconds:{0}", timeSpan.TotalSeconds * -1), LogLevel.Trace); FlushAll(); //wrapup result status if (_appProfile.MetaData.TotalFiles == _appProfile.MetaData.FilesSkipped) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOSUPPORTED_FILETYPES)); } else if (_appProfile.MatchList.Count == 0) { WriteOnce.Error(ErrMsg.GetString(ErrMsg.ID.ANALYZE_NOPATTERNS)); } else { WriteOnce.Operation(ErrMsg.FormatString(ErrMsg.ID.CMD_COMPLETED, "Analyze")); } return(_appProfile.MatchList.Count() == 0 ? (int)ExitCode.NoMatches : (int)ExitCode.MatchesFound); }