/// <summary> /// Analyzes a directory of files. /// </summary> /// <param name="directory">directory to analyze.</param> /// <returns>List of tags identified</returns> public async Task <List <IssueRecord> > AnalyzeDirectory(string directory) { Logger.Trace("AnalyzeDirectory({0})", directory); List <IssueRecord>?analysisResults = new List <IssueRecord>(); RuleSet rules = new RuleSet(null); if (Options["disable-default-rules"] is bool disableDefaultRules && !disableDefaultRules) { Assembly?assembly = Assembly.GetExecutingAssembly(); foreach (string?resourceName in assembly.GetManifestResourceNames()) { if (resourceName.EndsWith(".json")) { try { Stream?stream = assembly.GetManifestResourceStream(resourceName); using StreamReader? resourceStream = new StreamReader(stream ?? new MemoryStream()); rules.AddString(resourceStream.ReadToEnd(), resourceName); } catch (Exception ex) { Logger.Warn(ex, "Error loading {0}: {1}", resourceName, ex.Message); } } } // Add Appliation Inspector cryptography rules assembly = typeof(AnalyzeCommand).Assembly; foreach (string?resourceName in assembly.GetManifestResourceNames()) { if (resourceName.EndsWith(".json")) { try { Stream?stream = assembly.GetManifestResourceStream(resourceName); using StreamReader? resourceStream = new StreamReader(stream ?? new MemoryStream()); rules.AddString(resourceStream.ReadToEnd(), resourceName); } catch (Exception ex) { Logger.Warn(ex, "Error loading {0}: {1}", resourceName, ex.Message); } } } } if (Options["custom-rule-directory"] is string customDirectory) { rules.AddDirectory(customDirectory); } if (!rules.Any()) { Logger.Error("No rules were specified, unable to continue."); return(analysisResults); // empty } RuleProcessor processor = new RuleProcessor(rules, new RuleProcessorOptions()); string[] fileList; if (System.IO.Directory.Exists(directory)) { fileList = System.IO.Directory.GetFiles(directory, "*", SearchOption.AllDirectories); } else if (File.Exists(directory)) { fileList = new string[] { directory }; } else { Logger.Warn("{0} is neither a directory nor a file.", directory); return(analysisResults); // empty } foreach (string?filename in fileList) { Logger.Trace($"Processing {filename}"); // TODO: Make this more resilient if (IGNORE_FILES.Contains(Path.GetFileName(filename))) { Logger.Trace($"Ignoring {filename}"); continue; } byte[] fileContents; try { fileContents = File.ReadAllBytes(filename); } catch (Exception ex) { Logger.Trace(ex, "File {0} cannot be read, ignoring.", filename); continue; } string?buffer = NormalizeFileContent(filename, fileContents); Logger.Debug("Normalization complete."); double MIN_CRYPTO_OP_DENSITY = 0.10; try { // TODO don't do this if we disassembled native code double cryptoOperationLikelihood = CalculateCryptoOpDensity(buffer); Logger.Debug("Cryptographic operation density for {0} was {1}", filename, cryptoOperationLikelihood); if (cryptoOperationLikelihood >= MIN_CRYPTO_OP_DENSITY) { analysisResults.Add(new IssueRecord( Filename: filename, Filesize: buffer.Length, TextSample: "n/a", Issue: new Issue( Boundary: new Boundary(), StartLocation: new Location(), EndLocation: new Location(), Rule: new Rule() { Id = "_CRYPTO_DENSITY", Name = "Cryptographic symbols", Description = cryptoOperationLikelihood.ToString(), Tags = new string[] { "Cryptography.GenericImplementation.HighDensityOperators" } } ) )); } Logger.Debug($"Analyzing {filename}, length={buffer.Length}"); List <MatchRecord>?fileResults = null; //processor.AnalyzeFile FileEntry? holderEntry = new FileEntry("placeholder", new MemoryStream(Encoding.UTF8.GetBytes(buffer))); LanguageInfo languageInfo = new LanguageInfo(); Language.FromFileName(filename, ref languageInfo); Task <List <MatchRecord> >?task = Task.Run(() => processor.AnalyzeFile(holderEntry, languageInfo)); if (task.Wait(TimeSpan.FromSeconds(30))) { fileResults = task.Result; } else { Logger.Warn("DevSkim operation timed out."); return(analysisResults); } Logger.Debug("Operation Complete: {0}", fileResults?.Count); foreach (MatchRecord?issue in fileResults ?? new List <MatchRecord>()) { string[]? fileContentArray = buffer.Split(new[] { Environment.NewLine }, StringSplitOptions.None); List <string>?excerpt = new List <string>(); int startLoc = Math.Max(issue.StartLocationLine - 1, 0); int endLoc = Math.Min(issue.EndLocationLine + 1, fileContentArray.Length - 1); for (int i = startLoc; i <= endLoc; i++) { excerpt.Add(fileContentArray[i].Trim()); } analysisResults.Add(new IssueRecord( Filename: filename, Filesize: buffer.Length, TextSample: issue.StartLocationLine + " => " + string.Join(Environment.NewLine, excerpt), Issue: new Issue( issue.Boundary, new Location() { Column = issue.StartLocationColumn, Line = issue.StartLocationLine }, new Location() { Column = issue.EndLocationColumn, Line = issue.EndLocationLine }, issue.Rule) ) ); } } catch (Exception ex) { Logger.Warn(ex, "Error analyzing {0}: {1}", filename, ex.Message); Logger.Warn(ex.StackTrace); } } return(analysisResults); }