public void InsertOptionalDataVisitorTests_InsertsOptionalDataForCommonConditions(OptionallyEmittedData optionallyEmittedData) { string testDirectory = GetTestDirectory("InsertOptionalDataVisitor"); string inputFileName = "CoreTests"; RunTest(testDirectory, inputFileName, optionallyEmittedData); }
public int GetFileIndex( ArtifactLocation fileLocation, bool addToFilesTableIfNotPresent = true, OptionallyEmittedData dataToInsert = OptionallyEmittedData.None, Encoding encoding = null, HashData hashData = null) { if (fileLocation == null) { throw new ArgumentNullException(nameof(fileLocation)); } if (this.Artifacts == null || this.Artifacts.Count == 0) { if (!addToFilesTableIfNotPresent) { return(-1); } } if (_artifactLocationToIndexMap == null) { InitializeFileToIndexMap(); } if (fileLocation.Uri == null) { // We only have a file index, so just return it. return(fileLocation.Index); } // Strictly speaking, some elements that may contribute to a files table // key are case sensitive, e.g., everything but the scheme and protocol of a // web URI. We don't have a proper comparer implementation that can handle // all cases. For now, we cover the Windows happy path, which assumes that // most URIs in log files are file paths (which are case-insensitive). // // Tracking item for an improved comparer: // https://github.com/Microsoft/sarif-sdk/issues/973 // When we perform a files table look-up, only the uri and uriBaseId // are relevant; these properties together comprise the unique identity // of the file object. The file index, of course, does not relate to the // file identity. We consciously exclude the properties bag as well. // We will normalize the input fileLocation.Uri to make URIs more consistent // throughout the emitted log. fileLocation.Uri = new Uri(UriHelper.MakeValidUri(fileLocation.Uri.OriginalString), UriKind.RelativeOrAbsolute); var artifactLocation = new ArtifactLocation { Uri = fileLocation.Uri, UriBaseId = fileLocation.UriBaseId }; if (!_artifactLocationToIndexMap.TryGetValue(artifactLocation, out int artifactIndex)) { if (addToFilesTableIfNotPresent) { this.Artifacts = this.Artifacts ?? new List <Artifact>(); artifactIndex = this.Artifacts.Count; Uri artifactUri = artifactLocation.TryReconstructAbsoluteUri(this.OriginalUriBaseIds, out Uri resolvedUri) ? resolvedUri : artifactLocation.Uri; var artifact = Artifact.Create( artifactUri, dataToInsert, hashData: hashData, encoding: encoding); // Copy ArtifactLocation to ensure changes to Result copy don't affect new Run.Artifacts copy artifact.Location = new ArtifactLocation(fileLocation); this.Artifacts.Add(artifact); _artifactLocationToIndexMap[artifactLocation] = artifactIndex; } else { // We did not find the item. The call was not configured to add the entry. // Return the default value that indicates the item isn't present. artifactIndex = -1; } } fileLocation.Index = artifactIndex; return(artifactIndex); }
private void InitializeOutputFile(TOptions analyzeOptions, TContext context, ISet <string> targets) { string filePath = analyzeOptions.OutputFilePath; AggregatingLogger aggregatingLogger = (AggregatingLogger)context.Logger; if (!string.IsNullOrEmpty(filePath)) { InvokeCatchingRelevantIOExceptions ( () => { LoggingOptions loggingOptions; loggingOptions = analyzeOptions.ConvertToLoggingOptions(); OptionallyEmittedData dataToInsert = analyzeOptions.DataToInsert.ToFlags(); OptionallyEmittedData dataToRemove = analyzeOptions.DataToRemove.ToFlags(); // This code is required in order to support the obsolete ComputeFileHashes argument // on the analyze command-line. if (analyzeOptions.ComputeFileHashes) { dataToInsert |= OptionallyEmittedData.Hashes; } SarifLogger sarifLogger; if (analyzeOptions.SarifOutputVersion != SarifVersion.OneZeroZero) { sarifLogger = new SarifLogger( analyzeOptions.OutputFilePath, loggingOptions, dataToInsert, dataToRemove, tool: _tool, run: null, analysisTargets: targets, invocationTokensToRedact: GenerateSensitiveTokensList(), invocationPropertiesToLog: analyzeOptions.InvocationPropertiesToLog); } else { sarifLogger = new SarifOneZeroZeroLogger( analyzeOptions.OutputFilePath, loggingOptions, dataToInsert, dataToRemove, tool: _tool, run: null, analysisTargets: targets, invocationTokensToRedact: GenerateSensitiveTokensList(), invocationPropertiesToLog: analyzeOptions.InvocationPropertiesToLog); } _pathToHashDataMap = sarifLogger.AnalysisTargetToHashDataMap; sarifLogger.AnalysisStarted(); aggregatingLogger.Loggers.Add(sarifLogger); }, (ex) => { Errors.LogExceptionCreatingLogFile(context, filePath, ex); ThrowExitApplicationException(context, ExitReason.ExceptionCreatingLogFile, ex); } ); } }
public virtual void FileWorkItems(SarifLog sarifLog) { sarifLog = sarifLog ?? throw new ArgumentNullException(nameof(sarifLog)); this.FilingClient.Connect(this.FilingContext.PersonalAccessToken).Wait(); OptionallyEmittedData optionallyEmittedData = this.FilingContext.DataToRemove; if (optionallyEmittedData != OptionallyEmittedData.None) { var dataRemovingVisitor = new RemoveOptionalDataVisitor(optionallyEmittedData); dataRemovingVisitor.Visit(sarifLog); } optionallyEmittedData = this.FilingContext.DataToInsert; if (optionallyEmittedData != OptionallyEmittedData.None) { var dataInsertingVisitor = new InsertOptionalDataVisitor(optionallyEmittedData); dataInsertingVisitor.Visit(sarifLog); } SplittingStrategy splittingStrategy = this.FilingContext.SplittingStrategy; if (splittingStrategy == SplittingStrategy.None) { FileWorkItemsHelper(sarifLog, this.FilingContext, this.FilingClient); return; } for (int runIndex = 0; runIndex < sarifLog.Runs?.Count; ++runIndex) { if (sarifLog.Runs[runIndex]?.Results?.Count > 0) { IList <SarifLog> logsToProcess = new List <SarifLog>(new SarifLog[] { sarifLog }); if (splittingStrategy != SplittingStrategy.PerRun) { SplittingVisitor visitor; switch (splittingStrategy) { case SplittingStrategy.PerRunPerRule: visitor = new PerRunPerRuleSplittingVisitor(); break; case SplittingStrategy.PerRunPerTarget: visitor = new PerRunPerTargetSplittingVisitor(); break; case SplittingStrategy.PerRunPerTargetPerRule: visitor = new PerRunPerTargetPerRuleSplittingVisitor(); break; // TODO: Implement PerResult and PerRun splittings strategies // // https://github.com/microsoft/sarif-sdk/issues/1763 // https://github.com/microsoft/sarif-sdk/issues/1762 // case SplittingStrategy.PerResult: case SplittingStrategy.PerRun: default: throw new ArgumentOutOfRangeException($"SplittingStrategy: {splittingStrategy}"); } visitor.VisitRun(sarifLog.Runs[runIndex]); logsToProcess = visitor.SplitSarifLogs; } for (int splitFileIndex = 0; splitFileIndex < logsToProcess.Count; splitFileIndex++) { SarifLog splitLog = logsToProcess[splitFileIndex]; FileWorkItemsHelper(splitLog, this.FilingContext, this.FilingClient); } } } }
public InsertOptionalDataVisitor(OptionallyEmittedData dataToInsert, Run run) : this(dataToInsert, run?.OriginalUriBaseIds) { _run = run ?? throw new ArgumentNullException(nameof(run)); }
public ReformattingVisitor(OptionallyEmittedData dataToInsert) { _dataToInsert = dataToInsert; }
public override void Convert(Stream input, IResultLogWriter output, OptionallyEmittedData dataToInsert) { }
public override void Convert(Stream input, IResultLogWriter output, OptionallyEmittedData dataToInsert) { throw new NotImplementedException(); }
internal FileInfoFactory(Func <string, string> mimeTypeClassifier, OptionallyEmittedData dataToInsert) { _mimeTypeClassifier = mimeTypeClassifier ?? MimeType.DetermineFromFileExtension; _fileInfoDictionary = new Dictionary <string, FileData>(); _dataToInsert = dataToInsert; }
private string NormalizeOptionallyEmittedDataToString(OptionallyEmittedData optionallyEmittedData) { string result = optionallyEmittedData.ToString(); return(result.Replace(", ", "+")); }
public static Artifact Create( Uri uri, OptionallyEmittedData dataToInsert = OptionallyEmittedData.None, Encoding encoding = null, HashData hashData = null, IFileSystem fileSystem = null) { if (uri == null) { throw new ArgumentNullException(nameof(uri)); } fileSystem = fileSystem ?? new FileSystem(); var artifact = new Artifact() { Encoding = encoding?.WebName, }; string mimeType = SarifWriters.MimeType.DetermineFromFileExtension(uri); // Attempt to persist file contents and/or compute file hash and persist // this information to the log file. In the event that there is some issue // accessing the file, for example, due to ACLs applied to a directory, // we currently swallow these exceptions without populating any requested // data or putting a notification in the log file that a problem // occurred. Something to discuss moving forward. try { bool workTodo = dataToInsert.HasFlag(OptionallyEmittedData.Hashes) || dataToInsert.HasFlag(OptionallyEmittedData.TextFiles) || dataToInsert.HasFlag(OptionallyEmittedData.BinaryFiles); if (!workTodo || !uri.IsAbsoluteUri || !uri.IsFile || !fileSystem.FileExists(uri.LocalPath)) { return(artifact); } string filePath = uri.LocalPath; if (dataToInsert.HasFlag(OptionallyEmittedData.BinaryFiles) && SarifWriters.MimeType.IsBinaryMimeType(mimeType)) { artifact.Contents = GetEncodedFileContents(fileSystem, filePath, mimeType, encoding); } if (dataToInsert.HasFlag(OptionallyEmittedData.TextFiles) && SarifWriters.MimeType.IsTextualMimeType(mimeType)) { artifact.Contents = GetEncodedFileContents(fileSystem, filePath, mimeType, encoding); } if (dataToInsert.HasFlag(OptionallyEmittedData.Hashes)) { HashData hashes = hashData ?? HashUtilities.ComputeHashes(filePath); artifact.Hashes = new Dictionary <string, string> { { "md5", hashes.MD5 }, { "sha-1", hashes.Sha1 }, { "sha-256", hashes.Sha256 }, }; } } catch (Exception e) when(e is IOException || e is UnauthorizedAccessException) { } return(artifact); }
public InsertOptionalDataVisitor(OptionallyEmittedData dataToInsert, IDictionary <string, Uri> originalUriBaseIds = null) { _dataToInsert = dataToInsert; _originalUriBaseIds = originalUriBaseIds; }
public virtual SarifLog FileWorkItems(SarifLog sarifLog) { sarifLog = sarifLog ?? throw new ArgumentNullException(nameof(sarifLog)); sarifLog.SetProperty("guid", Guid.NewGuid()); using (Logger.BeginScope(nameof(FileWorkItems))) { this.FilingResult = FilingResult.None; this.FiledWorkItems = new List<WorkItemModel>(); sarifLog = sarifLog ?? throw new ArgumentNullException(nameof(sarifLog)); Logger.LogInformation("Connecting to filing client: {accountOrOrganization}", this.FilingClient.AccountOrOrganization); this.FilingClient.Connect(this.FilingContext.PersonalAccessToken).Wait(); OptionallyEmittedData optionallyEmittedData = this.FilingContext.DataToRemove; if (optionallyEmittedData != OptionallyEmittedData.None) { var dataRemovingVisitor = new RemoveOptionalDataVisitor(optionallyEmittedData); dataRemovingVisitor.Visit(sarifLog); } optionallyEmittedData = this.FilingContext.DataToInsert; if (optionallyEmittedData != OptionallyEmittedData.None) { var dataInsertingVisitor = new InsertOptionalDataVisitor(optionallyEmittedData); dataInsertingVisitor.Visit(sarifLog); } SplittingStrategy splittingStrategy = this.FilingContext.SplittingStrategy; if (splittingStrategy == SplittingStrategy.None) { FileWorkItemsHelper(sarifLog, this.FilingContext, this.FilingClient); return sarifLog; } IList<SarifLog> logsToProcess; PartitionFunction<string> partitionFunction = null; Stopwatch splittingStopwatch = Stopwatch.StartNew(); switch (splittingStrategy) { case SplittingStrategy.PerRun: { partitionFunction = (result) => result.ShouldBeFiled() ? "Include" : null; break; } case SplittingStrategy.PerResult: { partitionFunction = (result) => result.ShouldBeFiled() ? Guid.NewGuid().ToString() : null; break; } default: { throw new ArgumentOutOfRangeException($"SplittingStrategy: {splittingStrategy}"); } } var partitioningVisitor = new PartitioningVisitor<string>(partitionFunction, deepClone: false); partitioningVisitor.VisitSarifLog(sarifLog); logsToProcess = new List<SarifLog>(partitioningVisitor.GetPartitionLogs().Values); var logsToProcessMetrics = new Dictionary<string, object> { { "splittingStrategy", splittingStrategy }, { "logsToProcessCount", logsToProcess.Count }, { "splittingDurationInMilliseconds", splittingStopwatch.ElapsedMilliseconds }, }; this.Logger.LogMetrics(EventIds.LogsToProcessMetrics, logsToProcessMetrics); splittingStopwatch.Stop(); for (int splitFileIndex = 0; splitFileIndex < logsToProcess.Count; splitFileIndex++) { SarifLog splitLog = logsToProcess[splitFileIndex]; FileWorkItemsHelper(splitLog, this.FilingContext, this.FilingClient); } } return sarifLog; }
private void RunTest(string testDirectory, string inputFileName, OptionallyEmittedData optionallyEmittedData) { var sb = new StringBuilder(); string optionsNameSuffix = "_" + NormalizeOptionallyEmittedDataToString(optionallyEmittedData); string expectedFileName = inputFileName + optionsNameSuffix + ".sarif"; string actualFileName = @"Actual\" + inputFileName + optionsNameSuffix + ".sarif"; inputFileName = inputFileName + ".sarif"; expectedFileName = Path.Combine(testDirectory, expectedFileName); actualFileName = Path.Combine(testDirectory, actualFileName); inputFileName = Path.Combine(testDirectory, inputFileName); string actualDirectory = Path.GetDirectoryName(actualFileName); if (!Directory.Exists(actualDirectory)) { Directory.CreateDirectory(actualDirectory); } File.Exists(inputFileName).Should().BeTrue(); SarifLog actualLog; JsonSerializerSettings settings = new JsonSerializerSettings() { ContractResolver = SarifContractResolver.Instance, Formatting = Formatting.Indented }; try { actualLog = JsonConvert.DeserializeObject <SarifLog>(File.ReadAllText(inputFileName), settings); Uri originalUri = actualLog.Runs[0].OriginalUriBaseIds["TESTROOT"]; string uriString = originalUri.ToString(); // This code rewrites the log persisted URI to match the test environment string currentDirectory = Environment.CurrentDirectory; currentDirectory = currentDirectory.Substring(0, currentDirectory.IndexOf(@"\bld\")); uriString = uriString.Replace("REPLACED_AT_TEST_RUNTIME", currentDirectory); actualLog.Runs[0].OriginalUriBaseIds["TESTROOT"] = new Uri(uriString, UriKind.Absolute); var visitor = new InsertOptionalDataVisitor(optionallyEmittedData); visitor.Visit(actualLog.Runs[0]); // Restore the remanufactured URI so that file diffing matches actualLog.Runs[0].OriginalUriBaseIds["TESTROOT"] = originalUri; } catch (Exception ex) { sb.AppendFormat(CultureInfo.InvariantCulture, "Unhandled exception processing input '{0}' with the following options: '{1}'.\r\n", inputFileName, optionallyEmittedData); sb.AppendLine(ex.ToString()); ValidateResults(sb.ToString()); return; } string expectedSarif = File.Exists(expectedFileName) ? File.ReadAllText(expectedFileName) : null; string actualSarif = JsonConvert.SerializeObject(actualLog, settings); if (!AreEquivalentSarifLogs(actualSarif, expectedSarif)) { if (s_rebaseline) { // We rewrite to test output directory. This allows subsequent tests to // pass without requiring a rebuild that recopies SARIF test files File.WriteAllText(expectedFileName, actualSarif); string subdirectory = Path.GetFileName(testDirectory); string productTestDirectory = GetProductTestDataDirectory(subdirectory); expectedFileName = Path.GetFileName(expectedFileName); expectedFileName = Path.Combine(productTestDirectory, expectedFileName); // We also rewrite the checked in test baselines File.WriteAllText(expectedFileName, actualSarif); } else { File.WriteAllText(actualFileName, actualSarif); string errorMessage = "Expanding optional data for input '{0}' produced unexpected results for the following options: '{1}'."; sb.AppendLine(string.Format(CultureInfo.CurrentCulture, errorMessage, inputFileName, optionallyEmittedData)); sb.AppendLine("Check individual differences with:"); sb.AppendLine(GenerateDiffCommand(expectedFileName, actualFileName) + Environment.NewLine); sb.AppendLine("To compare all difference for this test suite:"); sb.AppendLine(GenerateDiffCommand(Path.GetDirectoryName(expectedFileName), Path.GetDirectoryName(actualFileName)) + Environment.NewLine); } } // Add this check to prevent us from unexpectedly checking in this static with the wrong value s_rebaseline.Should().BeFalse(); ValidateResults(sb.ToString()); }
/// <summary> /// Interface implementation for converting a stream in Fortify FPR format to a stream in /// SARIF format. /// </summary> /// <exception cref="ArgumentNullException">Thrown when one or more required arguments are null.</exception> /// <param name="input">Stream in Fortify FPR format.</param> /// <param name="output">Stream in SARIF format.</param> /// <param name="dataToInsert">Optionally emitted properties that should be written to log.</param> public override void Convert(Stream input, IResultLogWriter output, OptionallyEmittedData dataToInsert) { if (input == null) { throw new ArgumentNullException(nameof(input)); } if (output == null) { throw new ArgumentNullException(nameof(output)); } _invocation = new Invocation(); _invocation.ToolNotifications = new List <Notification>(); _results.Clear(); _files.Clear(); _rules.Clear(); _ruleIdToIndexMap.Clear(); _tflToNodeIdDictionary.Clear(); _tflToSnippetIdDictionary.Clear(); _locationToSnippetIdDictionary.Clear(); _resultToSnippetIdDictionary.Clear(); _resultToReplacementDefinitionDictionary.Clear(); _nodeIdToLocationDictionary.Clear(); _nodeIdToActionTypeDictionary.Clear(); _snippetIdToRegionsDictionary.Clear(); ParseFprFile(input); AddMessagesToResults(); AddSnippetsToResults(); AddNodeLocationsToThreadFlowLocations(); AddSnippetsToThreadFlowLocations(); var run = new Run() { Id = new RunAutomationDetails { InstanceGuid = _runId, InstanceId = _automationId + "/" }, Artifacts = new List <Artifact>(_files), Tool = new Tool { Driver = new ToolComponent { Name = ToolName, RuleDescriptors = _rules } }, Invocations = new[] { _invocation }, }; if (!string.IsNullOrWhiteSpace(_originalUriBasePath)) { if (_originalUriBasePath.StartsWith("/") && _invocation.GetProperty("Platform") == "Linux") { _originalUriBasePath = "file:/" + _originalUriBasePath; } if (Uri.TryCreate(_originalUriBasePath, UriKind.Absolute, out Uri uri)) { run.OriginalUriBaseIds = new Dictionary <string, ArtifactLocation> { { FileLocationUriBaseId, new ArtifactLocation { Uri = uri } } }; } } PersistResults(output, _results, run); }
public int Run(ConvertOptions convertOptions, IFileSystem fileSystem = null) { if (fileSystem == null) { fileSystem = new FileSystem(); } try { if (string.IsNullOrEmpty(convertOptions.OutputFilePath)) { convertOptions.OutputFilePath = convertOptions.InputFilePath + ".sarif"; } if (fileSystem.DirectoryExists(convertOptions.OutputFilePath)) { Console.Error.WriteLine( string.Format( CultureInfo.CurrentCulture, "The output path '{0}' is a directory.", convertOptions.OutputFilePath)); return(FAILURE); } if (!ValidateOptions(convertOptions, fileSystem)) { return(FAILURE); } LoggingOptions loggingOptions = LoggingOptions.None; OptionallyEmittedData dataToInsert = convertOptions.DataToInsert.ToFlags(); if (convertOptions.PrettyPrint) { loggingOptions |= LoggingOptions.PrettyPrint; } ; if (convertOptions.Force) { loggingOptions |= LoggingOptions.OverwriteExistingOutputFile; } ; new ToolFormatConverter().ConvertToStandardFormat( convertOptions.ToolFormat, convertOptions.InputFilePath, convertOptions.OutputFilePath, loggingOptions, dataToInsert, convertOptions.PluginAssemblyPath); } catch (Exception ex) when(!Debugger.IsAttached) { Console.WriteLine(ex); return(FAILURE); } return(SUCCESS); }
public virtual IReadOnlyList <SarifLog> SplitLogFile(SarifLog sarifLog) { IList <SarifLog> logsToProcess; using (Logger.BeginScopeContext(nameof(SplitLogFile))) { sarifLog = sarifLog ?? throw new ArgumentNullException(nameof(sarifLog)); sarifLog.SetProperty("guid", Guid.NewGuid()); this.FilingResult = FilingResult.None; this.FiledWorkItems = new List <WorkItemModel>(); sarifLog = sarifLog ?? throw new ArgumentNullException(nameof(sarifLog)); Logger.LogInformation("Connecting to filing client: {accountOrOrganization}", this.FilingClient.AccountOrOrganization); this.FilingClient.Connect(this.FilingContext.PersonalAccessToken).Wait(); OptionallyEmittedData optionallyEmittedData = this.FilingContext.DataToRemove; if (optionallyEmittedData != OptionallyEmittedData.None) { Logger.LogDebug("Removing optional data."); var dataRemovingVisitor = new RemoveOptionalDataVisitor(optionallyEmittedData); dataRemovingVisitor.Visit(sarifLog); } optionallyEmittedData = this.FilingContext.DataToInsert; if (optionallyEmittedData != OptionallyEmittedData.None) { Logger.LogDebug("Inserting optional data."); var dataInsertingVisitor = new InsertOptionalDataVisitor(optionallyEmittedData); dataInsertingVisitor.Visit(sarifLog); } using (Logger.BeginScopeContext("Splitting visitor")) { SplittingStrategy splittingStrategy = this.FilingContext.SplittingStrategy; Logger.LogInformation($"Splitting strategy - {splittingStrategy}"); if (splittingStrategy == SplittingStrategy.None) { return(new[] { sarifLog }); } PartitionFunction <string> partitionFunction = null; Stopwatch splittingStopwatch = Stopwatch.StartNew(); switch (splittingStrategy) { case SplittingStrategy.PerRun: { partitionFunction = (result) => result.ShouldBeFiled() ? "Include" : null; break; } case SplittingStrategy.PerResult: { partitionFunction = (result) => result.ShouldBeFiled() ? Guid.NewGuid().ToString() : null; break; } case SplittingStrategy.PerRunPerOrgPerEntityTypePerPartialFingerprint: { partitionFunction = (result) => result.ShouldBeFiled() ? result.GetFingerprintSplittingStrategyId() : null; break; } case SplittingStrategy.PerRunPerOrgPerEntityTypePerRepositoryPerPartialFingerprint: { partitionFunction = (result) => result.ShouldBeFiled() ? result.GetPerRepositoryFingerprintSplittingStrategyId() : null; break; } default: { throw new ArgumentOutOfRangeException($"SplittingStrategy: {splittingStrategy}"); } } Logger.LogDebug("Begin splitting logs"); var partitioningVisitor = new PartitioningVisitor <string>(partitionFunction, deepClone: false); partitioningVisitor.VisitSarifLog(sarifLog); Logger.LogDebug("Begin retrieving split logs"); logsToProcess = new List <SarifLog>(partitioningVisitor.GetPartitionLogs().Values); Logger.LogDebug("End retrieving split logs"); var logsToProcessMetrics = new Dictionary <string, object> { { "splittingStrategy", splittingStrategy }, { "logsToProcessCount", logsToProcess.Count }, { "splittingDurationInMilliseconds", splittingStopwatch.ElapsedMilliseconds }, }; this.Logger.LogMetrics(EventIds.LogsToProcessMetrics, logsToProcessMetrics); splittingStopwatch.Stop(); } } if (logsToProcess != null && !this.FilingContext.ShouldFileUnchanged) { // Remove any logs that do not contain at least one result with a New or None baselinestate. logsToProcess = logsToProcess.Where(log => log?.Runs?.Any(run => run.Results?.Any(result => result.BaselineState == BaselineState.New || result.BaselineState == BaselineState.None) == true) == true).ToList(); } return(logsToProcess.ToArray()); }
/// <summary>Convert a Clang plist report into the SARIF format.</summary> /// <exception cref="ArgumentNullException">Thrown when one or more required arguments are null.</exception> /// <param name="input">CLang log file stream.</param> /// <param name="output">Result log writer.</param> /// <param name="dataToInsert">Optionally emitted properties that should be written to log.</param> public override void Convert(Stream input, IResultLogWriter output, OptionallyEmittedData dataToInsert) { // ToDo remove this comment after all issues are resolved. // Rodney is tasked with bringing Clang analyzer results into the SARIF fold. // Once this work is complete, he can close the following task: // http://twcsec-tfs01:8080/tfs/DefaultCollection/SecDevTools/_workitems#_a=edit&id=13409 if (input == null) { throw new ArgumentNullException(nameof(input)); } if (output == null) { throw new ArgumentNullException(nameof(output)); } try { XmlReaderSettings settings = new XmlReaderSettings { IgnoreWhitespace = true, DtdProcessing = DtdProcessing.Ignore, XmlResolver = null }; var results = new List <Result>(); using (XmlReader xmlReader = XmlReader.Create(input, settings)) { xmlReader.MoveToContent(); xmlReader.ReadStartElement(ClangSchemaStrings.PlistName); if (xmlReader.NodeType == XmlNodeType.Element) { using (var pListReader = xmlReader.ReadSubtree()) { this.ReadPlist(pListReader, results); } } } var tool = new Tool { Name = "Clang" }; var fileInfoFactory = new FileInfoFactory(MimeType.DetermineFromFileExtension, dataToInsert); Dictionary <string, FileData> fileDictionary = fileInfoFactory.Create(results); var run = new Run() { Tool = tool }; output.Initialize(run); if (fileDictionary != null && fileDictionary.Count > 0) { output.WriteFiles(fileDictionary); } output.OpenResults(); output.WriteResults(results); output.CloseResults(); } finally { _files = null; } }
/// <summary> /// Interface implementation for converting a stream in Fortify FPR format to a stream in /// SARIF format. /// </summary> /// <exception cref="ArgumentNullException">Thrown when one or more required arguments are null.</exception> /// <param name="input">Stream in Fortify FPR format.</param> /// <param name="output">Stream in SARIF format.</param> /// <param name="dataToInsert">Optionally emitted properties that should be written to log.</param> public override void Convert(Stream input, IResultLogWriter output, OptionallyEmittedData dataToInsert) { if (input == null) { throw new ArgumentNullException(nameof(input)); } if (output == null) { throw new ArgumentNullException(nameof(output)); } _invocation = new Invocation(); _invocation.ToolExecutionNotifications = new List <Notification>(); _invocation.ExecutionSuccessful = true; _results.Clear(); _files.Clear(); _rules.Clear(); _ruleIdToIndexMap.Clear(); _cweIds.Clear(); _tflToNodeIdDictionary.Clear(); _tflToSnippetIdDictionary.Clear(); _locationToSnippetIdDictionary.Clear(); _resultToSnippetIdDictionary.Clear(); _resultToReplacementDefinitionDictionary.Clear(); _nodeIdToLocationDictionary.Clear(); _nodeIdToActionTypeDictionary.Clear(); _snippetIdToRegionsDictionary.Clear(); ParseFprFile(input); AddMessagesToResults(); AddSnippetsToResults(); AddNodeLocationsToThreadFlowLocations(); AddSnippetsToThreadFlowLocations(); var run = new Run() { AutomationDetails = new RunAutomationDetails { Guid = _runId, Id = _automationId + "/" }, Artifacts = _files.OrderBy(d => d.Value.Item2) .Select(p => p.Value) .Select(t => t.Item1) .ToList() as IList <Artifact>, Tool = new Tool { Driver = new ToolComponent { Name = ToolName, Rules = _rules, SupportedTaxonomies = new List <ToolComponentReference> { new ToolComponentReference { Name = "CWE", Index = 0, Guid = "2B841697-D0DE-45DD-9F19-1EEE1312429" } } } }, Taxonomies = new List <ToolComponent> { CweToolComponent }, Invocations = new[] { _invocation }, }; if (_cweIds.Count > 0) { run.Taxonomies[0].Taxa = _cweIds.Select(c => new ReportingDescriptor { Id = c }).ToList(); } if (!string.IsNullOrWhiteSpace(_originalUriBasePath)) { if (_originalUriBasePath.StartsWith("/") && _invocation.GetProperty("Platform") == "Linux") { _originalUriBasePath = "file:/" + _originalUriBasePath; } if (Uri.TryCreate(_originalUriBasePath, UriKind.Absolute, out Uri uri)) { run.OriginalUriBaseIds = new Dictionary <string, ArtifactLocation> { { FileLocationUriBaseId, new ArtifactLocation { Uri = uri } } }; } } PersistResults(output, _results, run); }
private void ProcessCppCheckLog(XmlReader reader, IResultLogWriter output, OptionallyEmittedData dataToInsert) { reader.ReadStartElement(_strings.Results); if (!StringReference.AreEqual(reader.LocalName, _strings.CppCheck)) { throw reader.CreateException(ConverterResources.CppCheckCppCheckElementMissing); } string version = reader.GetAttribute(_strings.Version); if (version != null && !version.IsSemanticVersioningCompatible()) { // This logic only fixes up simple cases, such as being passed // 1.66, where Semantic Versioning 2.0 requires 1.66.0. Also // strips Revision member if passed a complete .NET version. Version dotNetVersion; if (Version.TryParse(version, out dotNetVersion)) { version = Math.Max(0, dotNetVersion.Major) + "." + Math.Max(0, dotNetVersion.Minor) + "." + Math.Max(0, dotNetVersion.Build); } } if (string.IsNullOrWhiteSpace(version)) { throw reader.CreateException(ConverterResources.CppCheckCppCheckElementMissing); } reader.Skip(); // <cppcheck /> if (!StringReference.AreEqual(reader.LocalName, _strings.Errors)) { throw reader.CreateException(ConverterResources.CppCheckErrorsElementMissing); } var results = new List <Result>(); if (reader.IsEmptyElement) { reader.Skip(); // <errors /> } else { int errorsDepth = reader.Depth; reader.Read(); // <errors> while (reader.Depth > errorsDepth) { var parsedError = CppCheckError.Parse(reader, _strings); results.Add(parsedError.ToSarifIssue()); } reader.ReadEndElement(); // </errors> } reader.ReadEndElement(); // </results> var run = new Run() { Tool = new Tool { Driver = new ToolComponent { Name = ToolName, Version = version } }, }; PersistResults(output, results, run); }
private void EnhanceRun( IEnumerable <string> analysisTargets, OptionallyEmittedData dataToInsert, OptionallyEmittedData dataToRemove, IEnumerable <string> invocationTokensToRedact, IEnumerable <string> invocationPropertiesToLog, string defaultFileEncoding = null, IDictionary <string, HashData> filePathToHashDataMap = null) { _run.Invocations ??= new List <Invocation>(); if (defaultFileEncoding != null) { _run.DefaultEncoding = defaultFileEncoding; } Encoding encoding = SarifUtilities.GetEncodingFromName(_run.DefaultEncoding); if (analysisTargets != null) { _run.Artifacts ??= new List <Artifact>(); foreach (string target in analysisTargets) { Uri uri = new Uri(UriHelper.MakeValidUri(target), UriKind.RelativeOrAbsolute); HashData hashData = null; if (dataToInsert.HasFlag(OptionallyEmittedData.Hashes)) { filePathToHashDataMap?.TryGetValue(target, out hashData); } var artifact = Artifact.Create( new Uri(target, UriKind.RelativeOrAbsolute), dataToInsert, encoding, hashData: hashData); var fileLocation = new ArtifactLocation { Uri = uri }; artifact.Location = fileLocation; // This call will insert the file object into run.Files if not already present artifact.Location.Index = _run.GetFileIndex( artifact.Location, addToFilesTableIfNotPresent: true, dataToInsert: dataToInsert, encoding: encoding, hashData: hashData); } } var invocation = Invocation.Create( emitMachineEnvironment: dataToInsert.HasFlag(OptionallyEmittedData.EnvironmentVariables), emitTimestamps: !dataToRemove.HasFlag(OptionallyEmittedData.NondeterministicProperties), invocationPropertiesToLog); // TODO we should actually redact across the complete log file context // by a dedicated rewriting visitor or some other approach. if (invocationTokensToRedact != null) { invocation.CommandLine = Redact(invocation.CommandLine, invocationTokensToRedact); invocation.Machine = Redact(invocation.Machine, invocationTokensToRedact); invocation.Account = Redact(invocation.Account, invocationTokensToRedact); if (invocation.WorkingDirectory != null) { invocation.WorkingDirectory.Uri = Redact(invocation.WorkingDirectory.Uri, invocationTokensToRedact); } if (invocation.EnvironmentVariables != null) { string[] keys = invocation.EnvironmentVariables.Keys.ToArray(); foreach (string key in keys) { string value = invocation.EnvironmentVariables[key]; invocation.EnvironmentVariables[key] = Redact(value, invocationTokensToRedact); } } } _run.Invocations.Add(invocation); }
public int Run(ConvertOptions convertOptions, IFileSystem fileSystem = null) { if (fileSystem == null) { fileSystem = new FileSystem(); } try { if (string.IsNullOrEmpty(convertOptions.OutputFilePath)) { convertOptions.OutputFilePath = convertOptions.InputFilePath + SarifConstants.SarifFileExtension; } if (fileSystem.DirectoryExists(convertOptions.OutputFilePath)) { Console.Error.WriteLine( string.Format( CultureInfo.CurrentCulture, "The output path '{0}' is a directory.", convertOptions.OutputFilePath)); return(FAILURE); } if (!ValidateOptions(convertOptions, fileSystem)) { return(FAILURE); } LoggingOptions loggingOptions = LoggingOptions.None; OptionallyEmittedData dataToInsert = convertOptions.DataToInsert.ToFlags(); if (convertOptions.PrettyPrint) { loggingOptions |= LoggingOptions.PrettyPrint; } ; if (convertOptions.Force) { loggingOptions |= LoggingOptions.OverwriteExistingOutputFile; } ; new ToolFormatConverter().ConvertToStandardFormat( convertOptions.ToolFormat, convertOptions.InputFilePath, convertOptions.OutputFilePath, loggingOptions, dataToInsert, convertOptions.PluginAssemblyPath); if (convertOptions.NormalizeForGitHubDsp) { SarifLog sarifLog; JsonSerializer serializer = new JsonSerializer() { Formatting = convertOptions.PrettyPrint ? Formatting.Indented : 0, }; using (JsonTextReader reader = new JsonTextReader(new StreamReader(convertOptions.OutputFilePath))) { sarifLog = serializer.Deserialize <SarifLog>(reader); } var visitor = new GitHubDspIngestionVisitor(); visitor.VisitSarifLog(sarifLog); using (FileStream stream = File.Create(convertOptions.OutputFilePath)) using (StreamWriter streamWriter = new StreamWriter(stream)) using (JsonTextWriter writer = new JsonTextWriter(streamWriter)) { serializer.Serialize(writer, sarifLog); } } } catch (Exception ex) when(!Debugger.IsAttached) { Console.WriteLine(ex); return(FAILURE); } return(SUCCESS); }
public static FileData Create( Uri uri, OptionallyEmittedData dataToInsert = OptionallyEmittedData.None, string mimeType = null, Encoding encoding = null, IFileSystem fileSystem = null) { if (uri == null) { throw new ArgumentNullException(nameof(uri)); } mimeType = mimeType ?? SarifWriters.MimeType.DetermineFromFileExtension(uri); fileSystem = fileSystem ?? new FileSystem(); var fileData = new FileData() { MimeType = mimeType }; // Attempt to persist file contents and/or compute file hash and persist // this information to the log file. In the event that there is some issue // accessing the file, for example, due to ACLs applied to a directory, // we currently swallow these exceptions without populating any requested // data or putting a notification in the log file that a problem // occurred. Something to discuss moving forward. try { if (!uri.IsAbsoluteUri || !uri.IsFile || !fileSystem.FileExists(uri.LocalPath)) { return(fileData); } string filePath = uri.LocalPath; if (dataToInsert.Includes(OptionallyEmittedData.BinaryFiles) && SarifWriters.MimeType.IsBinaryMimeType(mimeType)) { fileData.Contents = GetEncodedFileContents(fileSystem, filePath, mimeType, encoding); } if (dataToInsert.Includes(OptionallyEmittedData.TextFiles) && SarifWriters.MimeType.IsTextualMimeType(mimeType)) { fileData.Contents = GetEncodedFileContents(fileSystem, filePath, mimeType, encoding); } if (dataToInsert.Includes(OptionallyEmittedData.Hashes)) { HashData hashes = HashUtilities.ComputeHashes(filePath); fileData.Hashes = new List <Hash> { new Hash() { Value = hashes.MD5, Algorithm = "md5", }, new Hash() { Value = hashes.Sha1, Algorithm = "sha-1", }, new Hash() { Value = hashes.Sha256, Algorithm = "sha-256", }, }; } } catch (Exception e) when(e is IOException || e is UnauthorizedAccessException) { } return(fileData); }
public static bool Includes(this OptionallyEmittedData optionallyEmittedData, OptionallyEmittedData otherOptionallyEmittedData) { return((optionallyEmittedData & otherOptionallyEmittedData) == otherOptionallyEmittedData); }
public InsertOptionalDataVisitor(OptionallyEmittedData dataToInsert, IDictionary <string, ArtifactLocation> originalUriBaseIds = null) { _dataToInsert = dataToInsert; _originalUriBaseIds = originalUriBaseIds; _ruleIndex = -1; }
public RemoveOptionalDataVisitor(OptionallyEmittedData optionallyEmittedData) { _dataToRemove = optionallyEmittedData; }
public int Run(TransformOptions transformOptions) { try { // Only set --output-file if --inline isn't specified. ValidateOptions will check // to make sure that exactly one of those two options is set. if (!transformOptions.Inline) { transformOptions.OutputFilePath = CommandUtilities.GetTransformedOutputFileName(transformOptions); } bool valid = ValidateOptions(transformOptions); if (!valid) { return(FAILURE); } // NOTE: we don't actually utilize the dataToInsert command-line data yet... OptionallyEmittedData dataToInsert = transformOptions.DataToInsert.ToFlags(); string inputFilePath = transformOptions.InputFilePath; string inputVersion = SniffVersion(inputFilePath); // If the user wants to transform to current v2, we check to see whether the input // file is v2 or pre-release v2. We upgrade both formats to current v2. // // Correspondingly, if the input file is v2 of any kind, we first ensure that it is // current v2, then drop it down to v1. // // We do not support transforming to any obsoleted pre-release v2 formats. if (transformOptions.SarifOutputVersion == SarifVersion.Current) { if (inputVersion == "1.0.0") { SarifLogVersionOne actualLog = ReadSarifFile <SarifLogVersionOne>(_fileSystem, transformOptions.InputFilePath, SarifContractResolverVersionOne.Instance); var visitor = new SarifVersionOneToCurrentVisitor(); visitor.VisitSarifLogVersionOne(actualLog); WriteSarifFile(_fileSystem, visitor.SarifLog, transformOptions.OutputFilePath, transformOptions.Formatting); } else { // We have a pre-release v2 file that we should upgrade to current. PrereleaseCompatibilityTransformer.UpdateToCurrentVersion( _fileSystem.ReadAllText(inputFilePath), formatting: transformOptions.Formatting, out string sarifText); _fileSystem.WriteAllText(transformOptions.OutputFilePath, sarifText); } } else { if (inputVersion == "1.0.0") { SarifLogVersionOne logV1 = ReadSarifFile <SarifLogVersionOne>(_fileSystem, transformOptions.InputFilePath, SarifContractResolverVersionOne.Instance); logV1.SchemaUri = SarifVersion.OneZeroZero.ConvertToSchemaUri(); WriteSarifFile(_fileSystem, logV1, transformOptions.OutputFilePath, transformOptions.Formatting, SarifContractResolverVersionOne.Instance); } else { string currentSarifVersion = SarifUtilities.StableSarifVersion; string sarifText = _fileSystem.ReadAllText(inputFilePath); SarifLog actualLog = null; if (inputVersion != currentSarifVersion) { // Note that we don't provide formatting here. It is not required to indent the v2 SARIF - it // will be transformed to v1 later, where we should apply the indentation settings. actualLog = PrereleaseCompatibilityTransformer.UpdateToCurrentVersion( sarifText, formatting: Formatting.None, out sarifText); } else { actualLog = JsonConvert.DeserializeObject <SarifLog>(sarifText); } var visitor = new SarifCurrentToVersionOneVisitor(); visitor.VisitSarifLog(actualLog); WriteSarifFile(_fileSystem, visitor.SarifLogVersionOne, transformOptions.OutputFilePath, transformOptions.Formatting, SarifContractResolverVersionOne.Instance); } } } catch (Exception ex) { Console.WriteLine(ex); return(FAILURE); } return(SUCCESS); }
private static Run CreateRun( IEnumerable <string> analysisTargets, OptionallyEmittedData dataToInsert, IEnumerable <string> invocationTokensToRedact, IEnumerable <string> invocationPropertiesToLog, string defaultFileEncoding = null) { var run = new Run { Invocations = new List <Invocation>(), DefaultEncoding = defaultFileEncoding }; if (analysisTargets != null) { run.Artifacts = new List <Artifact>(); foreach (string target in analysisTargets) { Uri uri = new Uri(UriHelper.MakeValidUri(target), UriKind.RelativeOrAbsolute); var fileData = Artifact.Create( new Uri(target, UriKind.RelativeOrAbsolute), dataToInsert); var fileLocation = new ArtifactLocation { Uri = uri }; fileData.Location = fileLocation; // This call will insert the file object into run.Files if not already present fileData.Location.Index = run.GetFileIndex(fileData.Location, addToFilesTableIfNotPresent: true, dataToInsert); } } var invocation = Invocation.Create(dataToInsert.HasFlag(OptionallyEmittedData.EnvironmentVariables), invocationPropertiesToLog); // TODO we should actually redact across the complete log file context // by a dedicated rewriting visitor or some other approach. if (invocationTokensToRedact != null) { invocation.CommandLine = Redact(invocation.CommandLine, invocationTokensToRedact); invocation.Machine = Redact(invocation.Machine, invocationTokensToRedact); invocation.Account = Redact(invocation.Account, invocationTokensToRedact); if (invocation.WorkingDirectory != null) { invocation.WorkingDirectory.Uri = Redact(invocation.WorkingDirectory.Uri, invocationTokensToRedact); } if (invocation.EnvironmentVariables != null) { string[] keys = invocation.EnvironmentVariables.Keys.ToArray(); foreach (string key in keys) { string value = invocation.EnvironmentVariables[key]; invocation.EnvironmentVariables[key] = Redact(value, invocationTokensToRedact); } } } run.Invocations.Add(invocation); return(run); }
public abstract void Convert(Stream input, IResultLogWriter output, OptionallyEmittedData dataToInsert);
private void InitializeOutputFile(TOptions analyzeOptions, TContext context) { string filePath = analyzeOptions.OutputFilePath; AggregatingLogger aggregatingLogger = (AggregatingLogger)context.Logger; if (!string.IsNullOrEmpty(filePath)) { InvokeCatchingRelevantIOExceptions ( () => { LogFilePersistenceOptions logFilePersistenceOptions = analyzeOptions.ConvertToLogFilePersistenceOptions(); OptionallyEmittedData dataToInsert = analyzeOptions.DataToInsert.ToFlags(); OptionallyEmittedData dataToRemove = analyzeOptions.DataToRemove.ToFlags(); SarifLogger sarifLogger; _run = new Run(); if (analyzeOptions.SarifOutputVersion != SarifVersion.OneZeroZero) { sarifLogger = new SarifLogger( analyzeOptions.OutputFilePath, logFilePersistenceOptions, dataToInsert, dataToRemove, tool: _tool, run: _run, analysisTargets: null, invocationTokensToRedact: GenerateSensitiveTokensList(), invocationPropertiesToLog: analyzeOptions.InvocationPropertiesToLog, levels: analyzeOptions.Level, kinds: analyzeOptions.Kind); } else { sarifLogger = new SarifOneZeroZeroLogger( analyzeOptions.OutputFilePath, logFilePersistenceOptions, dataToInsert, dataToRemove, tool: _tool, run: _run, analysisTargets: null, invocationTokensToRedact: GenerateSensitiveTokensList(), invocationPropertiesToLog: analyzeOptions.InvocationPropertiesToLog, levels: analyzeOptions.Level, kinds: analyzeOptions.Kind); } _pathToHashDataMap = sarifLogger.AnalysisTargetToHashDataMap; sarifLogger.AnalysisStarted(); aggregatingLogger.Loggers.Add(sarifLogger); }, (ex) => { Errors.LogExceptionCreatingLogFile(context, filePath, ex); ThrowExitApplicationException(context, ExitReason.ExceptionCreatingLogFile, ex); } ); } }