private void RunVerification(CodeAnalysisResult result, string resultsString) { // Check if the resultsString matches our baseline. If not, output the test files that // were loaded, and some commands to easily overwrite the existing baseline with the output. // once you've overwritten it you can compare the new and old baselines using the Diff tools // in Team Explorer string baseline = RuleTestUtils.ReadFileToString(BaselineFilePath); RuleTestUtils.SaveStringToFile(resultsString, OutputFilePath); string loadedTestScriptFiles = ListScriptFilenames(); if (string.Compare(resultsString, baseline, false, CultureInfo.CurrentCulture) != 0) { Assert.Fail(String.Format( "The result is not the same as expected. It's recommended you compare the actual output " + "to the baseline. If the output matches your expectations, update the baseline file inside " + "the project.\r\n\r\n" + "################## loaded test script files ################## \r\n" + loadedTestScriptFiles + "\r\n" + "rem ################## View Baseline: ##################\r\n" + "Notepad \"{0}\" \r\n\r\n" + "rem ################## View Actual Output: ##################\r\n" + "Notepad \"{1}\" \r\n\r\n" + "################## cd test folder command ##################\r\n" + "cd \"{2}\"\r\n", BaselineFilePath, OutputFilePath, ScriptsFolder)); } }
public async Task <CodeAnalysisResult> Analyse(TestingCode code) { var tempFolder = Path.Combine(Configuration.TempFolderPath, Guid.NewGuid().ToString()); await CreateDirectoryForAnalysis(code, tempFolder); ExecutableCode executableCode = _codeFactory.GetExecutableCode(code); Command executionCommand = GetCompilationCommand(executableCode, tempFolder); Command analysisCommand = ModifyCommandForAnalysis(executionCommand); ContainerExecutionResult containerExecutionResult = await _executor.ExecuteAsync(analysisCommand); CodeAnalysisResult codeAnalysis = new CodeAnalysisResult(); if (containerExecutionResult.Result == ExecutionResult.Success) { codeAnalysis = AnalyseOutput(containerExecutionResult); } Directory.Delete(tempFolder, true); return(codeAnalysis); }
private static void RunAnalysis(TSqlModel model, string OutFile) { CodeAnalysisService service = new CodeAnalysisServiceFactory().CreateAnalysisService(model.Version); service.ResultsFile = OutFile; CodeAnalysisResult result = service.Analyze(model); }
private void RunVerification(CodeAnalysisResult result, string resultsString) { string baseline = RuleTestUtils.ReadFileToString(BaselineFilePath); RuleTestUtils.SaveStringToFile(resultsString, OutputFilePath); string loadedTestScriptFiles = ListScriptFilenames(); if (string.Compare(resultsString, baseline, false, System.Globalization.CultureInfo.CurrentCulture) != 0) { StringBuilder failureMessage = new StringBuilder(); failureMessage.AppendLine($"The result is not the same as expected. Please compare actual output to baseline."); failureMessage.AppendLine(""); failureMessage.AppendLine($"### Loaded Test Script Files ###"); failureMessage.AppendLine(loadedTestScriptFiles); failureMessage.AppendLine(""); failureMessage.AppendLine($"### View Baseline ###"); failureMessage.AppendLine(BaselineFilePath); failureMessage.AppendLine(""); failureMessage.AppendLine($"### View Action Output ###"); failureMessage.AppendLine(OutputFilePath); failureMessage.AppendLine(""); failureMessage.AppendLine($"### Test Folder ###"); failureMessage.AppendLine(ScriptsFolder); Assert.Fail(failureMessage.ToString()); } }
public void RunSCARules() { CodeAnalysisService service = new CodeAnalysisServiceFactory().CreateAnalysisService(_Model.Version); CodeAnalysisResult result = service.Analyze(_Model); SerializeResultOutput(result); CollectionAssert.AreEquivalent(_FoundProblems, _ExpectedProblems); }
public void RunSqlRuleTest() { CodeAnalysisService service = new CodeAnalysisServiceFactory().CreateAnalysisService(_Model.Version); CodeAnalysisResult result = service.Analyze(_Model); LoadRuleResults(result); // Assert CollectionAssert.AreEquivalent(_ExpectedProblems, _ActualProblems); }
private void RunAnalysis(TSqlModel model, string OutFile) { CodeAnalysisService service = new CodeAnalysisServiceFactory().CreateAnalysisService(model.Version); service.ResultsFile = OutFile; CodeAnalysisResult result = service.Analyze(model); Console.WriteLine("Code Analysis with output file {0} complete, analysis succeeded? {1}", OutFile, result.AnalysisSucceeded); }
private void LoadRuleResults(CodeAnalysisResult result) { foreach (SqlRuleProblem problem in result.Problems) { if (_rulesToRun == null || _rulesToRun.Exists(x => x.Equals(problem.RuleId))) { TestSqlRuleProblem actual = new TestSqlRuleProblem(problem.RuleId, problem.StartLine); _ActualProblems.Add(actual); } } }
private List <CodeAnalysisResult> CreateCodeAnalysisResultForBindPreview() { var codeAnalysisResultList = new List <CodeAnalysisResult>(); var codeAnalysisResult = new CodeAnalysisResult() { ApplicationID = "1", CodeAnalysisResultDetails = new List <CodeAnalysisResultDetail>() }; codeAnalysisResultList.Add(codeAnalysisResult); return(codeAnalysisResultList); }
private void RunRulesAndVerifyResult(CodeAnalysisService service, Action <CodeAnalysisResult, string> verify) { CodeAnalysisResult analysisResult = service.Analyze(this.ModelForAnalysis); // Only considering analysis errors for now - might want to expand to initialization and suppression errors in the future this.DumpErrors(analysisResult.AnalysisErrors); string problemsString = this.DumpProblemsToString(analysisResult.Problems); verify(analysisResult, problemsString); }
public void SerializeResultOutput(CodeAnalysisResult result) { foreach (SqlRuleProblem Problem in result.Problems) { // Only concern ourselves with our problems if (Problem.RuleId.StartsWith("Smells.")) { TestProblem TestProblem = new TestProblem(Problem.StartLine, Problem.StartColumn, Problem.RuleId); _FoundProblems.Add(TestProblem); } } }
/// <summary> /// Runs analysis, writing the output to a file /// </summary> private static void RunAnalysis(TSqlModel model, string resultsFilePath) { // Creating a default service will run all discovered rules, treating issues as Warnings. // To configure which rules are run you can pass a CodeAnalysisRuleSettings object to the service // or as part of the CodeAnalysisServiceSettings passed into the factory method. Examples of this // can be seen in the RuleTest.CreateCodeAnalysisService method. CodeAnalysisService service = new CodeAnalysisServiceFactory().CreateAnalysisService(model.Version); service.ResultsFile = resultsFilePath; CodeAnalysisResult result = service.Analyze(model); Console.WriteLine("Code Analysis with output file {0} complete, analysis succeeded? {1}", resultsFilePath, result.AnalysisSucceeded); PrintProblemsAndValidationErrors(model, result); }
protected override CodeAnalysisResult AnalyseOutput(ContainerExecutionResult containerExecutionResult) { var output = JsonConvert.DeserializeObject <Output[]>(containerExecutionResult.StandardOutput); var currentFile = output.FirstOrDefault(); var analysisResults = _mapper.Map <AnalysisResult[]>(currentFile?.Messages); var codeAnalysisResult = new CodeAnalysisResult { AnalysisResults = analysisResults }; if (currentFile?.ErrorCount == 0) { codeAnalysisResult.IsSuccessful = true; } return(codeAnalysisResult); }
private static void PrintProblemsAndValidationErrors(TSqlModel model, CodeAnalysisResult analysisResult) { Console.WriteLine("-----------------"); Console.WriteLine("Outputting validation issues and problems"); foreach (var issue in model.Validate()) { Console.WriteLine("\tValidation Issue: '{0}', Severity: {1}", issue.Message, issue.MessageType); } foreach (var problem in analysisResult.Problems) { Console.WriteLine("\tCode Analysis Problem: '{0}', Severity: {1}, Source: {2}, StartLine/Column [{3},{4}]", problem.ErrorMessageString, problem.Severity, problem.SourceName, problem.StartLine, problem.StartColumn); } Console.WriteLine("-----------------"); }