public void Match_TestPatternsPhp_MatchedAllDefault() { var path = Path.Combine(TestUtility.TestsDataPath, "Patterns.php"); var sourceRep = new FileSourceRepository(path); var logger = new TestLogger(); var workflow = new Workflow(sourceRep, Global.PatternsRepository) { Logger = logger }; workflow.Process(); IEnumerable <MatchResultDto> matchResults = logger.Matches .ToDto() .OrderBy(r => r.PatternKey); IEnumerable <PatternDto> patternDtos = Global.PatternsRepository.GetAll() .Where(patternDto => patternDto.Languages.Contains("Php")); foreach (PatternDto dto in patternDtos) { Assert.Greater(matchResults.Count(p => p.PatternKey == dto.Key), 0, dto.Description); } Assert.AreEqual(1, matchResults.Count(r => r.MatchedCode.Contains("Configure") && r.MatchedCode.Contains("write") && r.MatchedCode.Contains("3"))); Assert.AreEqual(0, matchResults.Count(r => r.MatchedCode.Contains("Configure") && r.MatchedCode.Contains("write") && r.MatchedCode.Contains("50"))); }
public static SourceRepository CreateSourceRepository(string path, string tempDir, CliParameters parameters) { SourceRepository sourceRepository; if (string.IsNullOrWhiteSpace(path)) { sourceRepository = DummySourceRepository.Instance; } else if (DirectoryExt.Exists(path)) { sourceRepository = new DirectorySourceRepository(path); } else if (FileExt.Exists(path)) { string extension = Path.GetExtension(path); if (extension.EqualsIgnoreCase(".zip")) { var zipCachingRepository = new ZipCachingRepository(path); if (tempDir != null) { zipCachingRepository.ExtractPath = tempDir; } sourceRepository = zipCachingRepository; } else { sourceRepository = new FileSourceRepository(path); } } else { string url = path; string projectName = null; string urlWithoutHttp = TextUtils.HttpRegex.Replace(url, ""); if (!url.EndsWith(".zip", StringComparison.OrdinalIgnoreCase)) { if (urlWithoutHttp.StartsWith("github.com", StringComparison.OrdinalIgnoreCase)) { url = url + "/archive/master.zip"; } } if (urlWithoutHttp.StartsWith("github.com", StringComparison.OrdinalIgnoreCase)) { projectName = urlWithoutHttp.Split('/').ElementAtOrDefault(2); } var zipAtUrlCachedCodeRepository = new ZipAtUrlCachingRepository(url, projectName); if (tempDir != null) { zipAtUrlCachedCodeRepository.DownloadPath = tempDir; } sourceRepository = zipAtUrlCachedCodeRepository; } return(sourceRepository); }
public void FileSourceRepository_TestPath_CorrectPathsAndNames() { string fullName = Path.Combine(TestUtility.TestsDataPath, "Test Project", "1.cs"); var fileSourceRepository = new FileSourceRepository(fullName); IEnumerable <string> fileNames = fileSourceRepository.GetFileNames(); var source = (TextFile)fileSourceRepository.ReadFile(fileNames.Single()); Assert.AreEqual("1.cs", source.Name); Assert.AreEqual("", source.RelativePath); Assert.AreEqual(Path.GetDirectoryName(fullName), source.RootPath); Assert.AreEqual(fullName, source.FullName); }
public void Match_TestPatternsJava_MatchedAllDefault() { var path = Path.Combine(TestUtility.TestsDataPath, "Patterns.java"); var sourceRep = new FileSourceRepository(path); var logger = new TestLogger(); var workflow = new Workflow(sourceRep, Global.PatternsRepository) { Logger = logger }; workflow.Process(); IEnumerable <MatchResultDto> matchResults = logger.Matches.ToDto().OrderBy(r => r.PatternKey); IEnumerable <PatternDto> patternDtos = Global.PatternsRepository.GetAll() .Where(patternDto => patternDto.Languages.Contains("Java")).ToArray(); foreach (PatternDto dto in patternDtos) { Assert.Greater(matchResults.Count(p => p.PatternKey == dto.Key), 0, dto.Description); } var improperValidationEmptyMethodPartial = patternDtos.Single(dto => dto.Description.StartsWith("ImproperValidationEmptyMethodPartial")); Assert.AreEqual(1, matchResults.Count(r => r.PatternKey == improperValidationEmptyMethodPartial.Key)); var improperValidationEmptyMethodFull = patternDtos.Single(dto => dto.Description.StartsWith("ImproperValidationEmptyMethodFull")); Assert.AreEqual(3, matchResults.Count(r => r.PatternKey == improperValidationEmptyMethodFull.Key)); var missingReceiverPermission = patternDtos.Single(dto => dto.Description.StartsWith("MissingReceiverPermission")); Assert.AreEqual(1, matchResults.Count(r => r.PatternKey == missingReceiverPermission.Key)); var missingBroadcasterPermission = patternDtos.Single(dto => dto.Description.StartsWith("MissingBroadcasterPermission")); Assert.AreEqual(1, matchResults.Count(r => r.PatternKey == missingBroadcasterPermission.Key)); var cookieNotSentOverSslDto = patternDtos.Single(dto => dto.Description.StartsWith("CookieNotSentOverSSL")); var cookieNotSentOverSslResults = matchResults.Where(r => r.PatternKey == cookieNotSentOverSslDto.Key).ToArray(); Assert.AreEqual(1, cookieNotSentOverSslResults.Count(r => r.MatchedCode.Contains("emailCookieExistsSimple"))); Assert.AreEqual(1, cookieNotSentOverSslResults.Count(r => r.MatchedCode.Contains("emailCookieExistsComplex"))); Assert.AreEqual(1, cookieNotSentOverSslResults.Count(r => r.MatchedCode.Contains("emailCookieExistsAnotherVarName"))); var useOfNullPointerException = patternDtos.Single(dto => dto.Description.StartsWith("Use of NullPointerException")); Assert.AreEqual(1, matchResults.Count(r => r.PatternKey == useOfNullPointerException.Key)); }
public void Check_CorrectLongestCommonPaths() { string path = FileSourceRepository.GetLongestCommonPath(new string[0]); Assert.AreEqual(@"", path); path = FileSourceRepository.GetLongestCommonPath(new[] { "" }); Assert.AreEqual(@"", path); path = FileSourceRepository.GetLongestCommonPath(new[] { @"C:\dir\dir1\file1".NormalizeDirSeparator() }); Assert.AreEqual(@"C:\dir\dir1".NormalizeDirSeparator(), path); path = FileSourceRepository.GetLongestCommonPath(new[] { @"C:\dir\dir1\file1".NormalizeDirSeparator(), @"C:\dir\dir1\file2".NormalizeDirSeparator() }); Assert.AreEqual(@"C:\dir\dir1".NormalizeDirSeparator(), path); path = FileSourceRepository.GetLongestCommonPath(new[] { @"C:\dir\dir1\abc".NormalizeDirSeparator(), @"C:\dir\dir1\bcd".NormalizeDirSeparator() }); Assert.AreEqual(@"C:\dir\dir1".NormalizeDirSeparator(), path); path = FileSourceRepository.GetLongestCommonPath(new[] { @"C:\dir\dir1\file1".NormalizeDirSeparator(), @"D:\dir\dir1\file2".NormalizeDirSeparator() }); Assert.AreEqual(@"", path); }
public void Match_TestPatternsSql_MatchedAllDefault(Language sqlDialect, string patternsFileName) { var path = Path.Combine(TestUtility.TestsDataPath, patternsFileName.NormalizeDirSeparator()); var sourceRep = new FileSourceRepository(path); var logger = new TestLogger(); var workflow = new Workflow(sourceRep, Global.PatternsRepository) { Logger = logger }; workflow.Process(); IEnumerable <MatchResultDto> matchResults = logger.Matches .ToDto() .OrderBy(r => r.PatternKey); string sqlDialectString = sqlDialect.ToString(); IEnumerable <PatternDto> patternDtos = Global.PatternsRepository.GetAll() .Where(patternDto => patternDto.Languages.Contains(sqlDialectString)); foreach (var dto in patternDtos) { Assert.Greater(matchResults.Count(p => p.PatternKey == dto.Key), 0, dto.Description); } }
private static void CheckMsgPackSerialization(string inputFileName, bool linearTextSpans = false, bool damaged = false, bool incorrectFilePath = false, bool compressed = false) { string path = Path.Combine(TestUtility.TestsDataPath, inputFileName); string ext = SerializationFormat.MsgPack.GetExtension(); // Serialization string[] files = File.Exists(path) ? new[] { path } : Directory.GetFiles(path); var codeRepository = new FileSourceRepository(files); var logger = new TestLogger(); var workflow = new Workflow(codeRepository) { DumpStages = new HashSet <Stage> { Stage.Ust }, DumpDir = TestUtility.TestsOutputPath, SerializationFormat = SerializationFormat.MsgPack, LineColumnTextSpans = !linearTextSpans, CompressedSerialization = compressed, Stage = Stage.Ust, Logger = logger }; WorkflowResult result = workflow.Process(); Assert.AreEqual(0, logger.ErrorCount, logger.ErrorsString); int errorOffset = 6; byte errorValue = 123; var serializedFiles = new List <string>(); foreach (string file in files) { string shortFileName = Path.GetFileName(file) + ".ust." + ext; string serializedFile = Path.Combine(TestUtility.TestsOutputPath, shortFileName); if (damaged || incorrectFilePath) { byte[] bytes = File.ReadAllBytes(serializedFile); if (damaged) { errorOffset += path.Length + 1; } else { errorOffset += 2; } bytes[errorOffset] = errorValue; File.WriteAllBytes(serializedFile, bytes); } serializedFiles.Add(serializedFile); } // Deserialization var newLogger = new TestLogger(); var newCodeRepository = new FileSourceRepository(serializedFiles); var newWorkflow = new Workflow(newCodeRepository, new DefaultPatternRepository()) { Logger = newLogger }; newWorkflow.Process(); if (damaged) { Assert.AreEqual(1, newLogger.ErrorCount); Assert.IsTrue(newLogger.ErrorsString.Contains(errorValue.ToString())); Assert.IsTrue(newLogger.ErrorsString.Contains(errorOffset.ToString())); return; } var binaryFile = (BinaryFile)newCodeRepository.ReadFile(newCodeRepository.GetFileNames().ElementAt(0)); RootUstMessagePackSerializer.Deserialize(binaryFile, new HashSet <IFile>(), null, logger, out int readSize); if (!compressed) { Assert.AreEqual(binaryFile.Data.Length, readSize); } else { double compressionRatio = (double)readSize / binaryFile.Data.Length; Console.WriteLine($"Compression ratio: {compressionRatio}"); } Assert.GreaterOrEqual(newLogger.Matches.Count, 1); if (incorrectFilePath) { string firstError = newLogger.Errors[0]; Assert.IsTrue(firstError.Contains("ReadException")); Assert.IsTrue(firstError.Contains("Error during opening the file")); return; } Assert.AreEqual(0, newLogger.ErrorCount, newLogger.ErrorsString); var match = (MatchResult)newLogger.Matches[0]; using (var sourceFilesEnumerator = result.SourceFiles.GetEnumerator()) { sourceFilesEnumerator.MoveNext(); var firstFile = (TextFile)sourceFilesEnumerator.Current; Assert.AreEqual(new LineColumnTextSpan(2, 1, 3, 25), firstFile.GetLineColumnTextSpan(match.TextSpan)); } }
private static void CheckJsonSerialization(string inputFileName, bool linearTextSpans = false, bool includeTextSpans = true, bool indented = true, bool includeCode = false, bool checkStrict = false, bool strict = true, bool checkPatternSerialization = false) { string path = Path.Combine(TestUtility.TestsDataPath, inputFileName); // Serialization string[] files = File.Exists(path) ? new [] { path } : Directory.GetFiles(path); var codeRepository = new FileSourceRepository(files); var logger = new TestLogger(); var workflow = new Workflow(codeRepository) { DumpStages = new HashSet <Stage> { Stage.Ust }, DumpDir = TestUtility.TestsOutputPath, IncludeCodeInDump = includeCode, IndentedDump = indented, StrictJson = strict, DumpWithTextSpans = includeTextSpans, LineColumnTextSpans = !linearTextSpans, Stage = Stage.Ust, IsDumpPatterns = checkPatternSerialization }; WorkflowResult result = workflow.Process(); Assert.AreEqual(0, logger.ErrorCount, logger.ErrorsString); var preprocessedFile = (TextFile)result.SourceFiles.FirstOrDefault(f => f.Name == "preprocessed.php"); var originFile = (TextFile)result.SourceFiles.FirstOrDefault(f => f.Name == "origin.php"); LineColumnTextSpan lcPreprocessedTextSpan = new LineColumnTextSpan(4, 1, 4, 3); LineColumnTextSpan lcOriginTextSpan = new LineColumnTextSpan(3, 1, 3, 3, originFile); var jsonFiles = new List <string>(); foreach (string file in files) { string shortFileName = Path.GetFileName(file) + ".ust.json"; string jsonFile = Path.Combine(TestUtility.TestsOutputPath, shortFileName); if (file.Contains("preprocessed.php") || checkStrict) { string json = File.ReadAllText(jsonFile); if (file.Contains("preprocessed.php")) { string preprocessedTextSpanString, originTextSpanString; if (linearTextSpans) { preprocessedTextSpanString = preprocessedFile.GetTextSpan(lcPreprocessedTextSpan).ToString(); originTextSpanString = originFile.GetTextSpan(lcOriginTextSpan).ToString(); } else { preprocessedTextSpanString = lcPreprocessedTextSpan.ToString(); originTextSpanString = lcOriginTextSpan.ToString(); } json = json.Replace($"\"{preprocessedTextSpanString}\"", $"[ \"{lcPreprocessedTextSpan}\", \"{originTextSpanString}\" ]"); } if (checkStrict) { json = json.Replace("\"Kind\": \"IntLiteral\"", "\"Kind\": \"IntLiteral\", \"ExcessProperty\": \"value\""); } File.WriteAllText(jsonFile, json); } jsonFiles.Add(jsonFile); } // Deserialization var newLogger = new TestLogger(); var newCodeRepository = new FileSourceRepository(jsonFiles); var newPatternsRepository = checkPatternSerialization ? new JsonPatternsRepository(File.ReadAllText(Path.Combine(TestUtility.TestsOutputPath, "patterns.json"))) : inputFileName == "MultiTextSpan" ? (IPatternsRepository) new DslPatternRepository("a", "php") : new DefaultPatternRepository(); var newWorkflow = new Workflow(newCodeRepository, newPatternsRepository) { StrictJson = strict, Logger = newLogger }; newWorkflow.Process(); if (checkStrict && strict) { Assert.IsTrue(newLogger.ErrorsString.Contains("ExcessProperty")); } else { Assert.AreEqual(0, newLogger.ErrorCount, newLogger.ErrorsString); Assert.GreaterOrEqual(newLogger.Matches.Count, 1); if (includeTextSpans) { if (inputFileName == "MultiTextSpan") { var matchResult = (MatchResult)newLogger.Matches[1]; Assert.AreEqual(2, matchResult.TextSpans.Length); LineColumnTextSpan actualOriginTextSpan = originFile.GetLineColumnTextSpan(matchResult.TextSpans[1]); Assert.AreEqual(lcOriginTextSpan, actualOriginTextSpan); LineColumnTextSpan actualPreprocessedTextSpan = preprocessedFile.GetLineColumnTextSpan(matchResult.TextSpans[0]); Assert.AreEqual(lcPreprocessedTextSpan, actualPreprocessedTextSpan); } else { var match = (MatchResult)newLogger.Matches[0]; var enumerator = result.SourceFiles.GetEnumerator(); enumerator.MoveNext(); var firstFile = (TextFile)enumerator.Current; Assert.AreEqual(new LineColumnTextSpan(2, 1, 3, 25), firstFile.GetLineColumnTextSpan(match.TextSpan)); } } } }