private async Task <List <AnalyzerResult> > RunCoderlyzerAnalysis(string solutionFilename) { MemoryUtils.LogSystemInfo(_logger); MemoryUtils.LogSolutiontSize(_logger, solutionFilename); _logger.LogInformation("Memory usage before RunCoderlyzerAnalysis: "); MemoryUtils.LogMemoryConsumption(_logger); var configuration = GetAnalyzerConfiguration(); var analyzer = CodeAnalyzerFactory.GetAnalyzer(configuration, _logger); var analyzerResults = await analyzer.AnalyzeSolution(solutionFilename); _logger.LogInformation("Memory usage after RunCoderlyzerAnalysis: "); MemoryUtils.LogMemoryConsumption(_logger); return(analyzerResults); }
public void TestLogSolutiontSize() { var testSolutionPath = Path.Combine( tmpTestFixturePath, "mvcmusicstore", "MvcMusicStore.sln"); DirectoryInfo solutionDir = Directory.GetParent(testSolutionPath); var totalFileCount = solutionDir.EnumerateFiles( "*", SearchOption.AllDirectories).Count(); Assert.AreEqual(totalFileCount, 299); var csFileCount = solutionDir.EnumerateFiles( "*.cs", SearchOption.AllDirectories).Count(); Assert.AreEqual(csFileCount, 41); // Run explicit GC GC.Collect(); for (int i = 0; i <= 10; i++) { // Before the execution long kbBeforeExecution = GC.GetTotalMemory(false) / 1024; var watch = Stopwatch.StartNew(); MemoryUtils.LogSolutiontSize(testLogger, testSolutionPath); watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; long kbAfterExecution = GC.GetTotalMemory(false) / 1024; // This will force garbage collection long kbAfterGC = GC.GetTotalMemory(true) / 1024; Console.WriteLine("----------Iteration " + i + "----------"); Console.WriteLine(elapsedMs + "ms to run LogSolutiontSize"); Console.WriteLine(kbBeforeExecution + "kb before LogSolutionSize."); Console.WriteLine(kbAfterExecution + "kb after LogSolutionSize."); Console.WriteLine(kbAfterGC + "kb after Garbage Collection"); Console.WriteLine(kbAfterExecution - kbBeforeExecution + "kb allocated during LogSolutionSize."); Console.WriteLine(kbAfterExecution - kbAfterGC + "kb got collected by GC."); // Verify All the short-lived objects in LogSolutiontSize are gabage collected Assert.GreaterOrEqual(kbBeforeExecution - kbAfterGC, 0); } }
private async void ProcessCompatibility(IEnumerable <PackageVersionPair> packageVersions, Dictionary <PackageVersionPair, TaskCompletionSource <PackageDetails> > compatibilityTaskCompletionSources, string pathToSolution, bool isIncremental, bool incrementalRefresh) { var packageVersionsFound = new HashSet <PackageVersionPair>(); var packageVersionsWithErrors = new HashSet <PackageVersionPair>(); var packageVersionsGroupedByPackageId = packageVersions .GroupBy(pv => pv.PackageId) .ToDictionary(pvGroup => pvGroup.Key, pvGroup => pvGroup.ToList()); foreach (var groupedPackageVersions in packageVersionsGroupedByPackageId) { var packageToDownload = groupedPackageVersions.Key.ToLower(); var fileToDownload = GetDownloadFilePath(CompatibilityCheckerType, packageToDownload); try { string tempDirectoryPath = GetTempDirectory(pathToSolution); PackageDetails packageDetails = null; if (isIncremental) { if (incrementalRefresh || !IsPackageInFile(fileToDownload, tempDirectoryPath)) { _logger.LogInformation("Downloading {0} from {1}", fileToDownload, CompatibilityCheckerType); packageDetails = await GetPackageDetailFromS3(fileToDownload, _httpService); _logger.LogInformation("Caching {0} from {1} to Temp", fileToDownload, CompatibilityCheckerType); CachePackageDetailsToFile(fileToDownload, packageDetails, tempDirectoryPath); } else { _logger.LogInformation("Fetching {0} from {1} from Temp", fileToDownload, CompatibilityCheckerType); packageDetails = GetPackageDetailFromFile(fileToDownload, tempDirectoryPath); } } else { packageDetails = await GetPackageDetailFromS3(fileToDownload, _httpService); } if (packageDetails.Name == null || !string.Equals(packageDetails.Name.Trim().ToLower(), packageToDownload.Trim().ToLower(), StringComparison.OrdinalIgnoreCase)) { throw new PackageDownloadMismatchException( actualPackage: packageDetails.Name, expectedPackage: packageToDownload); } foreach (var packageVersion in groupedPackageVersions.Value) { if (compatibilityTaskCompletionSources.TryGetValue(packageVersion, out var taskCompletionSource)) { taskCompletionSource.SetResult(packageDetails); packageVersionsFound.Add(packageVersion); } } } catch (OutOfMemoryException ex) { _logger.LogError("Failed when downloading and parsing {0} from {1}, {2}", fileToDownload, CompatibilityCheckerType, ex); MemoryUtils.LogSolutiontSize(_logger, pathToSolution); MemoryUtils.LogMemoryConsumption(_logger); } catch (Exception ex) { if (ex.Message.Contains("404")) { _logger.LogInformation($"Encountered {ex.GetType()} while downloading and parsing {fileToDownload} " + $"from {CompatibilityCheckerType}, but it was ignored. " + $"ErrorMessage: {ex.Message}."); // filter all 404 errors ex = null; } else { _logger.LogError("Failed when downloading and parsing {0} from {1}, {2}", fileToDownload, CompatibilityCheckerType, ex); } foreach (var packageVersion in groupedPackageVersions.Value) { if (compatibilityTaskCompletionSources.TryGetValue(packageVersion, out var taskCompletionSource)) { taskCompletionSource.SetException(new PortingAssistantClientException(ExceptionMessage.PackageNotFound(packageVersion), ex)); packageVersionsWithErrors.Add(packageVersion); } } } } foreach (var packageVersion in packageVersions) { if (packageVersionsFound.Contains(packageVersion) || packageVersionsWithErrors.Contains(packageVersion)) { continue; } if (compatibilityTaskCompletionSources.TryGetValue(packageVersion, out var taskCompletionSource)) { var errorMessage = $"Could not find package {packageVersion} in external source; try checking an internal source."; _logger.LogInformation(errorMessage); var innerException = new PackageNotFoundException(errorMessage); taskCompletionSource.TrySetException(new PortingAssistantClientException(ExceptionMessage.PackageNotFound(packageVersion), innerException)); } } }