private async Task IndexProjectAsync( SearchCode repoInfo, ConcurrentDictionary <PackageReference, HashSet <RepoInfo> > graph, IScanProgress scanProgress) { var projectContent = await _client.Repository.Content.GetAllContents(repoInfo.Repository.Id, repoInfo.Path); var repo = RepoInfo.Parse(repoInfo.Repository.Name, repoInfo.HtmlUrl); var nugetRefs = _projectFileParser.Parse(projectContent[0].Content); Console.WriteLine($"Repo name {repoInfo.Repository.Name} file name {repoInfo.Name}"); foreach (var nugetRef in nugetRefs) { if (!graph.TryGetValue(nugetRef, out var repoInfos)) { repoInfos = new HashSet <RepoInfo>(); } repoInfos.Add(repo); graph[nugetRef] = repoInfos; } scanProgress.UpdateProjectProgress(); }
public async Task ScanReposAsync( ConcurrentDictionary <PackageReference, HashSet <RepoInfo> > graph, IScanProgress scanProgress) { _solutions.Clear(); var scr = new SearchCodeRequest("PackageReference") { Organizations = new List <string> { _organization }, Extensions = new List <string> { "csproj" }, Repos = _repos, }; var searchResult = await _client.Search.SearchCode(scr); var totalProjectsCount = searchResult.TotalCount; for (int i = 0; i < totalProjectsCount; i += 100) { var pageNumber = i / 100; scr.Page = pageNumber; searchResult = await _client.Search.SearchCode(scr); Console.WriteLine($"Page {pageNumber} received {searchResult.Items.Count}"); foreach (var item in searchResult.Items) { try { bool wasAdded = _solutions.Add(item.Repository.Name); if (wasAdded) { scanProgress.UpdateRepoProgress(); } await IndexProjectAsync(item, graph, scanProgress); // rate limit - 5000 per hour await Task.Delay(500); } catch (Exception ex) { Console.WriteLine(); Console.WriteLine($"Error: {ex.Message}"); } } } }
private void IndexProject( SrcFileInfo searchFile, ConcurrentDictionary <PackageReference, HashSet <RepoInfo> > graph, IScanProgress scanProgress) { var fileName = Path.GetFileName(searchFile.path); if (!fileName.EndsWith(".csproj")) { return; } var repoSlug = ExtractSlugFromUrl(searchFile.links.self.href); if (_skipRepos.Contains(repoSlug)) { Console.WriteLine($"Skipped {repoSlug}"); return; } var repoResource = _client.RepositoriesEndPoint().RepositoryResource(_bbAccount, repoSlug); if (!_reposCache.TryGetValue(repoSlug, out var repo)) { var repoInfo = repoResource.GetRepository(); var fileLink = $"{repoInfo.links.html.href}/src/master/{searchFile.path}"; repo = RepoInfo.Parse(repoInfo.slug, fileLink); _reposCache.Add(repoSlug, repo); scanProgress.UpdateRepoProgress(); } var projectContent = repoResource.SrcResource().GetFileContent(searchFile.path); var nugetRefs = ProjectFileParser.Parse(projectContent); foreach (var nugetRef in nugetRefs) { if (!graph.TryGetValue(nugetRef, out var repoInfos)) { repoInfos = new HashSet <RepoInfo>(); } repoInfos.Add(repo); graph[nugetRef] = repoInfos; } scanProgress.UpdateProjectProgress(); Console.WriteLine($"Processed {fileName} from {repo.Name}"); }
public Task ScanReposAsync( ConcurrentDictionary <PackageReference, HashSet <RepoInfo> > graph, IScanProgress scanProgress) { scanProgress.SetRepoProgress(_reposCache.Count); var teamResource = _client.TeamsEndPoint().TeamResource(_bbAccount); var searchResults = teamResource.EnumerateSearchCodeSearchResults("PackageReference"); foreach (var searchResult in searchResults) { IndexProject(searchResult, graph, scanProgress); } return(Task.CompletedTask); }
private void IndexProject( SearchCodeSearchResult projectFile, ConcurrentDictionary <PackageReference, HashSet <RepoInfo> > graph, IScanProgress scanProgress) { int retryCount = 0; while (retryCount < 10) { try { IndexProject(projectFile.file, graph, scanProgress); break; } catch (BitbucketException) { ++retryCount; //To reduce calls rate limit Thread.Sleep(TimeSpan.FromSeconds(5 * retryCount)); } } }