public Task <GitHubResponse <IEnumerable <IssueEvent> > > Events(string repoFullName, GitHubCacheDetails cacheOptions, RequestPriority priority) { var request = new GitHubRequest($"repos/{repoFullName}/issues/events", cacheOptions, priority); request.AddParameter("sort", "updated"); request.AddParameter("direction", "asc"); return(FetchPaged(request, (IssueEvent x) => x.Id)); }
public Task <GitHubResponse <IEnumerable <PullRequest> > > PullRequests(string repoFullName, string sort, string direction, uint skipPages, uint maxPages, GitHubCacheDetails cacheOptions, RequestPriority priority) { var request = new GitHubRequest($"repos/{repoFullName}/pulls", cacheOptions, priority); request.AddParameter("state", "all"); request.AddParameter("sort", sort); request.AddParameter("direction", direction); return(FetchPaged(request, (PullRequest x) => x.Id, maxPages, skipPages)); }
public Task <GitHubResponse <IEnumerable <Issue> > > NewestIssues(string repoFullName, GitHubCacheDetails cacheOptions, RequestPriority priority) { var request = new GitHubRequest($"repos/{repoFullName}/issues", cacheOptions, priority) { // https://developer.github.com/v3/issues/#reactions-summary AcceptHeaderOverride = "application/vnd.github.squirrel-girl-preview+json" }; request.AddParameter("state", "all"); request.AddParameter("sort", "created"); request.AddParameter("direction", "desc"); request.AddParameter("per_page", PageSize); return(EnqueueRequest <IEnumerable <Issue> >(request)); }
public Task <GitHubResponse <IEnumerable <Milestone> > > Milestones(string repoFullName, GitHubCacheDetails cacheOptions, RequestPriority priority) { var request = new GitHubRequest($"repos/{repoFullName}/milestones", cacheOptions, priority); request.AddParameter("state", "all"); return(FetchPaged(request, (Milestone x) => x.Id)); }
public Task <GitHubResponse <IEnumerable <IssueComment> > > IssueComments(string repoFullName, int issueNumber, DateTimeOffset?since, GitHubCacheDetails cacheOptions, RequestPriority priority) { var request = new GitHubRequest($"repos/{repoFullName}/issues/{issueNumber}/comments", cacheOptions, priority); request.AddParameter("since", since); return(FetchPaged(request, (IssueComment x) => x.Id)); }
public Task <GitHubResponse <IEnumerable <Account> > > OrganizationMembers(string orgLogin, string role, GitHubCacheDetails cacheOptions, RequestPriority priority) { // defaults: filter=all, role=all var request = new GitHubRequest($"orgs/{orgLogin}/members", cacheOptions, priority); request.AddParameter("role", role); return(FetchPaged(request, (Account x) => x.Id)); }
public async Task <GitHubResponse <IEnumerable <OrganizationMembership> > > OrganizationMemberships(string state, GitHubCacheDetails cacheOptions, RequestPriority priority) { var request = new GitHubRequest("user/memberships/orgs", cacheOptions, priority); request.AddParameter(nameof(state), state); var result = await FetchPaged(request, (OrganizationMembership x) => x.Organization.Id); if (result.IsOk) { // Seriously GitHub? foreach (var membership in result.Result) { membership.Organization.Type = GitHubAccountType.Organization; } } return(result); }
private async Task <GitHubResponse <IEnumerable <T> > > FetchPaged <T, TKey>(GitHubRequest request, Func <T, TKey> keySelector, uint softPageLimit = uint.MaxValue, uint skipPages = 0, uint hardPageLimit = uint.MaxValue) { if (request.Method != HttpMethod.Get) { throw new InvalidOperationException("Only GETs can be paginated."); } if (softPageLimit == 0) { throw new InvalidOperationException($"{nameof(softPageLimit)} must be omitted or greater than 0"); } if (hardPageLimit == 0) { throw new InvalidOperationException($"{nameof(hardPageLimit)} must be omitted or greater than 0"); } // Always request the largest page size if (!request.Parameters.ContainsKey("per_page")) { request.AddParameter("per_page", PageSize); } // In all cases we need the first page 🙄 var response = await EnqueueRequest <IEnumerable <T> >(request); // Save first page cache data var dangerousFirstPageCacheData = response.CacheData; // When successful, try to enumerate. Else immediately return the error. if (response.IsOk) { // If skipping pages, calculate here. switch (skipPages) { case 0: break; case 1 when response.Pagination?.Next != null: var nextUri = response.Pagination.Next; response = await EnqueueRequest <IEnumerable <T> >(response.Request.CloneWithNewUri(nextUri)); break; case 1: // response.Pagination == null response.Result = Array.Empty <T>(); break; default: // skipPages > 1 if (response.Pagination?.CanInterpolate != true) { throw new InvalidOperationException($"Skipping pages is not supported for [{response.Request.Uri}]: {response.Pagination?.SerializeObject()}"); } nextUri = response.Pagination.Interpolate().Skip((int)(skipPages - 1)).FirstOrDefault(); if (nextUri == null) { // We skipped more pages than existed. response.Pagination = null; response.Result = Array.Empty <T>(); } else { response = await EnqueueRequest <IEnumerable <T> >(response.Request.CloneWithNewUri(nextUri)); } break; } // Check hard limit if (response.Pagination?.CanInterpolate == true && response.Pagination.Interpolate().Count() > hardPageLimit) { // We'll hit our hard limit, so return no results. response.Pagination = null; response.Result = Array.Empty <T>(); // Maybe this is a bad idea, but the goal with the hard limit is to cache the empty result we'll never be able to really enumerate. response.CacheData = dangerousFirstPageCacheData; } else if (softPageLimit > 1 && response.Pagination?.Next != null) { // Now, if there's more to do, enumerate the results // By default, upgrade background => subrequest var subRequestPriority = RequestPriority.SubRequest; // Ensure interactive => interactive if (response.Request.Priority == RequestPriority.Interactive) { subRequestPriority = RequestPriority.Interactive; } // Walk in order response = await EnumerateSequential(response, subRequestPriority, softPageLimit); } } // Response should have: // 1) Pagination header from last page // 2) Cache data from first page, IIF it's a complete result, and not truncated due to errors. // 3) Number of pages returned // Set first page cache data response.DangerousFirstPageCacheData = dangerousFirstPageCacheData; return(response.Distinct(keySelector)); }