public void PullAndRebaseAndPush(PullRequest pr) { string reppath = Path.Combine(_account.TempRepPath, pr.Base.Repository.Name); using (var repo = new Repository(reppath)) { var branchMaster = repo.Branches["master"]; var masterCommits = branchMaster.Commits.First(); var orgin = repo.Network.Remotes["orgin"]; var branchname = pr.Head.Ref; repo.Branches.Remove(branchname); Branch orginBranch = repo.Branches[$"refs/remotes/origin/{branchname}"]; Branch localBranch = repo.CreateBranch(branchname, orginBranch.Tip); localBranch = Commands.Checkout(repo, localBranch); if (!localBranch.Commits.Contains(masterCommits)) { Console.WriteLine(branchname); var rebaseOptions = new RebaseOptions(); var rb = repo.Rebase.Start(localBranch, branchMaster, null, _identity, rebaseOptions); if (rb.Status != RebaseStatus.Complete) { repo.Rebase.Abort(); return; } Commands.Checkout(repo, branchname); repo.Network.Push(orgin, $"+refs/heads/{branchname}:refs/heads/{branchname}", _pushOptions); } } }
public void CanRebaseHandlePatchAlreadyApplied(string attributes, string lineEnding, string expectedShaText) { SelfCleaningDirectory scd = BuildSelfCleaningDirectory(); var path = Repository.Init(scd.DirectoryPath); using (Repository repo = new Repository(path)) { ConstructRebaseTestRepository(repo, attributes, lineEnding); Commands.Checkout(repo, topicBranch1Name); Branch topicBranch1Prime = repo.CreateBranch(topicBranch1PrimeName, masterBranch1Name); string newFileRelativePath = "new_file.txt"; Touch(repo.Info.WorkingDirectory, newFileRelativePath, "New Content"); Commands.Stage(repo, newFileRelativePath); Commit commit = repo.Commit("new commit 1", Constants.Signature, Constants.Signature, new CommitOptions()); Commands.Checkout(repo, topicBranch1Prime); var cherryPickResult = repo.CherryPick(commit, Constants.Signature2); Assert.Equal(CherryPickStatus.CherryPicked, cherryPickResult.Status); string newFileRelativePath2 = "new_file_2.txt"; Touch(repo.Info.WorkingDirectory, newFileRelativePath2, "New Content for path 2"); Commands.Stage(repo, newFileRelativePath2); repo.Commit("new commit 2", Constants.Signature, Constants.Signature, new CommitOptions()); Branch upstreamBranch = repo.Branches[topicBranch1Name]; List <CompletedRebaseStepInfo> rebaseResults = new List <CompletedRebaseStepInfo>(); RebaseOptions options = new RebaseOptions() { RebaseStepCompleted = x => { rebaseResults.Add(new CompletedRebaseStepInfo(x.Commit, x.WasPatchAlreadyApplied)); } }; repo.Rebase.Start(null, upstreamBranch, null, Constants.Identity2, options); ObjectId secondCommitExpectedTreeId = new ObjectId(expectedShaText); Signature secondCommitAuthorSignature = Constants.Signature; Identity secondCommitCommiterIdentity = Constants.Identity2; Assert.Equal(2, rebaseResults.Count); Assert.True(rebaseResults[0].WasPatchAlreadyApplied); Assert.False(rebaseResults[1].WasPatchAlreadyApplied); Assert.NotNull(rebaseResults[1].Commit); // This is the expected tree ID of the new commit. Assert.Equal(secondCommitExpectedTreeId, rebaseResults[1].Commit.Tree.Id); Assert.True(Signature.Equals(secondCommitAuthorSignature, rebaseResults[1].Commit.Author)); Assert.Equal <string>(secondCommitCommiterIdentity.Name, rebaseResults[1].Commit.Committer.Name, StringComparer.Ordinal); Assert.Equal <string>(secondCommitCommiterIdentity.Email, rebaseResults[1].Commit.Committer.Email, StringComparer.Ordinal); } }
public void TestRebaseOptions() { Assert.Throws <ArgumentException>(() => { new RebaseOptions("Library://Foo.FeatureSource", "Library://Bar/"); }); Assert.Throws <ArgumentException>(() => { new RebaseOptions("Library://Foo/", "Library://Bar.FeatureSource"); }); var opts = new RebaseOptions("Library://Foo/", "Library://Bar/"); Assert.AreEqual("Library://Foo/", opts.SourceFolder); Assert.AreEqual("Library://Bar/", opts.TargetFolder); }
public void CanSpecifyFileConflictStrategy() { SelfCleaningDirectory scd = BuildSelfCleaningDirectory(); var path = Repository.Init(scd.DirectoryPath); using (Repository repo = new Repository(path)) { ConstructRebaseTestRepository(repo); Commands.Checkout(repo, topicBranch1Name); Assert.False(repo.RetrieveStatus().IsDirty); Branch branch = repo.Branches[topicBranch1Name]; Branch upstream = repo.Branches[conflictBranch1Name]; Branch onto = repo.Branches[conflictBranch1Name]; RebaseOptions options = new RebaseOptions() { FileConflictStrategy = CheckoutFileConflictStrategy.Ours, }; RebaseResult rebaseResult = repo.Rebase.Start(branch, upstream, onto, Constants.Identity, options); // Verify that we have a conflict. Assert.Equal(CurrentOperation.RebaseMerge, repo.Info.CurrentOperation); Assert.Equal(RebaseStatus.Conflicts, rebaseResult.Status); Assert.True(repo.RetrieveStatus().IsDirty); Assert.False(repo.Index.IsFullyMerged); Assert.Equal(0, rebaseResult.CompletedStepCount); Assert.Equal(3, rebaseResult.TotalStepCount); string conflictFile = filePathB; // Get the information on the conflict. Conflict conflict = repo.Index.Conflicts[conflictFile]; Assert.NotNull(conflict); Assert.NotNull(conflict.Theirs); Assert.NotNull(conflict.Ours); Blob expectedBlob = repo.Lookup <Blob>(conflict.Ours.Id); // Check the content of the file on disk matches what is expected. string expectedContent = expectedBlob.GetContentText(new FilteringOptions(conflictFile)); Assert.Equal(expectedContent, File.ReadAllText(Path.Combine(repo.Info.WorkingDirectory, conflictFile))); } }
public void CanAbortRebase() { SelfCleaningDirectory scd = BuildSelfCleaningDirectory(); var path = Repository.Init(scd.DirectoryPath); using (Repository repo = new Repository(path)) { ConstructRebaseTestRepository(repo); Commands.Checkout(repo, topicBranch1Name); Assert.False(repo.RetrieveStatus().IsDirty); Branch branch = repo.Branches[topicBranch1Name]; Branch upstream = repo.Branches[conflictBranch1Name]; Branch onto = repo.Branches[conflictBranch1Name]; RebaseResult rebaseResult = repo.Rebase.Start(branch, upstream, onto, Constants.Identity, null); // Verify that we have a conflict. Assert.Equal(RebaseStatus.Conflicts, rebaseResult.Status); Assert.True(repo.RetrieveStatus().IsDirty); Assert.False(repo.Index.IsFullyMerged); Assert.Equal(0, rebaseResult.CompletedStepCount); Assert.Equal(3, rebaseResult.TotalStepCount); // Set up the callbacks to verify that checkout progress / notify // callbacks are called. bool wasCheckoutProgressCalled = false; bool wasCheckoutNotifyCalled = false; RebaseOptions options = new RebaseOptions() { OnCheckoutProgress = (x, y, z) => wasCheckoutProgressCalled = true, OnCheckoutNotify = (x, y) => { wasCheckoutNotifyCalled = true; return(true); }, CheckoutNotifyFlags = CheckoutNotifyFlags.Updated, }; repo.Rebase.Abort(options); Assert.False(repo.RetrieveStatus().IsDirty, "Repository workdir is dirty after Rebase.Abort."); Assert.True(repo.Index.IsFullyMerged, "Repository index is not fully merged after Rebase.Abort."); Assert.Equal(CurrentOperation.None, repo.Info.CurrentOperation); Assert.True(wasCheckoutProgressCalled, "Checkout progress callback was not called during Rebase.Abort."); Assert.True(wasCheckoutNotifyCalled, "Checkout notify callback was not called during Rebase.Abort."); } }
public void CanRebase(string initialBranchName, string branchName, string upstreamName, string ontoName, int stepCount) { SelfCleaningDirectory scd = BuildSelfCleaningDirectory(); var path = Repository.Init(scd.DirectoryPath); using (Repository repo = new Repository(path)) { ConstructRebaseTestRepository(repo); Commands.Checkout(repo, initialBranchName); Assert.False(repo.RetrieveStatus().IsDirty); Branch branch = (branchName == null) ? null : repo.Branches[branchName]; Branch upstream = repo.Branches[upstreamName]; Branch onto = (ontoName == null) ? null : repo.Branches[ontoName]; Commit expectedSinceCommit = (branch == null) ? repo.Head.Tip : branch.Tip; Commit expectedUntilCommit = upstream.Tip; Commit expectedOntoCommit = (onto == null) ? upstream.Tip : onto.Tip; int beforeStepCallCount = 0; int afterStepCallCount = 0; bool beforeRebaseStepCountCorrect = true; bool afterRebaseStepCountCorrect = true; bool totalStepCountCorrect = true; List <Commit> PreRebaseCommits = new List <Commit>(); List <CompletedRebaseStepInfo> PostRebaseResults = new List <CompletedRebaseStepInfo>(); ObjectId expectedParentId = upstream.Tip.Id; RebaseOptions options = new RebaseOptions() { RebaseStepStarting = x => { beforeRebaseStepCountCorrect &= beforeStepCallCount == x.StepIndex; totalStepCountCorrect &= (x.TotalStepCount == stepCount); beforeStepCallCount++; PreRebaseCommits.Add(x.StepInfo.Commit); }, RebaseStepCompleted = x => { afterRebaseStepCountCorrect &= (afterStepCallCount == x.CompletedStepIndex); totalStepCountCorrect &= (x.TotalStepCount == stepCount); afterStepCallCount++; PostRebaseResults.Add(new CompletedRebaseStepInfo(x.Commit, x.WasPatchAlreadyApplied)); }, }; RebaseResult rebaseResult = repo.Rebase.Start(branch, upstream, onto, Constants.Identity, options); // Validation: Assert.True(afterRebaseStepCountCorrect, "Unexpected CompletedStepIndex value in RebaseStepCompleted"); Assert.True(beforeRebaseStepCountCorrect, "Unexpected StepIndex value in RebaseStepStarting"); Assert.True(totalStepCountCorrect, "Unexpected TotalStepcount value in Rebase step callback"); Assert.Equal(RebaseStatus.Complete, rebaseResult.Status); Assert.Equal(stepCount, rebaseResult.TotalStepCount); Assert.Null(rebaseResult.CurrentStepInfo); Assert.Equal(stepCount, rebaseResult.CompletedStepCount); Assert.False(repo.RetrieveStatus().IsDirty); Assert.Equal(stepCount, beforeStepCallCount); Assert.Equal(stepCount, afterStepCallCount); // Verify the chain of source commits that were rebased. CommitFilter sourceCommitFilter = new CommitFilter() { IncludeReachableFrom = expectedSinceCommit, ExcludeReachableFrom = expectedUntilCommit, SortBy = CommitSortStrategies.Reverse | CommitSortStrategies.Topological, }; Assert.Equal(repo.Commits.QueryBy(sourceCommitFilter), PreRebaseCommits); // Verify the chain of commits that resulted from the rebase. Commit expectedParent = expectedOntoCommit; foreach (CompletedRebaseStepInfo stepInfo in PostRebaseResults) { Commit rebasedCommit = stepInfo.Commit; Assert.Equal(expectedParent.Id, rebasedCommit.Parents.First().Id); Assert.False(stepInfo.WasPatchAlreadyApplied); expectedParent = rebasedCommit; } Assert.Equal(repo.Head.Tip, PostRebaseResults.Last().Commit); } }
public void CanContinueRebase() { SelfCleaningDirectory scd = BuildSelfCleaningDirectory(); var path = Repository.Init(scd.DirectoryPath); using (Repository repo = new Repository(path)) { ConstructRebaseTestRepository(repo); Commands.Checkout(repo, topicBranch1Name); Assert.False(repo.RetrieveStatus().IsDirty); Branch branch = repo.Branches[topicBranch1Name]; Branch upstream = repo.Branches[conflictBranch1Name]; Branch onto = repo.Branches[conflictBranch1Name]; int beforeStepCallCount = 0; int afterStepCallCount = 0; bool wasCheckoutProgressCalled = false; bool wasCheckoutNotifyCalled = false; RebaseOptions options = new RebaseOptions() { RebaseStepStarting = x => beforeStepCallCount++, RebaseStepCompleted = x => afterStepCallCount++, OnCheckoutProgress = (x, y, z) => wasCheckoutProgressCalled = true, OnCheckoutNotify = (x, y) => { wasCheckoutNotifyCalled = true; return(true); }, CheckoutNotifyFlags = CheckoutNotifyFlags.Updated, }; RebaseResult rebaseResult = repo.Rebase.Start(branch, upstream, onto, Constants.Identity, options); // Verify that we have a conflict. Assert.Equal(CurrentOperation.RebaseMerge, repo.Info.CurrentOperation); Assert.Equal(RebaseStatus.Conflicts, rebaseResult.Status); Assert.True(repo.RetrieveStatus().IsDirty); Assert.False(repo.Index.IsFullyMerged); Assert.Equal(0, rebaseResult.CompletedStepCount); Assert.Equal(3, rebaseResult.TotalStepCount); // Verify that expected callbacks were called Assert.Equal(1, beforeStepCallCount); Assert.Equal(0, afterStepCallCount); Assert.True(wasCheckoutProgressCalled, "CheckoutProgress callback was not called."); // Resolve the conflict foreach (Conflict conflict in repo.Index.Conflicts) { Touch(repo.Info.WorkingDirectory, conflict.Theirs.Path, repo.Lookup <Blob>(conflict.Theirs.Id).GetContentText(new FilteringOptions(conflict.Theirs.Path))); Commands.Stage(repo, conflict.Theirs.Path); } Assert.True(repo.Index.IsFullyMerged); // Clear the flags: wasCheckoutProgressCalled = false; wasCheckoutNotifyCalled = false; RebaseResult continuedRebaseResult = repo.Rebase.Continue(Constants.Identity, options); Assert.NotNull(continuedRebaseResult); Assert.Equal(RebaseStatus.Complete, continuedRebaseResult.Status); Assert.False(repo.RetrieveStatus().IsDirty); Assert.True(repo.Index.IsFullyMerged); Assert.Equal(0, rebaseResult.CompletedStepCount); Assert.Equal(3, rebaseResult.TotalStepCount); Assert.Equal(3, beforeStepCallCount); Assert.Equal(3, afterStepCallCount); Assert.True(wasCheckoutProgressCalled, "CheckoutProgress callback was not called."); Assert.True(wasCheckoutNotifyCalled, "CheckoutNotify callback was not called."); } }
public void VerifyRebaseDetailed(string attributes, string lineEnding, string[] expectedIds) { SelfCleaningDirectory scd = BuildSelfCleaningDirectory(); var path = Repository.Init(scd.DirectoryPath); using (Repository repo = new Repository(path)) { ConstructRebaseTestRepository(repo, attributes, lineEnding); Branch initialBranch = repo.Branches[topicBranch1Name]; Branch upstreamBranch = repo.Branches[masterBranch2Name]; Commands.Checkout(repo, initialBranch); Assert.False(repo.RetrieveStatus().IsDirty); bool wasCheckoutProgressCalled = false; bool wasCheckoutProgressCalledForResetingHead = false; bool wasCheckoutNotifyCalled = false; bool wasCheckoutNotifyCalledForResetingHead = false; bool startedApplyingSteps = false; RebaseOptions options = new RebaseOptions() { OnCheckoutProgress = (x, y, z) => { if (startedApplyingSteps) { wasCheckoutProgressCalled = true; } else { wasCheckoutProgressCalledForResetingHead = true; } }, OnCheckoutNotify = (x, y) => { if (startedApplyingSteps) { wasCheckoutNotifyCalled = true; } else { wasCheckoutNotifyCalledForResetingHead = true; } return(true); }, CheckoutNotifyFlags = CheckoutNotifyFlags.Updated, RebaseStepStarting = x => startedApplyingSteps = true, }; repo.Rebase.Start(null, upstreamBranch, null, Constants.Identity2, options); Assert.True(wasCheckoutNotifyCalledForResetingHead); Assert.True(wasCheckoutProgressCalledForResetingHead); Assert.True(wasCheckoutNotifyCalled); Assert.True(wasCheckoutProgressCalled); // Verify the chain of resultant rebased commits. CommitFilter commitFilter = new CommitFilter() { IncludeReachableFrom = repo.Head.Tip, ExcludeReachableFrom = upstreamBranch.Tip, SortBy = CommitSortStrategies.Reverse | CommitSortStrategies.Topological, }; List <ObjectId> expectedTreeIds = new List <ObjectId>() { new ObjectId(expectedIds[0]), new ObjectId(expectedIds[1]), new ObjectId(expectedIds[2]), }; List <Commit> rebasedCommits = repo.Commits.QueryBy(commitFilter).ToList(); Assert.Equal(3, rebasedCommits.Count); for (int i = 0; i < 3; i++) { Assert.Equal(expectedTreeIds[i], rebasedCommits[i].Tree.Id); Assert.Equal(Constants.Signature.Name, rebasedCommits[i].Author.Name); Assert.Equal(Constants.Signature.Email, rebasedCommits[i].Author.Email); Assert.Equal(Constants.Signature2.Name, rebasedCommits[i].Committer.Name); Assert.Equal(Constants.Signature2.Email, rebasedCommits[i].Committer.Email); } } }
public static bool Run(CompressimagesParameters parameters, ICollector <CompressImagesMessage> compressImagesMessages, ILogger logger) { CredentialsHandler credentialsProvider = (url, user, cred) => new UsernamePasswordCredentials { Username = KnownGitHubs.Username, Password = parameters.Password }; // clone var cloneOptions = new CloneOptions { CredentialsProvider = credentialsProvider, }; Repository.Clone(parameters.CloneUrl, parameters.LocalPath, cloneOptions); var repo = new Repository(parameters.LocalPath); var remote = repo.Network.Remotes["origin"]; var isWikiCompress = parameters.CloneUrl.EndsWith(".wiki.git"); // check if we have the branch already or this is empty repo try { if (repo.Network.ListReferences(remote, credentialsProvider).Any() == false) { logger.LogInformation("CompressImagesFunction: no references found for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); return(false); } if (!parameters.IsRebase && repo.Network.ListReferences(remote, credentialsProvider).Any(x => x.CanonicalName == $"refs/heads/{KnownGitHubs.BranchName}")) { logger.LogInformation("CompressImagesFunction: branch already exists for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); return(false); } } catch (Exception e) { // log + ignore logger.LogWarning(e, "CompressImagesFunction: issue checking for existing branch or empty repo for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); } // check if we should switch away from the default branch if (!isWikiCompress && parameters.Settings != null && !string.IsNullOrEmpty(parameters.Settings.DefaultBranchOverride)) { logger.LogInformation( "CompressImagesFunction: default branch override for {Owner}/{RepoName} is {DefaultBranchOverride}", parameters.RepoOwner, parameters.RepoName, parameters.Settings.DefaultBranchOverride); var baseBranch = repo.Branches[$"refs/remotes/origin/{parameters.Settings.DefaultBranchOverride}"]; if (baseBranch == null) { logger.LogWarning( "CompressImagesFunction: default branch ({DefaultBranchOverride}) not found for {Owner}/{RepoName}", parameters.Settings.DefaultBranchOverride, parameters.RepoOwner, parameters.RepoName); return(false); } Commands.Checkout(repo, baseBranch); } var repoConfiguration = new RepoConfiguration(); try { // see if .imgbotconfig exists in repo root var repoConfigJson = File.ReadAllText(parameters.LocalPath + Path.DirectorySeparatorChar + ".imgbotconfig"); if (!string.IsNullOrEmpty(repoConfigJson)) { repoConfiguration = JsonConvert.DeserializeObject <RepoConfiguration>(repoConfigJson); } } catch { // ignore } // Add new compressMessage if we should compress Wiki if (repoConfiguration.CompressWiki && isWikiCompress == false) { logger.LogInformation("CompressImagesFunction: Adding Wiki image compression to queue for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); compressImagesMessages.Add(new CompressImagesMessage() { InstallationId = parameters.CompressImagesMessage.InstallationId, RepoName = parameters.CompressImagesMessage.RepoName, Owner = parameters.RepoOwner, CloneUrl = $"https://github.com/{parameters.RepoOwner}/{parameters.RepoName}.wiki.git" }); } if (Schedule.ShouldOptimizeImages(repoConfiguration, repo) == false) { logger.LogInformation("CompressImagesFunction: skipping optimization due to schedule for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); return(false); } // Should not create branch if we are compressing Wiki or performing rebase if (isWikiCompress == false && !parameters.IsRebase) { // check out the branch repo.CreateBranch(KnownGitHubs.BranchName); var branch = Commands.Checkout(repo, KnownGitHubs.BranchName); } // reset any mean files repo.Reset(ResetMode.Mixed, repo.Head.Tip); // optimize images string[] imagePaths; List <string> addedOrModifiedImagePaths = new List <string>(); List <string> deletedImagePaths = new List <string>(); if (parameters.IsRebase) { var refspec = string.Format("{0}:{0}", KnownGitHubs.BranchName); Commands.Fetch(repo, "origin", new List <string> { refspec }, null, "fetch"); var diff = repo.Diff.Compare <TreeChanges>(repo.Branches[KnownGitHubs.BranchName].Commits.ElementAt(1).Tree, repo.Head.Tip.Tree); if (diff == null) { logger.LogInformation("Something went wrong while doing rebase"); return(false); } foreach (TreeEntryChanges c in diff) { if (KnownImgPatterns.ImgExtensions.Contains(Path.GetExtension(c.Path))) { var path = parameters.LocalPath + "/" + c.Path; var oldpath = parameters.LocalPath + "/" + c.OldPath; switch (c.Status) { case ChangeKind.Added: case ChangeKind.Modified: addedOrModifiedImagePaths.Add(path.Replace("\\", "/")); break; case ChangeKind.Renamed: addedOrModifiedImagePaths.Add(path.Replace("\\", "/")); deletedImagePaths.Add(oldpath.Replace("\\", "/")); break; case ChangeKind.Deleted: deletedImagePaths.Add(path.Replace("\\", "/")); break; } } } imagePaths = ImageQuery.FilterOutIgnoredFiles(addedOrModifiedImagePaths, repoConfiguration); } else { imagePaths = ImageQuery.FindImages(parameters.LocalPath, repoConfiguration); } var optimizedImages = OptimizeImages(repo, parameters.LocalPath, imagePaths, logger, repoConfiguration.AggressiveCompression); if (optimizedImages.Length == 0) { return(false); } // create commit message based on optimizations foreach (var image in optimizedImages) { Commands.Stage(repo, image.OriginalPath); } var commitMessage = CommitMessage.Create(optimizedImages); var signature = new Signature(KnownGitHubs.ImgBotLogin, KnownGitHubs.ImgBotEmail, DateTimeOffset.Now); repo.Commit(commitMessage, signature, signature); if (parameters.IsRebase) { var baseBranch = repo.Head; var newCommit = baseBranch.Tip; var oldCommit = repo.Branches[KnownGitHubs.BranchName].Tip; // we need to reset the default branch so that we can // rebase to it later. repo.Reset(ResetMode.Hard, repo.Head.Commits.ElementAt(1)); // checkout to imgbot branch. TODO: remove because this is needed earlier on diff Commands.Checkout(repo, KnownGitHubs.BranchName); // cherry-pick var cherryPickOptions = new CherryPickOptions() { MergeFileFavor = MergeFileFavor.Theirs, }; var cherryPickResult = repo.CherryPick(newCommit, signature, cherryPickOptions); if (cherryPickResult.Status == CherryPickStatus.Conflicts) { var status = repo.RetrieveStatus(new LibGit2Sharp.StatusOptions() { }); foreach (var item in status) { if (item.State == FileStatus.Conflicted) { Commands.Stage(repo, item.FilePath); } } repo.Commit(commitMessage, signature, signature); } // New commit message creation var previousCommitResults = CompressionResult.ParseCommitMessage(oldCommit.Message); var mergedResults = CompressionResult.Merge(optimizedImages, previousCommitResults); var filteredResults = CompressionResult.Filter(mergedResults, deletedImagePaths.ToArray()); var squashCommitMessage = CommitMessage.Create(filteredResults); // squash var baseCommit = repo.Head.Commits.ElementAt(2); repo.Reset(ResetMode.Soft, baseCommit); repo.Commit(squashCommitMessage, signature, signature); // rebase var rebaseOptions = new RebaseOptions() { FileConflictStrategy = CheckoutFileConflictStrategy.Theirs, }; var rebaseResult = repo.Rebase.Start(repo.Head, baseBranch, null, new Identity(KnownGitHubs.ImgBotLogin, KnownGitHubs.ImgBotEmail), rebaseOptions); while (rebaseResult.Status == RebaseStatus.Conflicts) { var status = repo.RetrieveStatus(new LibGit2Sharp.StatusOptions() { }); foreach (var item in status) { if (item.State == FileStatus.Conflicted) { if (imagePaths.Contains(parameters.LocalPath + "/" + item.FilePath)) { Commands.Stage(repo, item.FilePath); } else { Commands.Remove(repo, item.FilePath); } } } rebaseResult = repo.Rebase.Continue(new Identity(KnownGitHubs.ImgBotLogin, KnownGitHubs.ImgBotEmail), rebaseOptions); } } // We just made a normal commit, now we are going to capture all the values generated from that commit // then rewind and make a signed commit var commitBuffer = Commit.CreateBuffer( repo.Head.Tip.Author, repo.Head.Tip.Committer, repo.Head.Tip.Message, repo.Head.Tip.Tree, repo.Head.Tip.Parents, true, null); var signedCommitData = CommitSignature.Sign(commitBuffer + "\n", parameters.PgpPrivateKey, parameters.PgPPassword); repo.Reset(ResetMode.Soft, repo.Head.Commits.Skip(1).First().Sha); var commitToKeep = repo.ObjectDatabase.CreateCommitWithSignature(commitBuffer, signedCommitData); repo.Refs.UpdateTarget(repo.Refs.Head, commitToKeep); // Should use "master" if we are compressing Wiki if (isWikiCompress) { var branchAgain = Commands.Checkout(repo, "master"); } else { var branchAgain = Commands.Checkout(repo, KnownGitHubs.BranchName); } repo.Reset(ResetMode.Hard, commitToKeep.Sha); // verify images are not corrupted by reading from git // see https://github.com/dabutvin/ImgBot/issues/273 try { foreach (var image in optimizedImages) { if (image.OriginalPath.EndsWith(".svg")) { // do not use ImageMagick to verify SVGs continue; } new MagickImage(image.OriginalPath).Dispose(); } } catch (MagickErrorException) { logger.LogError("Corrupt images after reset!"); return(false); } // push to GitHub if (isWikiCompress) { repo.Network.Push(remote, "refs/heads/master", new PushOptions { CredentialsProvider = credentialsProvider, }); } else { var refs = $"refs/heads/{KnownGitHubs.BranchName}"; if (parameters.IsRebase) { refs = refs.Insert(0, "+"); } logger.LogInformation("refs: {refs}", refs); repo.Network.Push(remote, refs, new PushOptions { CredentialsProvider = credentialsProvider, }); } return(true); }
public static bool Run(CompressimagesParameters parameters, ICollector <CompressImagesMessage> compressImagesMessages, ILogger logger) { var storageAccount = CloudStorageAccount.Parse(Common.KnownEnvironmentVariables.AzureWebJobsStorage); var paidPlan = PaidPlan(storageAccount, parameters.RepoOwner); CredentialsHandler credentialsProvider = (url, user, cred) => new UsernamePasswordCredentials { Username = KnownGitHubs.Username, Password = parameters.Password }; // clone var cloneOptions = new CloneOptions { CredentialsProvider = credentialsProvider, }; Repository.Clone(parameters.CloneUrl, parameters.LocalPath, cloneOptions); var repo = new Repository(parameters.LocalPath); var remote = repo.Network.Remotes["origin"]; var isWikiCompress = parameters.CloneUrl.EndsWith(".wiki.git"); // check if we have the branch already or this is empty repo try { if (repo.Network.ListReferences(remote, credentialsProvider).Any() == false) { logger.LogInformation("CompressImagesFunction: no references found for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); return(false); } } catch (Exception e) { // log + ignore logger.LogWarning(e, "CompressImagesFunction: issue checking for existing branch or empty repo for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); } // check if the branch exists and has been modified by the user if (parameters.IsRebase && repo.Branches[$"refs/remotes/origin/{KnownGitHubs.BranchName}"].Tip.Author.Name != KnownGitHubs.ImgBotLogin) { logger.LogInformation("CompressImagesFunction: imgbot branch has been modified by the user."); return(false); } // check if we should switch away from the default branch if (!isWikiCompress && parameters.Settings != null && !string.IsNullOrEmpty(parameters.Settings.DefaultBranchOverride)) { logger.LogInformation( "CompressImagesFunction: default branch override for {Owner}/{RepoName} is {DefaultBranchOverride}", parameters.RepoOwner, parameters.RepoName, parameters.Settings.DefaultBranchOverride); var baseBranch = repo.Branches[$"refs/remotes/origin/{parameters.Settings.DefaultBranchOverride}"]; if (baseBranch == null) { logger.LogWarning( "CompressImagesFunction: default branch ({DefaultBranchOverride}) not found for {Owner}/{RepoName}", parameters.Settings.DefaultBranchOverride, parameters.RepoOwner, parameters.RepoName); return(false); } Commands.Checkout(repo, baseBranch); } var repoConfiguration = new RepoConfiguration(); try { // see if .imgbotconfig exists in repo root var repoConfigJson = File.ReadAllText(parameters.LocalPath + Path.DirectorySeparatorChar + ".imgbotconfig"); if (!string.IsNullOrEmpty(repoConfigJson)) { repoConfiguration = JsonConvert.DeserializeObject <RepoConfiguration>(repoConfigJson); // for now we are not adding the labels functionality || repoConfiguration.Labels.Any() TODO: add it when adding the labels feature if (paidPlan && (repoConfiguration.PrBody != null || repoConfiguration.PrTitle != null)) { var settingsTable = storageAccount.CreateCloudTableClient().GetTableReference("settings"); // Labels = repoConfiguration.Labels TODO: add it when adding the labels feature var settings = new Common.TableModels.Settings( parameters.CompressImagesMessage.InstallationId.ToString(), parameters.CompressImagesMessage.RepoName) { PrBody = repoConfiguration.PrBody, PrTitle = repoConfiguration.PrTitle, }; settingsTable.ExecuteAsync(TableOperation.InsertOrReplace(settings)).Wait(); } } } catch { // ignore } // Add new compressMessage if we should compress Wiki if (repoConfiguration.CompressWiki && isWikiCompress == false) { logger.LogInformation("CompressImagesFunction: Adding Wiki image compression to queue for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); compressImagesMessages.Add(new CompressImagesMessage() { InstallationId = parameters.CompressImagesMessage.InstallationId, RepoName = parameters.CompressImagesMessage.RepoName, Owner = parameters.RepoOwner, CloneUrl = $"https://github.com/{parameters.RepoOwner}/{parameters.RepoName}.wiki.git" }); } if (Schedule.ShouldOptimizeImages(repoConfiguration, repo) == false) { logger.LogInformation("CompressImagesFunction: skipping optimization due to schedule for {Owner}/{RepoName}", parameters.RepoOwner, parameters.RepoName); return(false); } // Should not create branch if we are compressing Wiki or performing rebase if (isWikiCompress == false && !parameters.IsRebase) { // check out the branch repo.CreateBranch(KnownGitHubs.BranchName); var branch = Commands.Checkout(repo, KnownGitHubs.BranchName); } else if (parameters.IsRebase) { // if rebasing, fetch the branch var refspec = string.Format("{0}:{0}", KnownGitHubs.BranchName); Commands.Fetch(repo, "origin", new List <string> { refspec }, null, "fetch"); } // reset any mean files repo.Reset(ResetMode.Mixed, repo.Head.Tip); // optimize images var imagePaths = ImageQuery.FindImages(parameters.LocalPath, repoConfiguration); var optimizedImages = OptimizeImages(repo, parameters.LocalPath, imagePaths, logger, repoConfiguration.AggressiveCompression); if (optimizedImages.Length == 0) { return(false); } if (!Threshold.MeetsThreshold(repoConfiguration, optimizedImages)) { logger.LogInformation($"Did not meet threshold. {parameters.RepoOwner}/{parameters.RepoName}"); return(false); } // create commit message based on optimizations foreach (var image in optimizedImages) { Commands.Stage(repo, image.OriginalPath); } var commitMessage = CommitMessage.Create(optimizedImages); var signature = new Signature(KnownGitHubs.ImgBotLogin, KnownGitHubs.ImgBotEmail, DateTimeOffset.Now); repo.Commit(commitMessage, signature, signature); if (parameters.IsRebase) { var baseBranch = repo.Head; var newCommit = baseBranch.Tip; // we need to reset the default branch so that we can // rebase to it later. repo.Reset(ResetMode.Hard, repo.Head.Commits.ElementAt(1)); Commands.Checkout(repo, KnownGitHubs.BranchName); // reset imgbot branch removing old commit repo.Reset(ResetMode.Hard, repo.Head.Commits.ElementAt(1)); // cherry-pick var cherryPickOptions = new CherryPickOptions() { MergeFileFavor = MergeFileFavor.Theirs, }; var cherryPickResult = repo.CherryPick(newCommit, signature, cherryPickOptions); if (cherryPickResult.Status == CherryPickStatus.Conflicts) { var status = repo.RetrieveStatus(new LibGit2Sharp.StatusOptions() { }); foreach (var item in status) { if (item.State == FileStatus.Conflicted) { Commands.Stage(repo, item.FilePath); } } repo.Commit(commitMessage, signature, signature); } // rebase var rebaseOptions = new RebaseOptions() { FileConflictStrategy = CheckoutFileConflictStrategy.Theirs, }; var rebaseResult = repo.Rebase.Start(repo.Head, baseBranch, null, new Identity(KnownGitHubs.ImgBotLogin, KnownGitHubs.ImgBotEmail), rebaseOptions); while (rebaseResult.Status == RebaseStatus.Conflicts) { var status = repo.RetrieveStatus(new LibGit2Sharp.StatusOptions() { }); foreach (var item in status) { if (item.State == FileStatus.Conflicted) { if (imagePaths.Contains(Path.Combine(parameters.LocalPath, item.FilePath))) { Commands.Stage(repo, item.FilePath); } else { Commands.Remove(repo, item.FilePath); } } } rebaseResult = repo.Rebase.Continue(new Identity(KnownGitHubs.ImgBotLogin, KnownGitHubs.ImgBotEmail), rebaseOptions); } } // We just made a normal commit, now we are going to capture all the values generated from that commit // then rewind and make a signed commit var commitBuffer = Commit.CreateBuffer( repo.Head.Tip.Author, repo.Head.Tip.Committer, repo.Head.Tip.Message, repo.Head.Tip.Tree, repo.Head.Tip.Parents, true, null); var signedCommitData = CommitSignature.Sign(commitBuffer + "\n", parameters.PgpPrivateKey, parameters.PgPPassword); repo.Reset(ResetMode.Soft, repo.Head.Commits.Skip(1).First().Sha); var commitToKeep = repo.ObjectDatabase.CreateCommitWithSignature(commitBuffer, signedCommitData); repo.Refs.UpdateTarget(repo.Refs.Head, commitToKeep); // Should use "master" if we are compressing Wiki if (isWikiCompress) { var branchAgain = Commands.Checkout(repo, "master"); } else { var branchAgain = Commands.Checkout(repo, KnownGitHubs.BranchName); } repo.Reset(ResetMode.Hard, commitToKeep.Sha); // verify images are not corrupted by reading from git // see https://github.com/dabutvin/ImgBot/issues/273 try { foreach (var image in optimizedImages) { if (image.OriginalPath.EndsWith(".svg")) { // do not use ImageMagick to verify SVGs continue; } new MagickImage(image.OriginalPath).Dispose(); } } catch (MagickErrorException) { logger.LogError("Corrupt images after reset!"); return(false); } // push to GitHub if (isWikiCompress) { repo.Network.Push(remote, "refs/heads/master", new PushOptions { CredentialsProvider = credentialsProvider, }); } else { var refs = $"refs/heads/{KnownGitHubs.BranchName}"; if (parameters.IsRebase) { refs = refs.Insert(0, "+"); } logger.LogInformation("refs: {refs}", refs); repo.Network.Push(remote, refs, new PushOptions { CredentialsProvider = credentialsProvider, }); } try { Directory.Delete(parameters.LocalPath, true); } catch (Exception e) { logger.LogError(e, $"Delete issue with repository {parameters.LocalPath}"); } return(true); }