/// <exception cref="System.IO.IOException"></exception> private bool ProcessOne(Candidate n) { RevCommit parent = n.GetParent(0); if (parent == null) { return(Split(n.GetNextCandidate(0), n)); } if (parent.Has(SEEN)) { return(false); } revPool.ParseHeaders(parent); if (Find(parent, n.sourcePath)) { if (idBuf.Equals(n.sourceBlob)) { // The common case of the file not being modified in // a simple string-of-pearls history. Blame parent. n.sourceCommit = parent; Push(n); return(false); } Candidate next = n.Create(parent, n.sourcePath); next.sourceBlob = idBuf.ToObjectId(); next.LoadText(reader); return(Split(next, n)); } if (n.sourceCommit == null) { return(Result(n)); } DiffEntry r = FindRename(parent, n.sourceCommit, n.sourcePath); if (r == null) { return(Result(n)); } if (0 == r.GetOldId().PrefixCompare(n.sourceBlob)) { // A 100% rename without any content change can also // skip directly to the parent. n.sourceCommit = parent; n.sourcePath = PathFilter.Create(r.GetOldPath()); Push(n); return(false); } Candidate next_1 = n.Create(parent, PathFilter.Create(r.GetOldPath())); next_1.sourceBlob = r.GetOldId().ToObjectId(); next_1.renameScore = r.GetScore(); next_1.LoadText(reader); return(Split(next_1, n)); }
private static string Path(DiffEntry difference) { switch (difference.GetChangeType()) { case DiffEntry.ChangeType.ADD: return(difference.GetNewPath()); case DiffEntry.ChangeType.COPY: return(string.Format("{0} -> {1}", difference.GetOldPath(), difference.GetNewPath())); case DiffEntry.ChangeType.DELETE: return(difference.GetOldPath()); case DiffEntry.ChangeType.MODIFY: return(difference.GetOldPath()); case DiffEntry.ChangeType.RENAME: return(string.Format("{0} -> {1}", difference.GetOldPath(), difference.GetNewPath())); default: return(difference.ToString()); } }
/// <summary>Assert which renames should have happened, in traversal order.</summary> /// <remarks>Assert which renames should have happened, in traversal order.</remarks> /// <param name="expectedRenames">the rename specs, each one in the form "srcPath->destPath" /// </param> protected internal virtual void AssertRenames(params string[] expectedRenames) { NUnit.Framework.Assert.AreEqual(expectedRenames.Length, diffCollector.diffs.Count , "Unexpected number of renames. Expected: " + expectedRenames.Length + ", actual: " + diffCollector.diffs.Count); for (int i = 0; i < expectedRenames.Length; i++) { DiffEntry diff = diffCollector.diffs[i]; NUnit.Framework.Assert.IsNotNull(diff); string[] split = expectedRenames[i].RegexSplit("->"); NUnit.Framework.Assert.IsNotNull(split); NUnit.Framework.Assert.AreEqual(2, split.Length); string src = split[0]; string target = split[1]; NUnit.Framework.Assert.AreEqual(src, diff.GetOldPath()); NUnit.Framework.Assert.AreEqual(target, diff.GetNewPath()); } }
public virtual void TestNoOutputStreamSet() { FilePath file = WriteTrashFile("test.txt", "a"); NUnit.Framework.Assert.IsTrue(file.SetLastModified(file.LastModified() - 5000)); Git git = new Git(db); git.Add().AddFilepattern(".").Call(); Write(file, "b"); IList <DiffEntry> diffs = git.Diff().Call(); NUnit.Framework.Assert.IsNotNull(diffs); NUnit.Framework.Assert.AreEqual(1, diffs.Count); DiffEntry diff = diffs[0]; NUnit.Framework.Assert.AreEqual(DiffEntry.ChangeType.MODIFY, diff.GetChangeType() ); NUnit.Framework.Assert.AreEqual("test.txt", diff.GetOldPath()); NUnit.Framework.Assert.AreEqual("test.txt", diff.GetNewPath()); }
public virtual void CommitSubmoduleUpdate() { Git git = new Git(db); WriteTrashFile("file.txt", "content"); git.Add().AddFilepattern("file.txt").Call(); RevCommit commit = git.Commit().SetMessage("create file").Call(); WriteTrashFile("file.txt", "content2"); git.Add().AddFilepattern("file.txt").Call(); RevCommit commit2 = git.Commit().SetMessage("edit file").Call(); SubmoduleAddCommand command = new SubmoduleAddCommand(db); string path = "sub"; command.SetPath(path); string uri = db.Directory.ToURI().ToString(); command.SetURI(uri); Repository repo = command.Call(); NUnit.Framework.Assert.IsNotNull(repo); AddRepoToClose(repo); SubmoduleWalk generator = SubmoduleWalk.ForIndex(db); NUnit.Framework.Assert.IsTrue(generator.Next()); NUnit.Framework.Assert.AreEqual(path, generator.GetPath()); NUnit.Framework.Assert.AreEqual(commit2, generator.GetObjectId()); NUnit.Framework.Assert.AreEqual(uri, generator.GetModulesUrl()); NUnit.Framework.Assert.AreEqual(path, generator.GetModulesPath()); NUnit.Framework.Assert.AreEqual(uri, generator.GetConfigUrl()); Repository subModRepo = generator.GetRepository(); AddRepoToClose(subModRepo); NUnit.Framework.Assert.IsNotNull(subModRepo); NUnit.Framework.Assert.AreEqual(commit2, repo.Resolve(Constants.HEAD)); RevCommit submoduleAddCommit = git.Commit().SetMessage("submodule add").SetOnly(path ).Call(); NUnit.Framework.Assert.IsNotNull(submoduleAddCommit); RefUpdate update = repo.UpdateRef(Constants.HEAD); update.SetNewObjectId(commit); NUnit.Framework.Assert.AreEqual(RefUpdate.Result.FORCED, update.ForceUpdate()); RevCommit submoduleEditCommit = git.Commit().SetMessage("submodule add").SetOnly( path).Call(); NUnit.Framework.Assert.IsNotNull(submoduleEditCommit); TreeWalk walk = new TreeWalk(db); walk.AddTree(submoduleAddCommit.Tree); walk.AddTree(submoduleEditCommit.Tree); walk.Filter = TreeFilter.ANY_DIFF; IList <DiffEntry> diffs = DiffEntry.Scan(walk); NUnit.Framework.Assert.AreEqual(1, diffs.Count); DiffEntry subDiff = diffs[0]; NUnit.Framework.Assert.AreEqual(FileMode.GITLINK, subDiff.GetOldMode()); NUnit.Framework.Assert.AreEqual(FileMode.GITLINK, subDiff.GetNewMode()); NUnit.Framework.Assert.AreEqual(commit2, subDiff.GetOldId().ToObjectId()); NUnit.Framework.Assert.AreEqual(commit, subDiff.GetNewId().ToObjectId()); NUnit.Framework.Assert.AreEqual(path, subDiff.GetNewPath()); NUnit.Framework.Assert.AreEqual(path, subDiff.GetOldPath()); }
/// <exception cref="System.IO.IOException"></exception> private bool ProcessMerge(Candidate n) { int pCnt = n.GetParentCount(); for (int pIdx = 0; pIdx < pCnt; pIdx++) { RevCommit parent = n.GetParent(pIdx); if (parent.Has(SEEN)) { continue; } revPool.ParseHeaders(parent); } // If any single parent exactly matches the merge, follow only // that one parent through history. ObjectId[] ids = null; for (int pIdx_1 = 0; pIdx_1 < pCnt; pIdx_1++) { RevCommit parent = n.GetParent(pIdx_1); if (parent.Has(SEEN)) { continue; } if (!Find(parent, n.sourcePath)) { continue; } if (!(n is Candidate.ReverseCandidate) && idBuf.Equals(n.sourceBlob)) { n.sourceCommit = parent; Push(n); return(false); } if (ids == null) { ids = new ObjectId[pCnt]; } ids[pIdx_1] = idBuf.ToObjectId(); } // If rename detection is enabled, search for any relevant names. DiffEntry[] renames = null; if (renameDetector != null) { renames = new DiffEntry[pCnt]; for (int pIdx_2 = 0; pIdx_2 < pCnt; pIdx_2++) { RevCommit parent = n.GetParent(pIdx_2); if (parent.Has(SEEN)) { continue; } if (ids != null && ids[pIdx_2] != null) { continue; } DiffEntry r = FindRename(parent, n.sourceCommit, n.sourcePath); if (r == null) { continue; } if (n is Candidate.ReverseCandidate) { if (ids == null) { ids = new ObjectId[pCnt]; } ids[pCnt] = r.GetOldId().ToObjectId(); } else { if (0 == r.GetOldId().PrefixCompare(n.sourceBlob)) { // A 100% rename without any content change can also // skip directly to the parent. Note this bypasses an // earlier parent that had the path (above) but did not // have an exact content match. For performance reasons // we choose to follow the one parent over trying to do // possibly both parents. n.sourceCommit = parent; n.sourcePath = PathFilter.Create(r.GetOldPath()); Push(n); return(false); } } renames[pIdx_2] = r; } } // Construct the candidate for each parent. Candidate[] parents = new Candidate[pCnt]; for (int pIdx_3 = 0; pIdx_3 < pCnt; pIdx_3++) { RevCommit parent = n.GetParent(pIdx_3); if (parent.Has(SEEN)) { continue; } Candidate p; if (renames != null && renames[pIdx_3] != null) { p = n.Create(parent, PathFilter.Create(renames[pIdx_3].GetOldPath())); p.renameScore = renames[pIdx_3].GetScore(); p.sourceBlob = renames[pIdx_3].GetOldId().ToObjectId(); } else { if (ids != null && ids[pIdx_3] != null) { p = n.Create(parent, n.sourcePath); p.sourceBlob = ids[pIdx_3]; } else { continue; } } EditList editList; if (n is Candidate.ReverseCandidate && p.sourceBlob.Equals(n.sourceBlob)) { // This special case happens on ReverseCandidate forks. p.sourceText = n.sourceText; editList = new EditList(0); } else { p.LoadText(reader); editList = diffAlgorithm.Diff(textComparator, p.sourceText, n.sourceText); } if (editList.IsEmpty()) { // Ignoring whitespace (or some other special comparator) can // cause non-identical blobs to have an empty edit list. In // a case like this push the parent alone. if (n is Candidate.ReverseCandidate) { parents[pIdx_3] = p; continue; } p.regionList = n.regionList; Push(p); return(false); } p.TakeBlame(editList, n); // Only remember this parent candidate if there is at least // one region that was blamed on the parent. if (p.regionList != null) { // Reverse blame requires inverting the regions. This puts // the regions the parent deleted from us into the parent, // and retains the common regions to look at other parents // for deletions. if (n is Candidate.ReverseCandidate) { Region r = p.regionList; p.regionList = n.regionList; n.regionList = r; } parents[pIdx_3] = p; } } if (n is Candidate.ReverseCandidate) { // On a reverse blame report all deletions found in the children, // and pass on to them a copy of our region list. Candidate resultHead = null; Candidate resultTail = null; for (int pIdx_2 = 0; pIdx_2 < pCnt; pIdx_2++) { Candidate p = parents[pIdx_2]; if (p == null) { continue; } if (p.regionList != null) { Candidate r = p.Copy(p.sourceCommit); if (resultTail != null) { resultTail.queueNext = r; resultTail = r; } else { resultHead = r; resultTail = r; } } if (n.regionList != null) { p.regionList = n.regionList.DeepCopy(); Push(p); } } if (resultHead != null) { return(Result(resultHead)); } return(false); } // Push any parents that are still candidates. for (int pIdx_4 = 0; pIdx_4 < pCnt; pIdx_4++) { if (parents[pIdx_4] != null) { Push(parents[pIdx_4]); } } if (n.regionList != null) { return(Result(n)); } return(false); }