public void testDirectoryFileSimple() { _theIndex = new GitIndex(db); _theIndex.add(trash, writeTrashFile("DF", "DF")); Core.Tree head = db.MapTree(_theIndex.writeTree()); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "DF"))); _theIndex = new GitIndex(db); _theIndex.add(trash, writeTrashFile("DF/DF", "DF/DF")); Core.Tree merge = db.MapTree(_theIndex.writeTree()); _theIndex = new GitIndex(db); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "DF"))); _theIndex.add(trash, writeTrashFile("DF", "DF")); _theReadTree = new WorkDirCheckout(db, trash, head, _theIndex, merge); _theReadTree.PrescanTwoTrees(); Assert.IsTrue(_theReadTree.Removed.Contains("DF")); Assert.IsTrue(_theReadTree.Updated.ContainsKey("DF/DF")); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "DF"))); _theIndex = new GitIndex(db); _theIndex.add(trash, writeTrashFile("DF/DF", "DF/DF")); _theReadTree = new WorkDirCheckout(db, trash, merge, _theIndex, head); _theReadTree.PrescanTwoTrees(); Assert.IsTrue(_theReadTree.Removed.Contains("DF/DF")); Assert.IsTrue(_theReadTree.Updated.ContainsKey("DF")); }
private void doCheckout(GitSharp.Core.Ref branch) { if (branch == null) { throw new ArgumentNullException("branch", "Cannot checkout; no HEAD advertised by remote"); } var repo = Repository._internal_repo; if (!Constants.HEAD.Equals(branch.getName())) { RefUpdate u1 = repo.UpdateRef(Constants.HEAD); u1.disableRefLog(); u1.link(branch.getName()); } GitSharp.Core.Commit commit = repo.MapCommit(branch.ObjectId); RefUpdate u = repo.UpdateRef(Constants.HEAD); u.NewObjectId = commit.CommitId; u.forceUpdate(); GitIndex index = new GitIndex(repo); GitSharp.Core.Tree tree = commit.TreeEntry; WorkDirCheckout co = new WorkDirCheckout(repo, repo.WorkingDirectory, index, tree); co.checkout(); index.write(); }
public void testCheckingOutWithConflicts() { var index = new GitIndex(db); index.add(trash, writeTrashFile("bar", "bar")); index.add(trash, writeTrashFile("foo/bar/baz/qux", "foo/bar")); recursiveDelete(new FileInfo(Path.Combine(trash.FullName, "bar"))); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "foo"))); writeTrashFile("bar/baz/qux/foo", "another nasty one"); writeTrashFile("foo", "troublesome little bugger"); var workDirCheckout1 = new WorkDirCheckout(db, trash, index, index); AssertHelper.Throws <CheckoutConflictException>(workDirCheckout1.checkout); var workDirCheckout2 = new WorkDirCheckout(db, trash, index, index) { FailOnConflict = false }; workDirCheckout2.checkout(); Assert.IsTrue(new FileInfo(Path.Combine(trash.FullName, "bar")).IsFile()); Assert.IsTrue(new FileInfo(Path.Combine(trash.FullName, "foo/bar/baz/qux")).IsFile()); var index2 = new GitIndex(db); recursiveDelete(new FileInfo(Path.Combine(trash.FullName, "bar"))); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "foo"))); index2.add(trash, writeTrashFile("bar/baz/qux/foo", "bar")); writeTrashFile("bar/baz/qux/bar", "evil? I thought it said WEEVIL!"); index2.add(trash, writeTrashFile("foo", "lalala")); workDirCheckout2 = new WorkDirCheckout(db, trash, index2, index) { FailOnConflict = false }; workDirCheckout2.checkout(); Assert.IsTrue(new FileInfo(Path.Combine(trash.FullName, "bar")).IsFile()); Assert.IsTrue(new FileInfo(Path.Combine(trash.FullName, "foo/bar/baz/qux")).IsFile()); Assert.IsNotNull(index2.GetEntry("bar")); Assert.IsNotNull(index2.GetEntry("foo/bar/baz/qux")); Assert.IsNull(index2.GetEntry("bar/baz/qux/foo")); Assert.IsNull(index2.GetEntry("foo")); }
public void testRules1thru3_NoIndexEntry() { var index = new GitIndex(db); var head = new Core.Tree(db); FileTreeEntry entry = head.AddFile("foo"); ObjectId expected = ObjectId.FromString("ba78e065e2c261d4f7b8f42107588051e87e18e9"); entry.Id = expected; var merge = new Core.Tree(db); var checkout = new WorkDirCheckout(db, trash, head, index, merge); checkout.PrescanTwoTrees(); Assert.IsTrue(checkout.Removed.Contains("foo")); checkout = new WorkDirCheckout(db, trash, merge, index, head); checkout.PrescanTwoTrees(); Assert.AreEqual(expected, checkout.Updated["foo"]); ObjectId id2 = ObjectId.FromString("ba78e065e2c261d4f7b8f42107588051e87e18ee"); merge.AddFile("foo").Id = id2; checkout = new WorkDirCheckout(db, trash, head, index, merge); checkout.PrescanTwoTrees(); Assert.AreEqual(id2, checkout.Updated["foo"]); }
public void testCheckingOutWithConflicts() { var index = new GitIndex(db); index.add(trash, writeTrashFile("bar", "bar")); index.add(trash, writeTrashFile("foo/bar/baz/qux", "foo/bar")); recursiveDelete(new FileInfo(Path.Combine(trash.FullName, "bar"))); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "foo"))); writeTrashFile("bar/baz/qux/foo", "another nasty one"); writeTrashFile("foo", "troublesome little bugger"); var workDirCheckout1 = new WorkDirCheckout(db, trash, index, index); AssertHelper.Throws<CheckoutConflictException>(workDirCheckout1.checkout); var workDirCheckout2 = new WorkDirCheckout(db, trash, index, index) { FailOnConflict = false }; workDirCheckout2.checkout(); Assert.IsTrue(new FileInfo(Path.Combine(trash.FullName, "bar")).IsFile()); Assert.IsTrue(new FileInfo(Path.Combine(trash.FullName, "foo/bar/baz/qux")).IsFile()); var index2 = new GitIndex(db); recursiveDelete(new FileInfo(Path.Combine(trash.FullName, "bar"))); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "foo"))); index2.add(trash, writeTrashFile("bar/baz/qux/foo", "bar")); writeTrashFile("bar/baz/qux/bar", "evil? I thought it said WEEVIL!"); index2.add(trash, writeTrashFile("foo", "lalala")); workDirCheckout2 = new WorkDirCheckout(db, trash, index2, index) { FailOnConflict = false }; workDirCheckout2.checkout(); Assert.IsTrue(new FileInfo(Path.Combine(trash.FullName, "bar")).IsFile()); Assert.IsTrue(new FileInfo(Path.Combine(trash.FullName, "foo/bar/baz/qux")).IsFile()); Assert.IsNotNull(index2.GetEntry("bar")); Assert.IsNotNull(index2.GetEntry("foo/bar/baz/qux")); Assert.IsNull(index2.GetEntry("bar/baz/qux/foo")); Assert.IsNull(index2.GetEntry("foo")); }
public void testFindingConflicts() { var index = new GitIndex(db); index.add(trash, writeTrashFile("bar", "bar")); index.add(trash, writeTrashFile("foo/bar/baz/qux", "foo/bar")); recursiveDelete(new FileInfo(Path.Combine(trash.FullName, "bar"))); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "foo"))); writeTrashFile("bar/baz/qux/foo", "another nasty one"); writeTrashFile("foo", "troublesome little bugger"); var workDirCheckout = new WorkDirCheckout(db, trash, index, index); workDirCheckout.PrescanOneTree(); List <string> conflictingEntries = workDirCheckout.Conflicts; Assert.AreEqual("bar/baz/qux/foo", conflictingEntries[0]); Assert.AreEqual("foo", conflictingEntries[1]); var index2 = new GitIndex(db); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "bar"))); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "foo"))); index2.add(trash, writeTrashFile("bar/baz/qux/foo", "bar")); index2.add(trash, writeTrashFile("foo", "lalala")); workDirCheckout = new WorkDirCheckout(db, trash, index2, index); workDirCheckout.PrescanOneTree(); conflictingEntries = workDirCheckout.Conflicts; List <string> removedEntries = workDirCheckout.Removed; Assert.IsTrue(conflictingEntries.Count == 0); Assert.IsTrue(removedEntries.Contains("bar/baz/qux/foo")); Assert.IsTrue(removedEntries.Contains("foo")); }
public void testRules4thru13_IndexEntryNotInHead() { // rule 4 and 5 var indexEntries = new Dictionary <string, string> { { "foo", "foo" } }; SetupCase(null, null, indexEntries); _theReadTree = Go(); assertAllEmpty(); // rule 6 and 7 indexEntries = new Dictionary <string, string> { { "foo", "foo" } }; SetupCase(null, indexEntries, indexEntries); _theReadTree = Go(); assertAllEmpty(); // rule 8 and 9 var mergeEntries = new Dictionary <string, string> { { "foo", "merge" } }; SetupCase(null, mergeEntries, indexEntries); Go(); Assert.IsTrue(_theReadTree.Updated.isEmpty()); Assert.IsTrue(_theReadTree.Removed.isEmpty()); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); // rule 10 var headEntries = new Dictionary <string, string> { { "foo", "foo" } }; SetupCase(headEntries, null, indexEntries); Go(); Assert.IsTrue(_theReadTree.Removed.Contains("foo")); Assert.IsTrue(_theReadTree.Updated.isEmpty()); Assert.IsTrue(_theReadTree.Conflicts.isEmpty()); // rule 11 SetupCase(headEntries, null, indexEntries); new FileInfo(Path.Combine(trash.FullName, "foo")).Delete(); writeTrashFile("foo", "bar"); _theIndex.Members[0].forceRecheck(); Go(); Assert.IsTrue(_theReadTree.Removed.isEmpty()); Assert.IsTrue(_theReadTree.Updated.isEmpty()); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); // rule 12 and 13 headEntries["foo"] = "head"; SetupCase(headEntries, null, indexEntries); Go(); Assert.IsTrue(_theReadTree.Removed.isEmpty()); Assert.IsTrue(_theReadTree.Updated.isEmpty()); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); // rule 14 and 15 SetupCase(headEntries, headEntries, indexEntries); Go(); assertAllEmpty(); // rule 16 and 17 SetupCase(headEntries, mergeEntries, indexEntries); Go(); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); // rule 18 and 19 SetupCase(headEntries, indexEntries, indexEntries); Go(); assertAllEmpty(); // rule 20 SetupCase(indexEntries, mergeEntries, indexEntries); Go(); Assert.IsTrue(_theReadTree.Updated.ContainsKey("foo")); // rule 21 SetupCase(indexEntries, mergeEntries, indexEntries); new FileInfo(Path.Combine(trash.FullName, "foo")).Delete(); writeTrashFile("foo", "bar"); _theIndex.Members[0].forceRecheck(); Go(); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); }
private WorkDirCheckout Go() { _theReadTree = new WorkDirCheckout(db, trash, _theHead, _theIndex, _theMerge); _theReadTree.PrescanTwoTrees(); return(_theReadTree); }
private void Checkout() { _theReadTree = new WorkDirCheckout(db, trash, _theHead, _theIndex, _theMerge); _theReadTree.checkout(); }
public void testRules1thru3_NoIndexEntry() { var index = new GitIndex(db); var head = new Tree(db); FileTreeEntry entry = head.AddFile("foo"); ObjectId expected = ObjectId.FromString("ba78e065e2c261d4f7b8f42107588051e87e18e9"); entry.Id = expected; var merge = new Tree(db); var checkout = new WorkDirCheckout(db, trash, head, index, merge); checkout.PrescanTwoTrees(); Assert.IsTrue(checkout.Removed.Contains("foo")); checkout = new WorkDirCheckout(db, trash, merge, index, head); checkout.PrescanTwoTrees(); Assert.AreEqual(expected, checkout.Updated["foo"]); ObjectId id2 = ObjectId.FromString("ba78e065e2c261d4f7b8f42107588051e87e18ee"); merge.AddFile("foo").Id = id2; checkout = new WorkDirCheckout(db, trash, head, index, merge); checkout.PrescanTwoTrees(); Assert.AreEqual(id2, checkout.Updated["foo"]); }
public void testFindingConflicts() { var index = new GitIndex(db); index.add(trash, writeTrashFile("bar", "bar")); index.add(trash, writeTrashFile("foo/bar/baz/qux", "foo/bar")); recursiveDelete(new FileInfo(Path.Combine(trash.FullName, "bar"))); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "foo"))); writeTrashFile("bar/baz/qux/foo", "another nasty one"); writeTrashFile("foo", "troublesome little bugger"); var workDirCheckout = new WorkDirCheckout(db, trash, index, index); workDirCheckout.PrescanOneTree(); List<string> conflictingEntries = workDirCheckout.Conflicts; Assert.AreEqual("bar/baz/qux/foo", conflictingEntries[0]); Assert.AreEqual("foo", conflictingEntries[1]); var index2 = new GitIndex(db); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "bar"))); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "foo"))); index2.add(trash, writeTrashFile("bar/baz/qux/foo", "bar")); index2.add(trash, writeTrashFile("foo", "lalala")); workDirCheckout = new WorkDirCheckout(db, trash, index2, index); workDirCheckout.PrescanOneTree(); conflictingEntries = workDirCheckout.Conflicts; List<string> removedEntries = workDirCheckout.Removed; Assert.IsTrue(conflictingEntries.Count == 0); Assert.IsTrue(removedEntries.Contains("bar/baz/qux/foo")); Assert.IsTrue(removedEntries.Contains("foo")); }
private void doCheckout(Ref branch) { if (branch == null) throw die("Cannot checkout; no HEAD advertised by remote"); if (!Constants.HEAD.Equals(branch.Name)) db.WriteSymref(Constants.HEAD, branch.Name); GitSharp.Commit commit = db.MapCommit(branch.ObjectId); RefUpdate u = db.UpdateRef(Constants.HEAD); u.NewObjectId = commit.CommitId; u.ForceUpdate(); GitIndex index = new GitIndex(db); Tree tree = commit.TreeEntry; WorkDirCheckout co = new WorkDirCheckout(db, db.WorkingDirectory, index, tree); co.checkout(); index.write(); }
internal void Apply(Stash stash) { Commit wip = _repo.Get <Commit> (stash.CommitId); Commit index = wip.Parents.Last(); Tree wipTree = wip.Tree; Tree headTree = _repo.CurrentBranch.CurrentCommit.Tree; GitIndex currentIndex = _repo.Index.GitIndex; Tree currentIndexTree = new Tree(_repo, _repo._internal_repo.MapTree(currentIndex.writeTree())); WorkDirCheckout co = new WorkDirCheckout(_repo._internal_repo, _repo._internal_repo.WorkingDirectory, headTree.InternalTree, currentIndex, wip.Tree.InternalTree); co.checkout(); currentIndex.write(); List <DirCacheEntry> toAdd = new List <DirCacheEntry> (); DirCache dc = DirCache.Lock(_repo._internal_repo); try { var cacheEditor = dc.editor(); // The WorkDirCheckout class doesn't check if there are conflicts in modified files, // so we have to do it here. foreach (var c in co.Updated) { var baseEntry = wip.Parents.First().Tree[c.Key] as Leaf; var oursEntry = wipTree [c.Key] as Leaf; var theirsEntry = headTree [c.Key] as Leaf; if (baseEntry != null && oursEntry != null && currentIndexTree [c.Key] == null) { // If a file was reported as updated but that file is not present in the stashed index, // it means that the file was scheduled to be deleted. cacheEditor.@add(new DirCacheEditor.DeletePath(c.Key)); File.Delete(_repo.FromGitPath(c.Key)); } else if (baseEntry != null && oursEntry != null && theirsEntry != null) { MergeResult res = MergeAlgorithm.merge(new RawText(baseEntry.RawData), new RawText(oursEntry.RawData), new RawText(theirsEntry.RawData)); MergeFormatter f = new MergeFormatter(); using (BinaryWriter bw = new BinaryWriter(File.OpenWrite(_repo.FromGitPath(c.Key)))) { f.formatMerge(bw, res, "Base", "Stash", "Head", Constants.CHARSET.WebName); } if (res.containsConflicts()) { // Remove the entry from the index. It will be added later on. cacheEditor.@add(new DirCacheEditor.DeletePath(c.Key)); // Generate index entries for each merge stage // Those entries can't be added right now to the index because a DirCacheEditor // can't be used at the same time as a DirCacheBuilder. var e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_1); e.setObjectId(baseEntry.InternalEntry.Id); e.setFileMode(baseEntry.InternalEntry.Mode); toAdd.Add(e); e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_2); e.setObjectId(oursEntry.InternalEntry.Id); e.setFileMode(oursEntry.InternalEntry.Mode); toAdd.Add(e); e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_3); e.setObjectId(theirsEntry.InternalEntry.Id); e.setFileMode(theirsEntry.InternalEntry.Mode); toAdd.Add(e); } } } cacheEditor.finish(); if (toAdd.Count > 0) { // Add the index entries generated above var cacheBuilder = dc.builder(); for (int n = 0; n < dc.getEntryCount(); n++) { cacheBuilder.@add(dc.getEntry(n)); } foreach (var entry in toAdd) { cacheBuilder.@add(entry); } cacheBuilder.finish(); } dc.write(); dc.commit(); } catch { dc.unlock(); throw; } }
private WorkDirCheckout Go() { _theReadTree = new WorkDirCheckout(db, trash, _theHead, _theIndex, _theMerge); _theReadTree.PrescanTwoTrees(); return _theReadTree; }
public void testRules4thru13_IndexEntryNotInHead() { // rule 4 and 5 var indexEntries = new Dictionary<string, string> {{"foo", "foo"}}; SetupCase(null, null, indexEntries); _theReadTree = Go(); assertAllEmpty(); // rule 6 and 7 indexEntries = new Dictionary<string, string> { { "foo", "foo" } }; SetupCase(null, indexEntries, indexEntries); _theReadTree = Go(); assertAllEmpty(); // rule 8 and 9 var mergeEntries = new Dictionary<string, string> { { "foo", "merge" } }; SetupCase(null, mergeEntries, indexEntries); Go(); Assert.IsTrue(_theReadTree.Updated.isEmpty()); Assert.IsTrue(_theReadTree.Removed.isEmpty()); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); // rule 10 var headEntries = new Dictionary<string, string> { { "foo", "foo" } }; SetupCase(headEntries, null, indexEntries); Go(); Assert.IsTrue(_theReadTree.Removed.Contains("foo")); Assert.IsTrue(_theReadTree.Updated.isEmpty()); Assert.IsTrue(_theReadTree.Conflicts.isEmpty()); // rule 11 SetupCase(headEntries, null, indexEntries); new FileInfo(Path.Combine(trash.FullName, "foo")).Delete(); writeTrashFile("foo", "bar"); _theIndex.Members[0].forceRecheck(); Go(); Assert.IsTrue(_theReadTree.Removed.isEmpty()); Assert.IsTrue(_theReadTree.Updated.isEmpty()); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); // rule 12 and 13 headEntries["foo"] = "head"; SetupCase(headEntries, null, indexEntries); Go(); Assert.IsTrue(_theReadTree.Removed.isEmpty()); Assert.IsTrue(_theReadTree.Updated.isEmpty()); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); // rule 14 and 15 SetupCase(headEntries, headEntries, indexEntries); Go(); assertAllEmpty(); // rule 16 and 17 SetupCase(headEntries, mergeEntries, indexEntries); Go(); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); // rule 18 and 19 SetupCase(headEntries, indexEntries, indexEntries); Go(); assertAllEmpty(); // rule 20 SetupCase(indexEntries, mergeEntries, indexEntries); Go(); Assert.IsTrue(_theReadTree.Updated.ContainsKey("foo")); // rule 21 SetupCase(indexEntries, mergeEntries, indexEntries); new FileInfo(Path.Combine(trash.FullName, "foo")).Delete(); writeTrashFile("foo", "bar"); _theIndex.Members[0].forceRecheck(); Go(); Assert.IsTrue(_theReadTree.Conflicts.Contains("foo")); }
private void doCheckout(GitSharp.Core.Ref branch) { if (branch == null) throw new ArgumentNullException("branch", "Cannot checkout; no HEAD advertised by remote"); var repo = Repository._internal_repo; if (!Constants.HEAD.Equals(branch.Name)) repo.WriteSymref(Constants.HEAD, branch.Name); GitSharp.Core.Commit commit = repo.MapCommit(branch.ObjectId); RefUpdate u = repo.UpdateRef(Constants.HEAD); u.NewObjectId = commit.CommitId; u.ForceUpdate(); GitIndex index = new GitIndex(repo); GitSharp.Core.Tree tree = commit.TreeEntry; WorkDirCheckout co = new WorkDirCheckout(repo, repo.WorkingDirectory, index, tree); co.checkout(); index.write(); }
public void testDirectoryFileSimple() { _theIndex = new GitIndex(db); _theIndex.add(trash, writeTrashFile("DF", "DF")); Tree head = db.MapTree(_theIndex.writeTree()); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "DF"))); _theIndex = new GitIndex(db); _theIndex.add(trash, writeTrashFile("DF/DF", "DF/DF")); Tree merge = db.MapTree(_theIndex.writeTree()); _theIndex = new GitIndex(db); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "DF"))); _theIndex.add(trash, writeTrashFile("DF", "DF")); _theReadTree = new WorkDirCheckout(db, trash, head, _theIndex, merge); _theReadTree.PrescanTwoTrees(); Assert.IsTrue(_theReadTree.Removed.Contains("DF")); Assert.IsTrue(_theReadTree.Updated.ContainsKey("DF/DF")); recursiveDelete(new DirectoryInfo(Path.Combine(trash.FullName, "DF"))); _theIndex = new GitIndex(db); _theIndex.add(trash, writeTrashFile("DF/DF", "DF/DF")); _theReadTree = new WorkDirCheckout(db, trash, merge, _theIndex, head); _theReadTree.PrescanTwoTrees(); Assert.IsTrue(_theReadTree.Removed.Contains("DF/DF")); Assert.IsTrue(_theReadTree.Updated.ContainsKey("DF")); }