private void testLongPath(int len) { string longPath = makeLongPath(len); string shortPath = "~~~ shorter-path"; DirCacheEntry longEnt = new DirCacheEntry(longPath); DirCacheEntry shortEnt = new DirCacheEntry(shortPath); Assert.AreEqual(longPath, longEnt.getPathString()); Assert.AreEqual(shortPath, shortEnt.getPathString()); DirCache dc1 = DirCache.Lock(db); DirCacheBuilder b = dc1.builder(); b.add(longEnt); b.add(shortEnt); Assert.IsTrue(b.commit()); Assert.AreEqual(2, dc1.getEntryCount()); Assert.AreSame(longEnt, dc1.getEntry(0)); Assert.AreSame(shortEnt, dc1.getEntry(1)); DirCache dc2 = DirCache.read(db); Assert.AreEqual(2, dc2.getEntryCount()); Assert.AreNotSame(longEnt, dc2.getEntry(0)); Assert.AreEqual(longPath, dc2.getEntry(0).getPathString()); Assert.AreNotSame(shortEnt, dc2.getEntry(1)); Assert.AreEqual(shortPath, dc2.getEntry(1).getPathString()); }
public void testAdd_ReverseGitSortOrder() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a.b", "a/b", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } DirCacheBuilder b = dc.builder(); for (int i = ents.Length - 1; i >= 0; i--) { b.add(ents[i]); } b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); for (int i = 0; i < paths.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); Assert.AreEqual(paths[i], dc.getEntry(i).getPathString()); Assert.AreEqual(i, dc.findEntry(paths[i])); Assert.AreSame(ents[i], dc.getEntry(paths[i])); } }
public void testFindSingleFile() { string path = "a-File-path"; DirCache dc = DirCache.read(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); DirCacheEntry entOrig = new DirCacheEntry(path); entOrig.setFileMode(FileMode.RegularFile); Assert.AreNotSame(path, entOrig.getPathString()); Assert.AreEqual(path, entOrig.getPathString()); b.add(entOrig); b.finish(); Assert.AreEqual(1, dc.getEntryCount()); Assert.AreSame(entOrig, dc.getEntry(0)); Assert.AreEqual(0, dc.findEntry(path)); Assert.AreEqual(-1, dc.findEntry("@@-before")); Assert.AreEqual(0, real(dc.findEntry("@@-before"))); Assert.AreEqual(-2, dc.findEntry("a-zoo")); Assert.AreEqual(1, real(dc.findEntry("a-zoo"))); Assert.AreSame(entOrig, dc.getEntry(path)); }
public void testBuildOneFile_FinishWriteCommit() { string path = "a-File-path"; var mode = FileMode.RegularFile; long lastModified = 1218123387057L; int Length = 1342; DirCacheEntry entOrig; DirCache dc = DirCache.Lock(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); entOrig = new DirCacheEntry(path); entOrig.setFileMode(mode); entOrig.setLastModified(lastModified); entOrig.setLength(Length); Assert.AreNotSame(path, entOrig.getPathString()); Assert.AreEqual(path, entOrig.getPathString()); Assert.AreEqual(ObjectId.ZeroId, entOrig.getObjectId()); Assert.AreEqual(mode.Bits, entOrig.getRawMode()); Assert.AreEqual(0, entOrig.getStage()); Assert.AreEqual(lastModified, entOrig.getLastModified()); Assert.AreEqual(Length, entOrig.getLength()); Assert.IsFalse(entOrig.isAssumeValid()); b.add(entOrig); b.finish(); Assert.AreEqual(1, dc.getEntryCount()); Assert.AreSame(entOrig, dc.getEntry(0)); dc.write(); Assert.IsTrue(dc.commit()); dc = DirCache.read(db); Assert.AreEqual(1, dc.getEntryCount()); DirCacheEntry entRead = dc.getEntry(0); Assert.AreNotSame(entOrig, entRead); Assert.AreEqual(path, entRead.getPathString()); Assert.AreEqual(ObjectId.ZeroId, entOrig.getObjectId()); Assert.AreEqual(mode.Bits, entOrig.getRawMode()); Assert.AreEqual(0, entOrig.getStage()); Assert.AreEqual(lastModified, entOrig.getLastModified()); Assert.AreEqual(Length, entOrig.getLength()); Assert.IsFalse(entOrig.isAssumeValid()); }
public void testUnsupportedOptionalExtension() { var dc = new DirCache(pathOf("gitgit.index.ZZZZ")); dc.read(); Assert.AreEqual(1, dc.getEntryCount()); Assert.AreEqual("A", dc.getEntry(0).getPathString()); }
public void testEntriesWithin() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } int aFirst = 1; int aLast = 3; DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); for (int i = 0; i < ents.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); } DirCacheEntry[] aContents = dc.getEntriesWithin("a"); Assert.IsNotNull(aContents); Assert.AreEqual(aLast - aFirst + 1, aContents.Length); for (int i = aFirst, j = 0; i <= aLast; i++, j++) { Assert.AreSame(ents[i], aContents[j]); } aContents = dc.getEntriesWithin("a/"); Assert.IsNotNull(aContents); Assert.AreEqual(aLast - aFirst + 1, aContents.Length); for (int i = aFirst, j = 0; i <= aLast; i++, j++) { Assert.AreSame(ents[i], aContents[j]); } Assert.IsNotNull(dc.getEntriesWithin("a.")); Assert.AreEqual(0, dc.getEntriesWithin("a.").Length); Assert.IsNotNull(dc.getEntriesWithin("a0b")); Assert.AreEqual(0, dc.getEntriesWithin("a0b.").Length); Assert.IsNotNull(dc.getEntriesWithin("zoo")); Assert.AreEqual(0, dc.getEntriesWithin("zoo.").Length); }
public void testPathFilterGroup_DoesNotSkipTail() { DirCache dc = DirCache.read(db); var mode = FileMode.RegularFile; string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(mode); } DirCacheBuilder builder = dc.builder(); for (int i = 0; i < ents.Length; i++) { builder.add(ents[i]); } builder.finish(); const int expIdx = 2; DirCacheBuilder b = dc.builder(); var tw = new GitSharp.Core.TreeWalk.TreeWalk(db); tw.reset(); tw.addTree(new DirCacheBuildIterator(b)); tw.Recursive = true; tw.setFilter(PathFilterGroup.createFromStrings(new[] { paths[expIdx] })); Assert.IsTrue(tw.next(), "found " + paths[expIdx]); var c = tw.getTree <DirCacheIterator>(0, typeof(DirCacheIterator)); Assert.IsNotNull(c); Assert.AreEqual(expIdx, c.Pointer); Assert.AreSame(ents[expIdx], c.getDirCacheEntry()); Assert.AreEqual(paths[expIdx], tw.getPathString()); Assert.AreEqual(mode.Bits, tw.getRawMode(0)); Assert.AreSame(mode, tw.getFileMode(0)); b.add(c.getDirCacheEntry()); Assert.IsFalse(tw.next(), "no more entries"); b.finish(); Assert.AreEqual(ents.Length, dc.getEntryCount()); for (int i = 0; i < ents.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); } }
public void testReadIndex_LsFiles() { List <CGitIndexRecord> ls = ReadLsFiles(); var dc = new DirCache(_index); Assert.AreEqual(0, dc.getEntryCount()); dc.read(); Assert.AreEqual(ls.Count, dc.getEntryCount()); int i = 0; foreach (var val in ls) { AssertAreEqual(val, dc.getEntry(i)); i++; } }
private static void AssertCorrectId(DirCache treeT, GitSharp.Core.TreeWalk.TreeWalk tw) { Assert.AreEqual(treeT.getEntry(tw.getPathString()).getObjectId(), tw.getObjectId(0)); }
internal void Apply(Stash stash) { Commit wip = _repo.Get <Commit> (stash.CommitId); Commit index = wip.Parents.Last(); Tree wipTree = wip.Tree; Tree headTree = _repo.CurrentBranch.CurrentCommit.Tree; GitIndex currentIndex = _repo.Index.GitIndex; Tree currentIndexTree = new Tree(_repo, _repo._internal_repo.MapTree(currentIndex.writeTree())); WorkDirCheckout co = new WorkDirCheckout(_repo._internal_repo, _repo._internal_repo.WorkingDirectory, headTree.InternalTree, currentIndex, wip.Tree.InternalTree); co.checkout(); currentIndex.write(); List <DirCacheEntry> toAdd = new List <DirCacheEntry> (); DirCache dc = DirCache.Lock(_repo._internal_repo); try { var cacheEditor = dc.editor(); // The WorkDirCheckout class doesn't check if there are conflicts in modified files, // so we have to do it here. foreach (var c in co.Updated) { var baseEntry = wip.Parents.First().Tree[c.Key] as Leaf; var oursEntry = wipTree [c.Key] as Leaf; var theirsEntry = headTree [c.Key] as Leaf; if (baseEntry != null && oursEntry != null && currentIndexTree [c.Key] == null) { // If a file was reported as updated but that file is not present in the stashed index, // it means that the file was scheduled to be deleted. cacheEditor.@add(new DirCacheEditor.DeletePath(c.Key)); File.Delete(_repo.FromGitPath(c.Key)); } else if (baseEntry != null && oursEntry != null && theirsEntry != null) { MergeResult res = MergeAlgorithm.merge(new RawText(baseEntry.RawData), new RawText(oursEntry.RawData), new RawText(theirsEntry.RawData)); MergeFormatter f = new MergeFormatter(); using (BinaryWriter bw = new BinaryWriter(File.OpenWrite(_repo.FromGitPath(c.Key)))) { f.formatMerge(bw, res, "Base", "Stash", "Head", Constants.CHARSET.WebName); } if (res.containsConflicts()) { // Remove the entry from the index. It will be added later on. cacheEditor.@add(new DirCacheEditor.DeletePath(c.Key)); // Generate index entries for each merge stage // Those entries can't be added right now to the index because a DirCacheEditor // can't be used at the same time as a DirCacheBuilder. var e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_1); e.setObjectId(baseEntry.InternalEntry.Id); e.setFileMode(baseEntry.InternalEntry.Mode); toAdd.Add(e); e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_2); e.setObjectId(oursEntry.InternalEntry.Id); e.setFileMode(oursEntry.InternalEntry.Mode); toAdd.Add(e); e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_3); e.setObjectId(theirsEntry.InternalEntry.Id); e.setFileMode(theirsEntry.InternalEntry.Mode); toAdd.Add(e); } } } cacheEditor.finish(); if (toAdd.Count > 0) { // Add the index entries generated above var cacheBuilder = dc.builder(); for (int n = 0; n < dc.getEntryCount(); n++) { cacheBuilder.@add(dc.getEntry(n)); } foreach (var entry in toAdd) { cacheBuilder.@add(entry); } cacheBuilder.finish(); } dc.write(); dc.commit(); } catch { dc.unlock(); throw; } }