private void testLongPath(int len) { string longPath = makeLongPath(len); string shortPath = "~~~ shorter-path"; DirCacheEntry longEnt = new DirCacheEntry(longPath); DirCacheEntry shortEnt = new DirCacheEntry(shortPath); Assert.AreEqual(longPath, longEnt.getPathString()); Assert.AreEqual(shortPath, shortEnt.getPathString()); DirCache dc1 = DirCache.Lock(db); DirCacheBuilder b = dc1.builder(); b.add(longEnt); b.add(shortEnt); Assert.IsTrue(b.commit()); Assert.AreEqual(2, dc1.getEntryCount()); Assert.AreSame(longEnt, dc1.getEntry(0)); Assert.AreSame(shortEnt, dc1.getEntry(1)); DirCache dc2 = DirCache.read(db); Assert.AreEqual(2, dc2.getEntryCount()); Assert.AreNotSame(longEnt, dc2.getEntry(0)); Assert.AreEqual(longPath, dc2.getEntry(0).getPathString()); Assert.AreNotSame(shortEnt, dc2.getEntry(1)); Assert.AreEqual(shortPath, dc2.getEntry(1).getPathString()); }
public void testWriteReadTree() { DirCache dc = DirCache.Lock(db); string A = string.Format("a%2000s", "a"); string B = string.Format("b%2000s", "b"); string[] paths = { A + ".", A + "." + B, A + "/" + B, A + "0" + B }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.commit(); DirCache read = DirCache.read(db); Assert.AreEqual(paths.Length, read.getEntryCount()); Assert.AreEqual(1, read.getCacheTree(true).getChildCount()); }
public virtual void TestUpdateSmudgedEntries() { git.BranchCreate().SetName("test2").Call(); RefUpdate rup = db.UpdateRef(Constants.HEAD); rup.Link("refs/heads/test2"); FilePath file = new FilePath(db.WorkTree, "Test.txt"); long size = file.Length(); long mTime = file.LastModified() - 5000L; NUnit.Framework.Assert.IsTrue(file.SetLastModified(mTime)); DirCache cache = DirCache.Lock(db.GetIndexFile(), db.FileSystem); DirCacheEntry entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); entry.SetLength(0); entry.LastModified = 0; cache.Write(); NUnit.Framework.Assert.IsTrue(cache.Commit()); cache = DirCache.Read(db.GetIndexFile(), db.FileSystem); entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); NUnit.Framework.Assert.AreEqual(0, entry.Length); NUnit.Framework.Assert.AreEqual(0, entry.LastModified); db.GetIndexFile().SetLastModified(db.GetIndexFile().LastModified() - 5000); NUnit.Framework.Assert.IsNotNull(git.Checkout().SetName("test").Call()); cache = DirCache.Read(db.GetIndexFile(), db.FileSystem); entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); NUnit.Framework.Assert.AreEqual(size, entry.Length); NUnit.Framework.Assert.AreEqual(mTime, entry.LastModified); }
public virtual void CommitUpdatesSmudgedEntries() { Git git = new Git(db); FilePath file1 = WriteTrashFile("file1.txt", "content1"); NUnit.Framework.Assert.IsTrue(file1.SetLastModified(file1.LastModified() - 5000)); FilePath file2 = WriteTrashFile("file2.txt", "content2"); NUnit.Framework.Assert.IsTrue(file2.SetLastModified(file2.LastModified() - 5000)); FilePath file3 = WriteTrashFile("file3.txt", "content3"); NUnit.Framework.Assert.IsTrue(file3.SetLastModified(file3.LastModified() - 5000)); NUnit.Framework.Assert.IsNotNull(git.Add().AddFilepattern("file1.txt").AddFilepattern ("file2.txt").AddFilepattern("file3.txt").Call()); RevCommit commit = git.Commit().SetMessage("add files").Call(); NUnit.Framework.Assert.IsNotNull(commit); DirCache cache = DirCache.Read(db.GetIndexFile(), db.FileSystem); int file1Size = cache.GetEntry("file1.txt").Length; int file2Size = cache.GetEntry("file2.txt").Length; int file3Size = cache.GetEntry("file3.txt").Length; ObjectId file2Id = cache.GetEntry("file2.txt").GetObjectId(); ObjectId file3Id = cache.GetEntry("file3.txt").GetObjectId(); NUnit.Framework.Assert.IsTrue(file1Size > 0); NUnit.Framework.Assert.IsTrue(file2Size > 0); NUnit.Framework.Assert.IsTrue(file3Size > 0); // Smudge entries cache = DirCache.Lock(db.GetIndexFile(), db.FileSystem); cache.GetEntry("file1.txt").SetLength(0); cache.GetEntry("file2.txt").SetLength(0); cache.GetEntry("file3.txt").SetLength(0); cache.Write(); NUnit.Framework.Assert.IsTrue(cache.Commit()); // Verify entries smudged cache = DirCache.Read(db.GetIndexFile(), db.FileSystem); NUnit.Framework.Assert.AreEqual(0, cache.GetEntry("file1.txt").Length); NUnit.Framework.Assert.AreEqual(0, cache.GetEntry("file2.txt").Length); NUnit.Framework.Assert.AreEqual(0, cache.GetEntry("file3.txt").Length); long indexTime = db.GetIndexFile().LastModified(); db.GetIndexFile().SetLastModified(indexTime - 5000); Write(file1, "content4"); NUnit.Framework.Assert.IsTrue(file1.SetLastModified(file1.LastModified() + 2500)); NUnit.Framework.Assert.IsNotNull(git.Commit().SetMessage("edit file").SetOnly("file1.txt" ).Call()); cache = db.ReadDirCache(); NUnit.Framework.Assert.AreEqual(file1Size, cache.GetEntry("file1.txt").Length); NUnit.Framework.Assert.AreEqual(file2Size, cache.GetEntry("file2.txt").Length); NUnit.Framework.Assert.AreEqual(file3Size, cache.GetEntry("file3.txt").Length); NUnit.Framework.Assert.AreEqual(file2Id, cache.GetEntry("file2.txt").GetObjectId( )); NUnit.Framework.Assert.AreEqual(file3Id, cache.GetEntry("file3.txt").GetObjectId( )); }
public void testBuildEmpty() { DirCache dc = DirCache.Lock(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); b.finish(); dc.write(); Assert.IsTrue(dc.commit()); dc = DirCache.read(db); Assert.AreEqual(0, dc.getEntryCount()); }
public void testBuildOneFile_FinishWriteCommit() { string path = "a-File-path"; var mode = FileMode.RegularFile; long lastModified = 1218123387057L; int Length = 1342; DirCacheEntry entOrig; DirCache dc = DirCache.Lock(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); entOrig = new DirCacheEntry(path); entOrig.setFileMode(mode); entOrig.setLastModified(lastModified); entOrig.setLength(Length); Assert.AreNotSame(path, entOrig.getPathString()); Assert.AreEqual(path, entOrig.getPathString()); Assert.AreEqual(ObjectId.ZeroId, entOrig.getObjectId()); Assert.AreEqual(mode.Bits, entOrig.getRawMode()); Assert.AreEqual(0, entOrig.getStage()); Assert.AreEqual(lastModified, entOrig.getLastModified()); Assert.AreEqual(Length, entOrig.getLength()); Assert.IsFalse(entOrig.isAssumeValid()); b.add(entOrig); b.finish(); Assert.AreEqual(1, dc.getEntryCount()); Assert.AreSame(entOrig, dc.getEntry(0)); dc.write(); Assert.IsTrue(dc.commit()); dc = DirCache.read(db); Assert.AreEqual(1, dc.getEntryCount()); DirCacheEntry entRead = dc.getEntry(0); Assert.AreNotSame(entOrig, entRead); Assert.AreEqual(path, entRead.getPathString()); Assert.AreEqual(ObjectId.ZeroId, entOrig.getObjectId()); Assert.AreEqual(mode.Bits, entOrig.getRawMode()); Assert.AreEqual(0, entOrig.getStage()); Assert.AreEqual(lastModified, entOrig.getLastModified()); Assert.AreEqual(Length, entOrig.getLength()); Assert.IsFalse(entOrig.isAssumeValid()); }
public void testWriteEmptyReadEmpty_RealIndex() { var idx = new FileInfo(db.Directory + "/index"); var lck = new FileInfo(db.Directory + "/index.lock"); Assert.IsFalse(File.Exists(idx.FullName)); Assert.IsFalse(File.Exists(lck.FullName)); DirCache dc = DirCache.Lock(db); dc.write(); Assert.IsTrue(dc.commit()); Assert.IsTrue(File.Exists(idx.FullName)); dc = DirCache.read(db); Assert.AreEqual(0, dc.getEntryCount()); }
public void testWriteEmptyUnlock_RealIndex() { var idx = new FileInfo(db.Directory + "/index"); var lck = new FileInfo(db.Directory + "/index.lock"); Assert.IsFalse(File.Exists(idx.FullName)); Assert.IsFalse(File.Exists(lck.FullName)); DirCache dc = DirCache.Lock(db); Assert.AreEqual(0, lck.Length); dc.write(); Assert.AreEqual(12 + 20, new FileInfo(lck.FullName).Length); dc.unlock(); Assert.IsFalse(File.Exists(idx.FullName)); Assert.IsFalse(File.Exists(lck.FullName)); }
/// <exception cref="System.IO.IOException"></exception> private void BuildIndex(Dictionary <string, string> indexEntries) { dirCache = new DirCache(db.GetIndexFile(), db.FileSystem); if (indexEntries != null) { NUnit.Framework.Assert.IsTrue(dirCache.Lock()); DirCacheEditor editor = dirCache.Editor(); foreach (KeyValuePair <string, string> e in indexEntries.EntrySet()) { WriteTrashFile(e.Key, e.Value); ObjectInserter inserter = db.NewObjectInserter(); ObjectId id = inserter.Insert(Constants.OBJ_BLOB, Constants.Encode(e.Value)); editor.Add(new DirCacheEditor.DeletePath(e.Key)); editor.Add(new _PathEdit_287(id, e.Key)); } NUnit.Framework.Assert.IsTrue(editor.Commit()); } }
public void testLockMissing_TempIndex() { var idx = new FileInfo(db.Directory + "/tmp_index"); var lck = new FileInfo(db.Directory + "/tmp_index.lock"); Assert.IsFalse(File.Exists(idx.FullName)); Assert.IsFalse(File.Exists(lck.FullName)); DirCache dc = DirCache.Lock(idx); Assert.IsNotNull(dc); Assert.IsFalse(File.Exists(idx.FullName)); Assert.IsTrue(File.Exists(lck.FullName)); Assert.AreEqual(0, dc.getEntryCount()); dc.unlock(); Assert.IsFalse(File.Exists(idx.FullName)); Assert.IsFalse(File.Exists(lck.FullName)); }
/// <exception cref="System.IO.IOException"></exception> private void BuildIndex(Dictionary<string, string> indexEntries) { dirCache = new DirCache(db.GetIndexFile(), db.FileSystem); if (indexEntries != null) { NUnit.Framework.Assert.IsTrue(dirCache.Lock()); DirCacheEditor editor = dirCache.Editor(); foreach (KeyValuePair<string, string> e in indexEntries.EntrySet()) { WriteTrashFile(e.Key, e.Value); ObjectInserter inserter = db.NewObjectInserter(); ObjectId id = inserter.Insert(Constants.OBJ_BLOB, Constants.Encode(e.Value)); editor.Add(new DirCacheEditor.DeletePath(e.Key)); editor.Add(new _PathEdit_284(id, e.Key)); } NUnit.Framework.Assert.IsTrue(editor.Commit()); } }
internal void Apply(Stash stash) { Commit wip = _repo.Get <Commit> (stash.CommitId); Commit index = wip.Parents.Last(); Tree wipTree = wip.Tree; Tree headTree = _repo.CurrentBranch.CurrentCommit.Tree; GitIndex currentIndex = _repo.Index.GitIndex; Tree currentIndexTree = new Tree(_repo, _repo._internal_repo.MapTree(currentIndex.writeTree())); WorkDirCheckout co = new WorkDirCheckout(_repo._internal_repo, _repo._internal_repo.WorkingDirectory, headTree.InternalTree, currentIndex, wip.Tree.InternalTree); co.checkout(); currentIndex.write(); List <DirCacheEntry> toAdd = new List <DirCacheEntry> (); DirCache dc = DirCache.Lock(_repo._internal_repo); try { var cacheEditor = dc.editor(); // The WorkDirCheckout class doesn't check if there are conflicts in modified files, // so we have to do it here. foreach (var c in co.Updated) { var baseEntry = wip.Parents.First().Tree[c.Key] as Leaf; var oursEntry = wipTree [c.Key] as Leaf; var theirsEntry = headTree [c.Key] as Leaf; if (baseEntry != null && oursEntry != null && currentIndexTree [c.Key] == null) { // If a file was reported as updated but that file is not present in the stashed index, // it means that the file was scheduled to be deleted. cacheEditor.@add(new DirCacheEditor.DeletePath(c.Key)); File.Delete(_repo.FromGitPath(c.Key)); } else if (baseEntry != null && oursEntry != null && theirsEntry != null) { MergeResult res = MergeAlgorithm.merge(new RawText(baseEntry.RawData), new RawText(oursEntry.RawData), new RawText(theirsEntry.RawData)); MergeFormatter f = new MergeFormatter(); using (BinaryWriter bw = new BinaryWriter(File.OpenWrite(_repo.FromGitPath(c.Key)))) { f.formatMerge(bw, res, "Base", "Stash", "Head", Constants.CHARSET.WebName); } if (res.containsConflicts()) { // Remove the entry from the index. It will be added later on. cacheEditor.@add(new DirCacheEditor.DeletePath(c.Key)); // Generate index entries for each merge stage // Those entries can't be added right now to the index because a DirCacheEditor // can't be used at the same time as a DirCacheBuilder. var e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_1); e.setObjectId(baseEntry.InternalEntry.Id); e.setFileMode(baseEntry.InternalEntry.Mode); toAdd.Add(e); e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_2); e.setObjectId(oursEntry.InternalEntry.Id); e.setFileMode(oursEntry.InternalEntry.Mode); toAdd.Add(e); e = new DirCacheEntry(c.Key, DirCacheEntry.STAGE_3); e.setObjectId(theirsEntry.InternalEntry.Id); e.setFileMode(theirsEntry.InternalEntry.Mode); toAdd.Add(e); } } } cacheEditor.finish(); if (toAdd.Count > 0) { // Add the index entries generated above var cacheBuilder = dc.builder(); for (int n = 0; n < dc.getEntryCount(); n++) { cacheBuilder.@add(dc.getEntry(n)); } foreach (var entry in toAdd) { cacheBuilder.@add(entry); } cacheBuilder.finish(); } dc.write(); dc.commit(); } catch { dc.unlock(); throw; } }