public virtual void TestIsModifiedFileSmudged() { FilePath f = WriteTrashFile("file", "content"); Git git = new Git(db); // The idea of this test is to check the smudged handling // Hopefully fsTick will make sure our entry gets smudged FsTick(f); WriteTrashFile("file", "content"); git.Add().AddFilepattern("file").Call(); WriteTrashFile("file", "conten2"); DirCacheEntry dce = db.ReadDirCache().GetEntry("file"); FileTreeIterator fti = new FileTreeIterator(trash, db.FileSystem, ((FileBasedConfig )db.GetConfig()).Get(WorkingTreeOptions.KEY)); while (!fti.EntryPathString.Equals("file")) { fti.Next(1); } // If the fsTick trick does not work we could skip the compareMetaData // test and hope that we are usually testing the intended code path. NUnit.Framework.Assert.AreEqual(WorkingTreeIterator.MetadataDiff.SMUDGED, fti.CompareMetadata (dce)); NUnit.Framework.Assert.IsTrue(fti.IsModified(dce, false)); }
private void testLongPath(int len) { string longPath = makeLongPath(len); string shortPath = "~~~ shorter-path"; DirCacheEntry longEnt = new DirCacheEntry(longPath); DirCacheEntry shortEnt = new DirCacheEntry(shortPath); Assert.AreEqual(longPath, longEnt.getPathString()); Assert.AreEqual(shortPath, shortEnt.getPathString()); DirCache dc1 = DirCache.Lock(db); DirCacheBuilder b = dc1.builder(); b.add(longEnt); b.add(shortEnt); Assert.IsTrue(b.commit()); Assert.AreEqual(2, dc1.getEntryCount()); Assert.AreSame(longEnt, dc1.getEntry(0)); Assert.AreSame(shortEnt, dc1.getEntry(1)); DirCache dc2 = DirCache.read(db); Assert.AreEqual(2, dc2.getEntryCount()); Assert.AreNotSame(longEnt, dc2.getEntry(0)); Assert.AreEqual(longPath, dc2.getEntry(0).getPathString()); Assert.AreNotSame(shortEnt, dc2.getEntry(1)); Assert.AreEqual(shortPath, dc2.getEntry(1).getPathString()); }
public void testWriteReadTree() { DirCache dc = DirCache.Lock(db); string A = string.Format("a%2000s", "a"); string B = string.Format("b%2000s", "b"); string[] paths = { A + ".", A + "." + B, A + "/" + B, A + "0" + B }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.commit(); DirCache read = DirCache.read(db); Assert.AreEqual(paths.Length, read.getEntryCount()); Assert.AreEqual(1, read.getCacheTree(true).getChildCount()); }
public virtual void TestLastModifiedTimes() { Git git = new Git(db); string path = "file"; WriteTrashFile(path, "content"); string path2 = "file2"; WriteTrashFile(path2, "content2"); git.Add().AddFilepattern(path).Call(); git.Add().AddFilepattern(path2).Call(); git.Commit().SetMessage("commit").Call(); DirCache dc = db.ReadDirCache(); DirCacheEntry entry = dc.GetEntry(path); DirCacheEntry entry2 = dc.GetEntry(path); NUnit.Framework.Assert.IsTrue(entry.LastModified != 0, "last modified shall not be zero!" ); NUnit.Framework.Assert.IsTrue(entry2.LastModified != 0, "last modified shall not be zero!" ); WriteTrashFile(path, "new content"); git.Add().AddFilepattern(path).Call(); git.Commit().SetMessage("commit2").Call(); dc = db.ReadDirCache(); entry = dc.GetEntry(path); entry2 = dc.GetEntry(path); NUnit.Framework.Assert.IsTrue(entry.LastModified != 0, "last modified shall not be zero!" ); NUnit.Framework.Assert.IsTrue(entry2.LastModified != 0, "last modified shall not be zero!" ); }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Api.Errors.NoFilepatternException"></exception> /// <exception cref="NGit.Api.Errors.NoHeadException"></exception> /// <exception cref="NGit.Api.Errors.NoMessageException"></exception> /// <exception cref="NGit.Api.Errors.ConcurrentRefUpdateException"></exception> /// <exception cref="NGit.Api.Errors.JGitInternalException"></exception> /// <exception cref="NGit.Api.Errors.WrongRepositoryStateException"></exception> public virtual void SetupRepository() { // create initial commit git = new Git(db); initialCommit = git.Commit().SetMessage("initial commit").Call(); // create file indexFile = new FilePath(db.WorkTree, "a.txt"); FileUtils.CreateNewFile(indexFile); PrintWriter writer = new PrintWriter(indexFile); writer.Write("content"); writer.Flush(); // add file and commit it git.Add().AddFilepattern("a.txt").Call(); secondCommit = git.Commit().SetMessage("adding a.txt").Call(); prestage = DirCache.Read(db.GetIndexFile(), db.FileSystem).GetEntry(indexFile.GetName ()); // modify file and add to index writer.Write("new content"); writer.Close(); git.Add().AddFilepattern("a.txt").Call(); // create a file not added to the index untrackedFile = new FilePath(db.WorkTree, "notAddedToIndex.txt"); FileUtils.CreateNewFile(untrackedFile); PrintWriter writer2 = new PrintWriter(untrackedFile); writer2.Write("content"); writer2.Close(); }
public void testBuilderClear() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a.b", "a/b", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } { DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); } Assert.AreEqual(paths.Length, dc.getEntryCount()); { DirCacheBuilder b = dc.builder(); b.finish(); } Assert.AreEqual(0, dc.getEntryCount()); }
public void testAdd_ReverseGitSortOrder() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a.b", "a/b", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } DirCacheBuilder b = dc.builder(); for (int i = ents.Length - 1; i >= 0; i--) { b.add(ents[i]); } b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); for (int i = 0; i < paths.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); Assert.AreEqual(paths[i], dc.getEntry(i).getPathString()); Assert.AreEqual(i, dc.findEntry(paths[i])); Assert.AreSame(ents[i], dc.getEntry(paths[i])); } }
public virtual void TestModify() { Git git = new Git(db); string path = "file"; WriteTrashFile(path, "content"); git.Add().AddFilepattern(path).Call(); git.Commit().SetMessage("commit").Call(); DirCache dc = db.ReadDirCache(); DirCacheEntry entry = dc.GetEntry(path); long masterLastMod = entry.LastModified; git.Checkout().SetCreateBranch(true).SetName("side").Call(); Sharpen.Thread.Sleep(10); string path2 = "file2"; WriteTrashFile(path2, "side content"); git.Add().AddFilepattern(path2).Call(); git.Commit().SetMessage("commit").Call(); dc = db.ReadDirCache(); entry = dc.GetEntry(path); long sideLastMode = entry.LastModified; Sharpen.Thread.Sleep(2000); WriteTrashFile(path, "uncommitted content"); git.Checkout().SetName("master").Call(); dc = db.ReadDirCache(); entry = dc.GetEntry(path); NUnit.Framework.Assert.IsTrue(masterLastMod == sideLastMode, "shall have equal mod time!" ); NUnit.Framework.Assert.IsTrue(entry.LastModified == masterLastMod, "shall not equal master timestamp!" ); }
public virtual void TestDirCacheMatchingId() { FilePath f = WriteTrashFile("file", "content"); Git git = new Git(db); WriteTrashFile("file", "content"); FsTick(f); git.Add().AddFilepattern("file").Call(); DirCacheEntry dce = db.ReadDirCache().GetEntry("file"); TreeWalk tw = new TreeWalk(db); FileTreeIterator fti = new FileTreeIterator(trash, db.FileSystem, ((FileBasedConfig )db.GetConfig()).Get(WorkingTreeOptions.KEY)); tw.AddTree(fti); DirCacheIterator dci = new DirCacheIterator(db.ReadDirCache()); tw.AddTree(dci); fti.SetDirCacheIterator(tw, 1); while (tw.Next() && !tw.PathString.Equals("file")) { } // NUnit.Framework.Assert.AreEqual(WorkingTreeIterator.MetadataDiff.EQUAL, fti.CompareMetadata (dce)); ObjectId fromRaw = ObjectId.FromRaw(fti.IdBuffer, fti.IdOffset); NUnit.Framework.Assert.AreEqual("6b584e8ece562ebffc15d38808cd6b98fc3d97ea", fromRaw .GetName()); NUnit.Framework.Assert.IsFalse(fti.IsModified(dce, false)); }
/// <summary>Reverts the worktree after an unsuccessful merge.</summary> /// <remarks> /// Reverts the worktree after an unsuccessful merge. We know that for all /// modified files the old content was in the old index and the index /// contained only stage 0. In case if inCore operation just clear /// the history of modified files. /// </remarks> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> /// <exception cref="NGit.Errors.CorruptObjectException">NGit.Errors.CorruptObjectException /// </exception> /// <exception cref="NGit.Errors.NoWorkTreeException">NGit.Errors.NoWorkTreeException /// </exception> private void CleanUp() { if (inCore) { modifiedFiles.Clear(); return; } DirCache dc = db.ReadDirCache(); ObjectReader or = db.ObjectDatabase.NewReader(); Iterator <string> mpathsIt = modifiedFiles.Iterator(); while (mpathsIt.HasNext()) { string mpath = mpathsIt.Next(); DirCacheEntry entry = dc.GetEntry(mpath); FileOutputStream fos = new FileOutputStream(new FilePath(db.WorkTree, mpath)); try { or.Open(entry.GetObjectId()).CopyTo(fos); } finally { fos.Close(); } mpathsIt.Remove(); } }
public void testBuildThenClear() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a.b", "a/b", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); dc.clear(); Assert.AreEqual(0, dc.getEntryCount()); }
public virtual void TestUpdateSmudgedEntries() { git.BranchCreate().SetName("test2").Call(); RefUpdate rup = db.UpdateRef(Constants.HEAD); rup.Link("refs/heads/test2"); FilePath file = new FilePath(db.WorkTree, "Test.txt"); long size = file.Length(); long mTime = file.LastModified() - 5000L; NUnit.Framework.Assert.IsTrue(file.SetLastModified(mTime)); DirCache cache = DirCache.Lock(db.GetIndexFile(), db.FileSystem); DirCacheEntry entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); entry.SetLength(0); entry.LastModified = 0; cache.Write(); NUnit.Framework.Assert.IsTrue(cache.Commit()); cache = DirCache.Read(db.GetIndexFile(), db.FileSystem); entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); NUnit.Framework.Assert.AreEqual(0, entry.Length); NUnit.Framework.Assert.AreEqual(0, entry.LastModified); db.GetIndexFile().SetLastModified(db.GetIndexFile().LastModified() - 5000); NUnit.Framework.Assert.IsNotNull(git.Checkout().SetName("test").Call()); cache = DirCache.Read(db.GetIndexFile(), db.FileSystem); entry = cache.GetEntry("Test.txt"); NUnit.Framework.Assert.IsNotNull(entry); NUnit.Framework.Assert.AreEqual(size, entry.Length); NUnit.Framework.Assert.AreEqual(mTime, entry.LastModified); }
public virtual void TestPathsResetOnDirs() { SetupRepository(); DirCacheEntry preReset = DirCache.Read(db.GetIndexFile(), db.FileSystem).GetEntry ("dir/b.txt"); NUnit.Framework.Assert.IsNotNull(preReset); git.Add().AddFilepattern(untrackedFile.GetName()).Call(); // 'dir/b.txt' has already been modified in setupRepository git.Reset().AddPath("dir").Call(); DirCacheEntry postReset = DirCache.Read(db.GetIndexFile(), db.FileSystem).GetEntry ("dir/b.txt"); NUnit.Framework.Assert.IsNotNull(postReset); NUnit.Framework.Assert.AreNotSame(preReset.GetObjectId(), postReset.GetObjectId() ); // check that HEAD hasn't moved ObjectId head = db.Resolve(Constants.HEAD); NUnit.Framework.Assert.AreEqual(secondCommit, head); // check if files still exist NUnit.Framework.Assert.IsTrue(untrackedFile.Exists()); NUnit.Framework.Assert.IsTrue(InHead("dir/b.txt")); NUnit.Framework.Assert.IsTrue(InIndex("dir/b.txt")); }
public void testFindSingleFile() { string path = "a-File-path"; DirCache dc = DirCache.read(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); DirCacheEntry entOrig = new DirCacheEntry(path); entOrig.setFileMode(FileMode.RegularFile); Assert.AreNotSame(path, entOrig.getPathString()); Assert.AreEqual(path, entOrig.getPathString()); b.add(entOrig); b.finish(); Assert.AreEqual(1, dc.getEntryCount()); Assert.AreSame(entOrig, dc.getEntry(0)); Assert.AreEqual(0, dc.findEntry(path)); Assert.AreEqual(-1, dc.findEntry("@@-before")); Assert.AreEqual(0, real(dc.findEntry("@@-before"))); Assert.AreEqual(-2, dc.findEntry("a-zoo")); Assert.AreEqual(1, real(dc.findEntry("a-zoo"))); Assert.AreSame(entOrig, dc.getEntry(path)); }
public void testNoSubtree_NoTreeWalk() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); var iter = new DirCacheIterator(dc); int pathIdx = 0; for (; !iter.eof(); iter.next(1)) { Assert.AreEqual(pathIdx, iter.Pointer); Assert.AreSame(ents[pathIdx], iter.getDirCacheEntry()); pathIdx++; } Assert.AreEqual(paths.Length, pathIdx); }
protected static DirCacheEntry File(string path, RevBlob blob) { var e = new DirCacheEntry(path); e.setFileMode(FileMode.RegularFile); e.setObjectId(blob); return(e); }
/** * Construct a regular file mode tree entry. * * @param path * path of the file. * @param blob * a blob, previously constructed in the repository. * @return the entry. * @throws Exception */ public DirCacheEntry file(String path, RevBlob blob) { DirCacheEntry e = new DirCacheEntry(path); e.setFileMode(FileMode.RegularFile); e.setObjectId(blob); return(e); }
/// <summary>Represent the state of the index in one String.</summary> /// <remarks> /// Represent the state of the index in one String. This representation is /// useful when writing tests which do assertions on the state of the index. /// By default information about path, mode, stage (if different from 0) is /// included. A bitmask controls which additional info about /// modificationTimes, smudge state and length is included. /// <p> /// The format of the returned string is described with this BNF: /// <pre> /// result = ( "[" path mode stage? time? smudge? length? sha1? content? "]" )* . /// mode = ", mode:" number . /// stage = ", stage:" number . /// time = ", time:t" timestamp-index . /// smudge = "" | ", smudged" . /// length = ", length:" number . /// sha1 = ", sha1:" hex-sha1 . /// content = ", content:" blob-data . /// </pre> /// 'stage' is only presented when the stage is different from 0. All /// reported time stamps are mapped to strings like "t0", "t1", ... "tn". The /// smallest reported time-stamp will be called "t0". This allows to write /// assertions against the string although the concrete value of the time /// stamps is unknown. /// </remarks> /// <param name="repo">the repository the index state should be determined for</param> /// <param name="includedOptions"> /// a bitmask constructed out of the constants /// <see cref="MOD_TIME">MOD_TIME</see> /// , /// <see cref="SMUDGE">SMUDGE</see> /// , /// <see cref="LENGTH">LENGTH</see> /// , /// <see cref="CONTENT_ID">CONTENT_ID</see> /// and /// <see cref="CONTENT">CONTENT</see> /// controlling which info is present in the /// resulting string. /// </param> /// <returns>a string encoding the index state</returns> /// <exception cref="System.InvalidOperationException">System.InvalidOperationException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual string IndexState(Repository repo, int includedOptions) { DirCache dc = repo.ReadDirCache(); StringBuilder sb = new StringBuilder(); TreeSet <long> timeStamps = null; // iterate once over the dircache just to collect all time stamps if (0 != (includedOptions & MOD_TIME)) { timeStamps = new TreeSet <long>(); for (int i = 0; i < dc.GetEntryCount(); ++i) { timeStamps.AddItem(Sharpen.Extensions.ValueOf(dc.GetEntry(i).LastModified)); } } // iterate again, now produce the result string for (int i_1 = 0; i_1 < dc.GetEntryCount(); ++i_1) { DirCacheEntry entry = dc.GetEntry(i_1); sb.Append("[" + entry.PathString + ", mode:" + entry.FileMode); int stage = entry.Stage; if (stage != 0) { sb.Append(", stage:" + stage); } if (0 != (includedOptions & MOD_TIME)) { sb.Append(", time:t" + timeStamps.HeadSet(Sharpen.Extensions.ValueOf(entry.LastModified )).Count); } if (0 != (includedOptions & SMUDGE)) { if (entry.IsSmudged) { sb.Append(", smudged"); } } if (0 != (includedOptions & LENGTH)) { sb.Append(", length:" + Sharpen.Extensions.ToString(entry.Length)); } if (0 != (includedOptions & CONTENT_ID)) { sb.Append(", sha1:" + ObjectId.ToString(entry.GetObjectId())); } if (0 != (includedOptions & CONTENT)) { sb.Append(", content:" + Sharpen.Runtime.GetStringForBytes(db.Open(entry.GetObjectId (), Constants.OBJ_BLOB).GetCachedBytes(), "UTF-8")); } if (0 != (includedOptions & ASSUME_UNCHANGED)) { sb.Append(", assume-unchanged:" + entry.IsAssumeValid.ToString().ToLower()); } sb.Append("]"); } return(sb.ToString()); }
ObjectId WriteWorkingDirectoryTree(RevTree headTree, DirCache index) { DirCache dc = DirCache.NewInCore(); DirCacheBuilder cb = dc.Builder(); ObjectInserter oi = _repo.NewObjectInserter(); try { TreeWalk tw = new TreeWalk(_repo); tw.Reset(); tw.AddTree(new FileTreeIterator(_repo)); tw.AddTree(headTree); tw.AddTree(new DirCacheIterator(index)); while (tw.Next()) { // Ignore untracked files if (tw.IsSubtree) { tw.EnterSubtree(); } else if (tw.GetFileMode(0) != NGit.FileMode.MISSING && (tw.GetFileMode(1) != NGit.FileMode.MISSING || tw.GetFileMode(2) != NGit.FileMode.MISSING)) { WorkingTreeIterator f = tw.GetTree <WorkingTreeIterator>(0); DirCacheIterator dcIter = tw.GetTree <DirCacheIterator>(2); DirCacheEntry currentEntry = dcIter.GetDirCacheEntry(); DirCacheEntry ce = new DirCacheEntry(tw.PathString); if (!f.IsModified(currentEntry, true)) { ce.SetLength(currentEntry.Length); ce.LastModified = currentEntry.LastModified; ce.FileMode = currentEntry.FileMode; ce.SetObjectId(currentEntry.GetObjectId()); } else { long sz = f.GetEntryLength(); ce.SetLength(sz); ce.LastModified = f.GetEntryLastModified(); ce.FileMode = f.EntryFileMode; var data = f.OpenEntryStream(); try { ce.SetObjectId(oi.Insert(Constants.OBJ_BLOB, sz, data)); } finally { data.Close(); } } cb.Add(ce); } } cb.Finish(); return(dc.WriteTree(oi)); } finally { oi.Release(); } }
private DirCacheEntry MakeEntry(string path, FileMode mode, String content) { var ent = new DirCacheEntry(path); ent.setFileMode(mode); byte[] contentBytes = Constants.encode(content); ent.setObjectId(new ObjectWriter(db).ComputeBlobSha1(contentBytes.Length, new MemoryStream(contentBytes))); return(ent); }
private DirCacheEntry makeFile(string path) { byte[] pathBytes = Constants.encode(path); var ent = new DirCacheEntry(path); ent.setFileMode(FileMode.RegularFile); ent.setObjectId(new ObjectWriter(db).ComputeBlobSha1(pathBytes.Length, new MemoryStream(pathBytes))); return(ent); }
/// <exception cref="System.Exception"></exception> private DirCacheEntry MakeFile(string path) { DirCacheEntry ent = new DirCacheEntry(path); ent.FileMode = FileMode.REGULAR_FILE; ent.SetObjectId(new ObjectInserter.Formatter().IdFor(Constants.OBJ_BLOB, Constants .Encode(path))); return(ent); }
/// <exception cref="System.Exception"></exception> private DirCacheEntry MakeEntry(string path, FileMode mode) { DirCacheEntry ent = new DirCacheEntry(path); ent.FileMode = mode; ent.SetObjectId(new ObjectInserter.Formatter().IdFor(Constants.OBJ_BLOB, Constants .Encode(path))); return(ent); }
private static void AssertAreEqual(CGitIndexRecord c, DirCacheEntry j) { Assert.IsNotNull(c); Assert.IsNotNull(j); Assert.AreEqual(c.Path, j.getPathString()); Assert.AreEqual(c.Id, j.getObjectId()); Assert.AreEqual(c.Mode, j.getRawMode()); Assert.AreEqual(c.Stage, j.getStage()); }
protected internal virtual DirCacheEntry CreateEntry(string path, FileMode mode, int stage, string content) { DirCacheEntry entry = new DirCacheEntry(path, stage); entry.FileMode = mode; entry.SetObjectId(new ObjectInserter.Formatter().IdFor(Constants.OBJ_BLOB, Constants .Encode(content))); return(entry); }
public void testTwoLevelSubtree() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a/b", "a/c/e", "a/c/f", "a/d", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } int aFirst = 1; int aLast = 4; int acFirst = 2; int acLast = 3; DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); Assert.IsNull(dc.getCacheTree(false)); DirCacheTree root = dc.getCacheTree(true); Assert.IsNotNull(root); Assert.AreSame(root, dc.getCacheTree(true)); Assert.AreEqual(string.Empty, root.getNameString()); Assert.AreEqual(string.Empty, root.getPathString()); Assert.AreEqual(1, root.getChildCount()); Assert.AreEqual(dc.getEntryCount(), root.getEntrySpan()); Assert.IsFalse(root.isValid()); DirCacheTree aTree = root.getChild(0); Assert.IsNotNull(aTree); Assert.AreSame(aTree, root.getChild(0)); Assert.AreEqual("a", aTree.getNameString()); Assert.AreEqual("a/", aTree.getPathString()); Assert.AreEqual(1, aTree.getChildCount()); Assert.AreEqual(aLast - aFirst + 1, aTree.getEntrySpan()); Assert.IsFalse(aTree.isValid()); DirCacheTree acTree = aTree.getChild(0); Assert.IsNotNull(acTree); Assert.AreSame(acTree, aTree.getChild(0)); Assert.AreEqual("c", acTree.getNameString()); Assert.AreEqual("a/c/", acTree.getPathString()); Assert.AreEqual(0, acTree.getChildCount()); Assert.AreEqual(acLast - acFirst + 1, acTree.getEntrySpan()); Assert.IsFalse(acTree.isValid()); }
public virtual void TestRecursiveFiltering() { ObjectInserter odi = db.NewObjectInserter(); ObjectId aSth = odi.Insert(Constants.OBJ_BLOB, Sharpen.Runtime.GetBytesForString( "a.sth")); ObjectId aTxt = odi.Insert(Constants.OBJ_BLOB, Sharpen.Runtime.GetBytesForString( "a.txt")); ObjectId bSth = odi.Insert(Constants.OBJ_BLOB, Sharpen.Runtime.GetBytesForString( "b.sth")); ObjectId bTxt = odi.Insert(Constants.OBJ_BLOB, Sharpen.Runtime.GetBytesForString( "b.txt")); DirCache dc = db.ReadDirCache(); DirCacheBuilder builder = dc.Builder(); DirCacheEntry aSthEntry = new DirCacheEntry("a.sth"); aSthEntry.FileMode = FileMode.REGULAR_FILE; aSthEntry.SetObjectId(aSth); DirCacheEntry aTxtEntry = new DirCacheEntry("a.txt"); aTxtEntry.FileMode = FileMode.REGULAR_FILE; aTxtEntry.SetObjectId(aTxt); builder.Add(aSthEntry); builder.Add(aTxtEntry); DirCacheEntry bSthEntry = new DirCacheEntry("sub/b.sth"); bSthEntry.FileMode = FileMode.REGULAR_FILE; bSthEntry.SetObjectId(bSth); DirCacheEntry bTxtEntry = new DirCacheEntry("sub/b.txt"); bTxtEntry.FileMode = FileMode.REGULAR_FILE; bTxtEntry.SetObjectId(bTxt); builder.Add(bSthEntry); builder.Add(bTxtEntry); builder.Finish(); ObjectId treeId = dc.WriteTree(odi); odi.Flush(); TreeWalk tw = new TreeWalk(db); tw.Recursive = true; tw.Filter = PathSuffixFilter.Create(".txt"); tw.AddTree(treeId); IList <string> paths = new List <string>(); while (tw.Next()) { paths.AddItem(tw.PathString); } IList <string> expected = new List <string>(); expected.AddItem("a.txt"); expected.AddItem("sub/b.txt"); NUnit.Framework.Assert.AreEqual(expected, paths); }
/// <summary> /// adds a entry to the index builder which is a copy of the specified /// DirCacheEntry /// </summary> /// <param name="e">the entry which should be copied</param> /// <returns>the entry which was added to the index</returns> private DirCacheEntry Keep(DirCacheEntry e) { DirCacheEntry newEntry = new DirCacheEntry(e.PathString, e.Stage); newEntry.FileMode = e.FileMode; newEntry.SetObjectId(e.GetObjectId()); newEntry.LastModified = e.LastModified; newEntry.SetLength(e.Length); builder.Add(newEntry); return(newEntry); }
public void testSingleSubtree_NoRecursion() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); string[] expPaths = { "a.", "a", "a0b" }; FileMode[] expModes = { FileMode.RegularFile, FileMode.Tree, FileMode.RegularFile }; var expPos = new[] { 0, -1, 4 }; var iter = new DirCacheIterator(dc); var tw = new TreeWalk(db); tw.reset(); tw.addTree(iter); tw.Recursive = false; int pathIdx = 0; while (tw.next()) { Assert.AreSame(iter, tw.getTree <DirCacheIterator>(0, typeof(DirCacheIterator))); Assert.AreEqual(expModes[pathIdx].Bits, tw.getRawMode(0)); Assert.AreSame(expModes[pathIdx], tw.getFileMode(0)); Assert.AreEqual(expPaths[pathIdx], tw.getPathString()); if (expPos[pathIdx] >= 0) { Assert.AreEqual(expPos[pathIdx], iter.Pointer); Assert.AreSame(ents[expPos[pathIdx]], iter.getDirCacheEntry()); } else { Assert.AreSame(FileMode.Tree, tw.getFileMode(0)); } pathIdx++; } Assert.AreEqual(expPaths.Length, pathIdx); }
public void testEntriesWithin() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(FileMode.RegularFile); } int aFirst = 1; int aLast = 3; DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); for (int i = 0; i < ents.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); } DirCacheEntry[] aContents = dc.getEntriesWithin("a"); Assert.IsNotNull(aContents); Assert.AreEqual(aLast - aFirst + 1, aContents.Length); for (int i = aFirst, j = 0; i <= aLast; i++, j++) { Assert.AreSame(ents[i], aContents[j]); } aContents = dc.getEntriesWithin("a/"); Assert.IsNotNull(aContents); Assert.AreEqual(aLast - aFirst + 1, aContents.Length); for (int i = aFirst, j = 0; i <= aLast; i++, j++) { Assert.AreSame(ents[i], aContents[j]); } Assert.IsNotNull(dc.getEntriesWithin("a.")); Assert.AreEqual(0, dc.getEntriesWithin("a.").Length); Assert.IsNotNull(dc.getEntriesWithin("a0b")); Assert.AreEqual(0, dc.getEntriesWithin("a0b.").Length); Assert.IsNotNull(dc.getEntriesWithin("zoo")); Assert.AreEqual(0, dc.getEntriesWithin("zoo.").Length); }