public void testNoSubtree_NoTreeWalk() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); var iter = new DirCacheIterator(dc); int pathIdx = 0; for (; !iter.eof(); iter.next(1)) { Assert.AreEqual(pathIdx, iter.Pointer); Assert.AreSame(ents[pathIdx], iter.getDirCacheEntry()); pathIdx++; } Assert.AreEqual(paths.Length, pathIdx); }
public void testEntriesWithin() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) ents[i] = new DirCacheEntry(paths[i]); int aFirst = 1; int aLast = 3; DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); for (int i = 0; i < ents.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); } DirCacheEntry[] aContents = dc.getEntriesWithin("a"); Assert.IsNotNull(aContents); Assert.AreEqual(aLast - aFirst + 1, aContents.Length); for (int i = aFirst, j = 0; i <= aLast; i++, j++) { Assert.AreSame(ents[i], aContents[j]); } aContents = dc.getEntriesWithin("a/"); Assert.IsNotNull(aContents); Assert.AreEqual(aLast - aFirst + 1, aContents.Length); for (int i = aFirst, j = 0; i <= aLast; i++, j++) { Assert.AreSame(ents[i], aContents[j]); } Assert.IsNotNull(dc.getEntriesWithin("a.")); Assert.AreEqual(0, dc.getEntriesWithin("a.").Length); Assert.IsNotNull(dc.getEntriesWithin("a0b")); Assert.AreEqual(0, dc.getEntriesWithin("a0b.").Length); Assert.IsNotNull(dc.getEntriesWithin("zoo")); Assert.AreEqual(0, dc.getEntriesWithin("zoo.").Length); }
public void testPathFilterGroup_DoesNotSkipTail() { DirCache dc = DirCache.read(db); var mode = FileMode.RegularFile; string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); ents[i].setFileMode(mode); } { DirCacheBuilder builder = dc.builder(); for (int i = 0; i < ents.Length; i++) { builder.add(ents[i]); } builder.finish(); } const int expIdx = 2; DirCacheBuilder b = dc.builder(); var tw = new GitSharp.TreeWalk.TreeWalk(db); tw.reset(); tw.addTree(new DirCacheBuildIterator(b)); tw.Recursive = true; tw.setFilter(PathFilterGroup.createFromStrings(new[] { paths[expIdx] })); Assert.IsTrue(tw.next(), "found " + paths[expIdx]); var c = tw.getTree<DirCacheIterator>(0, typeof(DirCacheIterator)); Assert.IsNotNull(c); Assert.AreEqual(expIdx, c.Pointer); Assert.AreSame(ents[expIdx], c.getDirCacheEntry()); Assert.AreEqual(paths[expIdx], tw.getPathString()); Assert.AreEqual(mode.Bits, tw.getRawMode(0)); Assert.AreSame(mode, tw.getFileMode(0)); b.add(c.getDirCacheEntry()); Assert.IsFalse(tw.next(), "no more entries"); b.finish(); Assert.AreEqual(ents.Length, dc.getEntryCount()); for (int i = 0; i < ents.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); } }
public void testAdd_InGitSortOrder() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a.b", "a/b", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) ents[i] = new DirCacheEntry(paths[i]); DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) b.add(ents[i]); b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); for (int i = 0; i < paths.Length; i++) { Assert.AreSame(ents[i], dc.getEntry(i)); Assert.AreEqual(paths[i], dc.getEntry(i).getPathString()); Assert.AreEqual(i, dc.findEntry(paths[i])); Assert.AreSame(ents[i], dc.getEntry(paths[i])); } }
public void testBuildThenClear() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a.b", "a/b", "a0b" }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.finish(); Assert.AreEqual(paths.Length, dc.getEntryCount()); dc.clear(); Assert.AreEqual(0, dc.getEntryCount()); }
public static int cmp(DirCacheEntry a, DirCacheEntry b) { return cmp(a.path, a.path.Length, b); }
/** * Recursively get all entries within a subtree. * * @param path * the subtree path to get all entries within. * @return all entries recursively contained within the subtree. */ public DirCacheEntry[] getEntriesWithin(string path) { if (!path.EndsWith("/")) path += "/"; byte[] p = Constants.encode(path); int pLen = p.Length; int eIdx = findEntry(p, pLen); if (eIdx < 0) eIdx = -(eIdx + 1); int lastIdx = nextEntry(p, pLen, eIdx); DirCacheEntry[] r = new DirCacheEntry[lastIdx - eIdx]; Array.Copy(sortedEntries, eIdx, r, 0, r.Length); return r; }
public void replace(DirCacheEntry[] e, int cnt) { sortedEntries = e; entryCnt = cnt; tree = null; }
private static InvalidOperationException bad(DirCacheEntry a, String msg) { return new InvalidOperationException(msg + ": " + a.getStage() + " " + a.getPathString()); }
private DirCacheEntry toEntry(int stage, TreeWalk.TreeWalk tw) { DirCacheEntry e = new DirCacheEntry(tw.getRawPath(), stage); AbstractTreeIterator i; i = tw.getTree<AbstractTreeIterator>(0, typeof(AbstractTreeIterator)); e.setFileMode(tw.getFileMode(0)); e.setObjectIdFromRaw(i.idBuffer(), i.idOffset()); return e; }
public void testWriteReadTree() { DirCache dc = DirCache.Lock(db); string A = string.Format("a%2000s", "a"); string B = string.Format("b%2000s", "b"); string[] paths = { A + ".", A + "." + B, A + "/" + B, A + "0" + B }; var ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) { ents[i] = new DirCacheEntry(paths[i]); } DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) { b.add(ents[i]); } b.commit(); DirCache read = DirCache.read(db); Assert.AreEqual(paths.Length, read.getEntryCount()); Assert.AreEqual(1, read.getCacheTree(true).getChildCount()); }
private void testLongPath(int len) { string longPath = makeLongPath(len); string shortPath = "~~~ shorter-path"; DirCacheEntry longEnt = new DirCacheEntry(longPath); DirCacheEntry shortEnt = new DirCacheEntry(shortPath); Assert.AreEqual(longPath, longEnt.getPathString()); Assert.AreEqual(shortPath, shortEnt.getPathString()); DirCache dc1 = DirCache.Lock(db); DirCacheBuilder b = dc1.builder(); b.add(longEnt); b.add(shortEnt); Assert.IsTrue(b.commit()); Assert.AreEqual(2, dc1.getEntryCount()); Assert.AreSame(longEnt, dc1.getEntry(0)); Assert.AreSame(shortEnt, dc1.getEntry(1)); DirCache dc2 = DirCache.read(db); Assert.AreEqual(2, dc2.getEntryCount()); Assert.AreNotSame(longEnt, dc2.getEntry(0)); Assert.AreEqual(longPath, dc2.getEntry(0).getPathString()); Assert.AreNotSame(shortEnt, dc2.getEntry(1)); Assert.AreEqual(shortPath, dc2.getEntry(1).getPathString()); }
public void testFindSingleFile() { string path = "a-File-path"; DirCache dc = DirCache.read(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); DirCacheEntry entOrig = new DirCacheEntry(path); Assert.AreNotSame(path, entOrig.getPathString()); Assert.AreEqual(path, entOrig.getPathString()); b.add(entOrig); b.finish(); Assert.AreEqual(1, dc.getEntryCount()); Assert.AreSame(entOrig, dc.getEntry(0)); Assert.AreEqual(0, dc.findEntry(path)); Assert.AreEqual(-1, dc.findEntry("@@-before")); Assert.AreEqual(0, real(dc.findEntry("@@-before"))); Assert.AreEqual(-2, dc.findEntry("a-zoo")); Assert.AreEqual(1, real(dc.findEntry("a-zoo"))); Assert.AreSame(entOrig, dc.getEntry(path)); }
public void testBuildOneFile_FinishWriteCommit() { string path = "a-File-path"; var mode = GitSharp.FileMode.RegularFile; long lastModified = 1218123387057L; int Length = 1342; DirCacheEntry entOrig; DirCache dc = DirCache.Lock(db); DirCacheBuilder b = dc.builder(); Assert.IsNotNull(b); entOrig = new DirCacheEntry(path); entOrig.setFileMode(mode); entOrig.setLastModified(lastModified); entOrig.setLength(Length); Assert.AreNotSame(path, entOrig.getPathString()); Assert.AreEqual(path, entOrig.getPathString()); Assert.AreEqual(ObjectId.ZeroId, entOrig.getObjectId()); Assert.AreEqual(mode.Bits, entOrig.getRawMode()); Assert.AreEqual(0, entOrig.getStage()); Assert.AreEqual(lastModified, entOrig.getLastModified()); Assert.AreEqual(Length, entOrig.getLength()); Assert.IsFalse(entOrig.isAssumeValid()); b.add(entOrig); b.finish(); Assert.AreEqual(1, dc.getEntryCount()); Assert.AreSame(entOrig, dc.getEntry(0)); dc.write(); Assert.IsTrue(dc.commit()); dc = DirCache.read(db); Assert.AreEqual(1, dc.getEntryCount()); DirCacheEntry entRead = dc.getEntry(0); Assert.AreNotSame(entOrig, entRead); Assert.AreEqual(path, entRead.getPathString()); Assert.AreEqual(ObjectId.ZeroId, entOrig.getObjectId()); Assert.AreEqual(mode.Bits, entOrig.getRawMode()); Assert.AreEqual(0, entOrig.getStage()); Assert.AreEqual(lastModified, entOrig.getLastModified()); Assert.AreEqual(Length, entOrig.getLength()); Assert.IsFalse(entOrig.isAssumeValid()); }
private DirCacheEntry makeEntry(string path, FileMode mode) { byte[] pathBytes = Constants.encode(path); DirCacheEntry ent = new DirCacheEntry(path); ent.setFileMode(mode); ent.setObjectId(new ObjectWriter(db).ComputeBlobSha1(pathBytes.Length, new MemoryStream(pathBytes))); return ent; }
/** * Update (if necessary) this tree's entrySpan. * * @param cache * the complete cache from DirCache. * @param cCnt * number of entries in <code>cache</code> that are valid for * iteration. * @param cIdx * first position of <code>cache</code> that is a member of this * tree. The path of <code>cache[cacheIdx].path</code> for the * range <code>[0,pathOff-1)</code> matches the complete path of * this tree, from the root of the repository. * @param pathOff * number of bytes of <code>cache[cacheIdx].path</code> that * matches this tree's path. The value at array position * <code>cache[cacheIdx].path[pathOff-1]</code> is always '/' if * <code>pathOff</code> is > 0. */ public void validate(DirCacheEntry[] cache, int cCnt, int cIdx, int pathOff) { if (entrySpan >= 0) { // If we are valid, our children are also valid. // We have no need to validate them. // return; } entrySpan = 0; if (cCnt == 0) { // Special case of an empty index, and we are the root tree. // return; } byte[] firstPath = cache[cIdx].path; int stIdx = 0; while (cIdx < cCnt) { byte[] currPath = cache[cIdx].path; if (pathOff > 0 && !peq(firstPath, currPath, pathOff)) { // The current entry is no longer in this tree. Our // span is updated and the remainder goes elsewhere. // break; } DirCacheTree st = stIdx < childCnt ? children[stIdx] : null; int cc = namecmp(currPath, pathOff, st); if (cc > 0) { // This subtree is now empty. // removeChild(stIdx); continue; } if (cc < 0) { int p = slash(currPath, pathOff); if (p < 0) { // The entry has no '/' and thus is directly in this // tree. Count it as one of our own. // cIdx++; entrySpan++; continue; } // Build a new subtree for this entry. // st = new DirCacheTree(this, currPath, pathOff, p - pathOff); insertChild(stIdx, st); } // The entry is contained in this subtree. // st.validate(cache, cCnt, cIdx, pathOff + st.nameLength() + 1); cIdx += st.entrySpan; entrySpan += st.entrySpan; stIdx++; } if (stIdx < childCnt) { // None of our remaining children can be in this tree // as the current cache entry is after our own name. // DirCacheTree[] dct = new DirCacheTree[stIdx]; Array.Copy(children, 0, dct, 0, stIdx); children = dct; } }
private DirCacheEntry MakeEntry(String path, FileMode mode, String content) { DirCacheEntry ent = new DirCacheEntry(path); ent.setFileMode(mode); byte[] contentBytes = Constants.encode(content); ent.setObjectId(new ObjectWriter(db).ComputeBlobSha1(contentBytes.Length, new MemoryStream(contentBytes))); return ent; }
/** * Create a new unmerged path exception. * * @param dce * the first non-zero stage of the unmerged path. */ public UnmergedPathException(DirCacheEntry dce) : base("Unmerged path: " + dce.getPathString()) { entry = dce; }
public void testSingleSubtree() { DirCache dc = DirCache.read(db); string[] paths = { "a.", "a/b", "a/c", "a/d", "a0b" }; DirCacheEntry[] ents = new DirCacheEntry[paths.Length]; for (int i = 0; i < paths.Length; i++) ents[i] = new DirCacheEntry(paths[i]); int aFirst = 1; int aLast = 3; DirCacheBuilder b = dc.builder(); for (int i = 0; i < ents.Length; i++) b.add(ents[i]); b.finish(); Assert.IsNull(dc.getCacheTree(false)); DirCacheTree root = dc.getCacheTree(true); Assert.IsNotNull(root); Assert.AreSame(root, dc.getCacheTree(true)); Assert.AreEqual(string.Empty, root.getNameString()); Assert.AreEqual(string.Empty, root.getPathString()); Assert.AreEqual(1, root.getChildCount()); Assert.AreEqual(dc.getEntryCount(), root.getEntrySpan()); Assert.IsFalse(root.isValid()); DirCacheTree aTree = root.getChild(0); Assert.IsNotNull(aTree); Assert.AreSame(aTree, root.getChild(0)); Assert.AreEqual("a", aTree.getNameString()); Assert.AreEqual("a/", aTree.getPathString()); Assert.AreEqual(0, aTree.getChildCount()); Assert.AreEqual(aLast - aFirst + 1, aTree.getEntrySpan()); Assert.IsFalse(aTree.isValid()); }
/** * Append one entry into the resulting entry list. * <p> * The entry is placed at the end of the entry list. The caller is * responsible for making sure the table is correctly sorted. * <p> * The {@link #entries} table is automatically expanded if there is * insufficient space for the new addition. * * @param newEntry * the new entry to add. */ protected void fastAdd(DirCacheEntry newEntry) { if (entries.Length == entryCnt) { DirCacheEntry[] n = new DirCacheEntry[(entryCnt + 16) * 3 / 2]; Array.Copy(entries, 0, n, 0, entryCnt); entries = n; } entries[entryCnt++] = newEntry; }
private void beforeAdd(DirCacheEntry newEntry) { if (FileMode.Tree.Equals(newEntry.getRawMode())) throw bad(newEntry, "Adding subtree not allowed"); if (sorted && entryCnt > 0) { DirCacheEntry lastEntry = entries[entryCnt - 1]; int cr = DirCache.cmp(lastEntry, newEntry); if (cr > 0) { // The new entry sorts before the old entry; we are // no longer sorted correctly. We'll need to redo // the sorting before we can close out the build. // sorted = false; } else if (cr == 0) { // Same file path; we can only insert this if the // stages won't be violated. // int peStage = lastEntry.getStage(); int dceStage = newEntry.getStage(); if (peStage == dceStage) throw bad(newEntry, "Duplicate stages not allowed"); if (peStage == 0 || dceStage == 0) throw bad(newEntry, "Mixed stages not allowed"); if (peStage > dceStage) sorted = false; } } }
/** * Add a range of existing entries from the destination cache. * <p> * The entries are placed at the end of the entry list, preserving their * current order. The caller is responsible for making sure the table * is correctly sorted. * <p> * This method copies from the destination cache, which has not yet been * updated with this editor's new table. So all offsets into the destination * cache are not affected by any updates that may be currently taking place * in this editor. * <p> * The {@link #entries} table is automatically expanded if there is * insufficient space for the new additions. * * @param pos * first entry to copy from the destination cache. * @param cnt * number of entries to copy. */ protected void fastKeep(int pos, int cnt) { if (entryCnt + cnt > entries.Length) { int m1 = (entryCnt + 16) * 3 / 2; int m2 = entryCnt + cnt; DirCacheEntry[] n = new DirCacheEntry[Math.Max(m1, m2)]; Array.Copy(entries, 0, n, 0, entryCnt); entries = n; } cache.toArray(pos, entries, entryCnt, cnt); entryCnt += cnt; }
/** * Append one entry into the resulting entry list. * <p> * The entry is placed at the end of the entry list. If the entry causes the * list to now be incorrectly sorted a sorting phase will be * automatically enabled within {@link #finish()}. * <p> * The internal entry table is automatically expanded if there is * insufficient space for the new addition. * * @param newEntry * the new entry to add. */ public void add(DirCacheEntry newEntry) { beforeAdd(newEntry); fastAdd(newEntry); }
/** * Update the DirCache with the contents of {@link #entries}. * <p> * This method should be invoked only during an implementation of * {@link #finish()}, and only after {@link #entries} is sorted. */ protected void replace() { if (entryCnt < entries.Length / 2) { DirCacheEntry[] n = new DirCacheEntry[entryCnt]; Array.Copy(entries, 0, n, 0, entryCnt); entries = n; } cache.replace(entries, entryCnt); }
private void readFrom(FileStream inStream) { var @in = new StreamReader(inStream); MessageDigest md = Constants.newMessageDigest(); // Read the index header and verify we understand it. // byte[] hdr = new byte[20]; NB.ReadFully(inStream, hdr, 0, 12); md.Update(hdr, 0, 12); if (!is_DIRC(hdr)) throw new CorruptObjectException("Not a DIRC file."); int ver = NB.decodeInt32(hdr, 4); if (ver != 2) throw new CorruptObjectException("Unknown DIRC version " + ver); entryCnt = NB.decodeInt32(hdr, 8); if (entryCnt < 0) throw new CorruptObjectException("DIRC has too many entries."); // Load the individual file entries. // byte[] infos = new byte[INFO_LEN * entryCnt]; sortedEntries = new DirCacheEntry[entryCnt]; for (int i = 0; i < entryCnt; i++) sortedEntries[i] = new DirCacheEntry(infos, i * INFO_LEN, inStream, md); lastModified = liveFile.LastAccessTime; // After the file entries are index extensions, and then a footer. // for (; ; ) { var pos = inStream.Position; NB.ReadFully(inStream, hdr, 0, 20); if (@in.Read() < 0) { // No extensions present; the file ended where we expected. // break; } inStream.Seek(pos, SeekOrigin.Begin); switch (NB.decodeInt32(hdr, 0)) { case EXT_TREE: { byte[] raw = new byte[NB.decodeInt32(hdr, 4)]; md.Update(hdr, 0, 8); NB.skipFully(inStream, 8); NB.ReadFully(inStream, raw, 0, raw.Length); md.Update(raw, 0, raw.Length); tree = new DirCacheTree(raw, new MutableInteger(), null); break; } default: if (hdr[0] >= (byte)'A' && hdr[0] <= (byte)'Z') { // The extension is optional and is here only as // a performance optimization. Since we do not // understand it, we can safely skip past it. // NB.skipFully(inStream, NB.decodeUInt32(hdr, 4)); } else { // The extension is not an optimization and is // _required_ to understand this index format. // Since we did not trap it above we must abort. // throw new CorruptObjectException("DIRC extension '" + Constants.CHARSET.GetString(hdr.Take(4).ToArray()) + "' not supported by this version."); } break; } } byte[] exp = md.Digest(); if (!exp.SequenceEqual( hdr)) { throw new CorruptObjectException("DIRC checksum mismatch"); } }
/// <summary> /// Create a new unmerged path exception. /// </summary> /// <param name="entry">The first non-zero stage of the unmerged path.</param> public UnmergedPathException(DirCacheEntry entry) : base("Unmerged path: " + entry.getPathString()) { _entry = entry; }
public void toArray(int i, DirCacheEntry[] dst, int off, int cnt) { Array.Copy(sortedEntries, i, dst, off, cnt); }
/** * Write (if necessary) this tree to the object store. * * @param cache * the complete cache from DirCache. * @param cIdx * first position of <code>cache</code> that is a member of this * tree. The path of <code>cache[cacheIdx].path</code> for the * range <code>[0,pathOff-1)</code> matches the complete path of * this tree, from the root of the repository. * @param pathOffset * number of bytes of <code>cache[cacheIdx].path</code> that * matches this tree's path. The value at array position * <code>cache[cacheIdx].path[pathOff-1]</code> is always '/' if * <code>pathOff</code> is > 0. * @param ow * the writer to use when serializing to the store. * @return identity of this tree. * @throws UnmergedPathException * one or more paths contain higher-order stages (stage > 0), * which cannot be stored in a tree object. * @throws IOException * an unexpected error occurred writing to the object store. */ public ObjectId writeTree(DirCacheEntry[] cache, int cIdx, int pathOffset, ObjectWriter ow) { if (id == null) { int endIdx = cIdx + entrySpan; int size = computeSize(cache, cIdx, pathOffset, ow); var @out = new MemoryStream(size); int childIdx = 0; int entryIdx = cIdx; while (entryIdx < endIdx) { DirCacheEntry e = cache[entryIdx]; byte[] ep = e.path; if (childIdx < childCnt) { DirCacheTree st = children[childIdx]; if (st.contains(ep, pathOffset, ep.Length)) { FileMode.Tree.CopyTo(@out); @out.Write(new byte[] { (byte)' ' }, 0, 1); @out.Write(st.encodedName, 0, st.encodedName.Length); @out.Write(new byte[] { (byte)0 }, 0, 1); st.id.copyRawTo(@out); entryIdx += st.entrySpan; childIdx++; continue; } } e.getFileMode().CopyTo(@out); @out.Write(new byte[] { (byte)' ' }, 0, 1); @out.Write(ep, pathOffset, ep.Length - pathOffset); @out.Write(new byte[] { 0 }, 0, 1); @out.Write(e.idBuffer(), e.idOffset(), Constants.OBJECT_ID_LENGTH); entryIdx++; } id = ow.WriteCanonicalTree(@out.ToArray()); } return id; }
public static int cmp(byte[] aPath, int aLen, DirCacheEntry b) { return cmp(aPath, aLen, b.path, b.path.Length); }
private int computeSize(DirCacheEntry[] cache, int cIdx, int pathOffset, ObjectWriter ow) { int endIdx = cIdx + entrySpan; int childIdx = 0; int entryIdx = cIdx; int size = 0; while (entryIdx < endIdx) { DirCacheEntry e = cache[entryIdx]; if (e.getStage() != 0) throw new UnmergedPathException(e); byte[] ep = e.path; if (childIdx < childCnt) { DirCacheTree st = children[childIdx]; if (st.contains(ep, pathOffset, ep.Length)) { int stOffset = pathOffset + st.nameLength() + 1; st.writeTree(cache, entryIdx, stOffset, ow); size += FileMode.Tree.copyToLength(); size += st.nameLength(); size += Constants.OBJECT_ID_LENGTH + 2; entryIdx += st.entrySpan; childIdx++; continue; } } FileMode mode = e.getFileMode(); if ((int)mode.ObjectType == Constants.OBJ_BAD) throw new UnmergedPathException(e); size += mode.copyToLength(); size += ep.Length - pathOffset; size += Constants.OBJECT_ID_LENGTH + 2; entryIdx++; } return size; }