/// <summary> /// Find the list of branches a given commit is reachable from when following /// parent.s /// <p> /// Note that this method calls /// <see cref="RevWalk.Reset()">RevWalk.Reset()</see> /// at the beginning. /// <p> /// In order to improve performance this method assumes clock skew among /// committers is never larger than 24 hours. /// </summary> /// <param name="commit">the commit we are looking at</param> /// <param name="revWalk">The RevWalk to be used.</param> /// <param name="refs">the set of branches we want to see reachability from</param> /// <returns>the list of branches a given commit is reachable from</returns> /// <exception cref="NGit.Errors.MissingObjectException">NGit.Errors.MissingObjectException /// </exception> /// <exception cref="NGit.Errors.IncorrectObjectTypeException">NGit.Errors.IncorrectObjectTypeException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public static IList <Ref> FindBranchesReachableFrom(RevCommit commit, RevWalk revWalk , ICollection <Ref> refs) { IList <Ref> result = new AList <Ref>(); // searches from branches can be cut off early if any parent of the // search-for commit is found. This is quite likely, so optimize for this. revWalk.MarkStart(Arrays.AsList(commit.Parents)); ObjectIdSubclassMap <ObjectId> cutOff = new ObjectIdSubclassMap <ObjectId>(); int SKEW = 24 * 3600; // one day clock skew foreach (Ref @ref in refs) { RevObject maybehead = revWalk.ParseAny(@ref.GetObjectId()); if (!(maybehead is RevCommit)) { continue; } RevCommit headCommit = (RevCommit)maybehead; // if commit is in the ref branch, then the tip of ref should be // newer than the commit we are looking for. Allow for a large // clock skew. if (headCommit.CommitTime + SKEW < commit.CommitTime) { continue; } IList <ObjectId> maybeCutOff = new AList <ObjectId>(cutOff.Size()); // guess rough size revWalk.ResetRetain(); revWalk.MarkStart(headCommit); RevCommit current; Ref found = null; while ((current = revWalk.Next()) != null) { if (AnyObjectId.Equals(current, commit)) { found = @ref; break; } if (cutOff.Contains(current)) { break; } maybeCutOff.AddItem(current.ToObjectId()); } if (found != null) { result.AddItem(@ref); } else { foreach (ObjectId id in maybeCutOff) { cutOff.AddIfAbsent(id); } } } return(result); }
public virtual void TestEmptyMap() { ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId> m = new ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId >(); NUnit.Framework.Assert.IsTrue(m.IsEmpty()); NUnit.Framework.Assert.AreEqual(0, m.Size()); Iterator<ObjectIdSubclassMapTest.SubId> i = m.Iterator(); NUnit.Framework.Assert.IsNotNull(i); NUnit.Framework.Assert.IsFalse(i.HasNext()); NUnit.Framework.Assert.IsFalse(m.Contains(Id(1))); }
internal BaseSearch(ProgressMonitor countingMonitor, ICollection <RevTree> bases, ObjectIdSubclassMap <ObjectToPack> objects, IList <ObjectToPack> edges, ObjectReader or) { progress = countingMonitor; reader = or; baseTrees = Sharpen.Collections.ToArray(bases, new ObjectId[bases.Count]); objectsMap = objects; edgeObjects = edges; alreadyProcessed = new IntSet(); treeCache = new ObjectIdSubclassMap <BaseSearch.TreeWithData>(); parser = new CanonicalTreeParser(); idBuf = new MutableObjectId(); }
internal BaseSearch(ProgressMonitor countingMonitor, ICollection<RevTree> bases, ObjectIdSubclassMap<ObjectToPack> objects, IList<ObjectToPack> edges, ObjectReader or) { progress = countingMonitor; reader = or; baseTrees = Sharpen.Collections.ToArray(bases, new ObjectId[bases.Count]); objectsMap = objects; edgeObjects = edges; alreadyProcessed = new IntSet(); treeCache = new ObjectIdSubclassMap<BaseSearch.TreeWithData>(); parser = new CanonicalTreeParser(); idBuf = new MutableObjectId(); }
public virtual void TestAddGetAndContains() { ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId> m = new ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId >(); m.Add(id_1); m.Add(id_2); m.Add(id_3); m.Add(id_a31); m.Add(id_b31); NUnit.Framework.Assert.IsFalse(m.IsEmpty()); NUnit.Framework.Assert.AreEqual(5, m.Size()); NUnit.Framework.Assert.AreSame(id_1, m.Get(id_1)); NUnit.Framework.Assert.AreSame(id_1, m.Get(Id(1))); NUnit.Framework.Assert.AreSame(id_1, m.Get(Id(1).Copy())); NUnit.Framework.Assert.AreSame(id_2, m.Get(Id(2).Copy())); NUnit.Framework.Assert.AreSame(id_3, m.Get(Id(3).Copy())); NUnit.Framework.Assert.AreSame(id_a31, m.Get(Id(31).Copy())); NUnit.Framework.Assert.AreSame(id_b31, m.Get(id_b31.Copy())); NUnit.Framework.Assert.IsTrue(m.Contains(id_1)); }
public virtual void TestIterator() { ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId> m = new ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId >(); m.Add(id_1); m.Add(id_2); m.Add(id_3); Iterator<ObjectIdSubclassMapTest.SubId> i = m.Iterator(); NUnit.Framework.Assert.IsTrue(i.HasNext()); NUnit.Framework.Assert.AreSame(id_1, i.Next()); NUnit.Framework.Assert.IsTrue(i.HasNext()); NUnit.Framework.Assert.AreSame(id_2, i.Next()); NUnit.Framework.Assert.IsTrue(i.HasNext()); NUnit.Framework.Assert.AreSame(id_3, i.Next()); NUnit.Framework.Assert.IsFalse(i.HasNext()); try { i.Next(); NUnit.Framework.Assert.Fail("did not fail on next with no next"); } catch (NoSuchElementException) { } // OK i = m.Iterator(); NUnit.Framework.Assert.AreSame(id_1, i.Next()); try { i.Remove(); NUnit.Framework.Assert.Fail("did not fail on remove"); } catch (NotSupportedException) { } }
public virtual void TestAddIfAbsentGrowsWithObjects() { ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId> m = new ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId >(); m.Add(id_1); for (int i = 32; i < 8000; i++) { m.AddIfAbsent(new ObjectIdSubclassMapTest.SubId(Id(i))); } NUnit.Framework.Assert.AreEqual(8000 - 32 + 1, m.Size()); NUnit.Framework.Assert.AreSame(id_1, m.Get(id_1.Copy())); for (int i_1 = 32; i_1 < 8000; i_1++) { NUnit.Framework.Assert.IsTrue(m.Contains(Id(i_1))); } }
public virtual void TestAddIfAbsent() { ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId> m = new ObjectIdSubclassMap<ObjectIdSubclassMapTest.SubId >(); m.Add(id_1); NUnit.Framework.Assert.AreSame(id_1, m.AddIfAbsent(new ObjectIdSubclassMapTest.SubId (id_1))); NUnit.Framework.Assert.AreEqual(1, m.Size()); NUnit.Framework.Assert.AreSame(id_2, m.AddIfAbsent(id_2)); NUnit.Framework.Assert.AreEqual(2, m.Size()); NUnit.Framework.Assert.AreSame(id_a31, m.AddIfAbsent(id_a31)); NUnit.Framework.Assert.AreSame(id_b31, m.AddIfAbsent(id_b31)); NUnit.Framework.Assert.AreSame(id_a31, m.AddIfAbsent(new ObjectIdSubclassMapTest.SubId (id_a31))); NUnit.Framework.Assert.AreSame(id_b31, m.AddIfAbsent(new ObjectIdSubclassMapTest.SubId (id_b31))); NUnit.Framework.Assert.AreEqual(4, m.Size()); }
/// <exception cref="System.IO.IOException"></exception> private void ResolveDeltasWithExternalBases(ProgressMonitor progress) { GrowEntries(baseById.Size()); if (needBaseObjectIds) { baseObjectIds = new ObjectIdSubclassMap<ObjectId>(); } IList<PackParser.DeltaChain> missing = new AList<PackParser.DeltaChain>(64); foreach (PackParser.DeltaChain baseId in baseById) { if (baseId.head == null) { continue; } if (needBaseObjectIds) { baseObjectIds.Add(baseId); } ObjectLoader ldr; try { ldr = readCurs.Open(baseId); } catch (MissingObjectException) { missing.AddItem(baseId); continue; } PackParser.DeltaVisit visit = new PackParser.DeltaVisit(); visit.data = ldr.GetCachedBytes(int.MaxValue); visit.id = baseId; int typeCode = ldr.GetType(); PackedObjectInfo oe = NewInfo(baseId, null, null); if (OnAppendBase(typeCode, visit.data, oe)) { entries[entryCount++] = oe; } visit.nextChild = FirstChildOf(oe); ResolveDeltas(visit.Next(), typeCode, new PackParser.ObjectTypeAndSize(), progress ); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelledDuringIndexing); } } foreach (PackParser.DeltaChain @base in missing) { if (@base.head != null) { throw new MissingObjectException(@base, "delta base"); } } OnEndThinPack(); }
/// <summary>Configure this index pack instance to keep track of new objects.</summary> /// <remarks> /// Configure this index pack instance to keep track of new objects. /// <p> /// By default an index pack doesn't save the new objects that were created /// when it was instantiated. Setting this flag to /// <code>true</code> /// allows the /// caller to use /// <see cref="GetNewObjectIds()">GetNewObjectIds()</see> /// to retrieve that list. /// </remarks> /// <param name="b"> /// <code>true</code> /// to enable keeping track of new objects. /// </param> public virtual void SetNeedNewObjectIds(bool b) { if (b) { newObjectIds = new ObjectIdSubclassMap<ObjectId>(); } else { newObjectIds = null; } }
public void index(ProgressMonitor progress) { progress.Start(2 /* tasks */); try { try { ReadPackHeader(); _entries = new PackedObjectInfo[(int)_objectCount]; _baseById = new ObjectIdSubclassMap<DeltaChain>(); _baseByPos = new LongMap<UnresolvedDelta>(); progress.BeginTask(PROGRESS_DOWNLOAD, (int)_objectCount); for (int done = 0; done < _objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled) { throw new IOException("Download cancelled"); } } ReadPackFooter(); EndInput(); progress.EndTask(); if (_deltaCount > 0) { if (_packOut == null) { throw new IOException("need packOut"); } ResolveDeltas(progress); if (_entryCount < _objectCount) { if (!_fixThin) { throw new IOException("pack has " + (_objectCount - _entryCount) + " unresolved deltas"); } FixThinPack(progress); } } if (_packOut != null && (_keepEmpty || _entryCount > 0)) { _packOut.Flush(); } _packDigest = null; _baseById = null; _baseByPos = null; if (_dstIdx != null && (_keepEmpty || _entryCount > 0)) { WriteIdx(); } } finally { try { InflaterCache.Instance.release(_inflater); } finally { _inflater = null; } _windowCursor = WindowCursor.Release(_windowCursor); progress.EndTask(); if (_packOut != null) { _packOut.Close(); } } if (_keepEmpty || _entryCount > 0) { if (_dstPack != null) { _dstPack.IsReadOnly = true; } if (_dstIdx != null) { _dstIdx.IsReadOnly = true; } } } catch (IOException) { if (_dstPack != null) _dstPack.Delete(); if (_dstIdx != null) _dstIdx.Delete(); throw; } }
/// <summary> /// Find the list of branches a given commit is reachable from when following /// parent.s /// <p> /// Note that this method calls /// <see cref="RevWalk.Reset()">RevWalk.Reset()</see> /// at the beginning. /// <p> /// In order to improve performance this method assumes clock skew among /// committers is never larger than 24 hours. /// </summary> /// <param name="commit">the commit we are looking at</param> /// <param name="revWalk">The RevWalk to be used.</param> /// <param name="refs">the set of branches we want to see reachability from</param> /// <returns>the list of branches a given commit is reachable from</returns> /// <exception cref="NGit.Errors.MissingObjectException">NGit.Errors.MissingObjectException /// </exception> /// <exception cref="NGit.Errors.IncorrectObjectTypeException">NGit.Errors.IncorrectObjectTypeException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public static IList<Ref> FindBranchesReachableFrom(RevCommit commit, RevWalk revWalk , ICollection<Ref> refs) { IList<Ref> result = new AList<Ref>(); // searches from branches can be cut off early if any parent of the // search-for commit is found. This is quite likely, so optimize for this. revWalk.MarkStart(Arrays.AsList(commit.Parents)); ObjectIdSubclassMap<ObjectId> cutOff = new ObjectIdSubclassMap<ObjectId>(); int SKEW = 24 * 3600; // one day clock skew foreach (Ref @ref in refs) { RevObject maybehead = revWalk.ParseAny(@ref.GetObjectId()); if (!(maybehead is RevCommit)) { continue; } RevCommit headCommit = (RevCommit)maybehead; // if commit is in the ref branch, then the tip of ref should be // newer than the commit we are looking for. Allow for a large // clock skew. if (headCommit.CommitTime + SKEW < commit.CommitTime) { continue; } IList<ObjectId> maybeCutOff = new AList<ObjectId>(cutOff.Size()); // guess rough size revWalk.ResetRetain(); revWalk.MarkStart(headCommit); RevCommit current; Ref found = null; while ((current = revWalk.Next()) != null) { if (AnyObjectId.Equals(current, commit)) { found = @ref; break; } if (cutOff.Contains(current)) { break; } maybeCutOff.AddItem(current.ToObjectId()); } if (found != null) { result.AddItem(@ref); } else { foreach (ObjectId id in maybeCutOff) { cutOff.AddIfAbsent(id); } } } return result; }
/// <exception cref="System.IO.IOException"></exception> private void FixThinPack(ProgressMonitor progress) { GrowEntries(); if (needBaseObjectIds) { baseObjectIds = new ObjectIdSubclassMap<ObjectId>(); } packDigest.Reset(); originalEOF = packOut.Length() - 20; Deflater def = new Deflater(Deflater.DEFAULT_COMPRESSION, false); IList<IndexPack.DeltaChain> missing = new AList<IndexPack.DeltaChain>(64); long end = originalEOF; foreach (IndexPack.DeltaChain baseId in baseById) { if (baseId.head == null) { continue; } if (needBaseObjectIds) { baseObjectIds.Add(baseId); } ObjectLoader ldr; try { ldr = readCurs.Open(baseId); } catch (MissingObjectException) { missing.AddItem(baseId); continue; } byte[] data = ldr.GetCachedBytes(int.MaxValue); int typeCode = ldr.GetType(); PackedObjectInfo oe; crc.Reset(); packOut.Seek(end); WriteWhole(def, typeCode, data); oe = new PackedObjectInfo(end, (int)crc.GetValue(), baseId); entries[entryCount++] = oe; end = packOut.GetFilePointer(); ResolveChildDeltas(oe.GetOffset(), typeCode, data, oe); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelledDuringIndexing); } } def.Finish(); foreach (IndexPack.DeltaChain @base in missing) { if (@base.head != null) { throw new MissingObjectException(@base, "delta base"); } } if (end - originalEOF < 20) { // Ugly corner case; if what we appended on to complete deltas // doesn't completely cover the SHA-1 we have to truncate off // we need to shorten the file, otherwise we will include part // of the old footer as object content. packOut.SetLength(end); } FixHeaderFooter(packcsum, packDigest.Digest()); }
/// <summary>Consume data from the input stream until the packfile is indexed.</summary> /// <remarks>Consume data from the input stream until the packfile is indexed.</remarks> /// <param name="progress">progress feedback</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual void Index(ProgressMonitor progress) { progress.Start(2); try { try { ReadPackHeader(); entries = new PackedObjectInfo[(int)objectCount]; baseById = new ObjectIdSubclassMap<IndexPack.DeltaChain>(); baseByPos = new LongMap<IndexPack.UnresolvedDelta>(); deferredCheckBlobs = new AList<PackedObjectInfo>(); progress.BeginTask(JGitText.Get().receivingObjects, (int)objectCount); for (int done = 0; done < objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelled); } } ReadPackFooter(); EndInput(); if (!deferredCheckBlobs.IsEmpty()) { DoDeferredCheckBlobs(); } progress.EndTask(); if (deltaCount > 0) { if (packOut == null) { throw new IOException(JGitText.Get().needPackOut); } ResolveDeltas(progress); if (entryCount < objectCount) { if (!fixThin) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , (objectCount - entryCount))); } FixThinPack(progress); } } if (packOut != null && (keepEmpty || entryCount > 0)) { packOut.GetChannel().Force(true); } packDigest = null; baseById = null; baseByPos = null; if (dstIdx != null && (keepEmpty || entryCount > 0)) { WriteIdx(); } } finally { try { if (readCurs != null) { readCurs.Release(); } } finally { readCurs = null; } try { inflater.Release(); } finally { inflater = null; objectDatabase.Close(); } progress.EndTask(); if (packOut != null) { packOut.Close(); } } if (keepEmpty || entryCount > 0) { if (dstPack != null) { dstPack.SetReadOnly(); } if (dstIdx != null) { dstIdx.SetReadOnly(); } } } catch (IOException err) { if (dstPack != null) { FileUtils.Delete(dstPack); } if (dstIdx != null) { FileUtils.Delete(dstIdx); } throw; } }
/// <summary> /// Consume data from the input stream until the packfile is indexed. /// </summary> /// <param name="progress">progress feedback</param> public void index(ProgressMonitor progress) { progress.Start(2 /* tasks */); try { try { ReadPackHeader(); _entries = new PackedObjectInfo[(int)_objectCount]; _baseById = new ObjectIdSubclassMap <DeltaChain>(); _baseByPos = new LongMap <UnresolvedDelta>(); progress.BeginTask(PROGRESS_DOWNLOAD, (int)_objectCount); for (int done = 0; done < _objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled) { throw new IOException("Download cancelled"); } } ReadPackFooter(); EndInput(); progress.EndTask(); if (_deltaCount > 0) { if (_packOut == null) { throw new IOException("need packOut"); } ResolveDeltas(progress); if (_needBaseObjectIds) { _baseIds = new HashSet <ObjectId>(); foreach (var c in _baseById) { _baseIds.Add(c); } } if (_entryCount < _objectCount) { if (!_fixThin) { throw new IOException("pack has " + (_objectCount - _entryCount) + " unresolved deltas"); } FixThinPack(progress); } } if (_packOut != null && (_keepEmpty || _entryCount > 0)) { _packOut.Flush(); } _packDigest = null; _baseById = null; _baseByPos = null; if (_dstIdx != null && (_keepEmpty || _entryCount > 0)) { WriteIdx(); } } finally { try { InflaterCache.Instance.release(_inflater); } finally { _inflater = null; _objectDatabase.close(); } _windowCursor = WindowCursor.Release(_windowCursor); progress.EndTask(); if (_packOut != null) { _packOut.Dispose(); } } if (_keepEmpty || _entryCount > 0) { if (_dstPack != null) { _dstPack.IsReadOnly = true; } if (_dstIdx != null) { _dstIdx.IsReadOnly = true; } } } catch (IOException) { if (_dstPack != null) { _dstPack.DeleteFile(); } if (_dstIdx != null) { _dstIdx.DeleteFile(); } throw; } }
/// <summary>Parse the pack stream.</summary> /// <remarks>Parse the pack stream.</remarks> /// <param name="receiving"> /// receives progress feedback during the initial receiving /// objects phase. If null, /// <see cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</see> /// will be /// used. /// </param> /// <param name="resolving">receives progress feedback during the resolving objects phase. /// </param> /// <returns> /// the pack lock, if one was requested by setting /// <see cref="SetLockMessage(string)">SetLockMessage(string)</see> /// . /// </returns> /// <exception cref="System.IO.IOException">the stream is malformed, or contains corrupt objects. /// </exception> public virtual PackLock Parse(ProgressMonitor receiving, ProgressMonitor resolving ) { if (receiving == null) { receiving = NullProgressMonitor.INSTANCE; } if (resolving == null) { resolving = NullProgressMonitor.INSTANCE; } if (receiving == resolving) { receiving.Start(2); } try { ReadPackHeader(); entries = new PackedObjectInfo[(int)objectCount]; baseById = new ObjectIdSubclassMap<PackParser.DeltaChain>(); baseByPos = new LongMap<PackParser.UnresolvedDelta>(); deferredCheckBlobs = new AList<PackedObjectInfo>(); receiving.BeginTask(JGitText.Get().receivingObjects, (int)objectCount); try { for (int done = 0; done < objectCount; done++) { IndexOneObject(); receiving.Update(1); if (receiving.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelled); } } ReadPackFooter(); EndInput(); } finally { receiving.EndTask(); } if (!deferredCheckBlobs.IsEmpty()) { DoDeferredCheckBlobs(); } if (deltaCount > 0) { ResolveDeltas(resolving); if (entryCount < objectCount) { if (!IsAllowThin()) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , (objectCount - entryCount))); } ResolveDeltasWithExternalBases(resolving); if (entryCount < objectCount) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , (objectCount - entryCount))); } } } packDigest = null; baseById = null; baseByPos = null; } finally { try { if (readCurs != null) { readCurs.Release(); } } finally { readCurs = null; } try { inflater.Release(); } finally { inflater = null; objectDatabase.Close(); } } return null; }