public override bool IsCancelled() { Lock.Lock(); try { return(pm.IsCancelled()); } finally { Lock.Unlock(); } }
/// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="NGit.Errors.IncorrectObjectTypeException"></exception> /// <exception cref="System.IO.IOException"></exception> public override bool Include(TreeWalk walker) { count++; if (count % stepSize == 0) { if (count <= total) { monitor.Update(stepSize); } if (monitor.IsCancelled()) { throw StopWalkException.INSTANCE; } } return(true); }
/// <exception cref="System.IO.IOException"></exception> private void ResolveDeltasWithExternalBases(ProgressMonitor progress) { GrowEntries(baseById.Size()); if (needBaseObjectIds) { baseObjectIds = new ObjectIdSubclassMap<ObjectId>(); } IList<PackParser.DeltaChain> missing = new AList<PackParser.DeltaChain>(64); foreach (PackParser.DeltaChain baseId in baseById) { if (baseId.head == null) { continue; } if (needBaseObjectIds) { baseObjectIds.Add(baseId); } ObjectLoader ldr; try { ldr = readCurs.Open(baseId); } catch (MissingObjectException) { missing.AddItem(baseId); continue; } PackParser.DeltaVisit visit = new PackParser.DeltaVisit(); visit.data = ldr.GetCachedBytes(int.MaxValue); visit.id = baseId; int typeCode = ldr.GetType(); PackedObjectInfo oe = NewInfo(baseId, null, null); if (OnAppendBase(typeCode, visit.data, oe)) { entries[entryCount++] = oe; } visit.nextChild = FirstChildOf(oe); ResolveDeltas(visit.Next(), typeCode, new PackParser.ObjectTypeAndSize(), progress ); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelledDuringIndexing); } } foreach (PackParser.DeltaChain @base in missing) { if (@base.head != null) { throw new MissingObjectException(@base, "delta base"); } } OnEndThinPack(); }
// By default there is no locking. /// <exception cref="System.IO.IOException"></exception> private void ResolveDeltas(ProgressMonitor progress) { int last = entryCount; for (int i = 0; i < last; i++) { ResolveDeltas(entries[i], progress); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelledDuringIndexing); } } }
/// <summary>Parse the pack stream.</summary> /// <remarks>Parse the pack stream.</remarks> /// <param name="receiving"> /// receives progress feedback during the initial receiving /// objects phase. If null, /// <see cref="NGit.NullProgressMonitor">NGit.NullProgressMonitor</see> /// will be /// used. /// </param> /// <param name="resolving">receives progress feedback during the resolving objects phase. /// </param> /// <returns> /// the pack lock, if one was requested by setting /// <see cref="SetLockMessage(string)">SetLockMessage(string)</see> /// . /// </returns> /// <exception cref="System.IO.IOException">the stream is malformed, or contains corrupt objects. /// </exception> public virtual PackLock Parse(ProgressMonitor receiving, ProgressMonitor resolving ) { if (receiving == null) { receiving = NullProgressMonitor.INSTANCE; } if (resolving == null) { resolving = NullProgressMonitor.INSTANCE; } if (receiving == resolving) { receiving.Start(2); } try { ReadPackHeader(); entries = new PackedObjectInfo[(int)objectCount]; baseById = new ObjectIdOwnerMap<PackParser.DeltaChain>(); baseByPos = new LongMap<PackParser.UnresolvedDelta>(); deferredCheckBlobs = new BlockList<PackedObjectInfo>(); receiving.BeginTask(JGitText.Get().receivingObjects, (int)objectCount); try { for (int done = 0; done < objectCount; done++) { IndexOneObject(); receiving.Update(1); if (receiving.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelled); } } ReadPackFooter(); EndInput(); } finally { receiving.EndTask(); } if (!deferredCheckBlobs.IsEmpty()) { DoDeferredCheckBlobs(); } if (deltaCount > 0) { if (resolving is BatchingProgressMonitor) { ((BatchingProgressMonitor)resolving).SetDelayStart(1000, TimeUnit.MILLISECONDS); } resolving.BeginTask(JGitText.Get().resolvingDeltas, deltaCount); ResolveDeltas(resolving); if (entryCount < objectCount) { if (!IsAllowThin()) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , Sharpen.Extensions.ValueOf(objectCount - entryCount))); } ResolveDeltasWithExternalBases(resolving); if (entryCount < objectCount) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , Sharpen.Extensions.ValueOf(objectCount - entryCount))); } } resolving.EndTask(); } packDigest = null; baseById = null; baseByPos = null; } finally { try { if (readCurs != null) { readCurs.Release(); } } finally { readCurs = null; } try { inflater.Release(); } finally { inflater = null; } } return null; }
/// <exception cref="NGit.Errors.TransportException"></exception> private bool DownloadPackedObject(ProgressMonitor monitor, AnyObjectId id) { // Search for the object in a remote pack whose index we have, // but whose pack we do not yet have. // Iterator<WalkFetchConnection.RemotePack> packItr = unfetchedPacks.Iterator(); while (packItr.HasNext() && !monitor.IsCancelled()) { WalkFetchConnection.RemotePack pack = packItr.Next(); try { pack.OpenIndex(monitor); } catch (IOException err) { // If the index won't open its either not found or // its a format we don't recognize. In either case // we may still be able to obtain the object from // another source, so don't consider it a failure. // RecordError(id, err); packItr.Remove(); continue; } if (monitor.IsCancelled()) { // If we were cancelled while the index was opening // the open may have aborted. We can't search an // unopen index. // return false; } if (!pack.index.HasObject(id)) { // Not in this pack? Try another. // continue; } // It should be in the associated pack. Download that // and attach it to the local repository so we can use // all of the contained objects. // try { pack.DownloadPack(monitor); } catch (IOException err) { // If the pack failed to download, index correctly, // or open in the local repository we may still be // able to obtain this object from another pack or // an alternate. // RecordError(id, err); continue; } finally { // If the pack was good its in the local repository // and Repository.hasObject(id) will succeed in the // future, so we do not need this data anymore. If // it failed the index and pack are unusable and we // shouldn't consult them again. // try { if (pack.tmpIdx != null) { FileUtils.Delete(pack.tmpIdx); } } catch (IOException e) { throw new TransportException(e.Message, e); } packItr.Remove(); } if (!AlreadyHave(id)) { // What the hell? This pack claimed to have // the object, but after indexing we didn't // actually find it in the pack. // RecordError(id, new FileNotFoundException(MessageFormat.Format(JGitText.Get().objectNotFoundIn , id.Name, pack.packName))); continue; } // Complete any other objects that we can. // Iterator<ObjectId> pending = SwapFetchQueue(); while (pending.HasNext()) { ObjectId p = pending.Next(); if (pack.index.HasObject(p)) { pending.Remove(); Process(p); } else { workQueue.AddItem(p); } } return true; } return false; }
/// <exception cref="NGit.Errors.TransportException"></exception> protected internal override void DoFetch(ProgressMonitor monitor, ICollection<Ref > want, ICollection<ObjectId> have) { MarkLocalRefsComplete(have); QueueWants(want); while (!monitor.IsCancelled() && !workQueue.IsEmpty()) { ObjectId id = workQueue.RemoveFirst(); if (!(id is RevObject) || !((RevObject)id).Has(COMPLETE)) { DownloadObject(monitor, id); } Process(id); } }
/// <exception cref="System.IO.IOException"></exception> internal virtual void OpenIndex(ProgressMonitor pm) { if (this.index != null) { return; } if (this.tmpIdx == null) { this.tmpIdx = FilePath.CreateTempFile("jgit-walk-", ".idx"); } else { if (this.tmpIdx.IsFile()) { try { this.index = PackIndex.Open(this.tmpIdx); return; } catch (FileNotFoundException) { } } } // Fall through and get the file. WalkRemoteObjectDatabase.FileStream s; s = this.connection.Open("pack/" + this.idxName); pm.BeginTask("Get " + Sharpen.Runtime.Substring(this.idxName, 0, 12) + "..idx", s .length < 0 ? ProgressMonitor.UNKNOWN : (int)(s.length / 1024)); try { FileOutputStream fos = new FileOutputStream(this.tmpIdx); try { byte[] buf = new byte[2048]; int cnt; while (!pm.IsCancelled() && (cnt = [email protected](buf)) >= 0) { fos.Write(buf, 0, cnt); pm.Update(cnt / 1024); } } finally { fos.Close(); } } catch (IOException err) { FileUtils.Delete(this.tmpIdx); throw; } finally { [email protected](); } pm.EndTask(); if (pm.IsCancelled()) { FileUtils.Delete(this.tmpIdx); return; } try { this.index = PackIndex.Open(this.tmpIdx); } catch (IOException e) { FileUtils.Delete(this.tmpIdx); throw; } }
// By default there is no locking. /// <exception cref="System.IO.IOException"></exception> private void ResolveDeltas(ProgressMonitor progress) { progress.BeginTask(JGitText.Get().resolvingDeltas, deltaCount); int last = entryCount; for (int i = 0; i < last; i++) { int before = entryCount; ResolveDeltas(entries[i]); progress.Update(entryCount - before); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelledDuringIndexing); } } progress.EndTask(); }
/// <exception cref="System.IO.IOException"></exception> private void WriteCommands(ICollection<RemoteRefUpdate> refUpdates, ProgressMonitor monitor) { string capabilities = EnableCapabilities(monitor); foreach (RemoteRefUpdate rru in refUpdates) { if (!capableDeleteRefs && rru.IsDelete()) { rru.SetStatus(RemoteRefUpdate.Status.REJECTED_NODELETE); continue; } StringBuilder sb = new StringBuilder(); Ref advertisedRef = GetRef(rru.GetRemoteName()); ObjectId oldId = (advertisedRef == null ? ObjectId.ZeroId : advertisedRef.GetObjectId ()); sb.Append(oldId.Name); sb.Append(' '); sb.Append(rru.GetNewObjectId().Name); sb.Append(' '); sb.Append(rru.GetRemoteName()); if (!sentCommand) { sentCommand = true; sb.Append(capabilities); } pckOut.WriteString(sb.ToString()); rru.SetStatus(RemoteRefUpdate.Status.AWAITING_REPORT); if (!rru.IsDelete()) { writePack = true; } } if (monitor.IsCancelled()) { throw new TransportException(uri, JGitText.Get().pushCancelled); } pckOut.End(); outNeedsEnd = false; }
/// <exception cref="System.IO.IOException"></exception> private void FixThinPack(ProgressMonitor progress) { GrowEntries(); if (needBaseObjectIds) { baseObjectIds = new ObjectIdSubclassMap<ObjectId>(); } packDigest.Reset(); originalEOF = packOut.Length() - 20; Deflater def = new Deflater(Deflater.DEFAULT_COMPRESSION, false); IList<IndexPack.DeltaChain> missing = new AList<IndexPack.DeltaChain>(64); long end = originalEOF; foreach (IndexPack.DeltaChain baseId in baseById) { if (baseId.head == null) { continue; } if (needBaseObjectIds) { baseObjectIds.Add(baseId); } ObjectLoader ldr; try { ldr = readCurs.Open(baseId); } catch (MissingObjectException) { missing.AddItem(baseId); continue; } byte[] data = ldr.GetCachedBytes(int.MaxValue); int typeCode = ldr.GetType(); PackedObjectInfo oe; crc.Reset(); packOut.Seek(end); WriteWhole(def, typeCode, data); oe = new PackedObjectInfo(end, (int)crc.GetValue(), baseId); entries[entryCount++] = oe; end = packOut.GetFilePointer(); ResolveChildDeltas(oe.GetOffset(), typeCode, data, oe); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelledDuringIndexing); } } def.Finish(); foreach (IndexPack.DeltaChain @base in missing) { if (@base.head != null) { throw new MissingObjectException(@base, "delta base"); } } if (end - originalEOF < 20) { // Ugly corner case; if what we appended on to complete deltas // doesn't completely cover the SHA-1 we have to truncate off // we need to shorten the file, otherwise we will include part // of the old footer as object content. packOut.SetLength(end); } FixHeaderFooter(packcsum, packDigest.Digest()); }
/// <summary>Consume data from the input stream until the packfile is indexed.</summary> /// <remarks>Consume data from the input stream until the packfile is indexed.</remarks> /// <param name="progress">progress feedback</param> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual void Index(ProgressMonitor progress) { progress.Start(2); try { try { ReadPackHeader(); entries = new PackedObjectInfo[(int)objectCount]; baseById = new ObjectIdSubclassMap<IndexPack.DeltaChain>(); baseByPos = new LongMap<IndexPack.UnresolvedDelta>(); deferredCheckBlobs = new AList<PackedObjectInfo>(); progress.BeginTask(JGitText.Get().receivingObjects, (int)objectCount); for (int done = 0; done < objectCount; done++) { IndexOneObject(); progress.Update(1); if (progress.IsCancelled()) { throw new IOException(JGitText.Get().downloadCancelled); } } ReadPackFooter(); EndInput(); if (!deferredCheckBlobs.IsEmpty()) { DoDeferredCheckBlobs(); } progress.EndTask(); if (deltaCount > 0) { if (packOut == null) { throw new IOException(JGitText.Get().needPackOut); } ResolveDeltas(progress); if (entryCount < objectCount) { if (!fixThin) { throw new IOException(MessageFormat.Format(JGitText.Get().packHasUnresolvedDeltas , (objectCount - entryCount))); } FixThinPack(progress); } } if (packOut != null && (keepEmpty || entryCount > 0)) { packOut.GetChannel().Force(true); } packDigest = null; baseById = null; baseByPos = null; if (dstIdx != null && (keepEmpty || entryCount > 0)) { WriteIdx(); } } finally { try { if (readCurs != null) { readCurs.Release(); } } finally { readCurs = null; } try { inflater.Release(); } finally { inflater = null; objectDatabase.Close(); } progress.EndTask(); if (packOut != null) { packOut.Close(); } } if (keepEmpty || entryCount > 0) { if (dstPack != null) { dstPack.SetReadOnly(); } if (dstIdx != null) { dstIdx.SetReadOnly(); } } } catch (IOException err) { if (dstPack != null) { FileUtils.Delete(dstPack); } if (dstIdx != null) { FileUtils.Delete(dstIdx); } throw; } }