/// <summary> /// Find the list of branches a given commit is reachable from when following /// parent.s /// <p> /// Note that this method calls /// <see cref="RevWalk.Reset()">RevWalk.Reset()</see> /// at the beginning. /// <p> /// In order to improve performance this method assumes clock skew among /// committers is never larger than 24 hours. /// </summary> /// <param name="commit">the commit we are looking at</param> /// <param name="revWalk">The RevWalk to be used.</param> /// <param name="refs">the set of branches we want to see reachability from</param> /// <returns>the list of branches a given commit is reachable from</returns> /// <exception cref="NGit.Errors.MissingObjectException">NGit.Errors.MissingObjectException /// </exception> /// <exception cref="NGit.Errors.IncorrectObjectTypeException">NGit.Errors.IncorrectObjectTypeException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public static IList <Ref> FindBranchesReachableFrom(RevCommit commit, RevWalk revWalk , ICollection <Ref> refs) { IList <Ref> result = new AList <Ref>(); // searches from branches can be cut off early if any parent of the // search-for commit is found. This is quite likely, so optimize for this. revWalk.MarkStart(Arrays.AsList(commit.Parents)); ObjectIdSubclassMap <ObjectId> cutOff = new ObjectIdSubclassMap <ObjectId>(); int SKEW = 24 * 3600; // one day clock skew foreach (Ref @ref in refs) { RevObject maybehead = revWalk.ParseAny(@ref.GetObjectId()); if (!(maybehead is RevCommit)) { continue; } RevCommit headCommit = (RevCommit)maybehead; // if commit is in the ref branch, then the tip of ref should be // newer than the commit we are looking for. Allow for a large // clock skew. if (headCommit.CommitTime + SKEW < commit.CommitTime) { continue; } IList <ObjectId> maybeCutOff = new AList <ObjectId>(cutOff.Size()); // guess rough size revWalk.ResetRetain(); revWalk.MarkStart(headCommit); RevCommit current; Ref found = null; while ((current = revWalk.Next()) != null) { if (AnyObjectId.Equals(current, commit)) { found = @ref; break; } if (cutOff.Contains(current)) { break; } maybeCutOff.AddItem(current.ToObjectId()); } if (found != null) { result.AddItem(@ref); } else { foreach (ObjectId id in maybeCutOff) { cutOff.AddIfAbsent(id); } } } return(result); }
/// <summary>Update the type of this command by checking for fast-forward.</summary> /// <remarks> /// Update the type of this command by checking for fast-forward. /// <p> /// If the command's current type is UPDATE, a merge test will be performed /// using the supplied RevWalk to determine if /// <see cref="GetOldId()">GetOldId()</see> /// is fully /// merged into /// <see cref="GetNewId()">GetNewId()</see> /// . If some commits are not merged the /// update type is changed to /// <see cref="Type.UPDATE_NONFASTFORWARD">Type.UPDATE_NONFASTFORWARD</see> /// . /// </remarks> /// <param name="walk"> /// an instance to perform the merge test with. The caller must /// allocate and release this object. /// </param> /// <exception cref="System.IO.IOException"> /// either oldId or newId is not accessible in the repository /// used by the RevWalk. This usually indicates data corruption, /// and the command cannot be processed. /// </exception> public virtual void UpdateType(RevWalk walk) { if (typeIsCorrect) { return; } if (type == ReceiveCommand.Type.UPDATE && !AnyObjectId.Equals(oldId, newId)) { RevObject o = walk.ParseAny(oldId); RevObject n = walk.ParseAny(newId); if (!(o is RevCommit) || !(n is RevCommit) || !walk.IsMergedInto((RevCommit)o, (RevCommit )n)) { SetType(ReceiveCommand.Type.UPDATE_NONFASTFORWARD); } } typeIsCorrect = true; }
/// <exception cref="System.IO.IOException"></exception> private static RevObject SafeParse(RevWalk rw, AnyObjectId id) { try { return(id != null?rw.ParseAny(id) : null); } catch (MissingObjectException) { // We can expect some objects to be missing, like if we are // trying to force a deletion of a branch and the object it // points to has been pruned from the database due to freak // corruption accidents (it happens with 'git new-work-dir'). // return(null); } }
/// <summary>Merge together two or more tree-ish objects.</summary> /// <remarks> /// Merge together two or more tree-ish objects. /// <p> /// Any tree-ish may be supplied as inputs. Commits and/or tags pointing at /// trees or commits may be passed as input objects. /// </remarks> /// <param name="tips"> /// source trees to be combined together. The merge base is not /// included in this set. /// </param> /// <returns> /// true if the merge was completed without conflicts; false if the /// merge strategy cannot handle this merge or there were conflicts /// preventing it from automatically resolving all paths. /// </returns> /// <exception cref="NGit.Errors.IncorrectObjectTypeException"> /// one of the input objects is not a commit, but the strategy /// requires it to be a commit. /// </exception> /// <exception cref="System.IO.IOException"> /// one or more sources could not be read, or outputs could not /// be written to the Repository. /// </exception> public virtual bool Merge(params AnyObjectId[] tips) { sourceObjects = new RevObject[tips.Length]; for (int i = 0; i < tips.Length; i++) { sourceObjects[i] = walk.ParseAny(tips[i]); } sourceCommits = new RevCommit[sourceObjects.Length]; for (int i_1 = 0; i_1 < sourceObjects.Length; i_1++) { try { sourceCommits[i_1] = walk.ParseCommit(sourceObjects[i_1]); } catch (IncorrectObjectTypeException) { sourceCommits[i_1] = null; } } sourceTrees = new RevTree[sourceObjects.Length]; for (int i_2 = 0; i_2 < sourceObjects.Length; i_2++) { sourceTrees[i_2] = walk.ParseTree(sourceObjects[i_2]); } try { bool ok = MergeImpl(); if (ok && inserter != null) { inserter.Flush(); } return(ok); } finally { if (inserter != null) { inserter.Release(); } reader.Release(); } }
public virtual void TestWritePack3() { config.SetReuseDeltas(false); ObjectId[] forcedOrder = new ObjectId[] { ObjectId.FromString("82c6b885ff600be425b4ea96dee75dca255b69e7" ), ObjectId.FromString("c59759f143fb1fe21c197981df75a7ee00290799"), ObjectId.FromString ("aabf2ffaec9b497f0950352b3e582d73035c2035"), ObjectId.FromString("902d5476fa249b7abc9d84c611577a81381f0327" ), ObjectId.FromString("5b6e7c66c276e7610d4a73c70ec1a1f7c1003259"), ObjectId.FromString ("6ff87c4664981e4397625791c8ea3bbb5f2279a3") }; RevWalk parser = new RevWalk(db); RevObject[] forcedOrderRevs = new RevObject[forcedOrder.Length]; for (int i = 0; i < forcedOrder.Length; i++) { forcedOrderRevs[i] = parser.ParseAny(forcedOrder[i]); } CreateVerifyOpenPack(Arrays.AsList(forcedOrderRevs).Iterator()); NUnit.Framework.Assert.AreEqual(forcedOrder.Length, writer.GetObjectsNumber()); VerifyObjectsOrder(forcedOrder); NUnit.Framework.Assert.AreEqual("ed3f96b8327c7c66b0f8f70056129f0769323d86", writer .ComputeName().Name); }
/// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> private ObjectIdRef DoPeel(Ref leaf) { RevWalk rw = new RevWalk(GetRepository()); try { RevObject obj = rw.ParseAny(leaf.GetObjectId()); if (obj is RevTag) { return(new ObjectIdRef.PeeledTag(leaf.GetStorage(), leaf.GetName(), leaf.GetObjectId (), rw.Peel(obj).Copy())); } else { return(new ObjectIdRef.PeeledNonTag(leaf.GetStorage(), leaf.GetName(), leaf.GetObjectId ())); } } finally { rw.Release(); } }
/// <exception cref="NGit.Api.Errors.RefAlreadyExistsException"> /// when trying to create (without force) a branch with a name /// that already exists /// </exception> /// <exception cref="NGit.Api.Errors.RefNotFoundException">if the start point can not be found /// </exception> /// <exception cref="NGit.Api.Errors.InvalidRefNameException"> /// if the provided name is <code>null</code> or otherwise /// invalid /// </exception> /// <returns>the newly created branch</returns> /// <exception cref="NGit.Api.Errors.GitAPIException"></exception> public override Ref Call() { CheckCallable(); ProcessOptions(); RevWalk revWalk = new RevWalk(repo); try { Ref refToCheck = repo.GetRef(name); bool exists = refToCheck != null && refToCheck.GetName().StartsWith(Constants.R_HEADS ); if (!force && exists) { throw new RefAlreadyExistsException(MessageFormat.Format(JGitText.Get().refAlreadyExists1 , name)); } ObjectId startAt = GetStartPoint(); string startPointFullName = null; if (startPoint != null) { Ref baseRef = repo.GetRef(startPoint); if (baseRef != null) { startPointFullName = baseRef.GetName(); } } // determine whether we are based on a commit, // a branch, or a tag and compose the reflog message string refLogMessage; string baseBranch = string.Empty; if (startPointFullName == null) { string baseCommit; if (startCommit != null) { baseCommit = startCommit.GetShortMessage(); } else { RevCommit commit = revWalk.ParseCommit(repo.Resolve(startPoint)); baseCommit = commit.GetShortMessage(); } if (exists) { refLogMessage = "branch: Reset start-point to commit " + baseCommit; } else { refLogMessage = "branch: Created from commit " + baseCommit; } } else { if (startPointFullName.StartsWith(Constants.R_HEADS) || startPointFullName.StartsWith (Constants.R_REMOTES)) { baseBranch = startPointFullName; if (exists) { refLogMessage = "branch: Reset start-point to branch " + startPointFullName; } else { // TODO refLogMessage = "branch: Created from branch " + baseBranch; } } else { startAt = revWalk.Peel(revWalk.ParseAny(startAt)); if (exists) { refLogMessage = "branch: Reset start-point to tag " + startPointFullName; } else { refLogMessage = "branch: Created from tag " + startPointFullName; } } } RefUpdate updateRef = repo.UpdateRef(Constants.R_HEADS + name); updateRef.SetNewObjectId(startAt); updateRef.SetRefLogMessage(refLogMessage, false); RefUpdate.Result updateResult; if (exists && force) { updateResult = updateRef.ForceUpdate(); } else { updateResult = updateRef.Update(); } SetCallable(false); bool ok = false; switch (updateResult) { case RefUpdate.Result.NEW: { ok = !exists; break; } case RefUpdate.Result.NO_CHANGE: case RefUpdate.Result.FAST_FORWARD: case RefUpdate.Result.FORCED: { ok = exists; break; } default: { break; break; } } if (!ok) { throw new JGitInternalException(MessageFormat.Format(JGitText.Get().createBranchUnexpectedResult , updateResult.ToString())); } Ref result = repo.GetRef(name); if (result == null) { throw new JGitInternalException(JGitText.Get().createBranchFailedUnknownReason); } if (baseBranch.Length == 0) { return(result); } // if we are based on another branch, see // if we need to configure upstream configuration: first check // whether the setting was done explicitly bool doConfigure; if (upstreamMode == CreateBranchCommand.SetupUpstreamMode.SET_UPSTREAM || upstreamMode == CreateBranchCommand.SetupUpstreamMode.TRACK) { // explicitly set to configure doConfigure = true; } else { if (upstreamMode == CreateBranchCommand.SetupUpstreamMode.NOTRACK) { // explicitly set to not configure doConfigure = false; } else { // if there was no explicit setting, check the configuration string autosetupflag = repo.GetConfig().GetString(ConfigConstants.CONFIG_BRANCH_SECTION , null, ConfigConstants.CONFIG_KEY_AUTOSETUPMERGE); if ("false".Equals(autosetupflag)) { doConfigure = false; } else { if ("always".Equals(autosetupflag)) { doConfigure = true; } else { // in this case, the default is to configure // only in case the base branch was a remote branch doConfigure = baseBranch.StartsWith(Constants.R_REMOTES); } } } } if (doConfigure) { StoredConfig config = repo.GetConfig(); string[] tokens = baseBranch.RegexSplit("/", 4); bool isRemote = tokens[1].Equals("remotes"); if (isRemote) { // refs/remotes/<remote name>/<branch> string remoteName = tokens[2]; string branchName = tokens[3]; config.SetString(ConfigConstants.CONFIG_BRANCH_SECTION, name, ConfigConstants.CONFIG_KEY_REMOTE , remoteName); config.SetString(ConfigConstants.CONFIG_BRANCH_SECTION, name, ConfigConstants.CONFIG_KEY_MERGE , Constants.R_HEADS + branchName); } else { // set "." as remote config.SetString(ConfigConstants.CONFIG_BRANCH_SECTION, name, ConfigConstants.CONFIG_KEY_REMOTE , "."); config.SetString(ConfigConstants.CONFIG_BRANCH_SECTION, name, ConfigConstants.CONFIG_KEY_MERGE , baseBranch); } config.Save(); } return(result); } catch (IOException ioe) { throw new JGitInternalException(ioe.Message, ioe); } finally { revWalk.Release(); } }
/// <summary>Update the type of this command by checking for fast-forward.</summary> /// <remarks> /// Update the type of this command by checking for fast-forward. /// <p> /// If the command's current type is UPDATE, a merge test will be performed /// using the supplied RevWalk to determine if /// <see cref="GetOldId()">GetOldId()</see> /// is fully /// merged into /// <see cref="GetNewId()">GetNewId()</see> /// . If some commits are not merged the /// update type is changed to /// <see cref="Type.UPDATE_NONFASTFORWARD">Type.UPDATE_NONFASTFORWARD</see> /// . /// </remarks> /// <param name="walk"> /// an instance to perform the merge test with. The caller must /// allocate and release this object. /// </param> /// <exception cref="System.IO.IOException"> /// either oldId or newId is not accessible in the repository /// used by the RevWalk. This usually indicates data corruption, /// and the command cannot be processed. /// </exception> public virtual void UpdateType(RevWalk walk) { if (typeIsCorrect) { return; } if (type == ReceiveCommand.Type.UPDATE && !AnyObjectId.Equals(oldId, newId)) { RevObject o = walk.ParseAny(oldId); RevObject n = walk.ParseAny(newId); if (!(o is RevCommit) || !(n is RevCommit) || !walk.IsMergedInto((RevCommit)o, (RevCommit )n)) { SetType(ReceiveCommand.Type.UPDATE_NONFASTFORWARD); } } typeIsCorrect = true; }
/// <exception cref="System.IO.IOException"></exception> private static RevObject SafeParse(RevWalk rw, AnyObjectId id) { try { return id != null ? rw.ParseAny(id) : null; } catch (MissingObjectException) { // We can expect some objects to be missing, like if we are // trying to force a deletion of a branch and the object it // points to has been pruned from the database due to freak // corruption accidents (it happens with 'git new-work-dir'). // return null; } }
/// <exception cref="System.IO.IOException"></exception> private void SendPack() { bool sideband = options.Contains(OPTION_SIDE_BAND) || options.Contains(OPTION_SIDE_BAND_64K ); ProgressMonitor pm = NullProgressMonitor.INSTANCE; OutputStream packOut = rawOut; SideBandOutputStream msgOut = null; if (sideband) { int bufsz = SideBandOutputStream.SMALL_BUF; if (options.Contains(OPTION_SIDE_BAND_64K)) { bufsz = SideBandOutputStream.MAX_BUF; } packOut = new SideBandOutputStream(SideBandOutputStream.CH_DATA, bufsz, rawOut); if (!options.Contains(OPTION_NO_PROGRESS)) { msgOut = new SideBandOutputStream(SideBandOutputStream.CH_PROGRESS, bufsz, rawOut ); pm = new SideBandProgressMonitor(msgOut); } } PackConfig cfg = packConfig; if (cfg == null) { cfg = new PackConfig(db); } PackWriter pw = new PackWriter(cfg, walk.GetObjectReader()); try { pw.SetUseCachedPacks(true); pw.SetReuseDeltaCommits(true); pw.SetDeltaBaseAsOffset(options.Contains(OPTION_OFS_DELTA)); pw.SetThin(options.Contains(OPTION_THIN_PACK)); pw.SetReuseValidatingObjects(false); if (commonBase.IsEmpty()) { ICollection <ObjectId> tagTargets = new HashSet <ObjectId>(); foreach (Ref @ref in refs.Values) { if (@ref.GetPeeledObjectId() != null) { tagTargets.AddItem(@ref.GetPeeledObjectId()); } else { if (@ref.GetObjectId() == null) { continue; } else { if (@ref.GetName().StartsWith(Constants.R_HEADS)) { tagTargets.AddItem(@ref.GetObjectId()); } } } } pw.SetTagTargets(tagTargets); } RevWalk rw = walk; if (wantAll.IsEmpty()) { pw.PreparePack(pm, wantIds, commonBase); } else { walk.Reset(); ObjectWalk ow = walk.ToObjectWalkWithSameObjects(); pw.PreparePack(pm, ow, wantAll, commonBase); rw = ow; } if (options.Contains(OPTION_INCLUDE_TAG)) { foreach (Ref vref in refs.Values) { Ref @ref = vref; ObjectId objectId = @ref.GetObjectId(); // If the object was already requested, skip it. if (wantAll.IsEmpty()) { if (wantIds.Contains(objectId)) { continue; } } else { RevObject obj = rw.LookupOrNull(objectId); if (obj != null && obj.Has(WANT)) { continue; } } if ([email protected]()) { @ref = db.Peel(@ref); } ObjectId peeledId = @ref.GetPeeledObjectId(); if (peeledId == null) { continue; } objectId = @ref.GetObjectId(); if (pw.WillInclude(peeledId) && !pw.WillInclude(objectId)) { pw.AddObject(rw.ParseAny(objectId)); } } } pw.WritePack(pm, NullProgressMonitor.INSTANCE, packOut); statistics = pw.GetStatistics(); if (msgOut != null) { string msg = pw.GetStatistics().GetMessage() + '\n'; msgOut.Write(Constants.Encode(msg)); msgOut.Flush(); } } finally { pw.Release(); } if (sideband) { pckOut.End(); } if (logger != null && statistics != null) { logger.OnPackStatistics(statistics); } }
/// <exception cref="System.IO.IOException"></exception> private ObjectId ProcessHaveLines(IList <ObjectId> peerHas, ObjectId last) { if (peerHas.IsEmpty()) { return(last); } IList <ObjectId> toParse = peerHas; HashSet <ObjectId> peerHasSet = null; bool needMissing = false; if (wantAll.IsEmpty() && !wantIds.IsEmpty()) { // We have not yet parsed the want list. Parse it now. peerHasSet = new HashSet <ObjectId>(peerHas); int cnt = wantIds.Count + peerHasSet.Count; toParse = new AList <ObjectId>(cnt); Sharpen.Collections.AddAll(toParse, wantIds); Sharpen.Collections.AddAll(toParse, peerHasSet); needMissing = true; } AsyncRevObjectQueue q = walk.ParseAny(toParse.AsIterable(), needMissing); try { for (; ;) { RevObject obj; try { obj = q.Next(); } catch (MissingObjectException notFound) { ObjectId id = notFound.GetObjectId(); if (wantIds.Contains(id)) { string msg = MessageFormat.Format(JGitText.Get().wantNotValid, id.Name); pckOut.WriteString("ERR " + msg); throw new PackProtocolException(msg, notFound); } continue; } if (obj == null) { break; } // If the object is still found in wantIds, the want // list wasn't parsed earlier, and was done in this batch. // if (wantIds.Remove(obj)) { if (!advertised.Contains(obj)) { string msg = MessageFormat.Format(JGitText.Get().wantNotValid, obj.Name); pckOut.WriteString("ERR " + msg); throw new PackProtocolException(msg); } if (!obj.Has(WANT)) { obj.Add(WANT); wantAll.AddItem(obj); } if (!(obj is RevCommit)) { obj.Add(SATISFIED); } if (obj is RevTag) { RevObject target = walk.Peel(obj); if (target is RevCommit) { if (!target.Has(WANT)) { target.Add(WANT); wantAll.AddItem(target); } } } if (!peerHasSet.Contains(obj)) { continue; } } last = obj; if (obj is RevCommit) { RevCommit c = (RevCommit)obj; if (oldestTime == 0 || c.CommitTime < oldestTime) { oldestTime = c.CommitTime; } } if (obj.Has(PEER_HAS)) { continue; } obj.Add(PEER_HAS); if (obj is RevCommit) { ((RevCommit)obj).Carry(PEER_HAS); } AddCommonBase(obj); switch (multiAck) { case BasePackFetchConnection.MultiAck.OFF: { // If both sides have the same object; let the client know. // if (commonBase.Count == 1) { pckOut.WriteString("ACK " + obj.Name + "\n"); } break; } case BasePackFetchConnection.MultiAck.CONTINUE: { pckOut.WriteString("ACK " + obj.Name + " continue\n"); break; } case BasePackFetchConnection.MultiAck.DETAILED: { pckOut.WriteString("ACK " + obj.Name + " common\n"); break; } } } } finally { q.Release(); } // If we don't have one of the objects but we're also willing to // create a pack at this point, let the client know so it stops // telling us about its history. // bool didOkToGiveUp = false; for (int i = peerHas.Count - 1; i >= 0; i--) { ObjectId id = peerHas[i]; if (walk.LookupOrNull(id) == null) { didOkToGiveUp = true; if (OkToGiveUp()) { switch (multiAck) { case BasePackFetchConnection.MultiAck.OFF: { break; } case BasePackFetchConnection.MultiAck.CONTINUE: { pckOut.WriteString("ACK " + id.Name + " continue\n"); break; } case BasePackFetchConnection.MultiAck.DETAILED: { pckOut.WriteString("ACK " + id.Name + " ready\n"); break; } } } break; } } if (multiAck == BasePackFetchConnection.MultiAck.DETAILED && !didOkToGiveUp && OkToGiveUp ()) { ObjectId id = peerHas[peerHas.Count - 1]; pckOut.WriteString("ACK " + id.Name + " ready\n"); } peerHas.Clear(); return(last); }
/// <exception cref="System.IO.IOException"></exception> private void SendPack() { bool sideband = options.Contains(OPTION_SIDE_BAND) || options.Contains(OPTION_SIDE_BAND_64K ); if (!biDirectionalPipe) { // Ensure the request was fully consumed. Any remaining input must // be a protocol error. If we aren't at EOF the implementation is broken. int eof = rawIn.Read(); if (0 <= eof) { throw new CorruptObjectException(MessageFormat.Format(JGitText.Get().expectedEOFReceived , "\\x" + Sharpen.Extensions.ToHexString(eof))); } } ProgressMonitor pm = NullProgressMonitor.INSTANCE; OutputStream packOut = rawOut; SideBandOutputStream msgOut = null; if (sideband) { int bufsz = SideBandOutputStream.SMALL_BUF; if (options.Contains(OPTION_SIDE_BAND_64K)) { bufsz = SideBandOutputStream.MAX_BUF; } packOut = new SideBandOutputStream(SideBandOutputStream.CH_DATA, bufsz, rawOut); if (!options.Contains(OPTION_NO_PROGRESS)) { msgOut = new SideBandOutputStream(SideBandOutputStream.CH_PROGRESS, bufsz, rawOut ); pm = new SideBandProgressMonitor(msgOut); } } try { if (wantAll.IsEmpty()) { preUploadHook.OnSendPack(this, wantIds, commonBase); } else { preUploadHook.OnSendPack(this, wantAll, commonBase); } } catch (UploadPackMayNotContinueException noPack) { if (sideband && noPack.Message != null) { noPack.SetOutput(); SideBandOutputStream err = new SideBandOutputStream(SideBandOutputStream.CH_ERROR , SideBandOutputStream.SMALL_BUF, rawOut); err.Write(Constants.Encode(noPack.Message)); err.Flush(); } throw; } PackConfig cfg = packConfig; if (cfg == null) { cfg = new PackConfig(db); } PackWriter pw = new PackWriter(cfg, walk.GetObjectReader()); try { pw.SetUseCachedPacks(true); pw.SetReuseDeltaCommits(true); pw.SetDeltaBaseAsOffset(options.Contains(OPTION_OFS_DELTA)); pw.SetThin(options.Contains(OPTION_THIN_PACK)); pw.SetReuseValidatingObjects(false); if (commonBase.IsEmpty()) { ICollection <ObjectId> tagTargets = new HashSet <ObjectId>(); foreach (Ref @ref in refs.Values) { if (@ref.GetPeeledObjectId() != null) { tagTargets.AddItem(@ref.GetPeeledObjectId()); } else { if (@ref.GetObjectId() == null) { continue; } else { if (@ref.GetName().StartsWith(Constants.R_HEADS)) { tagTargets.AddItem(@ref.GetObjectId()); } } } } pw.SetTagTargets(tagTargets); } RevWalk rw = walk; if (wantAll.IsEmpty()) { pw.PreparePack(pm, wantIds, commonBase); } else { walk.Reset(); ObjectWalk ow = walk.ToObjectWalkWithSameObjects(); pw.PreparePack(pm, ow, wantAll, commonBase); rw = ow; } if (options.Contains(OPTION_INCLUDE_TAG)) { foreach (Ref vref in refs.Values) { Ref @ref = vref; ObjectId objectId = @ref.GetObjectId(); // If the object was already requested, skip it. if (wantAll.IsEmpty()) { if (wantIds.Contains(objectId)) { continue; } } else { RevObject obj = rw.LookupOrNull(objectId); if (obj != null && obj.Has(WANT)) { continue; } } if ([email protected]()) { @ref = db.Peel(@ref); } ObjectId peeledId = @ref.GetPeeledObjectId(); if (peeledId == null) { continue; } objectId = @ref.GetObjectId(); if (pw.WillInclude(peeledId) && !pw.WillInclude(objectId)) { pw.AddObject(rw.ParseAny(objectId)); } } } pw.WritePack(pm, NullProgressMonitor.INSTANCE, packOut); statistics = pw.GetStatistics(); if (msgOut != null) { string msg = pw.GetStatistics().GetMessage() + '\n'; msgOut.Write(Constants.Encode(msg)); msgOut.Flush(); } } finally { pw.Release(); } if (sideband) { pckOut.End(); } if (logger != null && statistics != null) { logger.OnPackStatistics(statistics); } }
/// <summary> /// Find the list of branches a given commit is reachable from when following /// parent.s /// <p> /// Note that this method calls /// <see cref="RevWalk.Reset()">RevWalk.Reset()</see> /// at the beginning. /// <p> /// In order to improve performance this method assumes clock skew among /// committers is never larger than 24 hours. /// </summary> /// <param name="commit">the commit we are looking at</param> /// <param name="revWalk">The RevWalk to be used.</param> /// <param name="refs">the set of branches we want to see reachability from</param> /// <returns>the list of branches a given commit is reachable from</returns> /// <exception cref="NGit.Errors.MissingObjectException">NGit.Errors.MissingObjectException /// </exception> /// <exception cref="NGit.Errors.IncorrectObjectTypeException">NGit.Errors.IncorrectObjectTypeException /// </exception> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public static IList<Ref> FindBranchesReachableFrom(RevCommit commit, RevWalk revWalk , ICollection<Ref> refs) { IList<Ref> result = new AList<Ref>(); // searches from branches can be cut off early if any parent of the // search-for commit is found. This is quite likely, so optimize for this. revWalk.MarkStart(Arrays.AsList(commit.Parents)); ObjectIdSubclassMap<ObjectId> cutOff = new ObjectIdSubclassMap<ObjectId>(); int SKEW = 24 * 3600; // one day clock skew foreach (Ref @ref in refs) { RevObject maybehead = revWalk.ParseAny(@ref.GetObjectId()); if (!(maybehead is RevCommit)) { continue; } RevCommit headCommit = (RevCommit)maybehead; // if commit is in the ref branch, then the tip of ref should be // newer than the commit we are looking for. Allow for a large // clock skew. if (headCommit.CommitTime + SKEW < commit.CommitTime) { continue; } IList<ObjectId> maybeCutOff = new AList<ObjectId>(cutOff.Size()); // guess rough size revWalk.ResetRetain(); revWalk.MarkStart(headCommit); RevCommit current; Ref found = null; while ((current = revWalk.Next()) != null) { if (AnyObjectId.Equals(current, commit)) { found = @ref; break; } if (cutOff.Contains(current)) { break; } maybeCutOff.AddItem(current.ToObjectId()); } if (found != null) { result.AddItem(@ref); } else { foreach (ObjectId id in maybeCutOff) { cutOff.AddIfAbsent(id); } } } return result; }
/// <exception cref="NGit.Errors.TransportException"></exception> private IDictionary <string, RemoteRefUpdate> PrepareRemoteUpdates() { IDictionary <string, RemoteRefUpdate> result = new Dictionary <string, RemoteRefUpdate >(); foreach (RemoteRefUpdate rru in toPush.Values) { Ref advertisedRef = connection.GetRef(rru.GetRemoteName()); ObjectId advertisedOld = (advertisedRef == null ? ObjectId.ZeroId : advertisedRef .GetObjectId()); if (rru.GetNewObjectId().Equals(advertisedOld)) { if (rru.IsDelete()) { // ref does exist neither locally nor remotely rru.SetStatus(RemoteRefUpdate.Status.NON_EXISTING); } else { // same object - nothing to do rru.SetStatus(RemoteRefUpdate.Status.UP_TO_DATE); } continue; } // caller has explicitly specified expected old object id, while it // has been changed in the mean time - reject if (rru.IsExpectingOldObjectId() && !rru.GetExpectedOldObjectId().Equals(advertisedOld )) { rru.SetStatus(RemoteRefUpdate.Status.REJECTED_REMOTE_CHANGED); continue; } // create ref (hasn't existed on remote side) and delete ref // are always fast-forward commands, feasible at this level if (advertisedOld.Equals(ObjectId.ZeroId) || rru.IsDelete()) { rru.SetFastForward(true); result.Put(rru.GetRemoteName(), rru); continue; } // check for fast-forward: // - both old and new ref must point to commits, AND // - both of them must be known for us, exist in repository, AND // - old commit must be ancestor of new commit bool fastForward = true; try { RevObject oldRev = walker.ParseAny(advertisedOld); RevObject newRev = walker.ParseAny(rru.GetNewObjectId()); if (!(oldRev is RevCommit) || !(newRev is RevCommit) || !walker.IsMergedInto((RevCommit )oldRev, (RevCommit)newRev)) { fastForward = false; } } catch (MissingObjectException) { fastForward = false; } catch (Exception x) { throw new TransportException(transport.GetURI(), MessageFormat.Format(JGitText.Get ().readingObjectsFromLocalRepositoryFailed, x.Message), x); } rru.SetFastForward(fastForward); if (!fastForward && !rru.IsForceUpdate()) { rru.SetStatus(RemoteRefUpdate.Status.REJECTED_NONFASTFORWARD); } else { result.Put(rru.GetRemoteName(), rru); } } return(result); }