/// <exception cref="System.IO.IOException"></exception> private void MarkReachable(ICollection <ObjectId> have, int maxTime) { foreach (Ref r in local.GetAllRefs().Values) { try { RevCommit o = walk.ParseCommit(r.GetObjectId()); o.Add(REACHABLE); reachableCommits.AddItem(o); } catch (IOException) { } } // If we cannot read the value of the ref skip it. foreach (ObjectId id in have) { try { RevCommit o = walk.ParseCommit(id); o.Add(REACHABLE); reachableCommits.AddItem(o); } catch (IOException) { } } // If we cannot read the value of the ref skip it. if (maxTime > 0) { // Mark reachable commits until we reach maxTime. These may // wind up later matching up against things we want and we // can avoid asking for something we already happen to have. // DateTime maxWhen = Sharpen.Extensions.CreateDate(maxTime * 1000L); walk.Sort(RevSort.COMMIT_TIME_DESC); walk.MarkStart(reachableCommits); walk.SetRevFilter(CommitTimeRevFilter.After(maxWhen)); for (; ;) { RevCommit c = walk.Next(); if (c == null) { break; } if (c.Has(ADVERTISED) && !c.Has(COMMON)) { // This is actually going to be a common commit, but // our peer doesn't know that fact yet. // c.Add(COMMON); c.Carry(COMMON); reachableCommits.AddItem(c); } } } }
/// <exception cref="System.IO.IOException"></exception> private void MarkReachable(ICollection <ObjectId> have, int maxTime) { foreach (Ref r in local.GetAllRefs().Values) { ObjectId id = r.GetPeeledObjectId(); if (id == null) { id = r.GetObjectId(); } if (id == null) { continue; } ParseReachable(id); } foreach (ObjectId id_1 in local.GetAdditionalHaves()) { ParseReachable(id_1); } foreach (ObjectId id_2 in have) { ParseReachable(id_2); } if (maxTime > 0) { // Mark reachable commits until we reach maxTime. These may // wind up later matching up against things we want and we // can avoid asking for something we already happen to have. // DateTime maxWhen = Sharpen.Extensions.CreateDate(maxTime * 1000L); walk.Sort(RevSort.COMMIT_TIME_DESC); walk.MarkStart(reachableCommits); walk.SetRevFilter(CommitTimeRevFilter.After(maxWhen)); for (; ;) { RevCommit c = walk.Next(); if (c == null) { break; } if (c.Has(ADVERTISED) && !c.Has(COMMON)) { // This is actually going to be a common commit, but // our peer doesn't know that fact yet. // c.Add(COMMON); c.Carry(COMMON); reachableCommits.AddItem(c); } } } }
public override bool Include(RevWalk walker, RevCommit c) { bool remoteKnowsIsCommon = c.Has(this._enclosing.COMMON); if (c.Has(this._enclosing.ADVERTISED)) { // Remote advertised this, and we have it, hence common. // Whether or not the remote knows that fact is tested // before we added the flag. If the remote doesn't know // we have to still send them this object. // c.Add(this._enclosing.COMMON); } return(!remoteKnowsIsCommon); }
/// <exception cref="System.IO.IOException"></exception> private bool WantSatisfied(RevObject want) { if (want.Has(SATISFIED)) { return(true); } walk.ResetRetain(SAVE); walk.MarkStart((RevCommit)want); if (oldestTime != 0) { walk.SetRevFilter(CommitTimeRevFilter.After(oldestTime * 1000L)); } for (; ;) { RevCommit c = walk.Next(); if (c == null) { break; } if (c.Has(PEER_HAS)) { AddCommonBase(c); want.Add(SATISFIED); return(true); } } return(false); }
/// <exception cref="System.IO.IOException"></exception> private bool ProcessOne(Candidate n) { RevCommit parent = n.GetParent(0); if (parent == null) { return(Split(n.GetNextCandidate(0), n)); } if (parent.Has(SEEN)) { return(false); } revPool.ParseHeaders(parent); if (Find(parent, n.sourcePath)) { if (idBuf.Equals(n.sourceBlob)) { // The common case of the file not being modified in // a simple string-of-pearls history. Blame parent. n.sourceCommit = parent; Push(n); return(false); } Candidate next = n.Create(parent, n.sourcePath); next.sourceBlob = idBuf.ToObjectId(); next.LoadText(reader); return(Split(next, n)); } if (n.sourceCommit == null) { return(Result(n)); } DiffEntry r = FindRename(parent, n.sourceCommit, n.sourcePath); if (r == null) { return(Result(n)); } if (0 == r.GetOldId().PrefixCompare(n.sourceBlob)) { // A 100% rename without any content change can also // skip directly to the parent. n.sourceCommit = parent; n.sourcePath = PathFilter.Create(r.GetOldPath()); Push(n); return(false); } Candidate next_1 = n.Create(parent, PathFilter.Create(r.GetOldPath())); next_1.sourceBlob = r.GetOldId().ToObjectId(); next_1.renameScore = r.GetScore(); next_1.LoadText(reader); return(Split(next_1, n)); }
/// <exception cref="NGit.Errors.MissingObjectException"></exception> /// <exception cref="System.IO.IOException"></exception> private void PushLocalCommit(RevCommit p) { if (p.Has(LOCALLY_SEEN)) { return; } revWalk.ParseHeaders(p); p.Add(LOCALLY_SEEN); p.Add(COMPLETE); p.Carry(COMPLETE); localCommitQueue.Add(p); }
private void ParseReachable(ObjectId id) { try { RevCommit o = walk.ParseCommit(id); if (!o.Has(REACHABLE)) { o.Add(REACHABLE); reachableCommits.AddItem(o); } } catch (IOException) { } }
/// <exception cref="NGit.Errors.TransportException"></exception> private void VerifyPrerequisites() { if (prereqs.IsEmpty()) { return; } RevWalk rw = new RevWalk(transport.local); try { RevFlag PREREQ = rw.NewFlag("PREREQ"); RevFlag SEEN = rw.NewFlag("SEEN"); IDictionary <ObjectId, string> missing = new Dictionary <ObjectId, string>(); IList <RevObject> commits = new AList <RevObject>(); foreach (KeyValuePair <ObjectId, string> e in prereqs.EntrySet()) { ObjectId p = e.Key; try { RevCommit c = rw.ParseCommit(p); if (!c.Has(PREREQ)) { c.Add(PREREQ); commits.AddItem(c); } } catch (MissingObjectException) { missing.Put(p, e.Value); } catch (IOException err) { throw new TransportException(transport.uri, MessageFormat.Format(JGitText.Get().cannotReadCommit , p.Name), err); } } if (!missing.IsEmpty()) { throw new MissingBundlePrerequisiteException(transport.uri, missing); } foreach (Ref r in transport.local.GetAllRefs().Values) { try { rw.MarkStart(rw.ParseCommit(r.GetObjectId())); } catch (IOException) { } } // If we cannot read the value of the ref skip it. int remaining = commits.Count; try { RevCommit c; while ((c = rw.Next()) != null) { if (c.Has(PREREQ)) { c.Add(SEEN); if (--remaining == 0) { break; } } } } catch (IOException err) { throw new TransportException(transport.uri, JGitText.Get().cannotReadObject, err); } if (remaining > 0) { foreach (RevObject o in commits) { if (!o.Has(SEEN)) { missing.Put(o, prereqs.Get(o)); } } throw new MissingBundlePrerequisiteException(transport.uri, missing); } } finally { rw.Release(); } }
/// <exception cref="System.IO.IOException"></exception> private bool ProcessMerge(Candidate n) { int pCnt = n.GetParentCount(); for (int pIdx = 0; pIdx < pCnt; pIdx++) { RevCommit parent = n.GetParent(pIdx); if (parent.Has(SEEN)) { continue; } revPool.ParseHeaders(parent); } // If any single parent exactly matches the merge, follow only // that one parent through history. ObjectId[] ids = null; for (int pIdx_1 = 0; pIdx_1 < pCnt; pIdx_1++) { RevCommit parent = n.GetParent(pIdx_1); if (parent.Has(SEEN)) { continue; } if (!Find(parent, n.sourcePath)) { continue; } if (!(n is Candidate.ReverseCandidate) && idBuf.Equals(n.sourceBlob)) { n.sourceCommit = parent; Push(n); return(false); } if (ids == null) { ids = new ObjectId[pCnt]; } ids[pIdx_1] = idBuf.ToObjectId(); } // If rename detection is enabled, search for any relevant names. DiffEntry[] renames = null; if (renameDetector != null) { renames = new DiffEntry[pCnt]; for (int pIdx_2 = 0; pIdx_2 < pCnt; pIdx_2++) { RevCommit parent = n.GetParent(pIdx_2); if (parent.Has(SEEN)) { continue; } if (ids != null && ids[pIdx_2] != null) { continue; } DiffEntry r = FindRename(parent, n.sourceCommit, n.sourcePath); if (r == null) { continue; } if (n is Candidate.ReverseCandidate) { if (ids == null) { ids = new ObjectId[pCnt]; } ids[pCnt] = r.GetOldId().ToObjectId(); } else { if (0 == r.GetOldId().PrefixCompare(n.sourceBlob)) { // A 100% rename without any content change can also // skip directly to the parent. Note this bypasses an // earlier parent that had the path (above) but did not // have an exact content match. For performance reasons // we choose to follow the one parent over trying to do // possibly both parents. n.sourceCommit = parent; n.sourcePath = PathFilter.Create(r.GetOldPath()); Push(n); return(false); } } renames[pIdx_2] = r; } } // Construct the candidate for each parent. Candidate[] parents = new Candidate[pCnt]; for (int pIdx_3 = 0; pIdx_3 < pCnt; pIdx_3++) { RevCommit parent = n.GetParent(pIdx_3); if (parent.Has(SEEN)) { continue; } Candidate p; if (renames != null && renames[pIdx_3] != null) { p = n.Create(parent, PathFilter.Create(renames[pIdx_3].GetOldPath())); p.renameScore = renames[pIdx_3].GetScore(); p.sourceBlob = renames[pIdx_3].GetOldId().ToObjectId(); } else { if (ids != null && ids[pIdx_3] != null) { p = n.Create(parent, n.sourcePath); p.sourceBlob = ids[pIdx_3]; } else { continue; } } EditList editList; if (n is Candidate.ReverseCandidate && p.sourceBlob.Equals(n.sourceBlob)) { // This special case happens on ReverseCandidate forks. p.sourceText = n.sourceText; editList = new EditList(0); } else { p.LoadText(reader); editList = diffAlgorithm.Diff(textComparator, p.sourceText, n.sourceText); } if (editList.IsEmpty()) { // Ignoring whitespace (or some other special comparator) can // cause non-identical blobs to have an empty edit list. In // a case like this push the parent alone. if (n is Candidate.ReverseCandidate) { parents[pIdx_3] = p; continue; } p.regionList = n.regionList; Push(p); return(false); } p.TakeBlame(editList, n); // Only remember this parent candidate if there is at least // one region that was blamed on the parent. if (p.regionList != null) { // Reverse blame requires inverting the regions. This puts // the regions the parent deleted from us into the parent, // and retains the common regions to look at other parents // for deletions. if (n is Candidate.ReverseCandidate) { Region r = p.regionList; p.regionList = n.regionList; n.regionList = r; } parents[pIdx_3] = p; } } if (n is Candidate.ReverseCandidate) { // On a reverse blame report all deletions found in the children, // and pass on to them a copy of our region list. Candidate resultHead = null; Candidate resultTail = null; for (int pIdx_2 = 0; pIdx_2 < pCnt; pIdx_2++) { Candidate p = parents[pIdx_2]; if (p == null) { continue; } if (p.regionList != null) { Candidate r = p.Copy(p.sourceCommit); if (resultTail != null) { resultTail.queueNext = r; resultTail = r; } else { resultHead = r; resultTail = r; } } if (n.regionList != null) { p.regionList = n.regionList.DeepCopy(); Push(p); } } if (resultHead != null) { return(Result(resultHead)); } return(false); } // Push any parents that are still candidates. for (int pIdx_4 = 0; pIdx_4 < pCnt; pIdx_4++) { if (parents[pIdx_4] != null) { Push(parents[pIdx_4]); } } if (n.regionList != null) { return(Result(n)); } return(false); }