public void testAddTwo() { Edit e1 = new Edit(1, 2, 1, 1); Edit e2 = new Edit(8, 8, 8, 12); EditList l = new EditList(); l.Add(e1); l.Add(e2); Assert.AreEqual(2, l.size()); Assert.AreSame(e1, l.get(0)); Assert.AreSame(e2, l.get(1)); IEnumerator i = l.GetEnumerator(); i.Reset(); i.MoveNext(); Assert.AreSame(e1, i.Current); i.MoveNext(); Assert.AreSame(e2, i.Current); Assert.IsTrue(l.Equals(l)); Assert.IsFalse(l.Equals(new EditList())); EditList l2 = new EditList(); l2.Add(e1); l2.Add(e2); Assert.IsTrue(l.Equals(l2)); Assert.IsTrue(l2.Equals(l)); Assert.AreEqual(l.GetHashCode(), l2.GetHashCode()); }
private void CreatList(FightLayerType type, int index) { EditList list = GameObject.Instantiate <EditList>(listPrefab); list.type = type; list.gameObject.name = "" + type; listDic.Add(type, list); list.transform.SetParent(transform, false); }
public void testFileHeader() { GitSharp.Core.Patch.Patch p = ParseTestPatchFile(PatchsDir + "testGetText_BothISO88591.patch"); FileHeader fh = p.getFiles()[0]; EditList e = fh.ToEditList(); Assert.AreEqual(2, e.size()); Assert.AreEqual(new Edit(4 - 1, 5 - 1, 4 - 1, 5 - 1), e.get(0)); Assert.AreEqual(new Edit(16 - 1, 17 - 1, 16 - 1, 17 - 1), e.get(1)); }
public virtual void TestFileHeader() { NGit.Patch.Patch p = ParseTestPatchFile("testGetText_BothISO88591.patch"); FileHeader fh = p.GetFiles()[0]; EditList e = fh.ToEditList(); NUnit.Framework.Assert.AreEqual(2, e.Count); NUnit.Framework.Assert.AreEqual(new Edit(4 - 1, 5 - 1, 4 - 1, 5 - 1), e[0]); NUnit.Framework.Assert.AreEqual(new Edit(16 - 1, 17 - 1, 16 - 1, 17 - 1), e[1]); }
/// <summary>Constructs a new FileHeader</summary> /// <param name="headerLines">buffer holding the diff header for this file</param> /// <param name="edits">the edits for this file</param> /// <param name="type">the type of patch used to modify this file</param> public FileHeader(byte[] headerLines, EditList edits, FileHeader.PatchType type) : this(headerLines, 0) { endOffset = headerLines.Length; int ptr = ParseGitFileName(NGit.Patch.Patch.DIFF_GIT.Length, headerLines.Length); ParseGitHeaders(ptr, headerLines.Length); this.patchType = type; AddHunk(new HunkHeader(this, edits)); }
/// <returns>a list describing the content edits performed on this file.</returns> public virtual EditList ToEditList() { EditList r = new EditList(); foreach (HunkHeader hunk in hunks) { Sharpen.Collections.AddAll(r, hunk.ToEditList()); } return(r); }
private void BuildEditList() { _editList = DiffAlgorithm.GetAlgorithm(DiffAlgorithm.SupportedAlgorithm.HISTOGRAM).Diff( IgnoreWhitespace ? DiffViewer.Text.Comparator.WS_IGNORE_ALL : DiffViewer.Text.Comparator.DEFAULT, _leftText, _rightText ); }
private void Format(EditList edits, Text a, Text b) { for (int curIdx = 0; curIdx < edits.Count;) { Edit curEdit = edits[curIdx]; int endIdx = FindCombinedEnd(edits, curIdx); Edit endEdit = edits[endIdx]; int aCur = Math.Max(0, curEdit.GetBeginA() - Context); int bCur = Math.Max(0, curEdit.GetBeginB() - Context); int aEnd = Math.Min(a.Size(), endEdit.GetEndA() + Context); int bEnd = Math.Min(b.Size(), endEdit.GetEndB() + Context); WriteHunkHeader(aCur, aEnd, bCur, bEnd); while (aCur < aEnd || bCur < bEnd) { if (aCur < curEdit.GetBeginA() || endIdx + 1 < curIdx) { WriteContextLine(a, aCur); if (IsEndOfLineMissing(a, aCur)) { _out.Append(NoNewLine); } aCur++; bCur++; } else { if (aCur < curEdit.GetEndA()) { WriteRemovedLine(a, aCur); if (IsEndOfLineMissing(a, aCur)) { _out.Append(NoNewLine); } aCur++; } else { if (bCur < curEdit.GetEndB()) { WriteAddedLine(b, bCur); if (IsEndOfLineMissing(b, bCur)) { _out.Append(NoNewLine); } bCur++; } } } if (End(curEdit, aCur, bCur) && ++curIdx < edits.Count) { curEdit = edits[curIdx]; } } } }
public void testSet() { Edit e1 = new Edit(1, 2, 1, 1); Edit e2 = new Edit(3, 4, 3, 3); EditList l = new EditList(); l.Add(e1); Assert.AreSame(e1, l.get(0)); Assert.AreSame(e1, l.set(0, e2)); Assert.AreSame(e2, l.get(0)); }
public void LoadDiff(Text leftText, Text rightText, EditList editList) { _out = new StringBuilder(); Format(editList, leftText, rightText); _editor.Text = _out.ToString(); _editor.Refresh(); _out = null; }
public void testTypes() { GitSharp.Core.Patch.Patch p = ParseTestPatchFile(PatchsDir + "testEditList_Types.patch"); FileHeader fh = p.getFiles()[0]; EditList e = fh.ToEditList(); Assert.AreEqual(3, e.size()); Assert.AreEqual(new Edit(3 - 1, 3 - 1, 3 - 1, 4 - 1), e.get(0)); Assert.AreEqual(new Edit(17 - 1, 19 - 1, 18 - 1, 18 - 1), e.get(1)); Assert.AreEqual(new Edit(23 - 1, 25 - 1, 22 - 1, 28 - 1), e.get(2)); }
public virtual void TestTypes() { NGit.Patch.Patch p = ParseTestPatchFile("testEditList_Types.patch"); FileHeader fh = p.GetFiles()[0]; EditList e = fh.ToEditList(); NUnit.Framework.Assert.AreEqual(3, e.Count); NUnit.Framework.Assert.AreEqual(new Edit(3 - 1, 3 - 1, 3 - 1, 4 - 1), e[0]); NUnit.Framework.Assert.AreEqual(new Edit(17 - 1, 19 - 1, 18 - 1, 18 - 1), e[1]); NUnit.Framework.Assert.AreEqual(new Edit(23 - 1, 25 - 1, 22 - 1, 28 - 1), e[2]); }
public virtual void TestEmpty() { EditList l = new EditList(); NUnit.Framework.Assert.AreEqual(0, l.Count); NUnit.Framework.Assert.IsTrue(l.IsEmpty()); NUnit.Framework.Assert.AreEqual("EditList[]", l.ToString()); NUnit.Framework.Assert.AreEqual(l, l); NUnit.Framework.Assert.AreEqual(new EditList(), l); NUnit.Framework.Assert.IsFalse(l.Equals(string.Empty)); NUnit.Framework.Assert.AreEqual(l.GetHashCode(), new EditList().GetHashCode()); }
public void testEmpty() { EditList l = new EditList(); Assert.AreEqual(0, l.size()); Assert.IsTrue(l.isEmpty()); Assert.AreEqual("EditList[]", l.ToString()); Assert.IsTrue(l.Equals(l)); Assert.IsTrue(l.Equals(new EditList())); Assert.IsFalse(l.Equals(string.Empty)); Assert.AreEqual(l.GetHashCode(), new EditList().GetHashCode()); }
private static string toString(EditList list) { StringBuilder builder = new StringBuilder(); foreach (Edit e in list) { builder.Append(" -" + e.BeginA + "," + (e.EndA - e.BeginA) + " +" + e.BeginB + "," + (e.EndB - e.BeginB)); } return(builder.ToString()); }
public void testRemove() { Edit e1 = new Edit(1, 2, 1, 1); Edit e2 = new Edit(8, 8, 8, 12); EditList l = new EditList(); l.Add(e1); l.Add(e2); l.Remove(e1); Assert.AreEqual(1, l.size()); Assert.AreSame(e2, l.get(0)); }
private void Format(EditList edits, Text a, Text b, List <TextMarker> leftMarkers, List <TextMarker> rightMarkers, StringBuilder leftOut, StringBuilder rightOut) { int curA = 0; int offset = 0; foreach (var edit in edits) { for (int i = curA; i < edit.GetBeginA(); i++) { WriteContextLine(a, i, leftOut, rightOut); offset++; } _markers.Add(new DiffMarker( GetMarkerType(edit.GetType()), offset, Math.Max(edit.GetLengthA(), edit.GetLengthB()), edit.GetLengthA(), edit.GetLengthB() )); for (curA = edit.GetBeginA(); curA < edit.GetEndA(); curA++) { WriteLine(leftOut, leftMarkers, a, curA, edit.GetType()); } for (int curB = edit.GetBeginB(); curB < edit.GetEndB(); curB++) { WriteLine(rightOut, rightMarkers, b, curB, edit.GetType()); offset++; } for (int i = edit.GetLengthB() - edit.GetLengthA(); i > 0; i--) { WriteEmptyLine(leftOut, leftMarkers); } for (int i = edit.GetLengthA() - edit.GetLengthB(); i > 0; i--) { WriteEmptyLine(rightOut, rightMarkers); offset++; } } for (; curA < a.Size(); curA++) { WriteContextLine(a, curA, leftOut, rightOut); offset++; } }
public virtual void TestHunkHeader() { NGit.Patch.Patch p = ParseTestPatchFile("testGetText_BothISO88591.patch"); FileHeader fh = p.GetFiles()[0]; EditList list0 = fh.GetHunks()[0].ToEditList(); NUnit.Framework.Assert.AreEqual(1, list0.Count); NUnit.Framework.Assert.AreEqual(new Edit(4 - 1, 5 - 1, 4 - 1, 5 - 1), list0[0]); EditList list1 = fh.GetHunks()[1].ToEditList(); NUnit.Framework.Assert.AreEqual(1, list1.Count); NUnit.Framework.Assert.AreEqual(new Edit(16 - 1, 17 - 1, 16 - 1, 17 - 1), list1[0 ]); }
public async Task <IActionResult> EditRequest(int?id, EditList editList) { if (id == null) { return(NotFound()); } editList.ViewModelBoth.Article = await _context.Articles.FindAsync(id); if (editList.ViewModelBoth.Article == null) { return(NotFound()); } return(View("../Article/EditRequest", editList)); }
public void testHunkHeader() { GitSharp.Core.Patch.Patch p = ParseTestPatchFile(PatchsDir + "testGetText_BothISO88591.patch"); FileHeader fh = p.getFiles()[0]; EditList list0 = fh.Hunks[0].ToEditList(); Assert.AreEqual(1, list0.size()); Assert.AreEqual(new Edit(4 - 1, 5 - 1, 4 - 1, 5 - 1), list0.get(0)); EditList list1 = fh.Hunks[1].ToEditList(); Assert.AreEqual(1, list1.size()); Assert.AreEqual(new Edit(16 - 1, 17 - 1, 16 - 1, 17 - 1), list1.get(0)); }
public ActionResult OnGetAsync(EditList editList) { //var editrq = _context.Articles.ToList(); foreach (var u in editRequests) { EditList X = new EditList { ViewModelBoth = editList.ViewModelBoth, OriginalId = editList.OriginalId }; editRequests.Add(X); } return(Page()); }
private void EditPerson(object obj) { var viewmodel = new PersonViewModel { Name = SelectedPerson.Name, DayOfBirth = DateTime.Parse(SelectedPerson.BirthDate), DayOfDeath = DateTime.Parse(SelectedPerson.DeathDate), Gender = (Gender)Enum.Parse(typeof(Gender), SelectedPerson.Gender) }; EditList view = new EditList { DataContext = viewmodel }; view.ShowDialog(); if (viewmodel.IsConfirm) { var birthDate = viewmodel.DayOfBirth.Date; var deathDate = viewmodel.DayOfDeath.Date; viewmodel.BirthDate = birthDate.ToString("d"); viewmodel.DeathDate = deathDate.ToString("d"); var gender = viewmodel.Gender.ToString(); if (SelectedPerson.Children.Any()) { Person newPerson = new Person { BirthDate = birthDate.ToString("d"), DeathDate = deathDate.ToString("d"), Gender = gender }; foreach (var child in SelectedPerson.Children) { if (!CheckIfValid(newPerson, child, false)) { return; } } } var result = db.EditPerson(SelectedPerson, viewmodel.Name, viewmodel.BirthDate, viewmodel.DeathDate, gender); if (result == false) { MessageBox.Show("Wystąpił błąd przy edycji jednostki"); } Refresh(); } }
public virtual void TestAddOne() { Edit e = new Edit(1, 2, 1, 1); EditList l = new EditList(); Extensions.AddItem(l, e); NUnit.Framework.Assert.AreEqual(1, l.Count); NUnit.Framework.Assert.IsFalse(Extensions.IsEmpty(l)); NUnit.Framework.Assert.AreSame(e, l[0]); NUnit.Framework.Assert.AreSame(e, Extensions.Iterator(l).Next()); NUnit.Framework.Assert.AreEqual(l, l); NUnit.Framework.Assert.IsFalse(l.Equals(new EditList())); EditList l2 = new EditList(); Extensions.AddItem(l2, e); NUnit.Framework.Assert.AreEqual(l2, l); NUnit.Framework.Assert.AreEqual(l, l2); NUnit.Framework.Assert.AreEqual(AList<Edit>.GetHashCode(l), AList<Edit>.GetHashCode(l2)); }
public virtual void TestAddOne() { Edit e = new Edit(1, 2, 1, 1); EditList l = new EditList(); l.AddItem(e); NUnit.Framework.Assert.AreEqual(1, l.Count); NUnit.Framework.Assert.IsFalse(l.IsEmpty()); NUnit.Framework.Assert.AreSame(e, l[0]); NUnit.Framework.Assert.AreSame(e, l.Iterator().Next()); NUnit.Framework.Assert.AreEqual(l, l); NUnit.Framework.Assert.IsFalse(l.Equals(new EditList())); EditList l2 = new EditList(); l2.AddItem(e); NUnit.Framework.Assert.AreEqual(l2, l); NUnit.Framework.Assert.AreEqual(l, l2); NUnit.Framework.Assert.AreEqual(l.GetHashCode(), l2.GetHashCode()); }
public override bool Load(DbRow dr) { if (base.Load(dr)) { foreach (string i in _EditList.Split(',')) { if (i.IsInt()) { EditList.Add(i.ToInt()); } } return(true); } return(false); }
internal HunkHeader(FileHeader fh, EditList editList) : this(fh, fh.buf.Length) { this.editList = editList; endOffset = startOffset; nContext = 0; if (editList.IsEmpty()) { newStartLine = 0; newLineCount = 0; } else { newStartLine = editList[0].GetBeginB(); Edit last = editList[editList.Count - 1]; newLineCount = last.GetEndB() - newStartLine; } }
public IEnumerable <HunkRangeInfo> GetGitDiffFor(ITextDocument textDocument, ITextSnapshot snapshot) { string fileName = textDocument.FilePath; GitFileStatusTracker tracker = new GitFileStatusTracker(Path.GetDirectoryName(fileName)); if (!tracker.IsGit) { yield break; } GitFileStatus status = tracker.GetFileStatus(fileName); if (status == GitFileStatus.New || status == GitFileStatus.Added) { yield break; } HistogramDiff diff = new HistogramDiff(); diff.SetFallbackAlgorithm(null); string currentText = snapshot.GetText(); byte[] preamble = textDocument.Encoding.GetPreamble(); byte[] content = textDocument.Encoding.GetBytes(currentText); if (preamble.Length > 0) { byte[] completeContent = new byte[preamble.Length + content.Length]; Buffer.BlockCopy(preamble, 0, completeContent, 0, preamble.Length); Buffer.BlockCopy(content, 0, completeContent, preamble.Length, content.Length); content = completeContent; } byte[] previousContent = null; //GetPreviousRevision(tracker, fileName); RawText b = new RawText(content); RawText a = new RawText(previousContent ?? new byte[0]); EditList edits = diff.Diff(RawTextComparator.DEFAULT, a, b); foreach (Edit edit in edits) { yield return(new HunkRangeInfo(snapshot, edit, a, b)); } }
public virtual void TestAddTwo() { Edit e1 = new Edit(1, 2, 1, 1); Edit e2 = new Edit(8, 8, 8, 12); EditList l = new EditList(); Extensions.AddItem(l, e1); Extensions.AddItem(l, e2); NUnit.Framework.Assert.AreEqual(2, l.Count); NUnit.Framework.Assert.AreSame(e1, l[0]); NUnit.Framework.Assert.AreSame(e2, l[1]); Iterator<Edit> i = Extensions.Iterator(l); NUnit.Framework.Assert.AreSame(e1, i.Next()); NUnit.Framework.Assert.AreSame(e2, i.Next()); NUnit.Framework.Assert.AreEqual(l, l); NUnit.Framework.Assert.IsFalse(l.Equals(new EditList())); EditList l2 = new EditList(); Extensions.AddItem(l2, e1); Extensions.AddItem(l2, e2); NUnit.Framework.Assert.AreEqual(l2, l); NUnit.Framework.Assert.AreEqual(l, l2); NUnit.Framework.Assert.AreEqual(AList<Edit>.GetHashCode(l), AList<Edit>.GetHashCode(l2)); }
public void testAddOne() { Edit e = new Edit(1, 2, 1, 1); EditList l = new EditList(); l.Add(e); Assert.AreEqual(1, l.size()); Assert.IsFalse(l.isEmpty()); Assert.AreSame(e, l.get(0)); IEnumerator i = l.GetEnumerator(); i.Reset(); i.MoveNext(); Assert.AreSame(e, i.Current); Assert.IsTrue(l.Equals(l)); Assert.IsFalse(l.Equals(new EditList())); EditList l2 = new EditList(); l2.Add(e); Assert.IsTrue(l.Equals(l2)); Assert.IsTrue(l2.Equals(l)); Assert.AreEqual(l.GetHashCode(), l2.GetHashCode()); }
/// <summary> /// Returns a list describing the content edits performed on this file. /// </summary> /// <returns></returns> public EditList ToEditList() { var r = new EditList(); _hunks.ForEach(hunk => r.AddRange(hunk.ToEditList())); return r; }
/// <returns>a list describing the content edits performed within the hunk.</returns> public virtual EditList ToEditList() { if (editList == null) { editList = new EditList(); byte[] buf = file.buf; int c = RawParseUtils.NextLF(buf, startOffset); int oLine = old.startLine; int nLine = newStartLine; Edit @in = null; for (; c < endOffset; c = RawParseUtils.NextLF(buf, c)) { switch (buf[c]) { case (byte)(' '): case (byte)('\n'): { @in = null; oLine++; nLine++; continue; goto case (byte)('-'); } case (byte)('-'): { if (@in == null) { @in = new Edit(oLine - 1, nLine - 1); editList.AddItem(@in); } oLine++; @in.ExtendA(); continue; goto case (byte)('+'); } case (byte)('+'): { if (@in == null) { @in = new Edit(oLine - 1, nLine - 1); editList.AddItem(@in); } nLine++; @in.ExtendB(); continue; goto case (byte)('\\'); } case (byte)('\\'): { // Matches "\ No newline at end of file" continue; goto default; } default: { goto SCAN_break; break; } } SCAN_continue: ; } SCAN_break: ; } return editList; }
internal virtual void TakeBlame(EditList editList, NGit.Blame.Candidate child) { Blame(editList, this, child); }
private static void Blame(EditList editList, NGit.Blame.Candidate a, NGit.Blame.Candidate b) { Region r = b.ClearRegionList(); Region aTail = null; Region bTail = null; for (int eIdx = 0; eIdx < editList.Count;) { // If there are no more regions left, neither side has any // more responsibility for the result. Remaining edits can // be safely ignored. if (r == null) { return; } Edit e = editList[eIdx]; // Edit ends before the next candidate region. Skip the edit. if (e.GetEndB() <= r.sourceStart) { eIdx++; continue; } // Next candidate region starts before the edit. Assign some // of the blame onto A, but possibly split and also on B. if (r.sourceStart < e.GetBeginB()) { int d = e.GetBeginB() - r.sourceStart; if (r.length <= d) { // Pass the blame for this region onto A. Region next = r.next; r.sourceStart = e.GetBeginA() - d; aTail = Add(aTail, a, r); r = next; continue; } // Split the region and assign some to A, some to B. aTail = Add(aTail, a, r.SplitFirst(e.GetBeginA() - d, d)); r.SlideAndShrink(d); } // At this point e.getBeginB() <= r.sourceStart. // An empty edit on the B side isn't relevant to this split, // as it does not overlap any candidate region. if (e.GetLengthB() == 0) { eIdx++; continue; } // If the region ends before the edit, blame on B. int rEnd = r.sourceStart + r.length; if (rEnd <= e.GetEndB()) { Region next = r.next; bTail = Add(bTail, b, r); r = next; if (rEnd == e.GetEndB()) { eIdx++; } continue; } // This region extends beyond the edit. Blame the first // half of the region on B, and process the rest after. int len = e.GetEndB() - r.sourceStart; bTail = Add(bTail, b, r.SplitFirst(r.sourceStart, len)); r.SlideAndShrink(len); eIdx++; } if (r == null) { return; } // For any remaining region, pass the blame onto A after shifting // the source start to account for the difference between the two. Edit e_1 = editList[editList.Count - 1]; int endB = e_1.GetEndB(); int d_1 = endB - e_1.GetEndA(); if (aTail == null) { a.regionList = r; } else { aTail.next = r; } do { if (endB <= r.sourceStart) { r.sourceStart -= d_1; } r = r.next; }while (r != null); }
public virtual void TestSet() { Edit e1 = new Edit(1, 2, 1, 1); Edit e2 = new Edit(3, 4, 3, 3); EditList l = new EditList(); l.AddItem(e1); NUnit.Framework.Assert.AreSame(e1, l[0]); NUnit.Framework.Assert.AreSame(e1, l.Set(0, e2)); NUnit.Framework.Assert.AreSame(e2, l[0]); }
/// <returns>a list describing the content edits performed within the hunk.</returns> public virtual EditList ToEditList() { if (editList == null) { editList = new EditList(); byte[] buf = file.buf; int c = RawParseUtils.NextLF(buf, startOffset); int oLine = old.startLine; int nLine = newStartLine; Edit @in = null; for (; c < endOffset; c = RawParseUtils.NextLF(buf, c)) { switch (buf[c]) { case (byte)(' '): case (byte)('\n'): { @in = null; oLine++; nLine++; continue; goto case (byte)('-'); } case (byte)('-'): { if (@in == null) { @in = new Edit(oLine - 1, nLine - 1); editList.AddItem(@in); } oLine++; @in.ExtendA(); continue; goto case (byte)('+'); } case (byte)('+'): { if (@in == null) { @in = new Edit(oLine - 1, nLine - 1); editList.AddItem(@in); } nLine++; @in.ExtendB(); continue; goto case (byte)('\\'); } case (byte)('\\'): { // Matches "\ No newline at end of file" continue; goto default; } default: { goto SCAN_break; break; } } SCAN_continue :; } SCAN_break :; } return(editList); }
/// <exception cref="System.IO.IOException"></exception> private bool ProcessMerge(Candidate n) { int pCnt = n.GetParentCount(); for (int pIdx = 0; pIdx < pCnt; pIdx++) { RevCommit parent = n.GetParent(pIdx); if (parent.Has(SEEN)) { continue; } revPool.ParseHeaders(parent); } // If any single parent exactly matches the merge, follow only // that one parent through history. ObjectId[] ids = null; for (int pIdx_1 = 0; pIdx_1 < pCnt; pIdx_1++) { RevCommit parent = n.GetParent(pIdx_1); if (parent.Has(SEEN)) { continue; } if (!Find(parent, n.sourcePath)) { continue; } if (!(n is Candidate.ReverseCandidate) && idBuf.Equals(n.sourceBlob)) { n.sourceCommit = parent; Push(n); return(false); } if (ids == null) { ids = new ObjectId[pCnt]; } ids[pIdx_1] = idBuf.ToObjectId(); } // If rename detection is enabled, search for any relevant names. DiffEntry[] renames = null; if (renameDetector != null) { renames = new DiffEntry[pCnt]; for (int pIdx_2 = 0; pIdx_2 < pCnt; pIdx_2++) { RevCommit parent = n.GetParent(pIdx_2); if (parent.Has(SEEN)) { continue; } if (ids != null && ids[pIdx_2] != null) { continue; } DiffEntry r = FindRename(parent, n.sourceCommit, n.sourcePath); if (r == null) { continue; } if (n is Candidate.ReverseCandidate) { if (ids == null) { ids = new ObjectId[pCnt]; } ids[pCnt] = r.GetOldId().ToObjectId(); } else { if (0 == r.GetOldId().PrefixCompare(n.sourceBlob)) { // A 100% rename without any content change can also // skip directly to the parent. Note this bypasses an // earlier parent that had the path (above) but did not // have an exact content match. For performance reasons // we choose to follow the one parent over trying to do // possibly both parents. n.sourceCommit = parent; n.sourcePath = PathFilter.Create(r.GetOldPath()); Push(n); return(false); } } renames[pIdx_2] = r; } } // Construct the candidate for each parent. Candidate[] parents = new Candidate[pCnt]; for (int pIdx_3 = 0; pIdx_3 < pCnt; pIdx_3++) { RevCommit parent = n.GetParent(pIdx_3); if (parent.Has(SEEN)) { continue; } Candidate p; if (renames != null && renames[pIdx_3] != null) { p = n.Create(parent, PathFilter.Create(renames[pIdx_3].GetOldPath())); p.renameScore = renames[pIdx_3].GetScore(); p.sourceBlob = renames[pIdx_3].GetOldId().ToObjectId(); } else { if (ids != null && ids[pIdx_3] != null) { p = n.Create(parent, n.sourcePath); p.sourceBlob = ids[pIdx_3]; } else { continue; } } EditList editList; if (n is Candidate.ReverseCandidate && p.sourceBlob.Equals(n.sourceBlob)) { // This special case happens on ReverseCandidate forks. p.sourceText = n.sourceText; editList = new EditList(0); } else { p.LoadText(reader); editList = diffAlgorithm.Diff(textComparator, p.sourceText, n.sourceText); } if (editList.IsEmpty()) { // Ignoring whitespace (or some other special comparator) can // cause non-identical blobs to have an empty edit list. In // a case like this push the parent alone. if (n is Candidate.ReverseCandidate) { parents[pIdx_3] = p; continue; } p.regionList = n.regionList; Push(p); return(false); } p.TakeBlame(editList, n); // Only remember this parent candidate if there is at least // one region that was blamed on the parent. if (p.regionList != null) { // Reverse blame requires inverting the regions. This puts // the regions the parent deleted from us into the parent, // and retains the common regions to look at other parents // for deletions. if (n is Candidate.ReverseCandidate) { Region r = p.regionList; p.regionList = n.regionList; n.regionList = r; } parents[pIdx_3] = p; } } if (n is Candidate.ReverseCandidate) { // On a reverse blame report all deletions found in the children, // and pass on to them a copy of our region list. Candidate resultHead = null; Candidate resultTail = null; for (int pIdx_2 = 0; pIdx_2 < pCnt; pIdx_2++) { Candidate p = parents[pIdx_2]; if (p == null) { continue; } if (p.regionList != null) { Candidate r = p.Copy(p.sourceCommit); if (resultTail != null) { resultTail.queueNext = r; resultTail = r; } else { resultHead = r; resultTail = r; } } if (n.regionList != null) { p.regionList = n.regionList.DeepCopy(); Push(p); } } if (resultHead != null) { return(Result(resultHead)); } return(false); } // Push any parents that are still candidates. for (int pIdx_4 = 0; pIdx_4 < pCnt; pIdx_4++) { if (parents[pIdx_4] != null) { Push(parents[pIdx_4]); } } if (n.regionList != null) { return(Result(n)); } return(false); }
private static void Blame(EditList editList, NGit.Blame.Candidate a, NGit.Blame.Candidate b) { Region r = b.ClearRegionList(); Region aTail = null; Region bTail = null; for (int eIdx = 0; eIdx < editList.Count; ) { // If there are no more regions left, neither side has any // more responsibility for the result. Remaining edits can // be safely ignored. if (r == null) { return; } Edit e = editList[eIdx]; // Edit ends before the next candidate region. Skip the edit. if (e.GetEndB() <= r.sourceStart) { eIdx++; continue; } // Next candidate region starts before the edit. Assign some // of the blame onto A, but possibly split and also on B. if (r.sourceStart < e.GetBeginB()) { int d = e.GetBeginB() - r.sourceStart; if (r.length <= d) { // Pass the blame for this region onto A. Region next = r.next; r.sourceStart = e.GetBeginA() - d; aTail = Add(aTail, a, r); r = next; continue; } // Split the region and assign some to A, some to B. aTail = Add(aTail, a, r.SplitFirst(e.GetBeginA() - d, d)); r.SlideAndShrink(d); } // At this point e.getBeginB() <= r.sourceStart. // An empty edit on the B side isn't relevant to this split, // as it does not overlap any candidate region. if (e.GetLengthB() == 0) { eIdx++; continue; } // If the region ends before the edit, blame on B. int rEnd = r.sourceStart + r.length; if (rEnd <= e.GetEndB()) { Region next = r.next; bTail = Add(bTail, b, r); r = next; if (rEnd == e.GetEndB()) { eIdx++; } continue; } // This region extends beyond the edit. Blame the first // half of the region on B, and process the rest after. int len = e.GetEndB() - r.sourceStart; bTail = Add(bTail, b, r.SplitFirst(r.sourceStart, len)); r.SlideAndShrink(len); eIdx++; } if (r == null) { return; } // For any remaining region, pass the blame onto A after shifting // the source start to account for the difference between the two. Edit e_1 = editList[editList.Count - 1]; int endB = e_1.GetEndB(); int d_1 = endB - e_1.GetEndA(); if (aTail == null) { a.regionList = r; } else { aTail.next = r; } do { if (endB <= r.sourceStart) { r.sourceStart -= d_1; } r = r.next; } while (r != null); }
/// <returns>a list describing the content edits performed on this file.</returns> public virtual EditList ToEditList() { EditList r = new EditList(); foreach (HunkHeader hunk in hunks) { Sharpen.Collections.AddAll(r, hunk.ToEditList()); } return r; }
// An special edit which acts as a sentinel value by marking the end the // list of edits /// <summary>Does the three way merge between a common base and two sequences.</summary> /// <remarks>Does the three way merge between a common base and two sequences.</remarks> /// <?></?> /// <param name="cmp">comparison method for this execution.</param> /// <param name="base">the common base sequence</param> /// <param name="ours">the first sequence to be merged</param> /// <param name="theirs">the second sequence to be merged</param> /// <returns>the resulting content</returns> public MergeResult <S> Merge <S>(SequenceComparator <S> cmp, S @base, S ours, S theirs ) where S : Sequence { IList <S> sequences = new AList <S>(3); sequences.AddItem(@base); sequences.AddItem(ours); sequences.AddItem(theirs); MergeResult <S> result = new MergeResult <S>(sequences); if (ours.Size() == 0) { if (theirs.Size() != 0) { EditList theirsEdits = diffAlg.Diff(cmp, @base, theirs); if (!theirsEdits.IsEmpty()) { // we deleted, they modified -> Let their complete content // conflict with empty text result.Add(1, 0, 0, MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE); result.Add(2, 0, theirs.Size(), MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE); } else { // we deleted, they didn't modify -> Let our deletion win result.Add(1, 0, 0, MergeChunk.ConflictState.NO_CONFLICT); } } else { // we and they deleted -> return a single chunk of nothing result.Add(1, 0, 0, MergeChunk.ConflictState.NO_CONFLICT); } return(result); } else { if (theirs.Size() == 0) { EditList oursEdits = diffAlg.Diff(cmp, @base, ours); if (!oursEdits.IsEmpty()) { // we modified, they deleted -> Let our complete content // conflict with empty text result.Add(1, 0, ours.Size(), MergeChunk.ConflictState.FIRST_CONFLICTING_RANGE); result.Add(2, 0, 0, MergeChunk.ConflictState.NEXT_CONFLICTING_RANGE); } else { // they deleted, we didn't modify -> Let their deletion win result.Add(2, 0, 0, MergeChunk.ConflictState.NO_CONFLICT); } return(result); } } EditList oursEdits_1 = diffAlg.Diff(cmp, @base, ours); Iterator <Edit> baseToOurs = oursEdits_1.Iterator(); EditList theirsEdits_1 = diffAlg.Diff(cmp, @base, theirs); Iterator <Edit> baseToTheirs = theirsEdits_1.Iterator(); int current = 0; // points to the next line (first line is 0) of base // which was not handled yet Edit oursEdit = NextEdit(baseToOurs); Edit theirsEdit = NextEdit(baseToTheirs); // iterate over all edits from base to ours and from base to theirs // leave the loop when there are no edits more for ours or for theirs // (or both) while (theirsEdit != END_EDIT || oursEdit != END_EDIT) { if (oursEdit.GetEndA() < theirsEdit.GetBeginA()) { // something was changed in ours not overlapping with any change // from theirs. First add the common part in front of the edit // then the edit. if (current != oursEdit.GetBeginA()) { result.Add(0, current, oursEdit.GetBeginA(), MergeChunk.ConflictState.NO_CONFLICT ); } result.Add(1, oursEdit.GetBeginB(), oursEdit.GetEndB(), MergeChunk.ConflictState. NO_CONFLICT); current = oursEdit.GetEndA(); oursEdit = NextEdit(baseToOurs); } else { if (theirsEdit.GetEndA() < oursEdit.GetBeginA()) { // something was changed in theirs not overlapping with any // from ours. First add the common part in front of the edit // then the edit. if (current != theirsEdit.GetBeginA()) { result.Add(0, current, theirsEdit.GetBeginA(), MergeChunk.ConflictState.NO_CONFLICT ); } result.Add(2, theirsEdit.GetBeginB(), theirsEdit.GetEndB(), MergeChunk.ConflictState .NO_CONFLICT); current = theirsEdit.GetEndA(); theirsEdit = NextEdit(baseToTheirs); } else { // here we found a real overlapping modification // if there is a common part in front of the conflict add it if (oursEdit.GetBeginA() != current && theirsEdit.GetBeginA() != current) { result.Add(0, current, Math.Min(oursEdit.GetBeginA(), theirsEdit.GetBeginA()), MergeChunk.ConflictState .NO_CONFLICT); } // set some initial values for the ranges in A and B which we // want to handle int oursBeginB = oursEdit.GetBeginB(); int theirsBeginB = theirsEdit.GetBeginB(); // harmonize the start of the ranges in A and B if (oursEdit.GetBeginA() < theirsEdit.GetBeginA()) { theirsBeginB -= theirsEdit.GetBeginA() - oursEdit.GetBeginA(); } else { oursBeginB -= oursEdit.GetBeginA() - theirsEdit.GetBeginA(); } // combine edits: // Maybe an Edit on one side corresponds to multiple Edits on // the other side. Then we have to combine the Edits of the // other side - so in the end we can merge together two single // edits. // // It is important to notice that this combining will extend the // ranges of our conflict always downwards (towards the end of // the content). The starts of the conflicting ranges in ours // and theirs are not touched here. // // This combining is an iterative process: after we have // combined some edits we have to do the check again. The // combined edits could now correspond to multiple edits on the // other side. // // Example: when this combining algorithm works on the following // edits // oursEdits=((0-5,0-5),(6-8,6-8),(10-11,10-11)) and // theirsEdits=((0-1,0-1),(2-3,2-3),(5-7,5-7)) // it will merge them into // oursEdits=((0-8,0-8),(10-11,10-11)) and // theirsEdits=((0-7,0-7)) // // Since the only interesting thing to us is how in ours and // theirs the end of the conflicting range is changing we let // oursEdit and theirsEdit point to the last conflicting edit Edit nextOursEdit = NextEdit(baseToOurs); Edit nextTheirsEdit = NextEdit(baseToTheirs); for (; ;) { if (oursEdit.GetEndA() >= nextTheirsEdit.GetBeginA()) { theirsEdit = nextTheirsEdit; nextTheirsEdit = NextEdit(baseToTheirs); } else { if (theirsEdit.GetEndA() >= nextOursEdit.GetBeginA()) { oursEdit = nextOursEdit; nextOursEdit = NextEdit(baseToOurs); } else { break; } } } // harmonize the end of the ranges in A and B int oursEndB = oursEdit.GetEndB(); int theirsEndB = theirsEdit.GetEndB(); if (oursEdit.GetEndA() < theirsEdit.GetEndA()) { oursEndB += theirsEdit.GetEndA() - oursEdit.GetEndA(); } else { theirsEndB += oursEdit.GetEndA() - theirsEdit.GetEndA(); } // A conflicting region is found. Strip off common lines in // in the beginning and the end of the conflicting region // Determine the minimum length of the conflicting areas in OURS // and THEIRS. Also determine how much bigger the conflicting // area in THEIRS is compared to OURS. All that is needed to // limit the search for common areas at the beginning or end // (the common areas cannot be bigger then the smaller // conflicting area. The delta is needed to know whether the // complete conflicting area is common in OURS and THEIRS. int minBSize = oursEndB - oursBeginB; int BSizeDelta = minBSize - (theirsEndB - theirsBeginB); if (BSizeDelta > 0) { minBSize -= BSizeDelta; } int commonPrefix = 0; while (commonPrefix < minBSize && cmp.Equals(ours, oursBeginB + commonPrefix, theirs , theirsBeginB + commonPrefix)) { commonPrefix++; } minBSize -= commonPrefix; int commonSuffix = 0; while (commonSuffix < minBSize && cmp.Equals(ours, oursEndB - commonSuffix - 1, theirs , theirsEndB - commonSuffix - 1)) { commonSuffix++; } minBSize -= commonSuffix; // Add the common lines at start of conflict if (commonPrefix > 0) { result.Add(1, oursBeginB, oursBeginB + commonPrefix, MergeChunk.ConflictState.NO_CONFLICT ); } // Add the conflict (Only if there is a conflict left to report) if (minBSize > 0 || BSizeDelta != 0) { result.Add(1, oursBeginB + commonPrefix, oursEndB - commonSuffix, MergeChunk.ConflictState .FIRST_CONFLICTING_RANGE); result.Add(2, theirsBeginB + commonPrefix, theirsEndB - commonSuffix, MergeChunk.ConflictState .NEXT_CONFLICTING_RANGE); } // Add the common lines at end of conflict if (commonSuffix > 0) { result.Add(1, oursEndB - commonSuffix, oursEndB, MergeChunk.ConflictState.NO_CONFLICT ); } current = Math.Max(oursEdit.GetEndA(), theirsEdit.GetEndA()); oursEdit = nextOursEdit; theirsEdit = nextTheirsEdit; } } } // maybe we have a common part behind the last edit: copy it to the // result if (current < @base.Size()) { result.Add(0, current, @base.Size(), MergeChunk.ConflictState.NO_CONFLICT); } return(result); }
private void FormatEdits(Stream @out, RawText a, RawText b, EditList edits) { for (int curIdx = 0; curIdx < edits.Count; /* */) { Edit curEdit = edits.get(curIdx); int endIdx = FindCombinedEnd(edits, curIdx); Edit endEdit = edits.get(endIdx); int aCur = Math.Max(0, curEdit.BeginA - _context); int bCur = Math.Max(0, curEdit.BeginB - _context); int aEnd = Math.Min(a.size(), endEdit.EndA + _context); int bEnd = Math.Min(b.size(), endEdit.EndB + _context); WriteHunkHeader(@out, aCur, aEnd, bCur, bEnd); while (aCur < aEnd || bCur < bEnd) { if (aCur < curEdit.BeginA || endIdx + 1 < curIdx) { WriteLine(@out, ' ', a, aCur); aCur++; bCur++; } else if (aCur < curEdit.EndA) { WriteLine(@out, '-', a, aCur++); } else if (bCur < curEdit.EndB) { WriteLine(@out, '+', b, bCur++); } if (End(curEdit, aCur, bCur) && ++curIdx < edits.Count) { curEdit = edits.get(curIdx); } } } }
public void LoadDiff(Text leftText, Text rightText, EditList editList) { var leftOut = new StringBuilder(); var rightOut = new StringBuilder(); var leftMarkers = new List<TextMarker>(); var rightMarkers = new List<TextMarker>(); _markers = new List<IDiffMarker>(); Format(editList, leftText, rightText, leftMarkers, rightMarkers, leftOut, rightOut); _leftEditor.Document.MarkerStrategy.RemoveAll(p => true); _leftEditor.Text = leftOut.ToString(); leftMarkers.ForEach(p => _leftEditor.Document.MarkerStrategy.AddMarker(p)); _leftEditor.Refresh(); _rightEditor.Document.MarkerStrategy.RemoveAll(p => true); _rightEditor.Text = rightOut.ToString(); rightMarkers.ForEach(p => _rightEditor.Document.MarkerStrategy.AddMarker(p)); _rightEditor.Refresh(); // This is the algorithm ICSharpTextEditor uses to determine the // full size of the scroll bar. _visibleLines = _rightEditor.Document.GetVisibleLine(_rightEditor.Document.TotalNumberOfLines - 1) + 1 + _rightEditor.ActiveTextAreaControl.TextArea.TextView.VisibleLineCount * 2 / 3; _markerMap.SetMarkers(_markers, _visibleLines); if (!_readOnly) _editor.SetMarkers(_markers, _visibleLines); UpdateVisibleLines(); }
public virtual void TestAddTwo() { Edit e1 = new Edit(1, 2, 1, 1); Edit e2 = new Edit(8, 8, 8, 12); EditList l = new EditList(); l.AddItem(e1); l.AddItem(e2); NUnit.Framework.Assert.AreEqual(2, l.Count); NUnit.Framework.Assert.AreSame(e1, l[0]); NUnit.Framework.Assert.AreSame(e2, l[1]); Iterator<Edit> i = l.Iterator(); NUnit.Framework.Assert.AreSame(e1, i.Next()); NUnit.Framework.Assert.AreSame(e2, i.Next()); NUnit.Framework.Assert.IsTrue(l.Equals(l)); NUnit.Framework.Assert.IsFalse(l.Equals(new EditList())); EditList l2 = new EditList(); l2.AddItem(e1); l2.AddItem(e2); NUnit.Framework.Assert.IsTrue(l.Equals(l2)); NUnit.Framework.Assert.IsTrue(l2.Equals(l)); NUnit.Framework.Assert.AreEqual(l.GetHashCode(), l2.GetHashCode()); }
/** * Entrypoint into the algorithm this class is all about. This method triggers that the * differences between A and B are calculated in form of a list of edits. */ protected void calculateEdits() { edits = new EditList(); middle.initialize(0, a.size(), 0, b.size()); if (middle.beginA >= middle.endA && middle.beginB >= middle.endB) return; calculateEdits(middle.beginA, middle.endA, middle.beginB, middle.endB); }
public virtual void TestRemove() { Edit e1 = new Edit(1, 2, 1, 1); Edit e2 = new Edit(8, 8, 8, 12); EditList l = new EditList(); l.AddItem(e1); l.AddItem(e2); l.Remove(e1); NUnit.Framework.Assert.AreEqual(1, l.Count); NUnit.Framework.Assert.AreSame(e2, l[0]); }
private void Format(EditList edits, Text a, Text b) { for (int curIdx = 0; curIdx < edits.Count; ) { Edit curEdit = edits[curIdx]; int endIdx = FindCombinedEnd(edits, curIdx); Edit endEdit = edits[endIdx]; int aCur = Math.Max(0, curEdit.GetBeginA() - Context); int bCur = Math.Max(0, curEdit.GetBeginB() - Context); int aEnd = Math.Min(a.Size(), endEdit.GetEndA() + Context); int bEnd = Math.Min(b.Size(), endEdit.GetEndB() + Context); WriteHunkHeader(aCur, aEnd, bCur, bEnd); while (aCur < aEnd || bCur < bEnd) { if (aCur < curEdit.GetBeginA() || endIdx + 1 < curIdx) { WriteContextLine(a, aCur); if (IsEndOfLineMissing(a, aCur)) { _out.Append(NoNewLine); } aCur++; bCur++; } else { if (aCur < curEdit.GetEndA()) { WriteRemovedLine(a, aCur); if (IsEndOfLineMissing(a, aCur)) { _out.Append(NoNewLine); } aCur++; } else { if (bCur < curEdit.GetEndB()) { WriteAddedLine(b, bCur); if (IsEndOfLineMissing(b, bCur)) { _out.Append(NoNewLine); } bCur++; } } } if (End(curEdit, aCur, bCur) && ++curIdx < edits.Count) { curEdit = edits[curIdx]; } } } }
void timer_Tick(object sender, EventArgs e) { DateTime tempTime = DateTime.Now; TimeSpan n; for (int number = 0; number < myInfo.Count(); number++) { if (DateTime.Compare(myInfo.Element(number).DateThis, tempTime) == 0 || DateTime.Compare(myInfo.Element(number).DateThis, tempTime) < 0) { try { n = (tempTime - myInfo.Element(number).DateThis); switch (myInfo.Element(number).MyWork) { case Model.Work.daily: if (n.Days > 1 || n.Hours > 1 || n.Minutes > 1 || n.Seconds > 10) { } else { System.Diagnostics.Process.Start(myInfo.Element(number).Path); _viwe.NewMesege = string.Format("Задача {0} была выполнена в {1}", myInfo.Element(number).Path, tempTime); EditList?.Invoke(this, EventArgs.Empty); } myInfo.Element(number).DateThis = myInfo.Element(number).DateThis.AddDays(1); break; case Model.Work.weekly: if (n.Days > 1 || n.Hours > 1 || n.Minutes > 1 || n.Seconds > 10) { } else { System.Diagnostics.Process.Start(myInfo.Element(number).Path); _viwe.NewMesege = string.Format("Задача {0} была выполнена в {1}", myInfo.Element(number).Path, tempTime); EditList?.Invoke(this, EventArgs.Empty); } bool isSerhc = true; int i1 = 0; int i = (int)myInfo.Element(number).DateThis.DayOfWeek; do { for (; i < 7; i++, i1++) { if (myInfo.Element(number).Days_of_the_week[i] == true) { myInfo.Element(number).DateThis = myInfo.Element(number).DateThis.AddDays((i1 + i + 1)); isSerhc = false; break; } } i = 0; } while (isSerhc == true); break; case Model.Work.monthly: if (n.Days > 1 || n.Hours > 1 || n.Minutes > 1 || n.Seconds > 10) { myInfo.Element(number).DateThis = myInfo.Element(number).DateThis.AddMonths(1); } else { System.Diagnostics.Process.Start(myInfo.Element(number).Path); myInfo.Element(number).DateThis = myInfo.Element(number).DateThis.AddMonths(1); _viwe.NewMesege = string.Format("Задача {0} была выполнена в {1}", myInfo.Element(number).Path, tempTime); EditList?.Invoke(this, EventArgs.Empty); } break; case Model.Work.once: if (n.Days > 1 || n.Hours > 1 || n.Minutes > 1 || n.Seconds > 10) { Delete(number); } else { System.Diagnostics.Process.Start(myInfo.Element(number).Path); _viwe.NewMesege = string.Format("Задача {0} была выполнена в {1}", myInfo.Element(number).Path, tempTime); EditList?.Invoke(this, EventArgs.Empty); Delete(number); } break; } } catch (Exception ex) { Delete(number); _viwe.NewMesege = string.Format("Задача {0} была прорваленна в {1}, информация ошибки: {2}", myInfo.Element(number).Path, tempTime, ex.Message); EditList?.Invoke(this, EventArgs.Empty); } } } }
/// <summary> /// Returns a list describing the content edits performed within the hunk. /// </summary> /// <returns></returns> internal EditList ToEditList() { var r = new EditList(); byte[] buf = Buffer; int c = RawParseUtils.nextLF(buf, _startOffset); int oLine = OldStartLine; int nLine = NewStartLine; Edit inEdit = null; for (; c < Buffer.Length; c = RawParseUtils.nextLF(buf, c)) { bool breakScan; switch (buf[c]) { case (byte)' ': case (byte)'\n': inEdit = null; oLine++; nLine++; continue; case (byte)'-': if (inEdit == null) { inEdit = new Edit(oLine - 1, nLine - 1); r.Add(inEdit); } oLine++; inEdit.ExtendA(); continue; case (byte)'+': if (inEdit == null) { inEdit = new Edit(oLine - 1, nLine - 1); r.Add(inEdit); } nLine++; inEdit.ExtendB(); continue; case (byte)'\\': // Matches "\ No newline at end of file" continue; default: breakScan = true; break; } if (breakScan) { break; } } return r; }
public override bool Save() { _EditList = EditList.ToDelimitedString(","); return(base.Save()); }
private static string toString(EditList list) { StringBuilder builder = new StringBuilder(); foreach (Edit e in list) builder.Append(" -" + e.BeginA + "," + (e.EndA - e.BeginA) + " +" + e.BeginB + "," + (e.EndB - e.BeginB)); return builder.ToString(); }
private void Format(EditList edits, Text a, Text b, List<TextMarker> leftMarkers, List<TextMarker> rightMarkers, StringBuilder leftOut, StringBuilder rightOut) { int curA = 0; int offset = 0; foreach (var edit in edits) { for (int i = curA; i < edit.GetBeginA(); i++) { WriteContextLine(a, i, leftOut, rightOut); offset++; } _markers.Add(new DiffMarker( GetMarkerType(edit.GetType()), offset, Math.Max(edit.GetLengthA(), edit.GetLengthB()), edit.GetLengthA(), edit.GetLengthB() )); for (curA = edit.GetBeginA(); curA < edit.GetEndA(); curA++) { WriteLine(leftOut, leftMarkers, a, curA, edit.GetType()); } for (int curB = edit.GetBeginB(); curB < edit.GetEndB(); curB++) { WriteLine(rightOut, rightMarkers, b, curB, edit.GetType()); offset++; } for (int i = edit.GetLengthB() - edit.GetLengthA(); i > 0; i--) { WriteEmptyLine(leftOut, leftMarkers); } for (int i = edit.GetLengthA() - edit.GetLengthB(); i > 0; i--) { WriteEmptyLine(rightOut, rightMarkers); offset++; } } for (; curA < a.Size(); curA++) { WriteContextLine(a, curA, leftOut, rightOut); offset++; } }
/** * Helper method which returns the next Edit for an Iterator over Edits. * When there are no more edits left this method will return the constant * END_EDIT. * * @param it * the iterator for which the next edit should be returned * @return the next edit from the iterator or END_EDIT if there no more * edits */ private static Edit nextEdit(EditList.EditListIterator it) { return (it.hasNext() ? it.next() : END_EDIT); }