Exemplo n.º 1
0
        public static List <commonOrDifferentThing> diff_comm(string[] file1, string[] file2)
        {
            // We apply the LCS to build a "comm"-style picture of the
            // differences between file1 and file2.

            var result = new List <commonOrDifferentThing>();

            int tail1 = file1.Length;
            int tail2 = file2.Length;

            commonOrDifferentThing common = new commonOrDifferentThing
            {
                common = new List <string>()
            };

            for (var candidate = TextDiff.longest_common_subsequence(file1, file2);
                 candidate != null;
                 candidate = candidate.chain)
            {
                commonOrDifferentThing different = new commonOrDifferentThing
                {
                    file1 = new List <string>(),
                    file2 = new List <string>()
                };

                while (--tail1 > candidate.file1index)
                {
                    different.file1.Add(file1[tail1]);
                }

                while (--tail2 > candidate.file2index)
                {
                    different.file2.Add(file2[tail2]);
                }

                if (different.file1.Count > 0 || different.file2.Count > 0)
                {
                    processCommon(ref common, result);
                    different.file1.Reverse();
                    different.file2.Reverse();
                    result.Add(different);
                }

                if (tail1 >= 0)
                {
                    common.common.Add(file1[tail1]);
                }
            }

            processCommon(ref common, result);

            result.Reverse();
            return(result);
        }
Exemplo n.º 2
0
        public static List <diffSet> diff_indices(string[] file1, string[] file2)
        {
            // We apply the LCS to give a simple representation of the
            // offsets and lengths of mismatched chunks in the input
            // files. This is used by diff3_merge_indices below.

            var result = new List <diffSet>();
            var tail1  = file1.Length;
            var tail2  = file2.Length;

            for (var candidate = TextDiff.longest_common_subsequence(file1, file2);
                 candidate != null;
                 candidate = candidate.chain)
            {
                var mismatchLength1 = tail1 - candidate.file1index - 1;
                var mismatchLength2 = tail2 - candidate.file2index - 1;
                tail1 = candidate.file1index;
                tail2 = candidate.file2index;

                if (mismatchLength1 > 0 || mismatchLength2 > 0)
                {
                    result.Add(new diffSet
                    {
                        file1 = new chunkReference
                        {
                            offset = tail1 + 1,
                            length = mismatchLength1
                        },
                        file2 = new chunkReference
                        {
                            offset = tail2 + 1,
                            length = mismatchLength2
                        }
                    });
                }
            }

            result.Reverse();
            return(result);
        }
Exemplo n.º 3
0
        public static List <patchResult> diff_patch(string[] file1, string[] file2)
        {
            // We apply the LCD to build a JSON representation of a
            // diff(1)-style patch.

            var result = new List <patchResult>();
            var tail1  = file1.Length;
            var tail2  = file2.Length;

            for (var candidate = TextDiff.longest_common_subsequence(file1, file2);
                 candidate != null;
                 candidate = candidate.chain)
            {
                var mismatchLength1 = tail1 - candidate.file1index - 1;
                var mismatchLength2 = tail2 - candidate.file2index - 1;
                tail1 = candidate.file1index;
                tail2 = candidate.file2index;

                if (mismatchLength1 > 0 || mismatchLength2 > 0)
                {
                    patchResult thisResult = new patchResult
                    {
                        file1 = new patchDescriptionThing(file1,
                                                          candidate.file1index + 1,
                                                          mismatchLength1),
                        file2 = new patchDescriptionThing(file2,
                                                          candidate.file2index + 1,
                                                          mismatchLength2)
                    };
                    result.Add(thisResult);
                }
            }

            result.Reverse();
            return(result);
        }
Exemplo n.º 4
0
        public static List <patch3Set> Diff3MergeIndices(string[] a, string[] o, string[] b)
        {
            // Given three files, A, O, and B, where both A and B are
            // independently derived from O, returns a fairly complicated
            // internal representation of merge decisions it's taken. The
            // interested reader may wish to consult
            //
            // Sanjeev Khanna, Keshav Kunal, and Benjamin C. Pierce. "A
            // Formal Investigation of Diff3." In Arvind and Prasad,
            // editors, Foundations of Software Technology and Theoretical
            // Computer Science (FSTTCS), December 2007.
            //
            // (http://www.cis.upenn.edu/~bcpierce/papers/diff3-short.pdf)

            var m1 = TextDiff.diff_indices(o, a);
            var m2 = TextDiff.diff_indices(o, b);

            var hunks = new List <diff3Set>();

            for (int i = 0; i < m1.Count; i++)
            {
                addHunk(m1[i], Side.Left, hunks);
            }
            for (int i = 0; i < m2.Count; i++)
            {
                addHunk(m2[i], Side.Right, hunks);
            }
            hunks.Sort();

            var result       = new List <patch3Set>();
            var commonOffset = 0;

            for (var hunkIndex = 0; hunkIndex < hunks.Count; hunkIndex++)
            {
                var firstHunkIndex = hunkIndex;
                var hunk           = hunks[hunkIndex];
                var regionLhs      = hunk.file1offset;
                var regionRhs      = regionLhs + hunk.file1length;

                while (hunkIndex < hunks.Count - 1)
                {
                    var maybeOverlapping = hunks[hunkIndex + 1];
                    var maybeLhs         = maybeOverlapping.file1offset;
                    if (maybeLhs > regionRhs)
                    {
                        break;
                    }

                    regionRhs = Math.Max(regionRhs, maybeLhs + maybeOverlapping.file1length);
                    hunkIndex++;
                }

                copyCommon2(regionLhs, ref commonOffset, result);
                if (firstHunkIndex == hunkIndex)
                {
                    // The "overlap" was only one hunk long, meaning that
                    // there's no conflict here. Either a and o were the
                    // same, or b and o were the same.
                    if (hunk.file2length > 0)
                    {
                        result.Add(new patch3Set
                        {
                            side   = hunk.side,
                            offset = hunk.file2offset,
                            length = hunk.file2length
                        });
                    }
                }
                else
                {
                    // A proper conflict. Determine the extents of the
                    // regions involved from a, o and b. Effectively merge
                    // all the hunks on the left into one giant hunk, and
                    // do the same for the right; then, correct for skew
                    // in the regions of o that each side changed, and
                    // report appropriate spans for the three sides.

                    var regions = new Dictionary <Side, conflictRegion>
                    {
                        {
                            Side.Left,
                            new conflictRegion
                            {
                                file1RegionStart = a.Length,
                                file1RegionEnd   = -1,
                                file2RegionStart = o.Length,
                                file2RegionEnd   = -1
                            }
                        },
                        {
                            Side.Right,
                            new conflictRegion
                            {
                                file1RegionStart = b.Length,
                                file1RegionEnd   = -1,
                                file2RegionStart = o.Length,
                                file2RegionEnd   = -1
                            }
                        }
                    };

                    for (int i = firstHunkIndex; i <= hunkIndex; i++)
                    {
                        hunk = hunks[i];
                        var side  = hunk.side;
                        var r     = regions[side];
                        var oLhs  = hunk.file1offset;
                        var oRhs  = oLhs + hunk.file1length;
                        var abLhs = hunk.file2offset;
                        var abRhs = abLhs + hunk.file2length;
                        r.file1RegionStart = Math.Min(abLhs, r.file1RegionStart);
                        r.file1RegionEnd   = Math.Max(abRhs, r.file1RegionEnd);
                        r.file2RegionStart = Math.Min(oLhs, r.file2RegionStart);
                        r.file2RegionEnd   = Math.Max(oRhs, r.file2RegionEnd);
                    }
                    var aLhs = regions[Side.Left].file1RegionStart + (regionLhs - regions[Side.Left].file2RegionStart);
                    var aRhs = regions[Side.Left].file1RegionEnd + (regionRhs - regions[Side.Left].file2RegionEnd);
                    var bLhs = regions[Side.Right].file1RegionStart + (regionLhs - regions[Side.Right].file2RegionStart);
                    var bRhs = regions[Side.Right].file1RegionEnd + (regionRhs - regions[Side.Right].file2RegionEnd);

                    result.Add(new patch3Set
                    {
                        side                = Side.Conflict,
                        offset              = aLhs,
                        length              = aRhs - aLhs,
                        conflictOldOffset   = regionLhs,
                        conflictOldLength   = regionRhs - regionLhs,
                        conflictRightOffset = bLhs,
                        conflictRightLength = bRhs - bLhs
                    });
                }

                commonOffset = regionRhs;
            }

            copyCommon2(o.Length, ref commonOffset, result);
            return(result);
        }