コード例 #1
0
ファイル: Comper.impl.cs プロジェクト: s2hd/KENSSharp
 private static void Push(UInt16BE_NE_H_OutputBitStream bitStream, bool bit, Stream destination, MemoryStream data)
 {
     if (bitStream.Push(bit))
     {
         byte[] bytes = data.ToArray();
         destination.Write(bytes, 0, bytes.Length);
         data.SetLength(0);
     }
 }
コード例 #2
0
ファイル: Comper.impl.cs プロジェクト: s2hd/KENSSharp
        private static void EncodeInternal(Stream destination, byte[] buffer, long slidingWindow, long recLength, long size)
        {
            UInt16BE_NE_H_OutputBitStream bitStream = new UInt16BE_NE_H_OutputBitStream(destination);
            MemoryStream data = new MemoryStream();

            if (size > 0)
            {
                long bPointer = 2, longestMatchOffset = 0;
                bitStream.Push(false);
                NeutralEndian.Write1(data, buffer[0]);
                NeutralEndian.Write1(data, buffer[1]);

                while (bPointer < size)
                {
                    long matchMax      = Math.Min(recLength, size - bPointer);
                    long backSearchMax = Math.Max(bPointer - slidingWindow, 0);
                    long longestMatch  = 2;
                    long backSearch    = bPointer;

                    do
                    {
                        backSearch -= 2;
                        long currentCount = 0;
                        while (buffer[backSearch + currentCount] == buffer[bPointer + currentCount] && buffer[backSearch + currentCount + 1] == buffer[bPointer + currentCount + 1])
                        {
                            currentCount += 2;
                            if (currentCount >= matchMax)
                            {
                                // Match is as big as the look-forward buffer (or file) will let it be
                                break;
                            }
                        }

                        if (currentCount > longestMatch)
                        {
                            // New 'best' match
                            longestMatch       = currentCount;
                            longestMatchOffset = backSearch;
                        }
                    } while (backSearch > backSearchMax);               // Repeat for as far back as search buffer will let us

                    long iCount  = longestMatch / 2;                    // Comper counts in words (16 bits)
                    long iOffset = (longestMatchOffset - bPointer) / 2; // Comper's offsets count in words (16-bits)

                    if (iCount == 1)
                    {
                        // Symbolwise match
                        Push(bitStream, false, destination, data);
                        NeutralEndian.Write1(data, buffer[bPointer]);
                        NeutralEndian.Write1(data, buffer[bPointer + 1]);
                    }
                    else
                    {
                        // Dictionary match
                        Push(bitStream, true, destination, data);
                        NeutralEndian.Write1(data, (byte)(iOffset));
                        NeutralEndian.Write1(data, (byte)(iCount - 1));
                    }

                    bPointer += iCount * 2;   // iCount counts in words (16-bits), so we correct it to bytes (8-bits) here
                }
            }

            Push(bitStream, true, destination, data);

            NeutralEndian.Write1(data, 0);
            NeutralEndian.Write1(data, 0);
            bitStream.Flush(true);

            byte[] bytes = data.ToArray();
            destination.Write(bytes, 0, bytes.Length);
        }
コード例 #3
0
ファイル: Comper.impl.cs プロジェクト: santosha2003/KENSSharp
        internal static void Encode(Stream source, Stream destination)
        {
            int size_bytes = (int)(source.Length - source.Position);

            byte[] buffer_bytes = new byte[size_bytes + (size_bytes & 1)];
            source.Read(buffer_bytes, 0, size_bytes);

            int size = (size_bytes + 1) / 2;

            ushort[] buffer = new ushort[size];
            for (int i = 0; i < size; ++i)
            {
                buffer[i] = (ushort)((buffer_bytes[i * 2] << 8) | buffer_bytes[(i * 2) + 1]);
            }

            /*
             * Here we create and populate the "LZSS graph":
             *
             * Each value in the uncompressed file forms a node in this graph.
             * The various edges between these nodes represent LZSS matches.
             *
             * Using a shortest-path algorithm, these edges can be used to
             * find the optimal combination of matches needed to produce the
             * smallest possible file.
             *
             * The outputted array only contains one edge per node: the optimal
             * one. This means, in order to produce the smallest file, you just
             * have to traverse the graph from one edge to the next, encoding
             * each match as you go along.
             */

            LZSSGraphEdge[] node_meta_array = new LZSSGraphEdge[size + 1];

            // Initialise the array
            node_meta_array[0].cost = 0;
            for (int i = 1; i < size + 1; ++i)
            {
                node_meta_array[i].cost = int.MaxValue;
            }

            // Find matches
            for (int i = 0; i < size; ++i)
            {
                int max_read_ahead  = Math.Min(0x100, size - i);
                int max_read_behind = Math.Max(0, i - 0x100);

                // Search for dictionary matches
                for (int j = i; j-- > max_read_behind;)
                {
                    for (int k = 0; k < max_read_ahead; ++k)
                    {
                        if (buffer[i + k] == buffer[j + k])
                        {
                            int distance = i - j;
                            int length   = k + 1;

                            // Update this node's optimal edge if this one is better
                            if (node_meta_array[i + k + 1].cost > node_meta_array[i].cost + 1 + 16)
                            {
                                node_meta_array[i + k + 1].cost = node_meta_array[i].cost + 1 + 16;
                                node_meta_array[i + k + 1].previous_node_index = i;
                                node_meta_array[i + k + 1].match_length        = k + 1;
                                node_meta_array[i + k + 1].match_offset        = j;
                            }
                        }
                        else
                        {
                            break;
                        }
                    }
                }

                // Do literal match
                // Update this node's optimal edge if this one is better (or the same, since literal matches usually decode faster)
                if (node_meta_array[i + 1].cost >= node_meta_array[i].cost + 1 + 16)
                {
                    node_meta_array[i + 1].cost = node_meta_array[i].cost + 1 + 16;
                    node_meta_array[i + 1].previous_node_index = i;
                    node_meta_array[i + 1].match_length        = 0;
                }
            }

            // Reverse the edge link order, so the array can be traversed from start to end, rather than vice versa
            node_meta_array[0].previous_node_index = int.MaxValue;
            node_meta_array[size].next_node_index  = int.MaxValue;
            for (int node_index = size; node_meta_array[node_index].previous_node_index != int.MaxValue; node_index = node_meta_array[node_index].previous_node_index)
            {
                node_meta_array[node_meta_array[node_index].previous_node_index].next_node_index = node_index;
            }

            /*
             * LZSS graph complete
             */

            UInt16BE_NE_H_OutputBitStream bitStream = new UInt16BE_NE_H_OutputBitStream(destination);
            MemoryStream data = new MemoryStream();

            for (int node_index = 0; node_meta_array[node_index].next_node_index != int.MaxValue; node_index = node_meta_array[node_index].next_node_index)
            {
                int next_index = node_meta_array[node_index].next_node_index;

                int length   = node_meta_array[next_index].match_length;
                int distance = next_index - node_meta_array[next_index].match_length - node_meta_array[next_index].match_offset;

                if (length != 0)
                {
                    // Compressed
                    Push(bitStream, true, destination, data);
                    NeutralEndian.Write1(data, (byte)-distance);
                    NeutralEndian.Write1(data, (byte)(length - 1));
                }
                else
                {
                    // Uncompressed
                    Push(bitStream, false, destination, data);
                    BigEndian.Write2(data, buffer[node_index]);
                }
            }

            Push(bitStream, true, destination, data);

            NeutralEndian.Write1(data, 0);
            NeutralEndian.Write1(data, 0);
            bitStream.Flush(true);

            byte[] bytes = data.ToArray();
            destination.Write(bytes, 0, bytes.Length);
        }