Exemplo n.º 1
0
        private void ParallelDecoding(int threadNum, VideoFile video, int possibleMultiFors, int numOfThreads, int?startValue = null, int?endValue = null)
        {
            int[,] yDctQuan, cBDctQuan, cRDctQuan, yDiffEncoded, cBDiffEncoded, cRDiffEncoded;
            int[] yRunLenEncoded, cBRunLenEncoded, cRRunLenEncoded;

            int offset = possibleMultiFors * keyFrameEvery;
            int start;
            int finish;

            if (startValue != null)
            {
                start = (int)startValue;
                if (endValue != null)
                {
                    finish = (int)endValue;
                }
                else
                {
                    finish = tempImages.Length;
                }
            }
            else
            {
                start  = threadNum * offset;
                finish = (threadNum + 1) * offset;
            }

            int[,] yDctQuanDiff           = null;
            int[,] cBDctQuanDiff          = null;
            int[,] cRDctQuanDiff          = null;
            int[,] yDctQuanFromLastFrame  = null;
            int[,] cBDctQuanFromLastFrame = null;
            int[,] cRDctQuanFromLastFrame = null;

            for (int i = start; i < finish; i++)
            {
                // huffman decoding
                yRunLenEncoded  = HuffmanDecoding(YBitArray[i], video.YHuffmanCounts[i / keyFrameEvery]);
                cBRunLenEncoded = HuffmanDecoding(CbBitArray[i], video.CbHuffmanCounts[i / keyFrameEvery]);
                cRRunLenEncoded = HuffmanDecoding(CrBitArray[i], video.CrHuffmanCounts[i / keyFrameEvery]);

                //Tester.PrintToFile("yRunLenEncodedAfter", yRunLenEncoded);

                // run length decoding
                if (subsamplingMode == "4:4:4")
                {
                    yDiffEncoded  = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height);
                    cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width, video.height);
                    cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width, video.height);
                }
                else if (subsamplingMode == "4:2:2")
                {
                    yDiffEncoded  = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height);
                    cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width / 2, video.height);
                    cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width / 2, video.height);
                }
                else
                {
                    yDiffEncoded  = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height);
                    cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width / 2, video.height / 2);
                    cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width / 2, video.height / 2);
                }

                //Tester.PrintToFile("yDiffEncodedAfter", yDiffEncoded);

                // differential decoding
                yDctQuan  = DifferentialEncoding.Decode(yDiffEncoded, 8);
                cBDctQuan = DifferentialEncoding.Decode(cBDiffEncoded, 8);
                cRDctQuan = DifferentialEncoding.Decode(cRDiffEncoded, 8);

                // it's not a keyframe
                if (i % keyFrameEvery != 0)
                {
                    yDctQuanDiff  = yDctQuan;
                    cBDctQuanDiff = cBDctQuan;
                    cRDctQuanDiff = cRDctQuan;
                    for (int j = 0; j < yDctQuanFromLastFrame.GetLength(0); j++)
                    {
                        for (int k = 0; k < yDctQuanFromLastFrame.GetLength(1); k++)
                        {
                            yDctQuan[j, k] = yDctQuanFromLastFrame[j, k] + yDctQuanDiff[j, k];
                            if (subsamplingMode == "4:4:4")
                            {
                                cBDctQuan[j, k] = cBDctQuanFromLastFrame[j, k] + cBDctQuanDiff[j, k];
                                cRDctQuan[j, k] = cRDctQuanFromLastFrame[j, k] + cRDctQuanDiff[j, k];
                            }
                        }
                    }
                    if (subsamplingMode != "4:4:4")
                    {
                        for (int j = 0; j < cBDctQuanFromLastFrame.GetLength(0); j++)
                        {
                            for (int k = 0; k < cBDctQuanFromLastFrame.GetLength(1); k++)
                            {
                                cBDctQuan[j, k] = cBDctQuanFromLastFrame[j, k] + cBDctQuanDiff[j, k];
                                cRDctQuan[j, k] = cRDctQuanFromLastFrame[j, k] + cRDctQuanDiff[j, k];
                            }
                        }
                    }
                }

                yDctQuanFromLastFrame  = yDctQuan;
                cBDctQuanFromLastFrame = cBDctQuan;
                cRDctQuanFromLastFrame = cRDctQuan;

                // Tester.PrintToFile("yDctQuanAfter", yDctQuan);

                // revert dct and quantization
                DctImage dctImage = new DctImage(video.quality, video.subsamplingMode);
                int[,] YMatrix  = dctImage.RevertDctAndQuantization(yDctQuan);
                int[,] CbMatrix = dctImage.RevertDctAndQuantization(cBDctQuan);
                int[,] CrMatrix = dctImage.RevertDctAndQuantization(cRDctQuan);

                if (subsamplingMode == "4:4:4")
                {
                    YMatrix  = dctImage.TrimValueMatrix(YMatrix, video.width, video.height);
                    CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width, video.height);
                    CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width, video.height);
                }
                else if (subsamplingMode == "4:2:2")
                {
                    YMatrix  = dctImage.TrimValueMatrix(YMatrix, video.width, video.height);
                    CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width / 2, video.height);
                    CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width / 2, video.height);
                }
                else
                {
                    YMatrix  = dctImage.TrimValueMatrix(YMatrix, video.width, video.height);
                    CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width / 2, video.height / 2);
                    CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width / 2, video.height / 2);
                }

                // instantiate YCbCr images
                YCbCrImage tempImage = new YCbCrImage(YMatrix.GetLength(0), YMatrix.GetLength(1), subsamplingMode);

                for (int j = 0; j < YMatrix.GetLength(0); j++)
                {
                    for (int k = 0; k < YMatrix.GetLength(1); k++)
                    {
                        if (subsamplingMode == "4:4:4")
                        {
                            tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], CbMatrix[j, k], CrMatrix[j, k]);
                        }
                        else if (subsamplingMode == "4:2:2")
                        {
                            double Cb = CbMatrix[(j / 2), k];
                            double Cr = CrMatrix[(j / 2), k];
                            tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], Cb, Cr);
                        }
                        else if (subsamplingMode == "4:2:0")
                        {
                            double Cb = CbMatrix[(j / 2), (k / 2)];
                            double Cr = CrMatrix[(j / 2), (k / 2)];
                            tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], Cb, Cr);
                        }
                    }
                }

                tempImages[i] = tempImage;

                MethodInvoker mi = new MethodInvoker(() => {
                    int newValue = progressBar.Value + numOfThreads;
                    if (newValue <= outputImages.Length)
                    {
                        progressBar.Value = newValue;
                    }
                    else
                    {
                        progressBar.Value = outputImages.Length;
                    }
                });
                if (!progressBar.InvokeRequired)
                {
                    mi.Invoke();
                }
            }
        }
Exemplo n.º 2
0
        public void ParallelEncoding(int threadNum, int possibleMultiFors, int numOfThreads, int?startValue = null, int?endValue = null)
        {
            int[,] yDctQuan, cBDctQuan, cRDctQuan, yDiffEncoded, cBDiffEncoded, cRDiffEncoded;
            int[] yRunLenEncoded, cBRunLenEncoded, cRRunLenEncoded;
            int[,] accumulatedChangesY  = null;
            int[,] accumulatedChangesCb = null;
            int[,] accumulatedChangesCr = null;
            int[,] actualValuesY        = null;
            int[,] actualValuesCb       = null;
            int[,] actualValuesCr       = null;
            List <int[, ]> actualValuesListY  = new List <int[, ]>();
            List <int[, ]> actualValuesListCb = new List <int[, ]>();
            List <int[, ]> actualValuesListCr = new List <int[, ]>();

            // needed for multi huffman encoding
            int[][] YHuffmanValues  = new int[keyFrameEvery][];
            int[][] CbHuffmanValues = new int[keyFrameEvery][];
            int[][] CrHuffmanValues = new int[keyFrameEvery][];

            int offset = possibleMultiFors * keyFrameEvery;
            int start;
            int finish;

            if (startValue != null)
            {
                start = (int)startValue;
                if (endValue != null)
                {
                    finish = (int)endValue;
                }
                else
                {
                    finish = tempImages.Length;
                }
            }
            else
            {
                start  = threadNum * offset;
                finish = (threadNum + 1) * offset;
            }

            int[,] yDctQuanDiff           = null;
            int[,] cBDctQuanDiff          = null;
            int[,] cRDctQuanDiff          = null;
            int[,] yDctQuanFromLastFrame  = null;
            int[,] cBDctQuanFromLastFrame = null;
            int[,] cRDctQuanFromLastFrame = null;

            for (int i = start; i < finish; i++)
            {
                DctImage dctImage = new DctImage(tempImages[i], quality, actualValuesListY, actualValuesListCb, actualValuesListCr, actualValuesY, actualValuesCb, actualValuesCr, accumulatedChangesY, accumulatedChangesCb, accumulatedChangesCr);

                yDctQuan  = dctImage.PerformDctAndQuantization(tempImages[i], "Y");
                cBDctQuan = dctImage.PerformDctAndQuantization(tempImages[i], "Cb");
                cRDctQuan = dctImage.PerformDctAndQuantization(tempImages[i], "Cr");

                // it's not a keyframe
                if (i % keyFrameEvery != 0)
                {
                    for (int j = 0; j < yDctQuanFromLastFrame.GetLength(0); j++)
                    {
                        for (int k = 0; k < yDctQuanFromLastFrame.GetLength(1); k++)
                        {
                            //yDctQuanDiff[j, k] = GetOptimizedDifference(yDctQuan[j, k] - yDctQuanFromLastFrame[j, k], "y");
                            yDctQuanDiff[j, k] = yDctQuan[j, k] - yDctQuanFromLastFrame[j, k];
                            if (subsamplingMode == "4:4:4")
                            {
                                //cBDctQuanDiff[j, k] = GetOptimizedDifference(cBDctQuan[j, k] - cBDctQuanFromLastFrame[j, k], "cB");
                                cBDctQuanDiff[j, k] = cBDctQuan[j, k] - cBDctQuanFromLastFrame[j, k];
                                //cRDctQuanDiff[j, k] = GetOptimizedDifference(cRDctQuan[j, k] - cRDctQuanFromLastFrame[j, k], "cR");
                                cRDctQuanDiff[j, k] = cRDctQuan[j, k] - cRDctQuanFromLastFrame[j, k];
                            }
                        }
                    }
                    if (subsamplingMode != "4:4:4")
                    {
                        for (int j = 0; j < cBDctQuanFromLastFrame.GetLength(0); j++)
                        {
                            for (int k = 0; k < cBDctQuanFromLastFrame.GetLength(1); k++)
                            {
                                //cBDctQuanDiff[j, k] = GetOptimizedDifference(cBDctQuan[j, k] - cBDctQuanFromLastFrame[j, k], "cB");
                                cBDctQuanDiff[j, k] = cBDctQuan[j, k] - cBDctQuanFromLastFrame[j, k];
                                //cRDctQuanDiff[j, k] = GetOptimizedDifference(cRDctQuan[j, k] - cRDctQuanFromLastFrame[j, k], "cR");

                                cRDctQuanDiff[j, k] = cRDctQuan[j, k] - cRDctQuanFromLastFrame[j, k];
                            }
                        }
                    }
                }
                else
                {
                    // but actually it's a keyframe
                    yDctQuanDiff  = new int[yDctQuan.GetLength(0), yDctQuan.GetLength(1)];
                    cBDctQuanDiff = new int[cBDctQuan.GetLength(0), cBDctQuan.GetLength(1)];
                    cRDctQuanDiff = new int[cRDctQuan.GetLength(0), cRDctQuan.GetLength(1)];

                    accumulatedChangesY  = new int[yDctQuan.GetLength(0), yDctQuan.GetLength(1)];
                    accumulatedChangesCb = new int[cBDctQuan.GetLength(0), cBDctQuan.GetLength(1)];
                    accumulatedChangesCr = new int[cRDctQuan.GetLength(0), cRDctQuan.GetLength(1)];
                    actualValuesY        = new int[yDctQuan.GetLength(0), yDctQuan.GetLength(1)];
                    actualValuesCb       = new int[cBDctQuan.GetLength(0), cBDctQuan.GetLength(1)];
                    actualValuesCr       = new int[cRDctQuan.GetLength(0), cRDctQuan.GetLength(1)];
                    for (int x = 0; x < accumulatedChangesY.GetLength(0); x++)
                    {
                        for (int y = 0; y < accumulatedChangesY.GetLength(1); y++)
                        {
                            accumulatedChangesY[x, y] = int.MaxValue;
                            actualValuesY[x, y]       = int.MaxValue;
                        }
                    }
                    for (int x = 0; x < accumulatedChangesCb.GetLength(0); x++)
                    {
                        for (int y = 0; y < accumulatedChangesCb.GetLength(1); y++)
                        {
                            accumulatedChangesCb[x, y] = int.MaxValue;
                            accumulatedChangesCr[x, y] = int.MaxValue;
                            actualValuesCb[x, y]       = int.MaxValue;
                            actualValuesCr[x, y]       = int.MaxValue;
                        }
                    }
                    //actualValuesListY.Clear();
                    //actualValuesListCb.Clear();
                    //actualValuesListCr.Clear();
                }

                yDctQuanFromLastFrame  = yDctQuan;
                cBDctQuanFromLastFrame = cBDctQuan;
                cRDctQuanFromLastFrame = cRDctQuan;

                // it's not a keyframe
                if (i % keyFrameEvery != 0)
                {
                    yDctQuan  = yDctQuanDiff;
                    cBDctQuan = cBDctQuanDiff;
                    cRDctQuan = cRDctQuanDiff;
                }

                if (subsamplingMode == "4:4:4")
                {
                    yDctQuan  = dctImage.TrimValueMatrix(yDctQuan, width, height);
                    cBDctQuan = dctImage.TrimValueMatrix(cBDctQuan, width, height);
                    cRDctQuan = dctImage.TrimValueMatrix(cRDctQuan, width, height);
                }
                else if (subsamplingMode == "4:2:2")
                {
                    yDctQuan  = dctImage.TrimValueMatrix(yDctQuan, width, height);
                    cBDctQuan = dctImage.TrimValueMatrix(cBDctQuan, width / 2, height);
                    cRDctQuan = dctImage.TrimValueMatrix(cRDctQuan, width / 2, height);
                }
                else if (subsamplingMode == "4:2:0")
                {
                    yDctQuan  = dctImage.TrimValueMatrix(yDctQuan, width, height);
                    cBDctQuan = dctImage.TrimValueMatrix(cBDctQuan, width / 2, height / 2);
                    cRDctQuan = dctImage.TrimValueMatrix(cRDctQuan, width / 2, height / 2);
                }

                // Tester.PrintToFile("yDctQuanBefore", yDctQuan);

                yDiffEncoded  = DifferentialEncoding.Encode(yDctQuan, 8);
                cBDiffEncoded = DifferentialEncoding.Encode(cBDctQuan, 8);
                cRDiffEncoded = DifferentialEncoding.Encode(cRDctQuan, 8);

                //Tester.PrintToFile("yDiffEncodedBefore", yDiffEncoded);

                yRunLenEncoded  = RunLengthEncode.Encode(yDiffEncoded, 8);
                cBRunLenEncoded = RunLengthEncode.Encode(cBDiffEncoded, 8);
                cRRunLenEncoded = RunLengthEncode.Encode(cRDiffEncoded, 8);

                //Tester.PrintToFile("yRunLenEncodedBefore", yRunLenEncoded);

                // huffman encoding
                bool lastFrame = false;
                if (i == tempImages.Length - 1)
                {
                    lastFrame = true;
                }
                MultiHuffmanEncoding(i, yRunLenEncoded, cBRunLenEncoded, cRRunLenEncoded, lastFrame, YHuffmanValues, CbHuffmanValues, CrHuffmanValues);

                // Tester.PrintToFile("huffmanBefore", YBitArray);

                // garbage collection
                //tempImages[i] = null;

                MethodInvoker mi = new MethodInvoker(() => {
                    int newValue = progressBar.Value + numOfThreads;
                    if (newValue <= inputImages.Length)
                    {
                        progressBar.Value = newValue;
                    }
                    else
                    {
                        progressBar.Value = inputImages.Length;
                    }
                });
                if (!progressBar.InvokeRequired)
                {
                    mi.Invoke();
                }
            }
        }