Esempio n. 1
0
        private void ParallelDecoding(int threadNum, VideoFile video, int possibleMultiFors, int numOfThreads, int?startValue = null, int?endValue = null)
        {
            int[,] yDctQuan, cBDctQuan, cRDctQuan, yDiffEncoded, cBDiffEncoded, cRDiffEncoded;
            int[] yRunLenEncoded, cBRunLenEncoded, cRRunLenEncoded;

            int offset = possibleMultiFors * keyFrameEvery;
            int start;
            int finish;

            if (startValue != null)
            {
                start = (int)startValue;
                if (endValue != null)
                {
                    finish = (int)endValue;
                }
                else
                {
                    finish = tempImages.Length;
                }
            }
            else
            {
                start  = threadNum * offset;
                finish = (threadNum + 1) * offset;
            }

            int[,] yDctQuanDiff           = null;
            int[,] cBDctQuanDiff          = null;
            int[,] cRDctQuanDiff          = null;
            int[,] yDctQuanFromLastFrame  = null;
            int[,] cBDctQuanFromLastFrame = null;
            int[,] cRDctQuanFromLastFrame = null;

            for (int i = start; i < finish; i++)
            {
                // huffman decoding
                yRunLenEncoded  = HuffmanDecoding(YBitArray[i], video.YHuffmanCounts[i / keyFrameEvery]);
                cBRunLenEncoded = HuffmanDecoding(CbBitArray[i], video.CbHuffmanCounts[i / keyFrameEvery]);
                cRRunLenEncoded = HuffmanDecoding(CrBitArray[i], video.CrHuffmanCounts[i / keyFrameEvery]);

                //Tester.PrintToFile("yRunLenEncodedAfter", yRunLenEncoded);

                // run length decoding
                if (subsamplingMode == "4:4:4")
                {
                    yDiffEncoded  = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height);
                    cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width, video.height);
                    cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width, video.height);
                }
                else if (subsamplingMode == "4:2:2")
                {
                    yDiffEncoded  = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height);
                    cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width / 2, video.height);
                    cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width / 2, video.height);
                }
                else
                {
                    yDiffEncoded  = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height);
                    cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width / 2, video.height / 2);
                    cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width / 2, video.height / 2);
                }

                //Tester.PrintToFile("yDiffEncodedAfter", yDiffEncoded);

                // differential decoding
                yDctQuan  = DifferentialEncoding.Decode(yDiffEncoded, 8);
                cBDctQuan = DifferentialEncoding.Decode(cBDiffEncoded, 8);
                cRDctQuan = DifferentialEncoding.Decode(cRDiffEncoded, 8);

                // it's not a keyframe
                if (i % keyFrameEvery != 0)
                {
                    yDctQuanDiff  = yDctQuan;
                    cBDctQuanDiff = cBDctQuan;
                    cRDctQuanDiff = cRDctQuan;
                    for (int j = 0; j < yDctQuanFromLastFrame.GetLength(0); j++)
                    {
                        for (int k = 0; k < yDctQuanFromLastFrame.GetLength(1); k++)
                        {
                            yDctQuan[j, k] = yDctQuanFromLastFrame[j, k] + yDctQuanDiff[j, k];
                            if (subsamplingMode == "4:4:4")
                            {
                                cBDctQuan[j, k] = cBDctQuanFromLastFrame[j, k] + cBDctQuanDiff[j, k];
                                cRDctQuan[j, k] = cRDctQuanFromLastFrame[j, k] + cRDctQuanDiff[j, k];
                            }
                        }
                    }
                    if (subsamplingMode != "4:4:4")
                    {
                        for (int j = 0; j < cBDctQuanFromLastFrame.GetLength(0); j++)
                        {
                            for (int k = 0; k < cBDctQuanFromLastFrame.GetLength(1); k++)
                            {
                                cBDctQuan[j, k] = cBDctQuanFromLastFrame[j, k] + cBDctQuanDiff[j, k];
                                cRDctQuan[j, k] = cRDctQuanFromLastFrame[j, k] + cRDctQuanDiff[j, k];
                            }
                        }
                    }
                }

                yDctQuanFromLastFrame  = yDctQuan;
                cBDctQuanFromLastFrame = cBDctQuan;
                cRDctQuanFromLastFrame = cRDctQuan;

                // Tester.PrintToFile("yDctQuanAfter", yDctQuan);

                // revert dct and quantization
                DctImage dctImage = new DctImage(video.quality, video.subsamplingMode);
                int[,] YMatrix  = dctImage.RevertDctAndQuantization(yDctQuan);
                int[,] CbMatrix = dctImage.RevertDctAndQuantization(cBDctQuan);
                int[,] CrMatrix = dctImage.RevertDctAndQuantization(cRDctQuan);

                if (subsamplingMode == "4:4:4")
                {
                    YMatrix  = dctImage.TrimValueMatrix(YMatrix, video.width, video.height);
                    CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width, video.height);
                    CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width, video.height);
                }
                else if (subsamplingMode == "4:2:2")
                {
                    YMatrix  = dctImage.TrimValueMatrix(YMatrix, video.width, video.height);
                    CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width / 2, video.height);
                    CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width / 2, video.height);
                }
                else
                {
                    YMatrix  = dctImage.TrimValueMatrix(YMatrix, video.width, video.height);
                    CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width / 2, video.height / 2);
                    CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width / 2, video.height / 2);
                }

                // instantiate YCbCr images
                YCbCrImage tempImage = new YCbCrImage(YMatrix.GetLength(0), YMatrix.GetLength(1), subsamplingMode);

                for (int j = 0; j < YMatrix.GetLength(0); j++)
                {
                    for (int k = 0; k < YMatrix.GetLength(1); k++)
                    {
                        if (subsamplingMode == "4:4:4")
                        {
                            tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], CbMatrix[j, k], CrMatrix[j, k]);
                        }
                        else if (subsamplingMode == "4:2:2")
                        {
                            double Cb = CbMatrix[(j / 2), k];
                            double Cr = CrMatrix[(j / 2), k];
                            tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], Cb, Cr);
                        }
                        else if (subsamplingMode == "4:2:0")
                        {
                            double Cb = CbMatrix[(j / 2), (k / 2)];
                            double Cr = CrMatrix[(j / 2), (k / 2)];
                            tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], Cb, Cr);
                        }
                    }
                }

                tempImages[i] = tempImage;

                MethodInvoker mi = new MethodInvoker(() => {
                    int newValue = progressBar.Value + numOfThreads;
                    if (newValue <= outputImages.Length)
                    {
                        progressBar.Value = newValue;
                    }
                    else
                    {
                        progressBar.Value = outputImages.Length;
                    }
                });
                if (!progressBar.InvokeRequired)
                {
                    mi.Invoke();
                }
            }
        }
Esempio n. 2
0
        // If no input file is selected, clicking the input picture makes the user choose a file.
        private void inputPictureBox_Click(object sender, EventArgs e)
        {
            if (inputFileName == null)
            {
                OpenFileDialog ofd = new OpenFileDialog();
                ofd.InitialDirectory = Application.StartupPath;
                if (ofd.ShowDialog() == DialogResult.OK)
                {
                    inputFileName       = ofd.FileName;
                    inputSizeLabel.Text = "Input file size: " + BytesToString(new FileInfo(ofd.FileName).Length);
                }

                if (inputFileName.Substring(inputFileName.Length - 3) == "bfv")
                {
                    // file is already encoded
                    DisableEncodingUI();

                    // read video file
                    IFormatter decodingFormatter = new BinaryFormatter();
                    Stream     decodingStream    = new FileStream(inputFileName, FileMode.Open, FileAccess.Read, FileShare.Read);
                    VideoFile  inputVideo        = (VideoFile)decodingFormatter.Deserialize(decodingStream);
                    outputSizeLabel.Text = "Output file size: " + BytesToString(decodingStream.Length);
                    decodingStream.Close();
                    GC.Collect();

                    //init
                    tempImages          = new YCbCrImage[inputVideo.YBitArray.Length];
                    inputImages         = new Image[tempImages.Length];
                    outputImages        = new Image[tempImages.Length];
                    progressBar.Maximum = tempImages.Length;

                    //DCT & Quantization & Differential Decoding & Run Lenght Decoding & Huffman Decoding
                    Decoding(inputVideo);

                    // Convert YCbCr images to RGB images
                    YCbCrToRGB();

                    GC.Collect();

                    // show first picture
                    outputPictureBox.Image = outputImages[timeBar.Value];
                }
                else
                {
                    // normal file

                    progressLabel.Text    = "Importing file...";
                    progressLabel.Visible = true;
                    progressBar.Value     = 0;
                    progressBar.Visible   = true;
                    // Convert input video to image array
                    var ffMpeg = new NReco.VideoConverter.FFMpegConverter();

                    ArrayList inputImagesAL = new ArrayList();
                    var       hasFrame      = true;
                    var       count         = 0;

                    if (frameLimiter.Checked)
                    {
                        progressBar.Maximum = Decimal.ToInt32(frameInput.Value);
                    }

                    while (hasFrame == true && (!frameLimiter.Checked || count < Decimal.ToInt32(frameInput.Value)))
                    {
                        using (MemoryStream stream = new MemoryStream())
                        {
                            // video has 30 fps
                            ffMpeg.GetVideoThumbnail(inputFileName, stream, (count / 30f));
                            if (stream.Length != 0)
                            {
                                inputImagesAL.Add(Image.FromStream(stream));
                                progressBar.Value = count;
                                count++;
                            }
                            else
                            {
                                hasFrame = false;
                            }
                        }
                    }
                    inputImages           = Array.ConvertAll(inputImagesAL.ToArray(), image => (Image)image);
                    inputPictureBox.Image = inputImages[timeBar.Value];
                    progressBar.Maximum   = count + 1;
                    progressLabel.Visible = false;
                    progressBar.Visible   = false;

                    // init result array lengths
                    YBitArray  = new List <int> [inputImages.Length];
                    CbBitArray = new List <int> [inputImages.Length];
                    CrBitArray = new List <int> [inputImages.Length];
                    // init huffmans
                    UpdateHuffmanCounts();
                }
            }
        }