public int[,] PerformDctAndQuantization(YCbCrImage image, String channelString) { double[,] valueMatrixY = FillValueMatrix(image, "Y"); double[,] valueMatrixCb = FillValueMatrix(image, "Cb"); double[,] valueMatrixCr = FillValueMatrix(image, "Cr"); valueMatrixY = PadValueMatrix(valueMatrixY); valueMatrixCb = PadValueMatrix(valueMatrixCb); valueMatrixCr = PadValueMatrix(valueMatrixCr); switch (channelString) { case "Y": return(DctSubArray(valueMatrixY, "Y")); case "Cb": return(DctSubArray(valueMatrixCb, "Cb")); case "Cr": return(DctSubArray(valueMatrixCr, "Cr")); default: return(null); } }
public DctImage(YCbCrImage image) { this.image = image; subsamplingMode = image.subsamplingMode; //set qualityFactor to 50 (=base quantization matrix) this.qualityFactor = 50; CalculateQuantizationMatrix(this.qualityFactor); }
//////////////////////////////////////////////////////////////////////////////// /////Constructors //////////////////////////////////////////////////////////////////////////////// public DctImage(int qualityFactor, string subsamplingMode) { this.image = null; this.subsamplingMode = subsamplingMode; //set qualityFactor to 50 (=base quantization matrix) if given qualityFactor is not in defined range this.qualityFactor = (qualityFactor > 0) && (qualityFactor <= 100) ? qualityFactor : 50; CalculateQuantizationMatrix(this.qualityFactor); }
public DctImage(YCbCrImage image, int qualityFactor, List <int[, ]> actualValuesListY, List <int[, ]> actualValuesListCb, List <int[, ]> actualValuesListCr, int[,] actualValuesY, int[,] actualValuesCb, int[,] actualValuesCr, int[,] accumulatedChangesY, int[,] accumulatedChangesCb, int[,] accumulatedChangesCr) { this.actualValuesListY = actualValuesListY; this.actualValuesListCb = actualValuesListCb; this.actualValuesListCr = actualValuesListCr; this.actualValuesY = actualValuesY; this.actualValuesCb = actualValuesCb; this.actualValuesCr = actualValuesCr; this.accumulatedChangesY = accumulatedChangesY; this.accumulatedChangesCb = accumulatedChangesCb; this.accumulatedChangesCr = accumulatedChangesCr; this.image = image; subsamplingMode = image.subsamplingMode; //set qualityFactor to 50 (=base quantization matrix) if given qualityFactor is not in defined range this.qualityFactor = (qualityFactor > 0) && (qualityFactor <= 100) ? qualityFactor : 50; CalculateQuantizationMatrix(this.qualityFactor); }
private void RGBToYCbCr() { progressLabel.Text = "Converting rgb to ycbcr..."; progressLabel.Visible = true; progressBar.Value = 0; progressBar.Visible = true; // needed to update UI this.Update(); tempImages = new YCbCrImage[inputImages.Length]; outputImages = new Image[tempImages.Length]; for (int i = 0; i < inputImages.Length; i++) { Bitmap bitmap = new Bitmap(inputImages[i]); YCbCrImage yCbCrImage = new YCbCrImage(bitmap.Width, bitmap.Height, subsamplingMode); for (int x = 0; x < bitmap.Width; x++) { for (int y = 0; y < bitmap.Height; y++) { // Color conversion values from // https://www.renesas.com/eu/en/www/doc/application-note/an9717.pdf Color pixel = bitmap.GetPixel(x, y); double Y = 0.257 * pixel.R + 0.504 * pixel.G + 0.098 * pixel.B + 16; double Cb = -0.148 * pixel.R - 0.291 * pixel.G + 0.439 * pixel.B + 128; double Cr = 0.439 * pixel.R - 0.368 * pixel.G - 0.071 * pixel.B + 128; yCbCrImage.pixels[x, y] = new YCbCrPixel(Y, Cb, Cr); } } tempImages[i] = yCbCrImage; progressBar.Value = i; } // we need this later to save in our video file Bitmap tempbm = new Bitmap(inputImages[0]); width = tempbm.Width; height = tempbm.Height; progressLabel.Visible = false; progressBar.Visible = false; // needed to update UI this.Update(); }
//create a matrix containing only Y, Cb or Cr values public double[,] FillValueMatrix(YCbCrImage image, String channelString) { double[,] valueMatrix = null; // only save every Nth pixel depending on subsampling mode / channel string if (subsamplingMode == "4:4:4" || channelString == "Y") { valueMatrix = new double[image.width, image.height]; for (int height = 0; height < image.height; height++) { for (int width = 0; width < image.width; width++) { YCbCrPixel pixel = image.GetPixel(width, height); switch (channelString) { case "Y": valueMatrix[width, height] = pixel.getY(); break; case "Cb": valueMatrix[width, height] = pixel.getCb(); break; case "Cr": valueMatrix[width, height] = pixel.getCr(); break; default: break; } } } } else if (subsamplingMode == "4:2:2") { valueMatrix = new double[image.width / 2, image.height]; for (int height = 0; height < image.height; height++) { for (int width = 0; width < image.width; width += 2) { YCbCrPixel pixel = image.GetPixel(width, height); switch (channelString) { case "Cb": valueMatrix[width / 2, height] = pixel.getCb(); break; case "Cr": valueMatrix[width / 2, height] = pixel.getCr(); break; default: break; } } } } else if (subsamplingMode == "4:2:0") { valueMatrix = new double[image.width / 2, image.height / 2]; for (int height = 0; height < image.height; height += 2) { for (int width = 0; width < image.width; width += 2) { YCbCrPixel pixel = image.GetPixel(width, height); switch (channelString) { case "Cb": valueMatrix[width / 2, height / 2] = pixel.getCb(); break; case "Cr": valueMatrix[width / 2, height / 2] = pixel.getCr(); break; default: break; } } } } return(valueMatrix); }
private void ParallelDecoding(int threadNum, VideoFile video, int possibleMultiFors, int numOfThreads, int?startValue = null, int?endValue = null) { int[,] yDctQuan, cBDctQuan, cRDctQuan, yDiffEncoded, cBDiffEncoded, cRDiffEncoded; int[] yRunLenEncoded, cBRunLenEncoded, cRRunLenEncoded; int offset = possibleMultiFors * keyFrameEvery; int start; int finish; if (startValue != null) { start = (int)startValue; if (endValue != null) { finish = (int)endValue; } else { finish = tempImages.Length; } } else { start = threadNum * offset; finish = (threadNum + 1) * offset; } int[,] yDctQuanDiff = null; int[,] cBDctQuanDiff = null; int[,] cRDctQuanDiff = null; int[,] yDctQuanFromLastFrame = null; int[,] cBDctQuanFromLastFrame = null; int[,] cRDctQuanFromLastFrame = null; for (int i = start; i < finish; i++) { // huffman decoding yRunLenEncoded = HuffmanDecoding(YBitArray[i], video.YHuffmanCounts[i / keyFrameEvery]); cBRunLenEncoded = HuffmanDecoding(CbBitArray[i], video.CbHuffmanCounts[i / keyFrameEvery]); cRRunLenEncoded = HuffmanDecoding(CrBitArray[i], video.CrHuffmanCounts[i / keyFrameEvery]); //Tester.PrintToFile("yRunLenEncodedAfter", yRunLenEncoded); // run length decoding if (subsamplingMode == "4:4:4") { yDiffEncoded = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height); cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width, video.height); cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width, video.height); } else if (subsamplingMode == "4:2:2") { yDiffEncoded = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height); cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width / 2, video.height); cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width / 2, video.height); } else { yDiffEncoded = RunLengthEncode.Decode(yRunLenEncoded, 8, video.width, video.height); cBDiffEncoded = RunLengthEncode.Decode(cBRunLenEncoded, 8, video.width / 2, video.height / 2); cRDiffEncoded = RunLengthEncode.Decode(cRRunLenEncoded, 8, video.width / 2, video.height / 2); } //Tester.PrintToFile("yDiffEncodedAfter", yDiffEncoded); // differential decoding yDctQuan = DifferentialEncoding.Decode(yDiffEncoded, 8); cBDctQuan = DifferentialEncoding.Decode(cBDiffEncoded, 8); cRDctQuan = DifferentialEncoding.Decode(cRDiffEncoded, 8); // it's not a keyframe if (i % keyFrameEvery != 0) { yDctQuanDiff = yDctQuan; cBDctQuanDiff = cBDctQuan; cRDctQuanDiff = cRDctQuan; for (int j = 0; j < yDctQuanFromLastFrame.GetLength(0); j++) { for (int k = 0; k < yDctQuanFromLastFrame.GetLength(1); k++) { yDctQuan[j, k] = yDctQuanFromLastFrame[j, k] + yDctQuanDiff[j, k]; if (subsamplingMode == "4:4:4") { cBDctQuan[j, k] = cBDctQuanFromLastFrame[j, k] + cBDctQuanDiff[j, k]; cRDctQuan[j, k] = cRDctQuanFromLastFrame[j, k] + cRDctQuanDiff[j, k]; } } } if (subsamplingMode != "4:4:4") { for (int j = 0; j < cBDctQuanFromLastFrame.GetLength(0); j++) { for (int k = 0; k < cBDctQuanFromLastFrame.GetLength(1); k++) { cBDctQuan[j, k] = cBDctQuanFromLastFrame[j, k] + cBDctQuanDiff[j, k]; cRDctQuan[j, k] = cRDctQuanFromLastFrame[j, k] + cRDctQuanDiff[j, k]; } } } } yDctQuanFromLastFrame = yDctQuan; cBDctQuanFromLastFrame = cBDctQuan; cRDctQuanFromLastFrame = cRDctQuan; // Tester.PrintToFile("yDctQuanAfter", yDctQuan); // revert dct and quantization DctImage dctImage = new DctImage(video.quality, video.subsamplingMode); int[,] YMatrix = dctImage.RevertDctAndQuantization(yDctQuan); int[,] CbMatrix = dctImage.RevertDctAndQuantization(cBDctQuan); int[,] CrMatrix = dctImage.RevertDctAndQuantization(cRDctQuan); if (subsamplingMode == "4:4:4") { YMatrix = dctImage.TrimValueMatrix(YMatrix, video.width, video.height); CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width, video.height); CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width, video.height); } else if (subsamplingMode == "4:2:2") { YMatrix = dctImage.TrimValueMatrix(YMatrix, video.width, video.height); CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width / 2, video.height); CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width / 2, video.height); } else { YMatrix = dctImage.TrimValueMatrix(YMatrix, video.width, video.height); CbMatrix = dctImage.TrimValueMatrix(CbMatrix, video.width / 2, video.height / 2); CrMatrix = dctImage.TrimValueMatrix(CrMatrix, video.width / 2, video.height / 2); } // instantiate YCbCr images YCbCrImage tempImage = new YCbCrImage(YMatrix.GetLength(0), YMatrix.GetLength(1), subsamplingMode); for (int j = 0; j < YMatrix.GetLength(0); j++) { for (int k = 0; k < YMatrix.GetLength(1); k++) { if (subsamplingMode == "4:4:4") { tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], CbMatrix[j, k], CrMatrix[j, k]); } else if (subsamplingMode == "4:2:2") { double Cb = CbMatrix[(j / 2), k]; double Cr = CrMatrix[(j / 2), k]; tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], Cb, Cr); } else if (subsamplingMode == "4:2:0") { double Cb = CbMatrix[(j / 2), (k / 2)]; double Cr = CrMatrix[(j / 2), (k / 2)]; tempImage.pixels[j, k] = new YCbCrPixel(YMatrix[j, k], Cb, Cr); } } } tempImages[i] = tempImage; MethodInvoker mi = new MethodInvoker(() => { int newValue = progressBar.Value + numOfThreads; if (newValue <= outputImages.Length) { progressBar.Value = newValue; } else { progressBar.Value = outputImages.Length; } }); if (!progressBar.InvokeRequired) { mi.Invoke(); } } }