private void btn_Resize_Click(object sender, EventArgs e) { if ((Bitmap)pic_Image.Image == null) return; Bitmap bmp = (Bitmap)pic_Image.Image; int w = bmp.Width; int h = bmp.Height; if ((w <= 16 || h <= 16) && trk_Size.Value < 100) { MessageBox.Show("Image is too small for resizing."); return; } int newW = (int)(trk_Size.Value / 100.0f * w); int newH = (int)(trk_Size.Value / 100.0f * h); if (newW % 16 != 0) { newW = newW - (newW % 16); } if (newW < 16) newW = 16; if (newH % 16 != 0) { newH = newH - (newH % 16); } if (newH < 16) newH = 16; double ratioW = newW / (double)w; double ratioH = newH / (double)h; if (ratioW == 1 && ratioH == 1) return; if (bmp.PixelFormat != System.Drawing.Imaging.PixelFormat.Format24bppRgb) { MessageBox.Show("Only three channel color images are supported!"); return; } NPPImage_8uC3 imgIn = new NPPImage_8uC3(w, h); NPPImage_8uC3 imgOut = new NPPImage_8uC3(newW, newH); InterpolationMode interpol = InterpolationMode.SuperSampling; if (ratioH >= 1 || ratioW >= 1) interpol = InterpolationMode.Lanczos; imgIn.CopyToDevice(bmp); imgIn.ResizeSqrPixel(imgOut, ratioW, ratioH, 0, 0, interpol); Bitmap bmpRes = new Bitmap(newW, newH, System.Drawing.Imaging.PixelFormat.Format24bppRgb); imgOut.CopyToHost(bmpRes); pic_Image.Image = bmpRes; imgIn.Dispose(); imgOut.Dispose(); }
const int BUFFER_SIZE = 4 << 23; //32 MegaBytes #endregion Fields #region Methods public static Bitmap LoadJpeg(string aFilename) { JPEGCompression compression = new JPEGCompression(); byte[] pJpegData = File.ReadAllBytes(aFilename); int nInputLength = pJpegData.Length; // Check if this is a valid JPEG file int nPos = 0; int nMarker = nextMarker(pJpegData, ref nPos, nInputLength); if (nMarker != 0x0D8) { throw new ArgumentException(aFilename + " is not a JPEG file."); } nMarker = nextMarker(pJpegData, ref nPos, nInputLength); // Parsing and Huffman Decoding (on host) FrameHeader oFrameHeader = new FrameHeader(); oFrameHeader.aComponentIdentifier = new byte[3]; oFrameHeader.aSamplingFactors = new byte[3]; oFrameHeader.aQuantizationTableSelector = new byte[3]; QuantizationTable[] aQuantizationTables = new QuantizationTable[4]; aQuantizationTables[0] = new QuantizationTable(); aQuantizationTables[1] = new QuantizationTable(); aQuantizationTables[2] = new QuantizationTable(); aQuantizationTables[3] = new QuantizationTable(); CudaDeviceVariable<byte>[] pdQuantizationTables = new CudaDeviceVariable<byte>[4]; pdQuantizationTables[0] = new CudaDeviceVariable<byte>(64); pdQuantizationTables[1] = new CudaDeviceVariable<byte>(64); pdQuantizationTables[2] = new CudaDeviceVariable<byte>(64); pdQuantizationTables[3] = new CudaDeviceVariable<byte>(64); HuffmanTable[] aHuffmanTables = new HuffmanTable[4]; aHuffmanTables[0] = new HuffmanTable(); aHuffmanTables[1] = new HuffmanTable(); aHuffmanTables[2] = new HuffmanTable(); aHuffmanTables[3] = new HuffmanTable(); ScanHeader oScanHeader = new ScanHeader(); oScanHeader.aComponentSelector = new byte[3]; oScanHeader.aHuffmanTablesSelector = new byte[3]; int nMCUBlocksH = 0; int nMCUBlocksV = 0; int nRestartInterval = -1; NppiSize[] aSrcSize = new NppiSize[3]; short[][] aphDCT = new short[3][]; NPPImage_16sC1[] apdDCT = new NPPImage_16sC1[3]; int[] aDCTStep = new int[3]; NPPImage_8uC1[] apSrcImage = new NPPImage_8uC1[3]; int[] aSrcImageStep = new int[3]; NPPImage_8uC1[] apDstImage = new NPPImage_8uC1[3]; int[] aDstImageStep = new int[3]; NppiSize[] aDstSize = new NppiSize[3]; //Same read routine as in NPP JPEG sample from Nvidia while (nMarker != -1) { if (nMarker == 0x0D8) { // Embeded Thumbnail, skip it int nNextMarker = nextMarker(pJpegData, ref nPos, nInputLength); while (nNextMarker != -1 && nNextMarker != 0x0D9) { nNextMarker = nextMarker(pJpegData, ref nPos, nInputLength); } } if (nMarker == 0x0DD) { readRestartInterval(pJpegData, ref nPos, ref nRestartInterval); } if ((nMarker == 0x0C0) | (nMarker == 0x0C2)) { //Assert Baseline for this Sample //Note: NPP does support progressive jpegs for both encode and decode if (nMarker != 0x0C0) { pdQuantizationTables[0].Dispose(); pdQuantizationTables[1].Dispose(); pdQuantizationTables[2].Dispose(); pdQuantizationTables[3].Dispose(); throw new ArgumentException(aFilename + " is not a Baseline-JPEG file."); } // Baseline or Progressive Frame Header readFrameHeader(pJpegData, ref nPos, ref oFrameHeader); //Console.WriteLine("Image Size: " + oFrameHeader.nWidth + "x" + oFrameHeader.nHeight + "x" + (int)(oFrameHeader.nComponents)); //Assert 3-Channel Image for this Sample if (oFrameHeader.nComponents != 3) { pdQuantizationTables[0].Dispose(); pdQuantizationTables[1].Dispose(); pdQuantizationTables[2].Dispose(); pdQuantizationTables[3].Dispose(); throw new ArgumentException(aFilename + " is not a three channel JPEG file."); } // Compute channel sizes as stored in the JPEG (8x8 blocks & MCU block layout) for (int i = 0; i < oFrameHeader.nComponents; ++i) { nMCUBlocksV = Math.Max(nMCUBlocksV, oFrameHeader.aSamplingFactors[i] >> 4); nMCUBlocksH = Math.Max(nMCUBlocksH, oFrameHeader.aSamplingFactors[i] & 0x0f); } for (int i = 0; i < oFrameHeader.nComponents; ++i) { NppiSize oBlocks = new NppiSize(); NppiSize oBlocksPerMCU = new NppiSize(oFrameHeader.aSamplingFactors[i] & 0x0f, oFrameHeader.aSamplingFactors[i] >> 4); oBlocks.width = (int)Math.Ceiling((oFrameHeader.nWidth + 7) / 8 * (float)(oBlocksPerMCU.width) / nMCUBlocksH); oBlocks.width = DivUp(oBlocks.width, oBlocksPerMCU.width) * oBlocksPerMCU.width; oBlocks.height = (int)Math.Ceiling((oFrameHeader.nHeight + 7) / 8 * (float)(oBlocksPerMCU.height) / nMCUBlocksV); oBlocks.height = DivUp(oBlocks.height, oBlocksPerMCU.height) * oBlocksPerMCU.height; aSrcSize[i].width = oBlocks.width * 8; aSrcSize[i].height = oBlocks.height * 8; // Allocate Memory apdDCT[i] = new NPPImage_16sC1(oBlocks.width * 64, oBlocks.height); aDCTStep[i] = apdDCT[i].Pitch; apSrcImage[i] = new NPPImage_8uC1(aSrcSize[i].width, aSrcSize[i].height); aSrcImageStep[i] = apSrcImage[i].Pitch; aphDCT[i] = new short[aDCTStep[i] * oBlocks.height]; } } if (nMarker == 0x0DB) { // Quantization Tables readQuantizationTables(pJpegData, ref nPos, aQuantizationTables); } if (nMarker == 0x0C4) { // Huffman Tables readHuffmanTables(pJpegData, ref nPos, aHuffmanTables); } if (nMarker == 0x0DA) { // Scan readScanHeader(pJpegData, ref nPos, ref oScanHeader); nPos += 6 + oScanHeader.nComponents * 2; int nAfterNextMarkerPos = nPos; int nAfterScanMarker = nextMarker(pJpegData, ref nAfterNextMarkerPos, nInputLength); if (nRestartInterval > 0) { while (nAfterScanMarker >= 0x0D0 && nAfterScanMarker <= 0x0D7) { // This is a restart marker, go on nAfterScanMarker = nextMarker(pJpegData, ref nAfterNextMarkerPos, nInputLength); } } NppiDecodeHuffmanSpec[] apHuffmanDCTableDec = new NppiDecodeHuffmanSpec[3]; NppiDecodeHuffmanSpec[] apHuffmanACTableDec = new NppiDecodeHuffmanSpec[3]; for (int i = 0; i < 3; ++i) { apHuffmanDCTableDec[i] = JPEGCompression.DecodeHuffmanSpecInitAllocHost(aHuffmanTables[(oScanHeader.aHuffmanTablesSelector[i] >> 4)].aCodes, NppiHuffmanTableType.nppiDCTable); apHuffmanACTableDec[i] = JPEGCompression.DecodeHuffmanSpecInitAllocHost(aHuffmanTables[(oScanHeader.aHuffmanTablesSelector[i] & 0x0f) + 2].aCodes, NppiHuffmanTableType.nppiACTable); } byte[] img = new byte[nAfterNextMarkerPos - nPos - 2]; Buffer.BlockCopy(pJpegData, nPos, img, 0, nAfterNextMarkerPos - nPos - 2); JPEGCompression.DecodeHuffmanScanHost(img, nRestartInterval, oScanHeader.nSs, oScanHeader.nSe, oScanHeader.nA >> 4, oScanHeader.nA & 0x0f, aphDCT[0], aphDCT[1], aphDCT[2], aDCTStep, apHuffmanDCTableDec, apHuffmanACTableDec, aSrcSize); for (int i = 0; i < 3; ++i) { JPEGCompression.DecodeHuffmanSpecFreeHost(apHuffmanDCTableDec[i]); JPEGCompression.DecodeHuffmanSpecFreeHost(apHuffmanACTableDec[i]); } } nMarker = nextMarker(pJpegData, ref nPos, nInputLength); } // Copy DCT coefficients and Quantization Tables from host to device for (int i = 0; i < 4; ++i) { pdQuantizationTables[i].CopyToDevice(aQuantizationTables[i].aTable); } for (int i = 0; i < 3; ++i) { apdDCT[i].CopyToDevice(aphDCT[i], aDCTStep[i]); } // Inverse DCT for (int i = 0; i < 3; ++i) { compression.DCTQuantInv8x8LS(apdDCT[i], apSrcImage[i], aSrcSize[i], pdQuantizationTables[oFrameHeader.aQuantizationTableSelector[i]]); } //Alloc final image NPPImage_8uC3 res = new NPPImage_8uC3(apSrcImage[0].Width, apSrcImage[0].Height); //Copy Y color plane to first channel apSrcImage[0].Copy(res, 0); //Cb anc Cr channel might be smaller if ((oFrameHeader.aSamplingFactors[0] & 0x0f) == 1 && oFrameHeader.aSamplingFactors[0] >> 4 == 1) { //Color planes are of same size as Y channel apSrcImage[1].Copy(res, 1); apSrcImage[2].Copy(res, 2); } else { //rescale color planes to full size double scaleX = oFrameHeader.aSamplingFactors[0] & 0x0f; double scaleY = oFrameHeader.aSamplingFactors[0] >> 4; apSrcImage[1].ResizeSqrPixel(apSrcImage[0], scaleX, scaleY, 0, 0, InterpolationMode.Lanczos); apSrcImage[0].Copy(res, 1); apSrcImage[2].ResizeSqrPixel(apSrcImage[0], scaleX, scaleY, 0, 0, InterpolationMode.Lanczos); apSrcImage[0].Copy(res, 2); } //System.Drawing.Bitmap is ordered BGR not RGB //The NPP routine YCbCR to BGR needs clampled input values, following the YCbCr standard. //But JPEG uses unclamped values ranging all from [0..255], thus use our own color matrix: float[,] YCbCrToBgr = new float[3, 4] {{1.0f, 1.772f, 0.0f, -226.816f }, {1.0f, -0.34414f, -0.71414f, 135.45984f}, {1.0f, 0.0f, 1.402f, -179.456f }}; //Convert from YCbCr to BGR res.ColorTwist(YCbCrToBgr); Bitmap bmp = new Bitmap(apSrcImage[0].Width, apSrcImage[0].Height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); res.CopyToHost(bmp); //Cleanup: res.Dispose(); apSrcImage[2].Dispose(); apSrcImage[1].Dispose(); apSrcImage[0].Dispose(); apdDCT[2].Dispose(); apdDCT[1].Dispose(); apdDCT[0].Dispose(); pdQuantizationTables[0].Dispose(); pdQuantizationTables[1].Dispose(); pdQuantizationTables[2].Dispose(); pdQuantizationTables[3].Dispose(); compression.Dispose(); return bmp; }