// Convert the raw data in the frame's buffer into the bitmap's data, this method doesn't support // the following Pixel formats: eFmtRgb48, eFmtYuv411 and eFmtYuv444. static unsafe bool Frame2Data(ref tCamera Camera, ref BitmapData Data) { double PixelMean = 0; if (NightLUT == null || NightLUT.Length == 0) { NightLUT = new Byte[256]; for (int i = 0; i < 256; i++) { double t = i + 0.01 * i * (255 - i); if (t > 255) { t = 255; } if (t < 0) { t = 0; } NightLUT[i] = (byte)t; } } if (NoonLUT == null || NoonLUT.Length == 0) { NoonLUT = new Byte[256]; double gamma_r = 1.5; double gamma_b = 2; double gamma_c = 0.9; for (int i = 0; i < 256; i++) { double t = (gamma_c * Math.Pow(i / 255.0, gamma_r) * 255.0 - gamma_b); if (t > 255) { t = 255; } if (t < 0) { t = 0; } NoonLUT[i] = (byte)t; } } switch (Camera.Frame.Format) { case tImageFormat.eFmtMono8: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; byte * lSrc = (byte *)Camera.Frame.ImageBuffer; while (lOffset < Camera.Frame.ImageBufferSize) { try { byte thisPixel = lSrc[lOffset]; if (isNight) { thisPixel = NightLUT[thisPixel]; } else if (isNoon) { thisPixel = NoonLUT[thisPixel]; } lDst[lPos] = thisPixel; lDst[lPos + 1] = thisPixel; lDst[lPos + 2] = thisPixel; PixelMean += lSrc[lOffset]; } catch (Exception e) { //System.Windows.MessageBox.Show(e.Message); return(false); } lOffset++; lPos += 3; try { // Take care of the padding in the destination bitmap. if ((lOffset % Camera.Frame.Width) == 0) { lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3); } } catch (Exception e) { //System.Windows.MessageBox.Show(e.Message); return(false); } } PixelMean /= (double)Camera.Frame.ImageBufferSize; illumiance = PixelMean; //System.Windows.MessageBox.Show(PixelMean.ToString()); LuminanceAdjust(ref Camera, PixelMean); return(true); } case tImageFormat.eFmtMono16: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; byte bitshift = (byte)((int)Camera.Frame.BitDepth - 8); UInt16 *lSrc = (UInt16 *)Camera.Frame.ImageBuffer; while (lOffset < Camera.Frame.Width * Camera.Frame.Height) { lDst[lPos] = (byte)(lSrc[lOffset] >> bitshift); lDst[lPos + 1] = lDst[lPos]; lDst[lPos + 2] = lDst[lPos]; lOffset++; lPos += 3; // Take care of the padding in the destination bitmap. if ((lOffset % Camera.Frame.Width) == 0) { lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3); } } return(true); } case tImageFormat.eFmtBayer8: { UInt32 WidthSize = Camera.Frame.Width * 3; GCHandle pFrame = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned); UInt32 remainder = (((WidthSize + 3U) & ~3U) - WidthSize); // Interpolate the colors. IntPtr pRed = (IntPtr)((byte *)Data.Scan0 + 2); IntPtr pGreen = (IntPtr)((byte *)Data.Scan0 + 1); IntPtr pBlue = (IntPtr)((byte *)Data.Scan0); Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder); UInt32 lOffset = 0; byte * lSrc = (byte *)Camera.Frame.ImageBuffer; //double pixelMean2 = 0; while (lOffset < Camera.Frame.ImageBufferSize) { PixelMean += ((double)lSrc[lOffset + 2] + (double)lSrc[lOffset + 1] + (double)lSrc[lOffset]) / 3; if (isNight) { lSrc[lOffset + 2] = NightLUT[lSrc[lOffset + 2]]; lSrc[lOffset + 1] = NightLUT[lSrc[lOffset + 1]]; lSrc[lOffset] = NightLUT[lSrc[lOffset]]; } if (isNoon) { lSrc[lOffset + 2] = NoonLUT[lSrc[lOffset + 2]]; lSrc[lOffset + 1] = NoonLUT[lSrc[lOffset + 1]]; lSrc[lOffset] = NoonLUT[lSrc[lOffset]]; } //pixelMean2 += ((double)lSrc[lOffset + 2] + (double)lSrc[lOffset + 1] + (double)lSrc[lOffset]) / 3; lOffset += 3; } PixelMean /= (double)Camera.Frame.ImageBufferSize / 3; illumiance = PixelMean; //System.Windows.MessageBox.Show(PixelMean.ToString()); LuminanceAdjust(ref Camera, PixelMean); pFrame.Free(); return(true); } case tImageFormat.eFmtBayer16: { UInt32 WidthSize = Camera.Frame.Width * 3; UInt32 lOffset = 0; byte bitshift = (byte)((int)Camera.Frame.BitDepth - 8); UInt16 * lSrc = (UInt16 *)Camera.Frame.ImageBuffer; byte * lDst = (byte *)Camera.Frame.ImageBuffer; UInt32 remainder = (((WidthSize + 3U) & ~3U) - WidthSize); GCHandle pFrame; Camera.Frame.Format = tImageFormat.eFmtBayer8; pFrame = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned); // Shift the pixel. while (lOffset < Camera.Frame.Width * Camera.Frame.Height) { lDst[lOffset] = (byte)(lSrc[lOffset++] >> bitshift); } // Interpolate the colors. IntPtr pRed = (IntPtr)((byte *)Data.Scan0 + 2); IntPtr pGreen = (IntPtr)((byte *)Data.Scan0 + 1); IntPtr pBlue = (IntPtr)((byte *)Data.Scan0); Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder); pFrame.Free(); return(true); } case tImageFormat.eFmtRgb24: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; while (lOffset < Camera.Frame.ImageBufferSize) { // Copy the data. lDst[lPos] = Camera.Buffer[lOffset + 2]; lDst[lPos + 1] = Camera.Buffer[lOffset + 1]; lDst[lPos + 2] = Camera.Buffer[lOffset]; PixelMean += ((double)Camera.Buffer[lOffset + 2] + (double)Camera.Buffer[lOffset + 1] + (double)Camera.Buffer[lOffset]) / 3; lOffset += 3; lPos += 3; // Take care of the padding in the destination bitmap. if ((lOffset % (Camera.Frame.Width * 3)) == 0) { lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3); } } PixelMean /= (double)Camera.Frame.ImageBufferSize / 3; illumiance = PixelMean; //System.Windows.MessageBox.Show(PixelMean.ToString()); LuminanceAdjust(ref Camera, PixelMean); return(true); } case tImageFormat.eFmtRgb48: { UInt32 lOffset = 0; UInt32 lPos = 0; UInt32 lLength = Camera.Frame.ImageBufferSize / sizeof(UInt16); UInt16 *lSrc = (UInt16 *)Camera.Frame.ImageBuffer; byte * lDst = (byte *)Data.Scan0; byte bitshift = (byte)((int)Camera.Frame.BitDepth - 8); while (lOffset < lLength) { // Copy the data. lDst[lPos] = (byte)(lSrc[lOffset + 2] >> bitshift); lDst[lPos + 1] = (byte)(lSrc[lOffset + 1] >> bitshift); lDst[lPos + 2] = (byte)(lSrc[lOffset] >> bitshift); lOffset += 3; lPos += 3; // Take care of the padding in the destination bitmap. if ((lOffset % (Camera.Frame.Width * 3)) == 0) { lPos += (UInt32)Data.Stride - (Camera.Frame.Width * 3); } } return(true); } case tImageFormat.eFmtYuv411: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; int y1, y2, y3, y4, u, v; int r, g, b; r = g = b = 0; while (lOffset < Camera.Frame.ImageBufferSize) { u = Camera.Buffer[lOffset++]; y1 = Camera.Buffer[lOffset++]; y2 = Camera.Buffer[lOffset++]; v = Camera.Buffer[lOffset++]; y3 = Camera.Buffer[lOffset++]; y4 = Camera.Buffer[lOffset++]; YUV2RGB(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y3, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y4, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } case tImageFormat.eFmtYuv422: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; int y1, y2, u, v; int r, g, b; r = g = b = 0; while (lOffset < Camera.Frame.ImageBufferSize) { u = Camera.Buffer[lOffset++]; y1 = Camera.Buffer[lOffset++]; v = Camera.Buffer[lOffset++]; y2 = Camera.Buffer[lOffset++]; YUV2RGB(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } case tImageFormat.eFmtYuv444: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; int y1, y2, u, v; int r, g, b; r = g = b = 0; while (lOffset < Camera.Frame.ImageBufferSize) { u = Camera.Buffer[lOffset++]; y1 = Camera.Buffer[lOffset++]; v = Camera.Buffer[lOffset++]; lOffset++; y2 = Camera.Buffer[lOffset++]; lOffset++; YUV2RGB(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } default: return(false); } }
// Convert the raw data in the frame's buffer into the bitmap's data. This method doesn't support // the following Pixel formats: eFmtRgb48, eFmtYuv411 and eFmtYuv444. static unsafe bool Frame2Data(ref BitmapData Data) { switch (GCamera.Frame.Format) { case tImageFormat.eFmtMono8: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; while (lOffset < GCamera.Frame.ImageBufferSize) { lDst[lPos] = GCamera.Buffer[lOffset]; lDst[lPos + 1] = GCamera.Buffer[lOffset]; lDst[lPos + 2] = GCamera.Buffer[lOffset]; lOffset++; lPos += 3; // Take care of the padding in the destination bitmap. if ((lOffset % GCamera.Frame.Width) == 0) { lPos += (UInt32)Data.Stride - (GCamera.Frame.Width * 3); } } return(true); } case tImageFormat.eFmtMono16: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; byte bitshift = (byte)((int)GCamera.Frame.BitDepth - 8); UInt16 *lSrc = (UInt16 *)GCamera.Frame.ImageBuffer; while (lOffset < GCamera.Frame.Width * GCamera.Frame.Height) { lDst[lPos] = (byte)(lSrc[lOffset] >> bitshift); lDst[lPos + 1] = lDst[lPos]; lDst[lPos + 2] = lDst[lPos]; lOffset++; lPos += 3; // Take care of the padding in the destination bitmap. if ((lOffset % GCamera.Frame.Width) == 0) { lPos += (UInt32)Data.Stride - (GCamera.Frame.Width * 3); } } return(true); } case tImageFormat.eFmtBayer8: { UInt32 WidthSize = GCamera.Frame.Width * 3; GCHandle pFrame = GCHandle.Alloc(GCamera.Frame, GCHandleType.Pinned); UInt32 remainder = (((WidthSize + 3U) & ~3U) - WidthSize); // Interpolate the colors. IntPtr pRed = (IntPtr)((byte *)Data.Scan0 + 2); IntPtr pGreen = (IntPtr)((byte *)Data.Scan0 + 1); IntPtr pBlue = (IntPtr)((byte *)Data.Scan0); Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder); pFrame.Free(); return(true); } case tImageFormat.eFmtBayer16: { UInt32 WidthSize = GCamera.Frame.Width * 3; UInt32 lOffset = 0; byte bitshift = (byte)((int)GCamera.Frame.BitDepth - 8); UInt16 * lSrc = (UInt16 *)GCamera.Frame.ImageBuffer; byte * lDst = (byte *)GCamera.Frame.ImageBuffer; UInt32 remainder = (((WidthSize + 3U) & ~3U) - WidthSize); GCHandle pFrame; GCamera.Frame.Format = tImageFormat.eFmtBayer8; pFrame = GCHandle.Alloc(GCamera.Frame, GCHandleType.Pinned); // Shift the pixel. while (lOffset < GCamera.Frame.Width * GCamera.Frame.Height) { lDst[lOffset] = (byte)(lSrc[lOffset++] >> bitshift); } // Interpolate the colors. IntPtr pRed = (IntPtr)((byte *)Data.Scan0 + 2); IntPtr pGreen = (IntPtr)((byte *)Data.Scan0 + 1); IntPtr pBlue = (IntPtr)((byte *)Data.Scan0); Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder); pFrame.Free(); return(true); } case tImageFormat.eFmtRgb24: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; while (lOffset < GCamera.Frame.ImageBufferSize) { // Copy the data. lDst[lPos] = GCamera.Buffer[lOffset + 2]; lDst[lPos + 1] = GCamera.Buffer[lOffset + 1]; lDst[lPos + 2] = GCamera.Buffer[lOffset]; lOffset += 3; lPos += 3; // Take care of the padding in the destination bitmap. if ((lOffset % (GCamera.Frame.Width * 3)) == 0) { lPos += (UInt32)Data.Stride - (GCamera.Frame.Width * 3); } } return(true); } case tImageFormat.eFmtRgb48: { UInt32 lOffset = 0; UInt32 lPos = 0; UInt32 lLength = GCamera.Frame.ImageBufferSize / sizeof(UInt16); UInt16 *lSrc = (UInt16 *)GCamera.Frame.ImageBuffer; byte * lDst = (byte *)Data.Scan0; byte bitshift = (byte)((int)GCamera.Frame.BitDepth - 8); while (lOffset < lLength) { // Copy the data. lDst[lPos] = (byte)(lSrc[lOffset + 2] >> bitshift); lDst[lPos + 1] = (byte)(lSrc[lOffset + 1] >> bitshift); lDst[lPos + 2] = (byte)(lSrc[lOffset] >> bitshift); lOffset += 3; lPos += 3; // Take care of the padding in the destination bitmap. if ((lOffset % (GCamera.Frame.Width * 3)) == 0) { lPos += (UInt32)Data.Stride - (GCamera.Frame.Width * 3); } } return(true); } case tImageFormat.eFmtYuv411: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; int y1, y2, y3, y4, u, v; int r, g, b; r = g = b = 0; while (lOffset < GCamera.Frame.ImageBufferSize) { u = GCamera.Buffer[lOffset++]; y1 = GCamera.Buffer[lOffset++]; y2 = GCamera.Buffer[lOffset++]; v = GCamera.Buffer[lOffset++]; y3 = GCamera.Buffer[lOffset++]; y4 = GCamera.Buffer[lOffset++]; YUV2RGB(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y3, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y4, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } case tImageFormat.eFmtYuv422: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; int y1, y2, u, v; int r, g, b; r = g = b = 0; while (lOffset < GCamera.Frame.ImageBufferSize) { u = GCamera.Buffer[lOffset++]; y1 = GCamera.Buffer[lOffset++]; v = GCamera.Buffer[lOffset++]; y2 = GCamera.Buffer[lOffset++]; YUV2RGB(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } case tImageFormat.eFmtYuv444: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)Data.Scan0; int y1, y2, u, v; int r, g, b; r = g = b = 0; while (lOffset < GCamera.Frame.ImageBufferSize) { u = GCamera.Buffer[lOffset++]; y1 = GCamera.Buffer[lOffset++]; v = GCamera.Buffer[lOffset++]; lOffset++; y2 = GCamera.Buffer[lOffset++]; lOffset++; YUV2RGB(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; YUV2RGB(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } default: return(false); } }