internal Frame(Camera camera, IntPtr framePointer, tFrame frame, byte[] buffer) { this.camera = camera; this.framePointer = framePointer; this.frame = frame; this.buffer = buffer; }
protected void OnFrameReady(IntPtr framePointer) { if (FrameReady != null) { tFrame tFrame = (tFrame)Marshal.PtrToStructure(framePointer, typeof(tFrame)); Frame frame = new Frame(this, framePointer, tFrame, buffers[framePointer]); FrameReady(this, frame); } }
// Callback called when a frame is done. static void FrameDoneCB(ref tFrame pFrame, ref tCamera Camera) { GCHandle pFrame1 = GCHandle.Alloc(Camera.Frame, GCHandleType.Pinned); tFrameCallback FrameCB = new tFrameCallback(FrameDummyCB); // If the frame was completed, re-enqueue it. if (pFrame.Status != tErr.eErrUnplugged && pFrame.Status != tErr.eErrCancelled) { Pv.CaptureQueueFrame(Camera.Handle, pFrame1.AddrOfPinnedObject(), FrameCB); } }
// Callback for when the frame is completed. static void FrameDoneCB(IntPtr pFrame) { // Marshal the pointer into a frame structure. tFrame Frame = (tFrame)Marshal.PtrToStructure(pFrame, typeof(tFrame)); // If the frame was completed (or if data were missing/lost), re-enqueue it. if (Frame.Status == tErr.eErrSuccess || Frame.Status == tErr.eErrDataLost || Frame.Status == tErr.eErrDataMissing) { Pv.CaptureQueueFrame(GCamera.Handle, pFrame, FrameCB); } }
// Callback for when the frame is completed. static void FrameDoneCB(IntPtr pFrame) { // Marshal the pointer into a frame structure. tFrame Frame = (tFrame)Marshal.PtrToStructure(pFrame, typeof(tFrame)); Console.WriteLine("Frame recieved {0}.", (uint)Frame.Status); // If the frame was completed (or if data were missing/lost), re-enqueue it. if (Frame.Status != tErr.eErrUnplugged && Frame.Status != tErr.eErrCancelled) { Pv.CaptureQueueFrame(GCamera.Handle, pFrame, FrameDoneCB); } }
// Setup the camera up for streaming. static bool CameraStart() { UInt32 FrameSize = 0; // Adjust packet size for optimal performance. Pv.CaptureAdjustPacketSize(GCamera.Handle, 8228); // Determines how big the frame buffers should be. if (Pv.AttrUint32Get(GCamera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0) { tFrame Frame = new tFrame(); byte[] Buffer = new byte[FrameSize]; GCHandle pBuffer = GCHandle.Alloc(Buffer, GCHandleType.Pinned); // Set the frame's fields. // Handle to the Camera. Frame.Context.Field0 = new IntPtr(GCamera.Handle); // Address of the pinned object. Frame.ImageBuffer = pBuffer.AddrOfPinnedObject(); // Buffer size. Frame.ImageBufferSize = FrameSize; // Start the capture mode. if (Pv.CaptureStart(GCamera.Handle) == 0) { // Set the camera in continuous acquisition mode,and in "Freerun". if (Pv.AttrEnumSet(GCamera.Handle, "FrameStartTriggerMode", "Freerun") == 0) { // Set the acquisition mode into continuous. if (Pv.CommandRun(GCamera.Handle, "AcquisitionStart") != 0) { // If that fails, reset the camera to non-capture mode. Pv.CaptureEnd(GCamera.Handle); Console.WriteLine("Failed to start."); return(false); } else { // Pin down a copy of the frame structure. GCHandle pFrame = GCHandle.Alloc(Frame, GCHandleType.Pinned); // Enqueue the frame. Pv.CaptureQueueFrame(GCamera.Handle, pFrame.AddrOfPinnedObject(), FrameCB); return(true); } } else { return(false); } } else { return(false); } } else { return(false); } }
public void BeginCapture(tImageFormat fmt) { tErr error; if (!camera.HasValue) { error = tErr.eErrUnavailable; throw new PvException(error); } ImageFormat.pixelformat = fmt; error = Pv.CaptureStart(camera.Value); if (error != tErr.eErrSuccess) goto error; frameBufferHandles = new GCHandle[FRAME_POOL_SIZE]; framePoolHandles = new GCHandle[FRAME_POOL_SIZE]; frames = new tFrame[FRAME_POOL_SIZE]; uint bufferSize = 0; error = Pv.AttrUint32Get(camera.Value, "TotalBytesPerFrame", ref bufferSize); if (error != tErr.eErrSuccess) goto error; for (int count = FRAME_POOL_SIZE - 1; count >= 0; count--) { byte[] buffer = new byte[bufferSize]; GCHandle bufferHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); frameBufferHandles[count] = bufferHandle; tFrame frame = new tFrame { ImageBuffer = bufferHandle.AddrOfPinnedObject(), ImageBufferSize = bufferSize, AncillaryBufferSize = 0 }; frames[count] = frame; GCHandle frameHandle = GCHandle.Alloc(frame, GCHandleType.Pinned); framePoolHandles[count] = frameHandle; IntPtr framePointer = frameHandle.AddrOfPinnedObject(); buffers.Add(framePointer, buffer); if (!communicationManager.QueueFrame(framePointer, callback)) goto error; } this.FrameRate = 30; error = Pv.AttrEnumSet(this.camera.Value, "FrameStartTriggerMode", "FixedRate"); if (error != tErr.eErrSuccess) goto error; error = Pv.AttrEnumSet(this.camera.Value, "AcquisitionMode", "Continuous"); if (error != tErr.eErrSuccess) goto error; error = Pv.CommandRun(this.camera.Value, "AcquisitionStart"); if (error != tErr.eErrSuccess) goto error; return; error: EndCapture(); throw new PvException(error); }
// Setup the camera up for streaming. static bool CameraStart() { tFrameCallback lFrameCB = new tFrameCallback(FrameDoneCB); tCameraEventCallback lEventCB = new tCameraEventCallback(EventDone); UInt32 FrameSize = 0; IntPtr Context = IntPtr.Zero; if (Pv.AttrExists(GCamera.Handle, "EventsEnable1") == tErr.eErrNotFound) { Console.WriteLine("This camera does not support event notifications."); return(false); } // Adjust packet size for optimal performance. Pv.CaptureAdjustPacketSize(GCamera.Handle, 8228); // Determines how big the frame buffers should be. if (Pv.AttrUint32Get(GCamera.Handle, "TotalBytesPerFrame", ref FrameSize) == 0) { tFrame Frame = new tFrame(); byte[] Buffer = new byte[FrameSize]; GCHandle pBuffer = GCHandle.Alloc(Buffer, GCHandleType.Pinned); // Set the frame's fields. // Handle to the Camera. Frame.Context.Field0 = new IntPtr(GCamera.Handle); // Address of the pinned object. Frame.ImageBuffer = pBuffer.AddrOfPinnedObject(); // Buffer size. Frame.ImageBufferSize = FrameSize; // Setup the event channel. Pv.AttrUint32Set(GCamera.Handle, "EventsEnable1", 0); Pv.AttrEnumSet(GCamera.Handle, "EventSelector", "AcquisitionStart"); Pv.AttrEnumSet(GCamera.Handle, "EventNotification", "On"); Pv.AttrEnumSet(GCamera.Handle, "EventSelector", "AcquisitionEnd"); Pv.AttrEnumSet(GCamera.Handle, "EventNotification", "On"); Pv.AttrEnumSet(GCamera.Handle, "EventSelector", "FrameTrigger"); Pv.AttrEnumSet(GCamera.Handle, "EventNotification", "On"); if (Pv.CameraEventCallbackRegister(GCamera.Handle, lEventCB, Context) != tErr.eErrSuccess) { Console.WriteLine("There was an error accessing the driver."); return(false); } // Start the capture mode. if (Pv.CaptureStart(GCamera.Handle) == 0) { if (Pv.AttrFloat32Set(GCamera.Handle, "FrameRate", 5) == 0) { // Set the camera in continuous acquisition mode,and in "Freerun". if (Pv.AttrEnumSet(GCamera.Handle, "FrameStartTriggerMode", "FixedRate") == 0) { // Set the acquisition mode into continuous. if (Pv.CommandRun(GCamera.Handle, "AcquisitionStart") != 0) { // If that fails, reset the camera to non-capture mode. Pv.CaptureEnd(GCamera.Handle); Console.WriteLine("Failed to start."); return(false); } else { // Pin down a copy of the frame structure. GCHandle pFrame = GCHandle.Alloc(Frame, GCHandleType.Pinned); // Enqueue the frame. Pv.CaptureQueueFrame(GCamera.Handle, pFrame.AddrOfPinnedObject(), lFrameCB); return(true); } } else { return(false); } } else { return(false); } } else { return(false); } } else { return(false); } }
// convert the raw data in the frame's buffer into the bitmap's data, this method doesn't support // the following Pixel format: eFmtRgb48, eFmtYuv411 and eFmtYuv444 static unsafe bool Frame2Data(ref tFrame frame, ref byte[] buffer, ref BitmapData data) { switch (frame.Format) { case tImageFormat.eFmtMono8: { UInt32 lOffset = 0; UInt32 lPos = 0; byte* lDst = (byte*)data.Scan0; byte* lSrc = (byte*)frame.ImageBuffer; while (lOffset < frame.ImageBufferSize) { lDst[lPos] = lSrc[lOffset]; lDst[lPos + 1] = lSrc[lOffset]; lDst[lPos + 2] = lSrc[lOffset]; lOffset++; lPos += 3; // take care of the padding in the destination bitmap if ((lOffset % frame.Width) == 0) lPos += (UInt32)data.Stride - (frame.Width * 3); } return true; } case tImageFormat.eFmtMono16: { UInt32 lOffset = 0; UInt32 lPos = 0; byte* lDst = (byte*)data.Scan0; byte bitshift = (byte)((int)frame.BitDepth - 8); UInt16* lSrc = (UInt16*)frame.ImageBuffer; while (lOffset < frame.Width * frame.Height) { lDst[lPos] = (byte)(lSrc[lOffset] >> bitshift); lDst[lPos + 1] = lDst[lPos]; lDst[lPos + 2] = lDst[lPos]; lOffset++; lPos += 3; // take care of the padding in the destination bitmap if ((lOffset % frame.Width) == 0) lPos += (UInt32)data.Stride - (frame.Width * 3); } return true; } case tImageFormat.eFmtBayer8: { UInt32 widthSize = frame.Width * 3; GCHandle pFrame = GCHandle.Alloc(frame, GCHandleType.Pinned); UInt32 remainder = (((widthSize + 3U) & ~3U) - widthSize); // interpolate the colors IntPtr pRed = (IntPtr)((byte*)data.Scan0 + 2); IntPtr pGreen = (IntPtr)((byte*)data.Scan0 + 1); IntPtr pBlue = (IntPtr)((byte*)data.Scan0); Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder); pFrame.Free(); return true; } case tImageFormat.eFmtBayer16: { UInt32 widthSize = frame.Width * 3; UInt32 lOffset = 0; byte bitshift = (byte)((int)frame.BitDepth - 8); UInt16* lSrc = (UInt16*)frame.ImageBuffer; byte* lDst = (byte*)frame.ImageBuffer; UInt32 remainder = (((widthSize + 3U) & ~3U) - widthSize); frame.Format = tImageFormat.eFmtBayer8; GCHandle pFrame = GCHandle.Alloc(frame, GCHandleType.Pinned); // shift the pixel while (lOffset < frame.Width * frame.Height) lDst[lOffset] = (byte)(lSrc[lOffset++] >> bitshift); // interpolate the colors IntPtr pRed = (IntPtr)((byte*)data.Scan0 + 2); IntPtr pGreen = (IntPtr)((byte*)data.Scan0 + 1); IntPtr pBlue = (IntPtr)((byte*)data.Scan0); Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder); pFrame.Free(); return true; } case tImageFormat.eFmtBgr24: { UInt32 lPos = 0; byte* lDst = (byte*)data.Scan0; byte* lSrc = (byte*)frame.ImageBuffer; while (lPos < frame.ImageBufferSize) { // copy the data lDst[lPos] = lSrc[lPos]; lPos += 1; // take care of the padding in the destination bitmap if ((lPos % (frame.Width * 3)) == 0) lPos += (UInt32)data.Stride - (frame.Width * 3); } return true; } case tImageFormat.eFmtRgb24: { UInt32 lOffset = 0; UInt32 lPos = 0; byte* lDst = (byte*)data.Scan0; byte* lSrc = (byte*)frame.ImageBuffer; while (lOffset < frame.ImageBufferSize) { // copy the data lDst[lPos] = lSrc[lOffset + 2]; lDst[lPos + 1] = lSrc[lOffset + 1]; lDst[lPos + 2] = lSrc[lOffset]; lOffset += 3; lPos += 3; // take care of the padding in the destination bitmap if ((lOffset % (frame.Width * 3)) == 0) lPos += (UInt32)data.Stride - (frame.Width * 3); } return true; } case tImageFormat.eFmtRgb48: { UInt32 lOffset = 0; UInt32 lPos = 0; UInt32 lLength = frame.ImageBufferSize / sizeof(UInt16); UInt16* lSrc = (UInt16*)frame.ImageBuffer; byte* lDst = (byte*)data.Scan0; byte bitshift = (byte)((int)frame.BitDepth - 8); while (lOffset < lLength) { // copy the data lDst[lPos] = (byte)(lSrc[lOffset + 2] >> bitshift); lDst[lPos + 1] = (byte)(lSrc[lOffset + 1] >> bitshift); lDst[lPos + 2] = (byte)(lSrc[lOffset] >> bitshift); lOffset += 3; lPos += 3; // take care of the padding in the destination bitmap if ((lOffset % (frame.Width * 3)) == 0) lPos += (UInt32)data.Stride - (frame.Width * 3); } return true; } case tImageFormat.eFmtYuv411: { UInt32 lOffset = 0; UInt32 lPos = 0; byte* lDst = (byte*)data.Scan0; int y1, y2, y3, y4, u, v; int r, g, b; r = g = b = 0; while (lOffset < frame.ImageBufferSize) { u = buffer[lOffset++]; y1 = buffer[lOffset++]; y2 = buffer[lOffset++]; v = buffer[lOffset++]; y3 = buffer[lOffset++]; y4 = buffer[lOffset++]; Yuv2Rgb(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y3, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y4, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return true; } case tImageFormat.eFmtYuv422: { UInt32 lOffset = 0; UInt32 lPos = 0; byte* lDst = (byte*)data.Scan0; int y1, y2, u, v; int r, g, b; r = g = b = 0; while (lOffset < frame.ImageBufferSize) { u = buffer[lOffset++]; y1 = buffer[lOffset++]; v = buffer[lOffset++]; y2 = buffer[lOffset++]; Yuv2Rgb(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return true; } case tImageFormat.eFmtYuv444: { UInt32 lOffset = 0; UInt32 lPos = 0; byte* lDst = (byte*)data.Scan0; int y1, y2, u, v; int r, g, b; r = g = b = 0; while (lOffset < frame.ImageBufferSize) { u = buffer[lOffset++]; y1 = buffer[lOffset++]; v = buffer[lOffset++]; lOffset++; y2 = buffer[lOffset++]; lOffset++; Yuv2Rgb(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return true; } default: return false; } }
// convert the raw data in the frame's buffer into the bitmap's data, this method doesn't support // the following Pixel format: eFmtRgb48, eFmtYuv411 and eFmtYuv444 static unsafe bool Frame2Data(ref tFrame frame, ref byte[] buffer, ref BitmapData data) { switch (frame.Format) { case tImageFormat.eFmtMono8: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)data.Scan0; byte * lSrc = (byte *)frame.ImageBuffer; while (lOffset < frame.ImageBufferSize) { lDst[lPos] = lSrc[lOffset]; lDst[lPos + 1] = lSrc[lOffset]; lDst[lPos + 2] = lSrc[lOffset]; lOffset++; lPos += 3; // take care of the padding in the destination bitmap if ((lOffset % frame.Width) == 0) { lPos += (UInt32)data.Stride - (frame.Width * 3); } } return(true); } case tImageFormat.eFmtMono16: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)data.Scan0; byte bitshift = (byte)((int)frame.BitDepth - 8); UInt16 *lSrc = (UInt16 *)frame.ImageBuffer; while (lOffset < frame.Width * frame.Height) { lDst[lPos] = (byte)(lSrc[lOffset] >> bitshift); lDst[lPos + 1] = lDst[lPos]; lDst[lPos + 2] = lDst[lPos]; lOffset++; lPos += 3; // take care of the padding in the destination bitmap if ((lOffset % frame.Width) == 0) { lPos += (UInt32)data.Stride - (frame.Width * 3); } } return(true); } case tImageFormat.eFmtBayer8: { UInt32 widthSize = frame.Width * 3; GCHandle pFrame = GCHandle.Alloc(frame, GCHandleType.Pinned); UInt32 remainder = (((widthSize + 3U) & ~3U) - widthSize); // interpolate the colors IntPtr pRed = (IntPtr)((byte *)data.Scan0 + 2); IntPtr pGreen = (IntPtr)((byte *)data.Scan0 + 1); IntPtr pBlue = (IntPtr)((byte *)data.Scan0); Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder); pFrame.Free(); return(true); } case tImageFormat.eFmtBayer16: { UInt32 widthSize = frame.Width * 3; UInt32 lOffset = 0; byte bitshift = (byte)((int)frame.BitDepth - 8); UInt16 *lSrc = (UInt16 *)frame.ImageBuffer; byte * lDst = (byte *)frame.ImageBuffer; UInt32 remainder = (((widthSize + 3U) & ~3U) - widthSize); frame.Format = tImageFormat.eFmtBayer8; GCHandle pFrame = GCHandle.Alloc(frame, GCHandleType.Pinned); // shift the pixel while (lOffset < frame.Width * frame.Height) { lDst[lOffset] = (byte)(lSrc[lOffset++] >> bitshift); } // interpolate the colors IntPtr pRed = (IntPtr)((byte *)data.Scan0 + 2); IntPtr pGreen = (IntPtr)((byte *)data.Scan0 + 1); IntPtr pBlue = (IntPtr)((byte *)data.Scan0); Pv.ColorInterpolate(pFrame.AddrOfPinnedObject(), pRed, pGreen, pBlue, 2, remainder); pFrame.Free(); return(true); } case tImageFormat.eFmtBgr24: { UInt32 lPos = 0; byte * lDst = (byte *)data.Scan0; byte * lSrc = (byte *)frame.ImageBuffer; while (lPos < frame.ImageBufferSize) { // copy the data lDst[lPos] = lSrc[lPos]; lPos += 1; // take care of the padding in the destination bitmap if ((lPos % (frame.Width * 3)) == 0) { lPos += (UInt32)data.Stride - (frame.Width * 3); } } return(true); } case tImageFormat.eFmtRgb24: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)data.Scan0; byte * lSrc = (byte *)frame.ImageBuffer; while (lOffset < frame.ImageBufferSize) { // copy the data lDst[lPos] = lSrc[lOffset + 2]; lDst[lPos + 1] = lSrc[lOffset + 1]; lDst[lPos + 2] = lSrc[lOffset]; lOffset += 3; lPos += 3; // take care of the padding in the destination bitmap if ((lOffset % (frame.Width * 3)) == 0) { lPos += (UInt32)data.Stride - (frame.Width * 3); } } return(true); } case tImageFormat.eFmtRgb48: { UInt32 lOffset = 0; UInt32 lPos = 0; UInt32 lLength = frame.ImageBufferSize / sizeof(UInt16); UInt16 *lSrc = (UInt16 *)frame.ImageBuffer; byte * lDst = (byte *)data.Scan0; byte bitshift = (byte)((int)frame.BitDepth - 8); while (lOffset < lLength) { // copy the data lDst[lPos] = (byte)(lSrc[lOffset + 2] >> bitshift); lDst[lPos + 1] = (byte)(lSrc[lOffset + 1] >> bitshift); lDst[lPos + 2] = (byte)(lSrc[lOffset] >> bitshift); lOffset += 3; lPos += 3; // take care of the padding in the destination bitmap if ((lOffset % (frame.Width * 3)) == 0) { lPos += (UInt32)data.Stride - (frame.Width * 3); } } return(true); } case tImageFormat.eFmtYuv411: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)data.Scan0; int y1, y2, y3, y4, u, v; int r, g, b; r = g = b = 0; while (lOffset < frame.ImageBufferSize) { u = buffer[lOffset++]; y1 = buffer[lOffset++]; y2 = buffer[lOffset++]; v = buffer[lOffset++]; y3 = buffer[lOffset++]; y4 = buffer[lOffset++]; Yuv2Rgb(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y3, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y4, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } case tImageFormat.eFmtYuv422: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)data.Scan0; int y1, y2, u, v; int r, g, b; r = g = b = 0; while (lOffset < frame.ImageBufferSize) { u = buffer[lOffset++]; y1 = buffer[lOffset++]; v = buffer[lOffset++]; y2 = buffer[lOffset++]; Yuv2Rgb(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } case tImageFormat.eFmtYuv444: { UInt32 lOffset = 0; UInt32 lPos = 0; byte * lDst = (byte *)data.Scan0; int y1, y2, u, v; int r, g, b; r = g = b = 0; while (lOffset < frame.ImageBufferSize) { u = buffer[lOffset++]; y1 = buffer[lOffset++]; v = buffer[lOffset++]; lOffset++; y2 = buffer[lOffset++]; lOffset++; Yuv2Rgb(y1, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; Yuv2Rgb(y2, u, v, ref r, ref g, ref b); lDst[lPos++] = (byte)b; lDst[lPos++] = (byte)g; lDst[lPos++] = (byte)r; } return(true); } default: return(false); } }
public void BeginCapture(tImageFormat fmt) { tErr error; if (!camera.HasValue) { error = tErr.eErrUnavailable; throw new PvException(error); } ImageFormat.pixelformat = fmt; error = Pv.CaptureStart(camera.Value); if (error != tErr.eErrSuccess) { goto error; } frameBufferHandles = new GCHandle[FRAME_POOL_SIZE]; framePoolHandles = new GCHandle[FRAME_POOL_SIZE]; frames = new tFrame[FRAME_POOL_SIZE]; uint bufferSize = 0; error = Pv.AttrUint32Get(camera.Value, "TotalBytesPerFrame", ref bufferSize); if (error != tErr.eErrSuccess) { goto error; } for (int count = FRAME_POOL_SIZE - 1; count >= 0; count--) { byte[] buffer = new byte[bufferSize]; GCHandle bufferHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); frameBufferHandles[count] = bufferHandle; tFrame frame = new tFrame { ImageBuffer = bufferHandle.AddrOfPinnedObject(), ImageBufferSize = bufferSize, AncillaryBufferSize = 0 }; frames[count] = frame; GCHandle frameHandle = GCHandle.Alloc(frame, GCHandleType.Pinned); framePoolHandles[count] = frameHandle; IntPtr framePointer = frameHandle.AddrOfPinnedObject(); buffers.Add(framePointer, buffer); if (!communicationManager.QueueFrame(framePointer, callback)) { goto error; } } this.FrameRate = 30; error = Pv.AttrEnumSet(this.camera.Value, "FrameStartTriggerMode", "FixedRate"); if (error != tErr.eErrSuccess) { goto error; } error = Pv.AttrEnumSet(this.camera.Value, "AcquisitionMode", "Continuous"); if (error != tErr.eErrSuccess) { goto error; } error = Pv.CommandRun(this.camera.Value, "AcquisitionStart"); if (error != tErr.eErrSuccess) { goto error; } return; error: EndCapture(); throw new PvException(error); }