public void GetImageTexture(int[] returnImage, ref IntPtr imagePtr) { if (OpenCVWrapper.cvGrabFrame(capture) != 0) { failureCount = 0; IntPtr ptr = OpenCVWrapper.cvRetrieveFrame(capture); OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(ptr, typeof(OpenCVWrapper.IplImage)); bool replaceBackground = false; if (imageReadyCallback != null) { replaceBackground = imageReadyCallback(ptr, returnImage); } if (!replaceBackground && (returnImage != null)) { unsafe { byte *src = (byte *)videoImage.imageData; int index = 0; for (int i = 0; i < videoImage.height; i++) { for (int j = 0; j < videoImage.width * videoImage.nChannels; j += videoImage.nChannels) { returnImage[index++] = (int)((*(src) << 16) | (*(src + 1) << 8) | *(src + 2)); src += videoImage.nChannels; } } } } if (imagePtr != IntPtr.Zero) { imagePtr = videoImage.imageData; } } else { failureCount++; if (failureCount > FAILURE_THRESHOLD) { throw new GoblinException("Video capture device ID: is used by " + "other application, and can not be accessed"); } } }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) { return; } this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 1024; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } capture = OpenCVWrapper.cvCaptureFromCAM(videoDeviceID); if (capture == IntPtr.Zero) { throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range."); } OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FRAME_WIDTH, cameraWidth); OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FRAME_HEIGHT, cameraHeight); double frame_rate = 0; switch (frameRate) { case FrameRate._15Hz: frame_rate = 15; break; case FrameRate._30Hz: frame_rate = 30; break; case FrameRate._50Hz: frame_rate = 50; break; case FrameRate._60Hz: frame_rate = 60; break; case FrameRate._120Hz: frame_rate = 120; break; case FrameRate._240Hz: frame_rate = 240; break; } OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FPS, frame_rate); // Grab the video image to see if resolution is correct if (OpenCVWrapper.cvGrabFrame(capture) != 0) { IntPtr ptr = OpenCVWrapper.cvRetrieveFrame(capture); OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(ptr, typeof(OpenCVWrapper.IplImage)); if (videoImage.width != cameraWidth || videoImage.height != cameraHeight) { throw new GoblinException("Resolution " + cameraWidth + "x" + cameraHeight + " is not supported"); } } cameraInitialized = true; }