public void GetImageTexture(int[] returnImage, ref IntPtr imagePtr)
        {
            if (OpenCVWrapper.cvGrabFrame(capture) != 0)
            {
                failureCount = 0;

                IntPtr ptr = OpenCVWrapper.cvRetrieveFrame(capture);

                OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(ptr,
                                                                                                   typeof(OpenCVWrapper.IplImage));

                bool replaceBackground = false;
                if (imageReadyCallback != null)
                {
                    replaceBackground = imageReadyCallback(ptr, returnImage);
                }

                if (!replaceBackground && (returnImage != null))
                {
                    unsafe
                    {
                        byte *src   = (byte *)videoImage.imageData;
                        int   index = 0;
                        for (int i = 0; i < videoImage.height; i++)
                        {
                            for (int j = 0; j < videoImage.width * videoImage.nChannels; j += videoImage.nChannels)
                            {
                                returnImage[index++] = (int)((*(src) << 16) | (*(src + 1) << 8) | *(src + 2));
                                src += videoImage.nChannels;
                            }
                        }
                    }
                }

                if (imagePtr != IntPtr.Zero)
                {
                    imagePtr = videoImage.imageData;
                }
            }
            else
            {
                failureCount++;

                if (failureCount > FAILURE_THRESHOLD)
                {
                    throw new GoblinException("Video capture device ID: is used by " +
                                              "other application, and can not be accessed");
                }
            }
        }
        public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution,
                                     ImageFormat format, bool grayscale)
        {
            if (cameraInitialized)
            {
                return;
            }

            this.resolution    = resolution;
            this.grayscale     = grayscale;
            this.frameRate     = framerate;
            this.videoDeviceID = videoDeviceID;
            this.format        = format;

            switch (resolution)
            {
            case Resolution._160x120:
                cameraWidth  = 160;
                cameraHeight = 120;
                break;

            case Resolution._320x240:
                cameraWidth  = 320;
                cameraHeight = 240;
                break;

            case Resolution._640x480:
                cameraWidth  = 640;
                cameraHeight = 480;
                break;

            case Resolution._800x600:
                cameraWidth  = 800;
                cameraHeight = 600;
                break;

            case Resolution._1024x768:
                cameraWidth  = 1024;
                cameraHeight = 768;
                break;

            case Resolution._1280x1024:
                cameraWidth  = 1280;
                cameraHeight = 1024;
                break;

            case Resolution._1600x1200:
                cameraWidth  = 1600;
                cameraHeight = 1200;
                break;
            }

            capture = OpenCVWrapper.cvCaptureFromCAM(videoDeviceID);

            if (capture == IntPtr.Zero)
            {
                throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range.");
            }

            OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FRAME_WIDTH, cameraWidth);
            OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FRAME_HEIGHT, cameraHeight);

            double frame_rate = 0;

            switch (frameRate)
            {
            case FrameRate._15Hz: frame_rate = 15; break;

            case FrameRate._30Hz: frame_rate = 30; break;

            case FrameRate._50Hz: frame_rate = 50; break;

            case FrameRate._60Hz: frame_rate = 60; break;

            case FrameRate._120Hz: frame_rate = 120; break;

            case FrameRate._240Hz: frame_rate = 240; break;
            }

            OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FPS, frame_rate);

            // Grab the video image to see if resolution is correct
            if (OpenCVWrapper.cvGrabFrame(capture) != 0)
            {
                IntPtr ptr = OpenCVWrapper.cvRetrieveFrame(capture);

                OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(ptr,
                                                                                                   typeof(OpenCVWrapper.IplImage));

                if (videoImage.width != cameraWidth || videoImage.height != cameraHeight)
                {
                    throw new GoblinException("Resolution " + cameraWidth + "x" + cameraHeight +
                                              " is not supported");
                }
            }

            cameraInitialized = true;
        }
Exemple #3
0
        private void SetupCaptureDevices()
        {
            // Create our video capture device that uses OpenCV library.
            OpenCVCapture captureDevice = new OpenCVCapture();
            captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                ImageFormat.R8G8B8_24, false);

            // Create a 16bit color texture that contains the image processed by OpenCV
            // We're using alpha gray texture because we want the black color to represent
            // transparent color so that we can overlay the texture properly on top of the live
            // video image
            overlayTexture = new Texture2D(GraphicsDevice, captureDevice.Width, captureDevice.Height,
                false, SurfaceFormat.Bgra4444);
            // Create an array that will contain the image data of overlayTexture
            overlayData = new short[overlayTexture.Width * overlayTexture.Height];

            // Assigns a callback function to be called whenever a new video frame is captured
            captureDevice.CaptureCallback = delegate(IntPtr image, int[] background)
            {
                // Creates a holder for an OpenCV image
                IntPtr grayImage = OpenCVWrapper.cvCreateImage(OpenCVWrapper.cvGetSize(image), 8, 1);

                // Converts the color image (live video image) to a gray image
                OpenCVWrapper.cvCvtColor(image, grayImage, OpenCVWrapper.CV_BGR2GRAY);

                // Performs canny edge detection on the gray image
                OpenCVWrapper.cvCanny(grayImage, grayImage, 10, 200, 3);

                // Converts the gray image pointer to IplImage structure so that we can access
                // the image data of the processed gray image
                OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(grayImage,
                    typeof(OpenCVWrapper.IplImage));

                unsafe
                {
                    int index = 0;
                    // Gets a pointer to the first byte of the image data
                    byte* src = (byte*)videoImage.imageData;
                    // Iterates through the image pointer
                    for (int i = 0; i < videoImage.height; i++)
                    {
                        for (int j = 0; j < videoImage.width; j++)
                        {
                            // src data contains 8 bit gray scaled color, so we need to convert it
                            // to Rgba4444 format.
                            // We assign the black color to be totally transparent
                            overlayData[index++] = (short)((*(src) << 8) | (*(src)));
                            src++;
                        }
                    }
                }

                // Resets the texture assigned to the device (this is needed for XNA 3.1 since
                // they have a bug)
                GraphicsDevice.Textures[0] = null;
                // Assigns the image data to the overlay texture
                overlayTexture.SetData<short>(overlayData);

                // Deallocates the memory assigned to the OpenCV image
                OpenCVWrapper.cvReleaseImage(ref grayImage);

                // We don't want to modify the original video image, so we return false
                return false;
            };

            scene.AddVideoCaptureDevice(captureDevice);

            scene.ShowCameraImage = true;
        }