Beispiel #1
0
 protected virtual void DisposeOpticalFlow()
 {
     if (prevTrackPts != null)
     {
         prevTrackPts.Clear();
     }
     if (nextTrackPts != null)
     {
         nextTrackPts.Clear();
     }
     if (prevgray != null)
     {
         prevgray.Dispose();
     }
     if (gray != null)
     {
         gray.Dispose();
     }
     if (mOP2fPrevTrackPts != null)
     {
         mOP2fPrevTrackPts.Dispose();
     }
     if (mOP2fNextTrackPts != null)
     {
         mOP2fNextTrackPts.Dispose();
     }
     if (status != null)
     {
         status.Dispose();
     }
     if (err != null)
     {
         err.Dispose();
     }
 }
Beispiel #2
0
        /// <summary>
        /// Raises the web cam texture to mat helper disposed event.
        /// </summary>
        public void OnWebCamTextureToMatHelperDisposed()
        {
            Debug.Log("OnWebCamTextureToMatHelperDisposed");

            matOpFlowThis.Dispose();
            matOpFlowPrev.Dispose();
            MOPcorners.Dispose();
            mMOP2fptsThis.Dispose();
            mMOP2fptsPrev.Dispose();
            mMOP2fptsSafe.Dispose();
            mMOBStatus.Dispose();
            mMOFerr.Dispose();
        }
Beispiel #3
0
        public void MatIndexer()
        {
            const byte value          = 123;
            var        img            = new Mat(new Size(10, 10), MatType.CV_8UC1, Scalar.All(value));
            var        imgB           = new MatOfByte(img);
            var        indexer        = imgB.GetIndexer();
            var        generiCIndexer = img.GetGenericIndexer <byte>();

            Assert.Equal(value, indexer[0, 0]);
            Assert.Equal(value, generiCIndexer[0, 0]);

            img.Dispose();
            imgB.Dispose();
        }
Beispiel #4
0
        /// <summary>
        /// Raises the web cam texture to mat helper disposed event.
        /// </summary>
        public void OnWebCamTextureToMatHelperDisposed()
        {
            Debug.Log("OnWebCamTextureToMatHelperDisposed");

            if (texture != null)
            {
                Texture2D.Destroy(texture);
                texture = null;
            }

            if (matOpFlowThis != null)
            {
                matOpFlowThis.Dispose();
            }
            if (matOpFlowPrev != null)
            {
                matOpFlowPrev.Dispose();
            }
            if (MOPcorners != null)
            {
                MOPcorners.Dispose();
            }
            if (mMOP2fptsThis != null)
            {
                mMOP2fptsThis.Dispose();
            }
            if (mMOP2fptsPrev != null)
            {
                mMOP2fptsPrev.Dispose();
            }
            if (mMOP2fptsSafe != null)
            {
                mMOP2fptsSafe.Dispose();
            }
            if (mMOBStatus != null)
            {
                mMOBStatus.Dispose();
            }
            if (mMOFerr != null)
            {
                mMOFerr.Dispose();
            }
        }
    void OnDisable()
    {
        Debug.Log("OnDisable");

        if (matOpFlowThis != null)
        {
            matOpFlowThis.Dispose();
        }
        if (matOpFlowPrev != null)
        {
            matOpFlowPrev.Dispose();
        }
        if (MOPcorners != null)
        {
            MOPcorners.Dispose();
        }
        if (mMOP2fptsThis != null)
        {
            mMOP2fptsThis.Dispose();
        }
        if (mMOP2fptsPrev != null)
        {
            mMOP2fptsPrev.Dispose();
        }
        if (mMOP2fptsSafe != null)
        {
            mMOP2fptsSafe.Dispose();
        }
        if (mMOBStatus != null)
        {
            mMOBStatus.Dispose();
        }
        if (mMOFerr != null)
        {
            mMOFerr.Dispose();
        }
        //webCamTextureToMatHelper.Dispose();
    }
Beispiel #6
0
        static void Main(string[] args)
        {
            // Used to check memory leak
            //for (int i = 0; i < 1000; i++)
            using (var state = new ThreadLocal <FormExtractionHandle>(NativeFormExtraction.CreateFormExtraction))
            {
                GC.Collect();
                List <string> pathFiles = GetSamplesAndCleanUpResults();

                // For testing:
                pathFiles = pathFiles.Where(m => m.Contains("form9")).ToList();

                int numThread      = 1;            // Environment.ProcessorCount;
                var showDebugImage = true;         // If true, you may want to use: numThread = 1.

                Parallel.ForEach(pathFiles, new ParallelOptions {
                    MaxDegreeOfParallelism = numThread
                }, pathFile =>
                {
                    FormExtractionHandle handle = state.Value;

                    NativeFormExtraction.SetOptions(handle, 800, 25, 15, 5, 20000, 50000, showDebugImage);

                    var resizeWidth = 800;
                    var orig        = new Mat(pathFile);
                    var image       = new Mat(pathFile, ImreadModes.GrayScale);

                    Cv2.AdaptiveThreshold(image, image, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 9, 4);

                    // Resize image if too large.
                    if (image.Width > resizeWidth)
                    {
                        var height = resizeWidth * image.Height / image.Width;
                        Cv2.Resize(image, image, new Size(resizeWidth, height));
                    }

                    Cv2.BitwiseNot(image, image);
                    Cv2.Dilate(image, image, Cv2.GetStructuringElement(MorphShapes.Cross, new Size(2, 2)));

                    MatOfByte mat             = new MatOfByte(image);
                    MatIndexer <byte> indexer = mat.GetIndexer();

                    var row      = image.Height;
                    var col      = image.Width;
                    Mat newImage = new Mat(row, col, MatType.CV_8UC3);
                    newImage.SetTo(Scalar.Black);

                    // We must determine if it "may" be an interesting blob.
                    Stopwatch watch = new Stopwatch();
                    watch.Start();

                    int[] imgData = new int[row * col];
                    for (int y = 0; y < row; y++)
                    {
                        for (int x = 0; x < col; x++)
                        {
                            imgData[y + x * row] = indexer[y, x];
                        }
                    }

                    var result = NativeFormExtraction.RunFormExtraction(handle, imgData, row, col);
                    if (result != 0)
                    {
                        throw new Exception("Unknown error occured with the function: RunFormExtraction");
                    }
                    watch.Stop();
                    Console.WriteLine("Duration: " + watch.Elapsed);

                    if (showDebugImage)
                    {
                        var debugImg = NativeFormExtraction.GetDebugImage(handle, row * col);

                        var img = CreateImage(debugImg, row, col, hasColor: true);
                        Cv2.BitwiseOr(newImage, img, newImage);

                        Cv2.BitwiseNot(image, image);
                        int width  = 400;
                        var height = width * image.Height / image.Width;
                        Cv2.Resize(orig, orig, new Size(width, height));
                        Cv2.Resize(image, image, new Size(width, height));
                        Cv2.Resize(newImage, newImage, new Size(width, height));

                        using (new Window("orig", orig))
                            using (new Window("pre", image))
                                using (new Window("post", newImage))
                                {
                                    Cv2.WaitKey();
                                    Cv2.DestroyAllWindows();
                                }
                    }

                    // Dispose.
                    orig.Dispose();
                    image.Dispose();
                    newImage.Dispose();
                    mat.Dispose();
                });
            }

            Console.WriteLine("End");
            Console.ReadLine();
        }
Beispiel #7
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IPHONE && !UNITY_EDITOR
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.transform.eulerAngles = new Vector3(0, 0, 0);
                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
                                                                                #endif
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										if (webCamTexture.videoRotationAngle == 270)
//												scaleY = -1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    Camera.main.orthographicSize = webCamTexture.width / 2;
                                                                                #else
                    Camera.main.orthographicSize = webCamTexture.height / 2;
                                                                                #endif

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IPHONE && !UNITY_EDITOR
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.width / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("OpenCVForUnitySample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            GUILayout.EndVertical();
        }
    }
}
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                    while (webCamTexture.width <= 16)
                    {
                        webCamTexture.GetPixels32();
                        yield return(new WaitForEndOfFrame());
                    }
                                                                                #endif
                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                    updateLayout();

                    screenOrientation = Screen.orientation;
                    initDone          = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }
Beispiel #9
0
        public override int Run()
        {
            int device = 0;

            var argument = new StringList {
                "./"
            };
            FaceModelParameters det_parameters = new FaceModelParameters(argument);

            //vector<string> files, depth_directories, output_video_files, out_dummy;
            StringList files = new StringList(), output_video_files = new StringList(), out_dummy = new StringList();
            bool       u;
            string     output_codec;

            LandmarkDetector.get_video_input_output_params(files, out_dummy, output_video_files, out u, out output_codec, argument);

            CLNF clnf_model = new CLNF(det_parameters.model_location);

            float fx = 0, fy = 0, cx = 0, cy = 0;

            LandmarkDetector.get_camera_params(out device, out fx, out fy, out cx, out cy, argument);

            // If cx (optical axis centre) is undefined will use the image size/2 as an estimate
            bool cx_undefined = false;
            bool fx_undefined = false;

            if (cx == 0 || cy == 0)
            {
                cx_undefined = true;
            }
            if (fx == 0 || fy == 0)
            {
                fx_undefined = true;
            }

            //// Do some grabbing
            INFO_STREAM("Attempting to capture from device: " + device);
            using (VideoCapture video_capture = new VideoCapture(device))
            {
                using (Mat dummy = new Mat())
                    video_capture.Read(dummy);

                if (!video_capture.IsOpened())
                {
                    FATAL_STREAM("Failed to open video source");
                    return(1);
                }
                else
                {
                    INFO_STREAM("Device or file opened");
                }

                int frame_count    = 0;
                Mat captured_image = new Mat();
                video_capture.Read(captured_image);
                Size = new Size(captured_image.Width / SizeFactor, captured_image.Height / SizeFactor);
                using (var resized_image = captured_image.Resize(Size))
                {
                    // If optical centers are not defined just use center of image
                    if (cx_undefined)
                    {
                        cx = resized_image.Cols / 2.0f;
                        cy = resized_image.Rows / 2.0f;
                    }
                    // Use a rough guess-timate of focal length
                    if (fx_undefined)
                    {
                        fx = (float)(500 * (resized_image.Cols / 640.0));
                        fy = (float)(500 * (resized_image.Rows / 480.0));

                        fx = (float)((fx + fy) / 2.0);
                        fy = fx;
                    }
                }

                // Use for timestamping if using a webcam
                long t_initial = Cv2.GetTickCount();

                INFO_STREAM("Starting tracking");
                while (video_capture.Read(captured_image))
                {
                    using (var resized_image = captured_image.Resize(Size))
                    {
                        // Reading the images
                        MatOfByte grayscale_image = new MatOfByte();

                        if (resized_image.Channels() == 3)
                        {
                            Cv2.CvtColor(resized_image, grayscale_image, ColorConversionCodes.BGR2GRAY);
                        }
                        else
                        {
                            grayscale_image = (MatOfByte)resized_image.Clone();
                        }

                        // The actual facial landmark detection / tracking
                        bool detection_success = LandmarkDetector.DetectLandmarksInVideo(new SWIGTYPE_p_cv__Mat_T_uchar_t(grayscale_image.CvPtr), new SWIGTYPE_p_CLNF(CLNF.getCPtr(clnf_model)), new SWIGTYPE_p_FaceModelParameters(FaceModelParameters.getCPtr(det_parameters)));

                        // Visualising the results
                        // Drawing the facial landmarks on the face and the bounding box around it if tracking is successful and initialised
                        double detection_certainty = clnf_model.detection_certainty;

                        visualise_tracking(resized_image, ref clnf_model, ref det_parameters, frame_count, fx, fy, cx, cy);

                        // detect key presses
                        char character_press = (char)Cv2.WaitKey(15);
                        switch (character_press)
                        {
                        case 'r':
                            clnf_model.Reset();
                            break;

                        case 'q':
                            return(0);
                        }

                        // Update the frame count
                        frame_count++;

                        grayscale_image.Dispose();
                        grayscale_image = null;
                    }
                }
            }

            return(0);
        }
Beispiel #10
0
        public static FormExtractionResult ProcessImage(string filename, FormExtractionOptions options = null)
        {
            if (options == null)
            {
                // Assume recommanded parameters.
                options = new FormExtractionOptions();
            }

            var orig  = new Mat(filename);
            var image = new Mat(filename, ImreadModes.GrayScale);

            Cv2.AdaptiveThreshold(image, image, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.Binary, 9, 4);

            // Resize image if too large.
            if (image.Width > options.ResizeWidth)
            {
                var height = options.ResizeWidth * image.Height / image.Width;
                Cv2.Resize(image, image, new Size(options.ResizeWidth, height));
            }

            Cv2.BitwiseNot(image, image);
            Cv2.Dilate(image, image, Cv2.GetStructuringElement(MorphShapes.Cross, new Size(2, 2)));

            MatOfByte         mat     = new MatOfByte(image);
            MatIndexer <byte> indexer = mat.GetIndexer();

            var row      = image.Height;
            var col      = image.Width;
            Mat newImage = new Mat(row, col, MatType.CV_8UC3);

            newImage.SetTo(Scalar.Black);

            // We must determine if it "may" be an interesting blob.
            Stopwatch watch = new Stopwatch();

            watch.Start();

            int[] imgData = new int[row * col];
            for (int y = 0; y < row; y++)
            {
                for (int x = 0; x < col; x++)
                {
                    imgData[y + x * row] = indexer[y, x];
                }
            }

            var result = HasBoxes(imgData, row, col, options);

            watch.Stop();
            result.Duration = watch.Elapsed;

            // Preview
            if (result.Boxes.Any() && image.Width != 0 && options.ShowDebugImage)
            {
                var img = CreateImage(result.DebugImg, hasColor: true);
                Cv2.BitwiseOr(newImage, img, newImage);

                Cv2.BitwiseNot(image, image);
                int width  = 400;
                var height = width * image.Height / image.Width;
                Cv2.Resize(orig, orig, new Size(width, height));
                Cv2.Resize(image, image, new Size(width, height));
                Cv2.Resize(newImage, newImage, new Size(width, height));

                using (new Window("orig", orig))
                    using (new Window("pre", image))
                        using (new Window("post", newImage))
                        {
                            Cv2.WaitKey();
                            Cv2.DestroyAllWindows();
                        }
            }

            // Dispose.
            orig.Dispose();
            image.Dispose();
            newImage.Dispose();
            mat.Dispose();

            return(result);
        }