예제 #1
0
 protected virtual void DisposeOpticalFlow()
 {
     if (prevTrackPts != null)
     {
         prevTrackPts.Clear();
     }
     if (nextTrackPts != null)
     {
         nextTrackPts.Clear();
     }
     if (prevgray != null)
     {
         prevgray.Dispose();
     }
     if (gray != null)
     {
         gray.Dispose();
     }
     if (mOP2fPrevTrackPts != null)
     {
         mOP2fPrevTrackPts.Dispose();
     }
     if (mOP2fNextTrackPts != null)
     {
         mOP2fNextTrackPts.Dispose();
     }
     if (status != null)
     {
         status.Dispose();
     }
     if (err != null)
     {
         err.Dispose();
     }
 }
예제 #2
0
        public override void Dispose()
        {
            if (objectPoints68 != null)
            {
                objectPoints68.Dispose();
            }

            if (camMatrix != null)
            {
                camMatrix.Dispose();
            }
            if (distCoeffs != null)
            {
                distCoeffs.Dispose();
            }

            if (imagePoints != null)
            {
                imagePoints.Dispose();
            }

            if (rvec != null)
            {
                rvec.Dispose();
            }

            if (tvec != null)
            {
                tvec.Dispose();
            }
        }
        public void ProcessFinger(Mat rgbaImage)
        {
            Imgproc.pyrDown(rgbaImage, mPyrDownMat);
            Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
            Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA);
            Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb);

            Core.inRange(mHsvMat, fLowerBoundHSV, fUpperBoundHSV, fMaskHSV);

            fMask = fMaskHSV;

            Imgproc.dilate(fMask, fDilatedMask, new Mat());

            List <MatOfPoint> contoursFinger = new List <MatOfPoint>();

            Imgproc.findContours(fDilatedMask, contoursFinger, fHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            if (contoursFinger.Count == 0)
            {
                FingerContour = null;
                return;
            }

            // Find max contour area
            double     maxArea        = 0;
            MatOfPoint biggestContour = null;

            foreach (MatOfPoint each in contoursFinger)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.contourArea(wrapper);
                if (area > maxArea)
                {
                    maxArea        = area;
                    biggestContour = each;
                }
            }
            if (maxArea < 130)
            {
                FingerContour = null;
                return;
            }

            //Debug.Log("Finger contour area" + maxArea.ToString());

            MatOfPoint2f contours_res2f = new MatOfPoint2f();

            MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray());

            Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true);
            FingerContour = new MatOfPoint(contours_res2f.toArray());
            contours_res2f.Dispose();
            biggestContour2f.Dispose();
            if (Imgproc.contourArea(FingerContour) > mMinContourArea * maxArea)
            {
                Core.multiply(FingerContour, new Scalar(4, 4), FingerContour);
            }
        }
 public void Dispose()
 {
     objectPoints.Dispose();
     imagePoints.Dispose();
     rvec.Dispose();
     tvec.Dispose();
     rotM.Dispose();
     camMatrix.Dispose();
     distCoeffs.Dispose();
 }
예제 #5
0
        /// <summary>
        /// Raises the web cam texture to mat helper disposed event.
        /// </summary>
        public void OnWebCamTextureToMatHelperDisposed()
        {
            Debug.Log("OnWebCamTextureToMatHelperDisposed");

            matOpFlowThis.Dispose();
            matOpFlowPrev.Dispose();
            MOPcorners.Dispose();
            mMOP2fptsThis.Dispose();
            mMOP2fptsPrev.Dispose();
            mMOP2fptsSafe.Dispose();
            mMOBStatus.Dispose();
            mMOFerr.Dispose();
        }
예제 #6
0
 protected virtual void Dispose(bool disposing)
 {
     if (!disposedValue)
     {
         originalImage.Dispose();
         originalMat.Dispose();
         if (detectedBorder != null)
         {
             detectedBorder.Dispose();
         }
         oldPerimeter  = 0;
         disposedValue = true;
     }
 }
예제 #7
0
        /// <summary>
        /// Raises the web cam texture to mat helper disposed event.
        /// </summary>
        public void OnWebCamTextureToMatHelperDisposed()
        {
            Debug.Log("OnWebCamTextureToMatHelperDisposed");

            if (texture != null)
            {
                Texture2D.Destroy(texture);
                texture = null;
            }

            if (matOpFlowThis != null)
            {
                matOpFlowThis.Dispose();
            }
            if (matOpFlowPrev != null)
            {
                matOpFlowPrev.Dispose();
            }
            if (MOPcorners != null)
            {
                MOPcorners.Dispose();
            }
            if (mMOP2fptsThis != null)
            {
                mMOP2fptsThis.Dispose();
            }
            if (mMOP2fptsPrev != null)
            {
                mMOP2fptsPrev.Dispose();
            }
            if (mMOP2fptsSafe != null)
            {
                mMOP2fptsSafe.Dispose();
            }
            if (mMOBStatus != null)
            {
                mMOBStatus.Dispose();
            }
            if (mMOFerr != null)
            {
                mMOFerr.Dispose();
            }
        }
예제 #8
0
 void OnDestroy()
 {
     if (_anchorPointsImageMat != null)
     {
         _anchorPointsImageMat.Dispose();
     }
     if (_anchorPointsWorldMat != null)
     {
         _anchorPointsWorldMat.Dispose();
     }
     if (_distCoeffs != null)
     {
         _distCoeffs.Dispose();
     }
     if (_cameraMatrix != null)
     {
         _cameraMatrix.Dispose();
     }
     if (_rVec != null)
     {
         _rVec.Dispose();
     }
     if (_rVec != null)
     {
         _rVec.Dispose();
     }
     if (_camTexMat != null)
     {
         _camTexMat.Dispose();
     }
     if (_camTexGrayMat != null)
     {
         _camTexGrayMat.Dispose();
     }
     if (_camTexGrayUndistortMat != null)
     {
         _camTexGrayUndistortMat.Dispose();
     }
 }
    void OnDisable()
    {
        Debug.Log("OnDisable");

        if (matOpFlowThis != null)
        {
            matOpFlowThis.Dispose();
        }
        if (matOpFlowPrev != null)
        {
            matOpFlowPrev.Dispose();
        }
        if (MOPcorners != null)
        {
            MOPcorners.Dispose();
        }
        if (mMOP2fptsThis != null)
        {
            mMOP2fptsThis.Dispose();
        }
        if (mMOP2fptsPrev != null)
        {
            mMOP2fptsPrev.Dispose();
        }
        if (mMOP2fptsSafe != null)
        {
            mMOP2fptsSafe.Dispose();
        }
        if (mMOBStatus != null)
        {
            mMOBStatus.Dispose();
        }
        if (mMOFerr != null)
        {
            mMOFerr.Dispose();
        }
        //webCamTextureToMatHelper.Dispose();
    }
예제 #10
0
 private OpenCV.Core.Point[] FindBiggestContourWithFourBorders(IList <MatOfPoint> contours)
 {
     OpenCV.Core.Point[] biggestContour = null;
     Parallel.ForEach(contours, (c) =>
     {
         MatOfPoint2f mat2f = new MatOfPoint2f(c.ToArray());
         double perimeter   = Imgproc.ArcLength(mat2f, true);
         if (perimeter > 1500)
         {
             MatOfPoint2f approx = new MatOfPoint2f();
             Imgproc.ApproxPolyDP(mat2f, approx, 0.02 * perimeter, true);
             if (approx.Total() == 4)
             {
                 MatOfPoint approxMat = new MatOfPoint(approx.ToArray());
                 if (Imgproc.IsContourConvex(approxMat))
                 {
                     if (biggestContour == null)
                     {
                         biggestContour = approx.ToArray();
                         detectedBorder = approx;
                         oldPerimeter   = perimeter;
                     }
                     else
                     {
                         if (oldPerimeter < perimeter)
                         {
                             biggestContour = approx.ToArray();
                             detectedBorder = approx;
                             oldPerimeter   = perimeter;
                         }
                     }
                 }
             }
         }
         mat2f.Dispose();
     });
     return(biggestContour);
 }
예제 #11
0
        void OnDestroy()
        {
            if (_camTexMat != null)
            {
                _camTexMat.Dispose();
            }
            if (_tempTransferTexture)
            {
                Destroy(_tempTransferTexture);
            }
            if (_chessCornersImageMat != null)
            {
                _chessCornersImageMat.Dispose();
            }
            if (_previewMaterial)
            {
                Destroy(_previewMaterial);
            }
            if (_arTexture)
            {
                _arTexture.Release();
            }
            if (_chessPatternTexture)
            {
                _chessPatternTexture.Release();
            }
            if (_patternRenderMaterial)
            {
                Destroy(_patternRenderMaterial);
            }
            if (_chessCornersWorldMat != null)
            {
                _chessCornersWorldMat.Dispose();
            }

            Reset();
        }
예제 #12
0
        public void Dispose()
        {
            if (objectPoints != null && !objectPoints.IsDisposed)
            {
                objectPoints.Dispose();
            }

            if (imagePoints != null && !imagePoints.IsDisposed)
            {
                imagePoints.Dispose();
            }

            if (rvec != null && !rvec.IsDisposed)
            {
                rvec.Dispose();
            }

            if (tvec != null && !tvec.IsDisposed)
            {
                tvec.Dispose();
            }

            if (rotM != null && !rotM.IsDisposed)
            {
                rotM.Dispose();
            }

            if (camMatrix != null && !camMatrix.IsDisposed)
            {
                camMatrix.Dispose();
            }

            if (distCoeffs != null && !distCoeffs.IsDisposed)
            {
                distCoeffs.Dispose();
            }
        }
예제 #13
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IPHONE && !UNITY_EDITOR
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.transform.eulerAngles = new Vector3(0, 0, 0);
                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
                                                                                #endif
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										if (webCamTexture.videoRotationAngle == 270)
//												scaleY = -1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    Camera.main.orthographicSize = webCamTexture.width / 2;
                                                                                #else
                    Camera.main.orthographicSize = webCamTexture.height / 2;
                                                                                #endif

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IPHONE && !UNITY_EDITOR
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.width / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("OpenCVForUnitySample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            GUILayout.EndVertical();
        }
    }
}
예제 #14
0
        /// <summary>
        /// Recognizes the markers.
        /// </summary>
        /// <param name="grayscale">Grayscale.</param>
        /// <param name="detectedMarkers">Detected markers.</param>
        void recognizeMarkers(Mat grayscale, List <Marker> detectedMarkers)
        {
            List <Marker> goodMarkers = new List <Marker> ();

            // Identify the markers
            for (int i = 0; i < detectedMarkers.Count; i++)
            {
                Marker marker = detectedMarkers [i];


                // Find the perspective transformation that brings current marker to rectangular form
                Mat markerTransform = Imgproc.getPerspectiveTransform(new MatOfPoint2f(marker.points.toArray()), m_markerCorners2d);


                // Transform image to get a canonical marker image
                Imgproc.warpPerspective(grayscale, canonicalMarkerImage, markerTransform, markerSize);

                for (int p = 0; p < m_markerDesigns.Count; p++)
                {
                    MatOfInt nRotations = new MatOfInt(0);
                    int      id         = Marker.getMarkerId(canonicalMarkerImage, nRotations, m_markerDesigns [p]);
                    if (id != -1)
                    {
                        marker.id = id;
//                              Debug.Log ("id " + id);

                        //sort the points so that they are always in the same order no matter the camera orientation
                        List <Point> MarkerPointsList = marker.points.toList();

                        //              std::rotate(marker.points.begin(), marker.points.begin() + 4 - nRotations, marker.points.end());
                        MarkerPointsList = MarkerPointsList.Skip(4 - nRotations.toArray() [0]).Concat(MarkerPointsList.Take(4 - nRotations.toArray() [0])).ToList();

                        marker.points.fromList(MarkerPointsList);

                        goodMarkers.Add(marker);
                    }
                    nRotations.Dispose();
                }
            }

//              Debug.Log ("goodMarkers " + goodMarkers.Count);

            // Refine marker corners using sub pixel accuracy
            if (goodMarkers.Count > 0)
            {
                List <Point> preciseCornersPoint = new List <Point> (4 * goodMarkers.Count);
                for (int i = 0; i < preciseCornersPoint.Capacity; i++)
                {
                    preciseCornersPoint.Add(new Point(0, 0));
                }



                for (int i = 0; i < goodMarkers.Count; i++)
                {
                    Marker marker = goodMarkers [i];

                    List <Point> markerPointsList = marker.points.toList();

                    for (int c = 0; c < 4; c++)
                    {
                        preciseCornersPoint [i * 4 + c] = markerPointsList [c];
                    }
                }

                MatOfPoint2f preciseCorners = new MatOfPoint2f(preciseCornersPoint.ToArray());

                TermCriteria termCriteria = new TermCriteria(TermCriteria.MAX_ITER | TermCriteria.EPS, 30, 0.01);
                Imgproc.cornerSubPix(grayscale, preciseCorners, new Size(5, 5), new Size(-1, -1), termCriteria);

                preciseCornersPoint = preciseCorners.toList();

                // Copy refined corners position back to markers
                for (int i = 0; i < goodMarkers.Count; i++)
                {
                    Marker marker = goodMarkers [i];

                    List <Point> markerPointsList = marker.points.toList();

                    for (int c = 0; c < 4; c++)
                    {
                        markerPointsList [c] = preciseCornersPoint [i * 4 + c];
                    }
                }
                preciseCorners.Dispose();
            }

            detectedMarkers.Clear();
            detectedMarkers.AddRange(goodMarkers);
        }
예제 #15
0
    public virtual double Calibrate(Mat mat)
    {
        var repError    = -1.0;
        var points      = new MatOfPoint2f();
        var patternSize = new Size((int)SizeX, (int)SizeY);

        var found = false;

        switch (boardType)
        {
        case BoardType.ChessBoard:
            found = Calib3d.findChessboardCorners(mat, patternSize, points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE);

            break;

        case BoardType.CirclesGrid:
            found = Calib3d.findCirclesGrid(mat, patternSize, points, Calib3d.CALIB_CB_SYMMETRIC_GRID);
            break;

        case BoardType.AsymmetricCirclesGrid:
            found = Calib3d.findCirclesGrid(mat, patternSize, points, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
            break;
        }


        if (found)
        {
            if (boardType == BoardType.ChessBoard)
            {
                Imgproc.cornerSubPix(mat, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1));
            }
            imagePoints.Add(points);
        }
        else
        {
            Debug.Log("Invalid frame.");
            if (points != null)
            {
                points.Dispose();
            }
            ErrorEvent?.Invoke("Invalid frame.");
            return(repError);
        }

        if (imagePoints.Count < 1)
        {
            Debug.Log("Not enough points for calibration.");
            ErrorEvent?.Invoke("Not enough points for calibration.");
            return(repError);
        }
        else
        {
            var objectPoint = new MatOfPoint3f(new Mat(imagePoints[0].rows(), 1, CvType.CV_32FC3));
            CalcCorners(patternSize, squareSize, objectPoint);

            var objectPoints = new List <Mat>();
            for (int i = 0; i < imagePoints.Count; ++i)
            {
                objectPoints.Add(objectPoint);
            }
            repError = Calib3d.calibrateCamera(objectPoints, imagePoints, mat.size(), cameraMatrix, distCoeffs, rvecs, tvecs);
            CalibrationEvent?.Invoke(repError);
            objectPoint.Dispose();
        }

        Debug.Log("-----calibrate-----");
        Debug.Log("repErr: " + repError);
        Debug.Log("camMatrix: " + cameraMatrix.dump());
        Debug.Log("distCoeffs: " + distCoeffs.dump());
        return(repError);
    }
예제 #16
0
        /// <summary>
        /// Finds the candidates.
        /// </summary>
        /// <param name="contours">Contours.</param>
        /// <param name="detectedMarkers">Detected markers.</param>
        void findCandidates(List <MatOfPoint> contours, List <Marker> detectedMarkers)
        {
            MatOfPoint2f approxCurve = new MatOfPoint2f();

            List <Marker> possibleMarkers = new List <Marker> ();

            // For each contour, analyze if it is a parallelepiped likely to be the marker
            for (int i = 0; i < contours.Count; i++)
            {
                // Approximate to a polygon
                double eps = contours [i].toArray().Length * 0.05;
                Imgproc.approxPolyDP(new MatOfPoint2f(contours [i].toArray()), approxCurve, eps, true);

                Point[] approxCurveArray = approxCurve.toArray();

                // We interested only in polygons that contains only four points
                if (approxCurveArray.Length != 4)
                {
                    continue;
                }

                // And they have to be convex
                if (!Imgproc.isContourConvex(new MatOfPoint(approxCurveArray)))
                {
                    continue;
                }


                // Ensure that the distance between consecutive points is large enough
                float minDist = float.MaxValue;

                for (int p = 0; p < 4; p++)
                {
                    Point side = new Point(approxCurveArray [p].x - approxCurveArray [(p + 1) % 4].x, approxCurveArray [p].y - approxCurveArray [(p + 1) % 4].y);
                    float squaredSideLength = (float)side.dot(side);
                    minDist = Mathf.Min(minDist, squaredSideLength);
                }

                // Check that distance is not very small
                if (minDist < m_minContourLengthAllowed)
                {
                    continue;
                }

                // All tests are passed. Save marker candidate:
                Marker m = new Marker();
                m.points = new MatOfPoint();

                List <Point> markerPointsList = new List <Point> ();

                for (int p = 0; p < 4; p++)
                {
                    markerPointsList.Add(new Point(approxCurveArray [p].x, approxCurveArray [p].y));
                }



                // Sort the points in anti-clockwise order
                // Trace a line between the first and second point.
                // If the third point is at the right side, then the points are anti-clockwise
                Point v1 = new Point(markerPointsList [1].x - markerPointsList [0].x, markerPointsList [1].y - markerPointsList [0].y);
                Point v2 = new Point(markerPointsList [2].x - markerPointsList [0].x, markerPointsList [2].y - markerPointsList [0].y);

                double o = (v1.x * v2.y) - (v1.y * v2.x);

                if (o < 0.0)         //if the third point is in the left side, then sort in anti-clockwise order
                {
                    Point tmp = markerPointsList [1];
                    markerPointsList [1] = markerPointsList [3];
                    markerPointsList [3] = tmp;
                }

                m.points.fromList(markerPointsList);

                possibleMarkers.Add(m);
            }
            approxCurve.Dispose();


            //Debug.Log ("possibleMarkers " + possibleMarkers.Count);


            // Remove these elements which corners are too close to each other.
            // First detect candidates for removal:
            List <Point> tooNearCandidates = new List <Point> ();

            for (int i = 0; i < possibleMarkers.Count; i++)
            {
                Marker m1 = possibleMarkers [i];

                Point[] m1PointsArray = m1.points.toArray();

                //calculate the average distance of each corner to the nearest corner of the other marker candidate
                for (int j = i + 1; j < possibleMarkers.Count; j++)
                {
                    Marker m2 = possibleMarkers [j];

                    Point[] m2PointsArray = m2.points.toArray();

                    float distSquared = 0;

                    for (int c = 0; c < 4; c++)
                    {
                        Point v = new Point(m1PointsArray [c].x - m2PointsArray [c].x, m1PointsArray [c].y - m2PointsArray [c].y);
                        distSquared += (float)v.dot(v);
                    }

                    distSquared /= 4;

                    if (distSquared < 100)
                    {
                        tooNearCandidates.Add(new Point(i, j));
                    }
                }
            }

            // Mark for removal the element of the pair with smaller perimeter
            List <bool> removalMask = new List <bool> (possibleMarkers.Count);

            for (int i = 0; i < possibleMarkers.Count; i++)
            {
                removalMask.Add(false);
            }

            for (int i = 0; i < tooNearCandidates.Count; i++)
            {
                float p1 = perimeter(possibleMarkers [(int)tooNearCandidates [i].x].points);
                float p2 = perimeter(possibleMarkers [(int)tooNearCandidates [i].y].points);

                int removalIndex;
                if (p1 > p2)
                {
                    removalIndex = (int)tooNearCandidates [i].x;
                }
                else
                {
                    removalIndex = (int)tooNearCandidates [i].y;
                }

                removalMask [removalIndex] = true;
            }

            // Return candidates
            detectedMarkers.Clear();
            for (int i = 0; i < possibleMarkers.Count; i++)
            {
                if (!removalMask [i])
                {
                    detectedMarkers.Add(possibleMarkers [i]);
                }
            }
        }
예제 #17
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                    while (webCamTexture.width <= 16)
                    {
                        webCamTexture.GetPixels32();
                        yield return(new WaitForEndOfFrame());
                    }
                                                                                #endif
                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                    updateLayout();

                    screenOrientation = Screen.orientation;
                    initDone          = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }
예제 #18
0
    private void HandPoseEstimationProcess(Mat rgbaMat)
    {
        // indication for making sphere coloring better
        Imgproc.GaussianBlur(rgbaMat, rgbaMat, new OpenCVForUnity.Size(3, 3), 1, 1);

        List <MatOfPoint> contours = detector.GetContours();

        detector.ProcessSkin(rgbaMat);
        detector.ProcessFinger(rgbaMat);

        if (contours.Count <= 0) //TODO: Add contour size
        {
            HidePlane();
            return;
        }

        if (!isHandDetected)
        {
            //Debug.Log("Contour size:" + detector.HandContourSize);
            if (detector.HandContourSize < HAND_CONTOUR_AREA_THRESHOLD)
            {
                HidePlane();
                return;
            }
            Moments moment = Imgproc.moments(detector.HandContours[0]);
            armCenter.x = moment.m10 / moment.m00;
            armCenter.y = moment.m01 / moment.m00;

            Ray res = Camera.main.ViewportPointToRay(new Vector3(((float)armCenter.x / 640.0f), ((float)armCenter.y / 640.0f), Camera.main.nearClipPlane));
            gameObject.transform.position = res.GetPoint(distanceFromCam);


            //Added without debugging!!!
            ShowPlane();
        }

        MatOfPoint2f elipseRes   = new MatOfPoint2f(detector.HandContours[0].toArray());
        RotatedRect  rotatedRect = Imgproc.fitEllipse(elipseRes);

        elipseRes.Dispose();
        armAngle          = rotatedRect.angle;
        detector.ArmAngle = armAngle;
        double line_size = 0.14;

        //The gesture is not recognized at 90 degress!
        //if (armAngle >= 90 - deltaFor90Degrees && armAngle <= 90 + deltaFor90Degrees)
        //{
        //    gameObject.GetComponent<Renderer>().enabled = true;
        //    // enable all children (buttons) renderer
        //    Renderer[] renderChildren = gameObject.GetComponentsInChildren<Renderer>();
        //    for (int i = 0; i < renderChildren.Length; ++i)
        //    {
        //        renderChildren[i].GetComponent<Renderer>().enabled = true;
        //    }

        //    Moments moment1 = Imgproc.moments(detector.HandContours[0]);
        //    armCenter.x = moment1.m10 / moment1.m00;
        //    armCenter.y = moment1.m01 / moment1.m00;

        //    Vector3 offset = CalculateNewPositionFromPicture(armCenter);
        //    Vector3 newHandPosition = gameObject.transform.position + offset - previousOffset;
        //    newHandPosition.z = 4;
        //    gameObject.transform.position = newHandPosition;

        //    gameObject.GetComponent<Transform>().rotation = Quaternion.Euler(-25, 0, 0);

        //    return;
        //}
        //else if (armAngle == 0)
        //{
        //    gameObject.GetComponent<Renderer>().enabled = false;
        //    // disable all children (buttons) renderer
        //    Renderer[] renderChildren = gameObject.GetComponentsInChildren<Renderer>();
        //    for (int i = 0; i < renderChildren.Length; ++i)
        //    {
        //        renderChildren[i].GetComponent<Renderer>().enabled = false;
        //    }

        //}

        //Debug.Log("Arm angle: " + armAngle.ToString());

        if (armAngle > 90)
        {
            armAngle -= 180;
            offset    = new Vector3((float)(-Math.Abs(line_size * Math.Sin((Math.PI / 180) * (armAngle)))),
                                    Math.Abs((float)(line_size * Math.Cos((Math.PI / 180) * (-armAngle)))), 0);
        }
        else
        {
            offset = new Vector3(Math.Abs((float)(line_size * Math.Sin((Math.PI / 180) * (-armAngle)))),
                                 Math.Abs((float)(line_size * Math.Cos((Math.PI / 180) * (-armAngle)))), 0);
        }

        Vector3 cameraRotation = (camera.GetComponent <Camera>().transform.rotation).eulerAngles;

        if (cameraRotation.y > 105 && cameraRotation.y < 260)
        {
            offset.x *= -1;
        }

        Point p = detector.NearestPoint;

        if (p.x == -1 || p.y == -1 || (detector.NearestPoint.x < 0) || !gameObject.GetComponent <Renderer>().enabled)
        {
            //cube.GetComponent<Renderer>().enabled = false;
            return;
        }

        // newPosition is the position of the finger
        Vector3 newPosition = CalculateNewPositionFromPicture(detector.NearestPoint);

        if (!didHitPlane)
        {
            return;
        }

        //cube.transform.position = newPosition;
        //cube.GetComponent<Renderer>().enabled = true;

        // first button
        Vector3 buttonPos1 = gameObject.transform.GetChild(0).position;

        newPosition.z = buttonPos1.z = 0;
        // second button
        Vector3 buttonPos2 = gameObject.transform.GetChild(1).position;
        // partical system - animation while pressing buttons

        double safeYDistance = 0.05;
        double safeXDistance = 1.0;

        if (sphereColor != null)
        {
            if ((Math.Abs(newPosition.y - buttonPos1.y) <= safeYDistance) && (Math.Abs(newPosition.x - buttonPos1.x) <= safeXDistance))
            {
                // pressing button. do something
                PressButton(Color.yellow, 0);
            }
            else if ((Math.Abs(newPosition.y - buttonPos2.y) <= safeYDistance) && Math.Abs(newPosition.x - buttonPos2.x) <= safeXDistance)
            {
                // pressing button. do something
                PressButton(Color.red, 1);
            }
        }
    }
        public void ProcessSkin(Mat rgbaImage)
        {
            Imgproc.pyrDown(rgbaImage, mPyrDownMat);
            Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
            Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA);
            Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb);

            Core.inRange(mHsvMat, mLowerBoundHSV, mUpperBoundHSV, mMaskHSV);
            Core.inRange(mPyrDownMat, mLowerBoundRGB, mUpperBoundRGB, mMaskRGB);
            Core.inRange(mYCrCbMat, mLowerBoundYCrCb, mUpperBoundYCrCb, mMaskYCrCb);

            mMask = mMaskYCrCb & mMaskHSV & mMaskRGB;

            Imgproc.dilate(mMask, mDilatedMask, new Mat());

            List <MatOfPoint> contours = new List <MatOfPoint>();

            Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            if (contours.Count == 0)
            {
                return;
            }

            // Find max contour area
            double     maxArea              = 0;
            double     secondMaxArea        = 0;
            MatOfPoint biggestContour       = null;
            MatOfPoint secondBiggestContour = null;

            foreach (MatOfPoint each in contours)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.contourArea(wrapper);
                if (area > maxArea)
                {
                    secondMaxArea        = maxArea;
                    secondBiggestContour = biggestContour;

                    maxArea        = area;
                    biggestContour = each;
                }
                else if (area > secondMaxArea)
                {
                    secondMaxArea        = area;
                    secondBiggestContour = each;
                }
            }

            handContourSize = maxArea;

            if ((biggestContour != null) && (secondBiggestContour != null) && (ComputeAVGXForContour(biggestContour) >= ComputeAVGXForContour(secondBiggestContour)) && (secondMaxArea >= 0.3 * maxArea))
            {
                biggestContour  = secondBiggestContour;
                handContourSize = secondMaxArea;
            }


            MatOfPoint2f contours_res2f = new MatOfPoint2f();

            MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray());

            Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true);
            handContour = new MatOfPoint(contours_res2f.toArray());
            contours_res2f.Dispose();
            biggestContour2f.Dispose();

            if (Imgproc.contourArea(handContour) > mMinContourArea * maxArea)
            {
                Core.multiply(handContour, new Scalar(4, 4), handContour);
            }

            // Filter contours by area and resize to fit the original image size
            mContours.Clear();

            foreach (MatOfPoint each in contours)
            {
                MatOfPoint contour = each;
                if (Imgproc.contourArea(contour) > mMinContourArea * maxArea)
                {
                    Core.multiply(contour, new Scalar(4, 4), contour);
                    mContours.Add(contour);
                }
            }
        }
예제 #20
0
		/// <summary>
		/// Recognizes the markers.
		/// </summary>
		/// <param name="grayscale">Grayscale.</param>
		/// <param name="detectedMarkers">Detected markers.</param>
		void recognizeMarkers (Mat grayscale, List<Marker> detectedMarkers)
		{
				List<Marker> goodMarkers = new List<Marker> ();
		
				// Identify the markers
				for (int i=0; i<detectedMarkers.Count; i++) {
						Marker marker = detectedMarkers [i];

			
						// Find the perspective transformation that brings current marker to rectangular form
						Mat markerTransform = Imgproc.getPerspectiveTransform (new MatOfPoint2f (marker.points.toArray ()), m_markerCorners2d);
				

						// Transform image to get a canonical marker image
						Imgproc.warpPerspective (grayscale, canonicalMarkerImage, markerTransform, markerSize);
			
						MatOfInt nRotations = new MatOfInt (0);
						int id = Marker.getMarkerId (canonicalMarkerImage, nRotations, m_markerDesign);
						if (id != - 1) {
								marker.id = id;
//				                Debug.Log ("id " + id);

								//sort the points so that they are always in the same order no matter the camera orientation
								List<Point> MarkerPointsList = marker.points.toList ();

								//				std::rotate(marker.points.begin(), marker.points.begin() + 4 - nRotations, marker.points.end());
								MarkerPointsList = MarkerPointsList.Skip (4 - nRotations.toArray () [0]).Concat (MarkerPointsList.Take (4 - nRotations.toArray () [0])).ToList ();

								marker.points.fromList (MarkerPointsList);
				
								goodMarkers.Add (marker);
						}
						nRotations.Dispose ();
				}

//				Debug.Log ("goodMarkers " + goodMarkers.Count);
		
				// Refine marker corners using sub pixel accuracy
				if (goodMarkers.Count > 0) {
						List<Point> preciseCornersPoint = new List<Point> (4 * goodMarkers.Count);
						for (int i = 0; i < preciseCornersPoint.Capacity; i++) {
								preciseCornersPoint.Add (new Point (0, 0));
						}
						

			
						for (int i=0; i<goodMarkers.Count; i++) {
								Marker marker = goodMarkers [i];

								List<Point> markerPointsList = marker.points.toList ();
				
								for (int c = 0; c <4; c++) {
										preciseCornersPoint [i * 4 + c] = markerPointsList [c];
								}
						}

						MatOfPoint2f preciseCorners = new MatOfPoint2f (preciseCornersPoint.ToArray ());

						TermCriteria termCriteria = new TermCriteria (TermCriteria.MAX_ITER | TermCriteria.EPS, 30, 0.01);
						Imgproc.cornerSubPix (grayscale, preciseCorners, new Size (5, 5), new Size (-1, -1), termCriteria);

						preciseCornersPoint = preciseCorners.toList ();
			
						// Copy refined corners position back to markers
						for (int i=0; i<goodMarkers.Count; i++) {
								Marker marker = goodMarkers [i];

								List<Point> markerPointsList = marker.points.toList ();
				
								for (int c=0; c<4; c++) {
										markerPointsList [c] = preciseCornersPoint [i * 4 + c];
								}
						}
						preciseCorners.Dispose ();
				}

				detectedMarkers.Clear ();
				detectedMarkers.AddRange (goodMarkers);

		}
        private double CaptureFrame(Mat frameMat)
        {
            double repErr = -1;

            switch (markerType)
            {
            default:
            case MarkerType.ChArUcoBoard:
                List <Mat> corners = new List <Mat>();
                Mat        ids     = new Mat();
                Aruco.detectMarkers(frameMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                if (refineMarkerDetection)
                {
                    Aruco.refineDetectedMarkers(frameMat, charucoBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams);
                }

                if (ids.total() > 0)
                {
                    Debug.Log("Frame captured.");

                    allCorners.Add(corners);
                    allIds.Add(ids);
                    allImgs.Add(frameMat);
                }
                else
                {
                    Debug.Log("Invalid frame.");

                    frameMat.Dispose();
                    if (ids != null)
                    {
                        ids.Dispose();
                    }
                    foreach (var item in corners)
                    {
                        item.Dispose();
                    }
                    corners.Clear();

                    return(-1);
                }

                // calibrate camera using aruco markers
                //double arucoRepErr = CalibrateCameraAruco (allCorners, allIds, charucoBoard, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags);
                //Debug.Log ("arucoRepErr: " + arucoRepErr);

                // calibrate camera using charuco
                repErr = CalibrateCameraCharuco(allCorners, allIds, charucoBoard, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags, calibrationFlags);

                break;

            case MarkerType.ChessBoard:
            case MarkerType.CirclesGlid:
            case MarkerType.AsymmetricCirclesGlid:

                MatOfPoint2f points      = new MatOfPoint2f();
                Size         patternSize = new Size((int)squaresX, (int)squaresY);

                bool found = false;
                switch (markerType)
                {
                default:
                case MarkerType.ChessBoard:
                    found = Calib3d.findChessboardCorners(frameMat, patternSize, points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE);
                    break;

                case MarkerType.CirclesGlid:
                    found = Calib3d.findCirclesGrid(frameMat, patternSize, points, Calib3d.CALIB_CB_SYMMETRIC_GRID);
                    break;

                case MarkerType.AsymmetricCirclesGlid:
                    found = Calib3d.findCirclesGrid(frameMat, patternSize, points, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
                    break;
                }

                if (found)
                {
                    Debug.Log("Frame captured.");
                    if (markerType == MarkerType.ChessBoard)
                    {
                        Imgproc.cornerSubPix(frameMat, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1));
                    }

                    imagePoints.Add(points);
                    allImgs.Add(frameMat);
                }
                else
                {
                    Debug.Log("Invalid frame.");
                    frameMat.Dispose();
                    if (points != null)
                    {
                        points.Dispose();
                    }
                    return(-1);
                }

                if (imagePoints.Count < 1)
                {
                    Debug.Log("Not enough points for calibration.");
                    repErr = -1;
                }
                else
                {
                    MatOfPoint3f objectPoint = new MatOfPoint3f(new Mat(imagePoints[0].rows(), 1, CvType.CV_32FC3));
                    CalcChessboardCorners(patternSize, squareSize, objectPoint, markerType);

                    List <Mat> objectPoints = new List <Mat>();
                    for (int i = 0; i < imagePoints.Count; ++i)
                    {
                        objectPoints.Add(objectPoint);
                    }

                    repErr = Calib3d.calibrateCamera(objectPoints, imagePoints, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags);
                    objectPoint.Dispose();
                }

                break;
            }

            Debug.Log("repErr: " + repErr);
            Debug.Log("camMatrix: " + camMatrix.dump());
            Debug.Log("distCoeffs: " + distCoeffs.dump());

            return(repErr);
        }
예제 #22
0
		/// <summary>
		/// Finds the candidates.
		/// </summary>
		/// <param name="contours">Contours.</param>
		/// <param name="detectedMarkers">Detected markers.</param>
		void findCandidates (List<MatOfPoint> contours, List<Marker> detectedMarkers)
		{
				MatOfPoint2f approxCurve = new MatOfPoint2f ();
				
				List<Marker> possibleMarkers = new List<Marker> ();
		
				// For each contour, analyze if it is a parallelepiped likely to be the marker
				for (int i=0; i<contours.Count; i++) {
						// Approximate to a polygon
						double eps = contours [i].toArray ().Length * 0.05;
						Imgproc.approxPolyDP (new MatOfPoint2f (contours [i].toArray ()), approxCurve, eps, true);

						Point[] approxCurveArray = approxCurve.toArray ();
			
						// We interested only in polygons that contains only four points
						if (approxCurveArray.Length != 4)
								continue;
			
						// And they have to be convex
						if (!Imgproc.isContourConvex (new MatOfPoint (approxCurveArray)))
								continue;

			
						// Ensure that the distance between consecutive points is large enough
						float minDist = float.MaxValue;

						for (int p = 0; p < 4; p++) {
								Point side = new Point (approxCurveArray [p].x - approxCurveArray [(p + 1) % 4].x, approxCurveArray [p].y - approxCurveArray [(p + 1) % 4].y);
								float squaredSideLength = (float)side.dot (side);
								minDist = Mathf.Min (minDist, squaredSideLength);
						}
			
						// Check that distance is not very small
						if (minDist < m_minContourLengthAllowed)
								continue;
			
						// All tests are passed. Save marker candidate:
						Marker m = new Marker ();
						m.points = new MatOfPoint ();

						List<Point> markerPointsList = new List<Point> ();
			
						for (int p = 0; p<4; p++)
								markerPointsList.Add (new Point (approxCurveArray [p].x, approxCurveArray [p].y));


			
						// Sort the points in anti-clockwise order
						// Trace a line between the first and second point.
						// If the third point is at the right side, then the points are anti-clockwise
						Point v1 = new Point (markerPointsList [1].x - markerPointsList [0].x, markerPointsList [1].y - markerPointsList [0].y);
						Point v2 = new Point (markerPointsList [2].x - markerPointsList [0].x, markerPointsList [2].y - markerPointsList [0].y);
			
						double o = (v1.x * v2.y) - (v1.y * v2.x);
			
						if (o < 0.0) {		 //if the third point is in the left side, then sort in anti-clockwise order
								Point tmp = markerPointsList [1];
								markerPointsList [1] = markerPointsList [3];
								markerPointsList [3] = tmp;

						}

						m.points.fromList (markerPointsList);
			
						possibleMarkers.Add (m);
				}
				approxCurve.Dispose ();

		        
				//Debug.Log ("possibleMarkers " + possibleMarkers.Count);
		
		
				// Remove these elements which corners are too close to each other.
				// First detect candidates for removal:
				List< Point > tooNearCandidates = new List<Point> ();
				for (int i=0; i<possibleMarkers.Count; i++) {
						Marker m1 = possibleMarkers [i];

						Point[] m1PointsArray = m1.points.toArray ();
			
						//calculate the average distance of each corner to the nearest corner of the other marker candidate
						for (int j=i+1; j<possibleMarkers.Count; j++) {
								Marker m2 = possibleMarkers [j];

								Point[] m2PointsArray = m2.points.toArray ();
				
								float distSquared = 0;
				
								for (int c = 0; c < 4; c++) {
										Point v = new Point (m1PointsArray [c].x - m2PointsArray [c].x, m1PointsArray [c].y - m2PointsArray [c].y);
										distSquared += (float)v.dot (v);
								}
				
								distSquared /= 4;
				
								if (distSquared < 100) {
										tooNearCandidates.Add (new Point (i, j));
								}
						}
				}
		
				// Mark for removal the element of the pair with smaller perimeter
				List<bool> removalMask = new List<bool> (possibleMarkers.Count);
				for (int i = 0; i < possibleMarkers.Count; i++) {
						removalMask.Add (false);
				}
		
				for (int i=0; i<tooNearCandidates.Count; i++) {

						float p1 = perimeter (possibleMarkers [(int)tooNearCandidates [i].x].points);
						float p2 = perimeter (possibleMarkers [(int)tooNearCandidates [i].y].points);
			
						int removalIndex;
						if (p1 > p2)
								removalIndex = (int)tooNearCandidates [i].x;
						else
								removalIndex = (int)tooNearCandidates [i].y;
			
						removalMask [removalIndex] = true;
				}
		
				// Return candidates
				detectedMarkers.Clear ();
				for (int i=0; i<possibleMarkers.Count; i++) {
						if (!removalMask [i])
								detectedMarkers.Add (possibleMarkers [i]);
				}
		}