public void FindARMarker(Mat imgMat)
        {
            PatternTrackingInfo patternTrackingInfo = new PatternTrackingInfo();

            foreach (string patternName in _Patterns.Keys)
            {
                bool patternFound = _PatternDetectors[patternName].findPattern(imgMat, patternTrackingInfo);
                // Debug.Log ("PatternFound " + patternFound);

                if (patternFound)
                {
                    patternTrackingInfo.computePose(_Patterns[patternName], _CamMatrix, _DistCoeffs);

                    Matrix4x4 transformationM = patternTrackingInfo.pose3d; // Marker to Camera Coordinate System Convert Matrix

                    Matrix4x4 scaleMat = _ARObjectScaleMatrix[patternName];
                    Matrix4x4 ARM      = ARCamera.transform.localToWorldMatrix * scaleMat * _InvertYMat * transformationM * _InvertZMat;

                    GameObject ARGameObject;
                    if (!_ARObjects.TryGetValue(patternName, out ARGameObject))
                    {
                        ARGameObject            = GameObject.Instantiate(ARObjectPrefab, Vector3.zero, Quaternion.identity);
                        ARGameObject.name       = ARObjectPrefab.name + "_" + patternName;
                        _ARObjects[patternName] = ARGameObject;

                        Material material = ARGameObject.GetComponentInChildren <MeshRenderer>().material;
                        material.mainTexture = _TextureImages[patternName];
                    }

                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    ARGameObject.transform.Rotate(ARObjLocalRotEuler);
                }
            }
        }
Ejemplo n.º 2
0
        void FindARMarker(Mat imgMat, Matrix4x4 cameraToWorldMatrix)
        {
            PatternTrackingInfo patternTrackingInfo = new PatternTrackingInfo();

            foreach (string patternName in _Patterns.Keys)
            {
                bool patternFound = _PatternDetectors[patternName].findPattern(imgMat, patternTrackingInfo);
                // Debug.Log ("PatternFound " + patternFound);

                if (patternFound)
                {
                    patternTrackingInfo.computePose(_Patterns[patternName], _CamMatrix, _DistCoeffs);

                    Matrix4x4 transformationM = patternTrackingInfo.pose3d; // Marker to Camera Coordinate System Convert Matrix

                    Matrix4x4 scaleMat = _ARObjectScaleMatrix[patternName];

                    // _ARObjectTransformMatrix[patternName] = cameraToWorldMatrix * scaleMat * _InvertYMat * transformationM * _InvertZMat;
                    _ARObjectTransformMatrix[patternName] = cameraToWorldMatrix * scaleMat * _InvertZMat * _InvertYMat * transformationM * _InvertZMat;

                    _ARObjectHasUpdate[patternName] = true;
                }
                else
                {
                    _ARObjectHasUpdate[patternName] = false;
                }
            }
        }
Ejemplo n.º 3
0
        // Use this for initialization
        void Start()
        {
            GameObject cameraAR = GameObject.Find("ARCamera");

            ARCamera = cameraAR.GetComponent <Camera>();
            markerSettingsMarkerLessActual = null;
            markerSettingsMarkerActual     = null;

            patternTrackingInfo = new PatternTrackingInfo();
            markerSettingsList  = markerList.transform.GetComponentsInChildren <MarkerSettings>();

            if (markerSettingsList.Length == 0)
            {
                existeObjetoDetectar = false;
            }

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();
#if UNITY_ANDROID && !UNITY_EDITOR
            // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
            webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
            webCamTextureToMatHelper.Initialize();

            dictionaryAruco = Aruco.getPredefinedDictionary(PropertiesModel.DictionaryId);
            cornersAruco    = new List <Mat>();
            idsAruco        = new Mat();
        }
Ejemplo n.º 4
0
        static void VideoRun()
        {
            var cap    = new VideoCapture(@"rabit3V.mp4");
            Mat MARKER = new Mat("rabits.jpg");

            Cv2.Resize(MARKER, MARKER, new OpenCvSharp.Size(500, 500));
            Pattern             pattern             = new Pattern();
            PatternTrackingInfo patternTrackinginfo = new PatternTrackingInfo();
            PatternDetector     detector            = new PatternDetector(true);

            detector.buildPatternFromImage(MARKER, pattern);
            detector.train();



            int sleepTime = 1;

            using (Window window = new Window("capture"))
                using (Mat image = new Mat())
                {
                    while (true)
                    {
                        cap.Read(image);
                        if (image.Empty())
                        {
                            break;
                        }
                        var img = image.Clone();

                        if (detector.findPattern(img, patternTrackinginfo))
                        {
                            patternTrackinginfo.computePose(pattern);

                            var temp = patternTrackinginfo.campos.Get <Point3d>(0);

                            string camposInfo = "x:" + Math.Round(temp.X, 5) + "\ny:" + Math.Round(temp.Y, 5) + "\nz:" + Math.Round(temp.Z, 5);
                            Cv2.PutText(img,
                                        camposInfo,
                                        new OpenCvSharp.Point(0, 80),
                                        HersheyFonts.HersheyComplex,
                                        0.5,
                                        Scalar.White);

                            for (int i = 0; i < patternTrackinginfo.points2d.Rows; i++)
                            {
                                //Console.WriteLine(" x"+(int)patternTrackinginfo.points2d.Get<Point2d>(i).X+" "+ (int)patternTrackinginfo.points2d.Get<Point2d>(i).Y);
                                Cv2.Circle(img, (int)patternTrackinginfo.points2d.Get <Point2d>(i).X, (int)patternTrackinginfo.points2d.Get <Point2d>(i).Y, 5, Scalar.Black, 3);
                            }
                        }


                        window.ShowImage(img);
                        Cv2.WaitKey(sleepTime);
                        img.Release();
                    }
                }
            cap.Release();
        }
Ejemplo n.º 5
0
        // Use this for initialization
        void Start()
        {
            displayAxesToggle.isOn = displayAxes;
            axes.SetActive(displayAxes);
            displayCubeToggle.isOn = displayCube;
            cube.SetActive(displayCube);
            displayVideoToggle.isOn = displayVideo;
            video.SetActive(displayVideo);

            ARGameObject.gameObject.SetActive(false);

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>();

            patternMat = Imgcodecs.imread(Application.persistentDataPath + "/patternImg.jpg");

            if (patternMat.total() == 0)
            {
                OnCapturePatternButtonClick();
            }
            else
            {
                Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_BGR2RGB);

                Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);

                //To reuse mat, set the flipAfter flag to true.
                Utils.matToTexture2D(patternMat, patternTexture, true, 0, true);
                Debug.Log("patternMat dst ToString " + patternMat.ToString());

                patternRawImage.texture = patternTexture;
                patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);

                pattern             = new Pattern();
                patternTrackingInfo = new PatternTrackingInfo();

                patternDetector = new PatternDetector(null, null, null, true);

                patternDetector.buildPatternFromImage(patternMat, pattern);
                patternDetector.train(pattern);


                webCamTextureToMatHelper.Initialize();
            }
        }
Ejemplo n.º 6
0
        static void imgRun()
        {
            string path   = @"D:\Code_Resource\IMAGE\";
            Mat    img    = new Mat(path + "rabit3.jpg");
            Mat    MARKER = new Mat(path + "rabits.jpg");

            Cv2.Resize(MARKER, MARKER, new OpenCvSharp.Size(500, 500));
            Pattern             pattern             = new Pattern();
            PatternTrackingInfo patternTrackinginfo = new PatternTrackingInfo();
            PatternDetector     detector            = new PatternDetector(true);

            detector.buildPatternFromImage(MARKER, pattern);
            detector.train();
            if (detector.findPattern(img, patternTrackinginfo))
            {
                patternTrackinginfo.computePose(pattern);

                var temp = patternTrackinginfo.campos.Get <Point3d>(0);

                string camposInfo = "x:" + Math.Round(temp.X, 5) + "y:" + Math.Round(temp.Y, 5) + "z:" + Math.Round(temp.Z, 5);
                Cv2.PutText(img,
                            camposInfo,
                            new OpenCvSharp.Point(0, 80),
                            HersheyFonts.HersheyComplex,
                            0.5,
                            Scalar.White);

                for (int i = 0; i < 4; i++)
                {
                    Cv2.Circle(img, (int)patternTrackinginfo.points2d.Get <Point2d>(i).X, (int)patternTrackinginfo.points2d.Get <Point2d>(i).Y, 5, Scalar.Black, 3);
                }
            }
            Cv2.ImShow("result", img);

            Cv2.WaitKey(100000);
            img.Release();
        }
        // Use this for initialization
        void Start()
        {
            Mat patternMat = new Mat(patternTexture.height, patternTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(patternTexture, patternMat);
            Debug.Log("patternMat dst ToString " + patternMat.ToString());

            patternRawImage.texture = patternTexture;
            patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);


            Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(imgTexture, imgMat);
            Debug.Log("imgMat dst ToString " + imgMat.ToString());

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);


            float width  = imgMat.width();
            float height = imgMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            //set cameraparam
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            //calibration camera
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            //To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }



            //Learning the feature points of the pattern image.
            Pattern             pattern             = new Pattern();
            PatternTrackingInfo patternTrackingInfo = new PatternTrackingInfo();

            PatternDetector patternDetector = new PatternDetector(null, null, null, true);

            patternDetector.buildPatternFromImage(patternMat, pattern);
            patternDetector.train(pattern);



            bool patternFound = patternDetector.findPattern(imgMat, patternTrackingInfo);

            Debug.Log("patternFound " + patternFound);

            if (patternFound)
            {
                patternTrackingInfo.computePose(pattern, camMatrix, distCoeffs);

                Matrix4x4 transformationM = patternTrackingInfo.pose3d;
                Debug.Log("transformationM " + transformationM.ToString());

                Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                Debug.Log("invertZM " + invertZM.ToString());

                Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                Debug.Log("invertYM " + invertYM.ToString());


                if (shouldMoveARCamera)
                {
                    Matrix4x4 ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
                    Debug.Log("ARM " + ARM.ToString());

                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                }
                else
                {
                    Matrix4x4 ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
                    Debug.Log("ARM " + ARM.ToString());

                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                }
            }

            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Ejemplo n.º 8
0
    /// <summary>
    /// Finds the pattern.
    /// </summary>
    /// <returns><c>true</c>, if pattern was found, <c>false</c> otherwise.</returns>
    /// <param name="image">Image.</param>
    /// <param name="info">Info.</param>
    public bool findPattern(Mat image, PatternTrackingInfo info)
    {
        // Convert input image to gray
        getGray(image, m_grayImg);

        // Extract feature points from input gray image
        extractFeatures(m_grayImg, m_queryKeypoints, m_queryDescriptors);

        // Get matches with current pattern
        getMatches(m_queryDescriptors, m_matches);

//      (GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));


        // Find homography transformation and detect good matches
        bool homographyFound = refineMatchesWithHomography(
            m_queryKeypoints,
            m_pattern.keypoints,
            homographyReprojectionThreshold,
            m_matches,
            m_roughHomography);

        if (homographyFound)
        {
//      (GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));

            // If homography refinement enabled improve found transformation
            if (enableHomographyRefinement)
            {
                // Warp image using found homography
                Imgproc.warpPerspective(m_grayImg, m_warpedImg, m_roughHomography, m_pattern.size, Imgproc.WARP_INVERSE_MAP | Imgproc.INTER_CUBIC);


                //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(m_warpedImg);

                // Get refined matches:
                using (MatOfKeyPoint warpedKeypoints = new MatOfKeyPoint())
                    using (MatOfDMatch refinedMatches = new MatOfDMatch()) {
                        // Detect features on warped image
                        extractFeatures(m_warpedImg, warpedKeypoints, m_queryDescriptors);

                        // Match with pattern
                        getMatches(m_queryDescriptors, refinedMatches);

                        // Estimate new refinement homography
                        homographyFound = refineMatchesWithHomography(
                            warpedKeypoints,
                            m_pattern.keypoints,
                            homographyReprojectionThreshold,
                            refinedMatches,
                            m_refinedHomography);
                    }

                //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(DebugHelpers.getMatchesImage(m_warpedImg, m_pattern.grayImg, warpedKeypoints, m_pattern.keypoints, refinedMatches, 100));

                // Get a result homography as result of matrix product of refined and rough homographies:
//                              info.homography = m_roughHomography * m_refinedHomography;
                Core.gemm(m_roughHomography, m_refinedHomography, 1, new Mat(), 0, info.homography);

//              Debug.Log ("info.homography " + info.homography.ToString ());

                // Transform contour with rough homography

//                              Core.perspectiveTransform (m_pattern.points2d, info.points2d, m_roughHomography);
//                              info.draw2dContour (image, new Scalar (200, 0, 0, 255));


                // Transform contour with precise homography

                Core.perspectiveTransform(m_pattern.points2d, info.points2d, info.homography);

//              info.draw2dContour (image, new Scalar (0, 200, 0, 255));
            }
            else
            {
                info.homography = m_roughHomography;

//              Debug.Log ("m_roughHomography " + m_roughHomography.ToString ());
//              Debug.Log ("info.homography " + info.homography.ToString ());

                // Transform contour with rough homography
                Core.perspectiveTransform(m_pattern.points2d, info.points2d, m_roughHomography);

//              info.draw2dContour (image, new Scalar (0, 200, 0, 255));
            }
        }

//              (GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
//              Debug.Log ("Features:" + m_queryKeypoints.ToString () + " Matches: " + m_matches.ToString ());


        return(homographyFound);
    }