Example #1
0
        /// <summary>
        /// Estimates the position.
        /// </summary>
        /// <param name="detectedMarkers">Detected markers.</param>
        void estimatePosition(List <Marker> detectedMarkers)
        {
            for (int i = 0; i < detectedMarkers.Count; i++)
            {
                Marker m = detectedMarkers [i];

                Mat Rvec = new Mat();
                Mat Tvec = new Mat();
                Mat raux = new Mat();
                Mat taux = new Mat();
                Calib3d.solvePnP(m_markerCorners3d, new MatOfPoint2f(m.points.toArray()), camMatrix, distCoeff, raux, taux);


                raux.convertTo(Rvec, CvType.CV_32F);
                taux.convertTo(Tvec, CvType.CV_32F);

                Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);
                Calib3d.Rodrigues(Rvec, rotMat);


                m.transformation.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)Tvec.get(0, 0) [0]));
                m.transformation.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)Tvec.get(1, 0) [0]));
                m.transformation.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)Tvec.get(2, 0) [0]));
                m.transformation.SetRow(3, new Vector4(0, 0, 0, 1));

//                      Debug.Log ("m.transformation " + m.transformation.ToString ());


                Rvec.Dispose();
                Tvec.Dispose();
                raux.Dispose();
                taux.Dispose();
                rotMat.Dispose();
            }
        }
    /// <summary>
    /// Computes the pose.
    /// </summary>
    /// <param name="pattern">Pattern.</param>
    /// <param name="camMatrix">Cam matrix.</param>
    /// <param name="distCoeff">Dist coeff.</param>
    public void computePose(Pattern pattern, Mat camMatrix, MatOfDouble distCoeff)
    {
        Mat Rvec = new Mat();
        Mat Tvec = new Mat();
        Mat raux = new Mat();
        Mat taux = new Mat();

        Calib3d.solvePnP(pattern.points3d, points2d, camMatrix, distCoeff, raux, taux);
        raux.convertTo(Rvec, CvType.CV_32F);
        taux.convertTo(Tvec, CvType.CV_32F);

        Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);

        Calib3d.Rodrigues(Rvec, rotMat);

        pose3d.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)Tvec.get(0, 0) [0]));
        pose3d.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)Tvec.get(1, 0) [0]));
        pose3d.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)Tvec.get(2, 0) [0]));
        pose3d.SetRow(3, new Vector4(0, 0, 0, 1));

//      Debug.Log ("pose3d " + pose3d.ToString ());

        Rvec.Dispose();
        Tvec.Dispose();
        raux.Dispose();
        taux.Dispose();
        rotMat.Dispose();
    }
        private void DetectARUcoMarker()
        {
            if (downscaleRatio == 1)
            {
                downScaleRgbaMat = rgbaMat4Thread;
            }
            else
            {
                Imgproc.resize(rgbaMat4Thread, downScaleRgbaMat, new Size(), 1.0 / downscaleRatio, 1.0 / downscaleRatio, Imgproc.INTER_LINEAR);
            }
            Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

            // Detect markers and estimate Pose
            Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejected);

            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Position
                        double[] tvec = tvecs.get(i, 0);

                        // Rotation
                        double[] rv   = rvecs.get(i, 0);
                        Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                        rvec.put(0, 0, rv [0]);
                        rvec.put(1, 0, rv [1]);
                        rvec.put(2, 0, rv [2]);
                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)(tvec [2] / downscaleRatio)));

                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        hasUpdatedARTransformMatrix = true;

                        break;
                    }
                }
            }
        }
        /// <summary>
        /// Gets the frontal face angle.
        /// </summary>
        /// <returns>The frontal face angle.</returns>
        /// <param name="points">Points.</param>
        public Vector3 getFrontalFaceAngle(List <Vector2> points)
        {
            if (points.Count != 68)
            {
                throw new ArgumentNullException("Invalid landmark_points.");
            }

            if (camMatrix == null)
            {
                throw new ArgumentNullException("Invalid camMatrix.");
            }

            //normalize

            float   normScale = Math.Abs(points [30].y - points [8].y) / (normHeight / 2);
            Vector2 normDiff  = points [30] * normScale - new Vector2(normWidth / 2, normHeight / 2);

            for (int i = 0; i < points.Count; i++)
            {
                normPoints [i] = points [i] * normScale - normDiff;
            }
            //Debug.Log ("points[30] " + points[30]);
            //Debug.Log ("normPoints[30] " + normPoints[30]);
            //Debug.Log ("normScale " + normScale);
            //Debug.Log ("normDiff " + normDiff);

            imagePoints.fromArray(
                new Point((normPoints [38].x + normPoints [41].x) / 2, (normPoints [38].y + normPoints [41].y) / 2), //l eye
                new Point((normPoints [43].x + normPoints [46].x) / 2, (normPoints [43].y + normPoints [46].y) / 2), //r eye
                new Point(normPoints [33].x, normPoints [33].y),                                                     //nose
                new Point(normPoints [48].x, normPoints [48].y),                                                     //l mouth
                new Point(normPoints [54].x, normPoints [54].y),                                                     //r mouth                         ,
                new Point(normPoints [0].x, normPoints [0].y),                                                       //l ear
                new Point(normPoints [16].x, normPoints [16].y)                                                      //r ear
                );


            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

            Calib3d.Rodrigues(rvec, rotM);

            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            ARM = invertYM * transformationM * invertZM;

            return(ExtractRotationFromMatrix(ref ARM).eulerAngles);
        }
    private static Mat GetRotationMatrixFromRotationVector(Mat rotation_)
    {
        Mat rotationFull = new Mat(3, 3, CvType.CV_64FC1);

        // get full rotation matrix from rotation vector
        Calib3d.Rodrigues(rotation_, rotationFull);
        double[] LHSflipBackMatrix = new double[] { 1.0, 0.0, 0.0,
                                                    0.0, 1.0, 0.0,
                                                    0.0, 0.0, -1.0 };
        Mat LHSflipBackMatrixM = new Mat(3, 3, CvType.CV_64FC1);

        LHSflipBackMatrixM.put(0, 0, LHSflipBackMatrix);
        Mat rotLHS = rotationFull * LHSflipBackMatrixM; // invert Z (as we did before when savings points) to get from RHS -> LHS

        return(rotLHS);
    }
Example #6
0
        private void UpdateARObjectTransform(Mat rvec, Mat tvec)
        {
            // Position
            double[] tvecArr = new double[3];
            tvec.get(0, 0, tvecArr);

            // Rotation
            Calib3d.Rodrigues(rvec, rotMat);

            double[] rotMatArr = new double[rotMat.total()];
            rotMat.get(0, 0, rotMatArr);

            transformationM.SetRow(0, new Vector4((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0]));
            transformationM.SetRow(1, new Vector4((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1]));
            transformationM.SetRow(2, new Vector4((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
            ARM = invertYM * transformationM;

            // Apply Z axis inverted matrix.
            ARM = ARM * invertZM;

            if (shouldMoveARCamera)
            {
                ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

                ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);
            }
            else
            {
                ARM = arCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
            }
        }
Example #7
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers.
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

                // estimate pose.
                if (applyEstimationPose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            // This example can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                // position
                                double[] tvec = tvecs.get(i, 0);

                                // rotation
                                double[] rv   = rvecs.get(i, 0);
                                Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                                rvec.put(0, 0, rv [0]);
                                rvec.put(1, 0, rv [1]);
                                rvec.put(2, 0, rv [2]);
                                Calib3d.Rodrigues(rvec, rotMat);

                                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2]));
                                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARM = invertYM * transformationM;

                                // Apply Z axis inverted matrix.
                                ARM = ARM * invertZM;

                                if (shouldMoveARCamera)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                }
                                else
                                {
                                    ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                }
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Example #8
0
    /// <summary>
    /// 顔の向きを取得
    /// </summary>
    private Vector3 GetFrontalFaceAngle(List <Vector2> points)
    {
        if (points.Count != 68)
        {
            throw new ArgumentNullException("ランドマークが正しくありません。");
        }

        if (_camMatrix == null)
        {
            throw new ArgumentNullException("カメラの内部パラメータが正しくありません。");
        }

        // スケールの正規化
        float   normScale = Math.Abs(points[30].y - points[8].y) / (normHeight / 2);
        Vector2 normDiff  = points[30] * normScale - new Vector2(normWidth / 2, normHeight / 2);

        for (int i = 0; i < points.Count; i++)
        {
            _normPoints[i] = points[i] * normScale - normDiff;
        }

        _imagePoints.fromArray(
            new Point((_normPoints[38].x + _normPoints[41].x) / 2, (_normPoints[38].y + _normPoints[41].y) / 2), // 左目
            new Point((_normPoints[43].x + _normPoints[46].x) / 2, (_normPoints[43].y + _normPoints[46].y) / 2), // 右目
            new Point(_normPoints[33].x, _normPoints[33].y),                                                     // 鼻
            new Point(_normPoints[48].x, _normPoints[48].y),                                                     // 左口角
            new Point(_normPoints[54].x, _normPoints[54].y),                                                     // 右口角
            new Point(_normPoints[0].x, _normPoints[0].y),                                                       // 左耳
            new Point(_normPoints[16].x, _normPoints[16].y)                                                      // 右耳
            );

        // 2-3次元対応点から頭部を姿勢推定する
        if (_rVec == null || _tVec == null)
        {
            _rVec = new Mat(3, 1, CvType.CV_64FC1);
            _tVec = new Mat(3, 1, CvType.CV_64FC1);
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec);
        }

        double  tVecX           = _tVec.get(0, 0)[0];
        double  tVecY           = _tVec.get(1, 0)[0];
        double  tVecZ           = _tVec.get(2, 0)[0];
        bool    isNotInViewport = false;
        Vector4 pos             = _VP * new Vector4((float)tVecX, (float)tVecY, (float)tVecZ, 1.0f);

        if (pos.w != 0)
        {
            float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
            if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
            {
                isNotInViewport = true;
            }
        }

        // オブジェクトがカメラ視野に存在しない場合、外部パラメータを使用しない
        if (double.IsNaN(tVecZ) || isNotInViewport)
        {
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs,
                             _rVec, _tVec);
        }
        else
        {
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs,
                             _rVec, _tVec, true, Calib3d.SOLVEPNP_ITERATIVE);
        }

        if (!isNotInViewport)
        {
            // Unityのポーズデータに変換
            double[] rVecArr = new double[3];
            _rVec.get(0, 0, rVecArr);
            double[] tVecArr = new double[3];
            _tVec.get(0, 0, tVecArr);
            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rVecArr, tVecArr);
            // 閾値以下ならば更新を無視
            ARUtils.LowpassPoseData(ref _oldPoseData, ref poseData, positionLowPass, rotationLowPass);
            _oldPoseData = poseData;
            // 変換行列を作成
            _transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
        }


        Calib3d.Rodrigues(_rVec, _rotM);

        _transformationM.SetRow(0,
                                new Vector4(
                                    (float)_rotM.get(0, 0)[0],
                                    (float)_rotM.get(0, 1)[0],
                                    (float)_rotM.get(0, 2)[0],
                                    (float)_tVec.get(0, 0)[0]));
        _transformationM.SetRow(1,
                                new Vector4(
                                    (float)_rotM.get(1, 0)[0],
                                    (float)_rotM.get(1, 1)[0],
                                    (float)_rotM.get(1, 2)[0],
                                    (float)_tVec.get(1, 0)[0]));
        _transformationM.SetRow(2,
                                new Vector4(
                                    (float)_rotM.get(2, 0)[0],
                                    (float)_rotM.get(2, 1)[0],
                                    (float)_rotM.get(2, 2)[0],
                                    (float)_tVec.get(2, 0)[0]));
        _transformationM.SetRow(3,
                                new Vector4(
                                    0,
                                    0,
                                    0,
                                    1));

        _ARM = _invertYM * _transformationM * _invertYM;
        _ARM = _ARM * _invertYM * _invertZM;

        return(ExtractRotationFromMatrix(ref _ARM).eulerAngles);
    }
Example #9
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                        new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                        new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                        new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                        new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                        );

                    // Estimate head pose.
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }

                    double tvec_z = tvec.get(2, 0) [0];

                    if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

//                    Debug.Log (tvec.dump());

                    if (!double.IsNaN(tvec_z))
                    {
                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                        {
                            if (displayEffects)
                            {
                                rightEye.SetActive(true);
                            }
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                        {
                            if (displayEffects)
                            {
                                leftEye.SetActive(true);
                            }
                        }
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }


                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            if (displayEffects)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled   = true;
                                    ps.startSize = 40 * (mouseDistance / noseDistance);
                                }
                            }
                        }
                        else
                        {
                            if (displayEffects)
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        if (shouldMoveARCamera)
                        {
                            ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                            ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                        }
                        else
                        {
                            ARM = ARCamera.transform.localToWorldMatrix * ARM;

                            ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                        }
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Example #10
0
        void OnRenderObject()
        {
            if (pointCloudVisualizer == null)
            {
                return;
            }

            if (vertices == null || uintColorData == null)
            {
                return;
            }


            pointCloudBuffer.SetData(vertices);

            colorBuffer.SetData(uintColorData);

            pointCloudVisualizer.SetBuffer("_PointCloudData", pointCloudBuffer);
            pointCloudVisualizer.SetBuffer("_ColorData", colorBuffer);



            //Aruco
            if (applyAruco)
            {
                //transformation Matrix is static
                if (isStatic)
                {
                    //using earlier frames, decide transformationM
                    if (!matrixCofigured)
                    {
                        while (frameCount > 0)
                        {
                            imgTexture.LoadRawTextureData(byteColorData);

                            Utils.texture2DToMat(imgTexture, rgbMat);

                            //Upside down
                            Core.flip(rgbMat, rgbMat, 0);
                            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected, camMatrix, distCoeffs);


                            if (ids.total() == 1)
                            {
                                Debug.Log("detected");

                                frameCount--;
                            }
                        }

                        Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                        // position
                        tvec = tvecs.get(0, 0);

                        // rotation
                        rvec = new Mat(rvecs, range, range);
                        Calib3d.Rodrigues(rvec, rotMat);

                        //from marker coordinate(opencv) to camera coordinate(opencv)
                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec[0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec[1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec[2]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));


                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        // from camera coordinate(opencv) to camera coordinate(unity)
                        transformationM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        // from marker coordinate(Unity) to camera coordinate(Unity)
                        transformationM = transformationM * invertZM;

                        //from camera coordinate(unity) to marker coordinate(unity)
                        //rotate 90 degrees around the x-axis(alignM)
                        //from marker coordinate(unity) to world coordinate(unity)
                        transformationM = markerOrigin.transform.localToWorldMatrix * alignM * transformationM.inverse;

                        //transformationM: the matrix which transforms camera coordinate(unity) into world coordinate(unity)
                        //move RealSense model
                        ARUtils.SetTransformFromMatrix(viewCamera.transform, ref transformationM);

                        matrixCofigured = true;

                        Debug.Log("Transformation Matrix is configured");

                        if (drawMesh)
                        {
                            meshDrawer.draw(corners, vertices, ref transformationM, width, markerLength);
                        }
                    }
                }
                else
                {
                    imgTexture.LoadRawTextureData(byteColorData);

                    Utils.texture2DToMat(imgTexture, rgbMat);

                    Core.flip(rgbMat, rgbMat, 0);
                    Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected, camMatrix, distCoeffs);


                    if (ids.total() > 0)
                    {
                        Debug.Log("detected: " + ids.total());

                        Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                        // position
                        tvec = tvecs.get(0, 0);

                        // rotation
                        rvec = new Mat(rvecs, range, range);
                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec[0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec[1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec[2]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));


                        transformationM = invertYM * transformationM;

                        transformationM = transformationM * invertZM;

                        transformationM = markerOrigin.transform.localToWorldMatrix * alignM * transformationM.inverse;

                        ARUtils.SetTransformFromMatrix(viewCamera.transform, ref transformationM);
                    }
                }
            }
            else
            {
                transformationM = Matrix4x4.TRS(viewCamera.transform.position, viewCamera.transform.rotation, viewCamera.transform.lossyScale);
            }

            pointCloudVisualizer.SetMatrix("_transformationM", transformationM);

            trs = Matrix4x4.TRS(transform.position, transform.rotation, transform.lossyScale);
            pointCloudVisualizer.SetMatrix("_trs", trs);

            pointCloudVisualizer.SetPass(0);
            Graphics.DrawProcedural(MeshTopology.Points, dataLength);
        }
        private void UpdateARHeadTransform(List <Vector2> points)
        {
            // The coordinates in pixels of the object detected on the image projected onto the plane.
            imagePoints.fromArray(
                new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                );

            // Estimate head pose.
            if (rvec == null || tvec == null)
            {
                rvec = new Mat(3, 1, CvType.CV_64FC1);
                tvec = new Mat(3, 1, CvType.CV_64FC1);
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }

            double tvec_z = tvec.get(2, 0) [0];

            if (double.IsNaN(tvec_z) || tvec_z < 0)   // if tvec is wrong data, do not use extrinsic guesses.
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }
            else
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
            }

            if (applyEstimationPose && !double.IsNaN(tvec_z))
            {
                if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                {
                    if (displayEffects)
                    {
                        rightEye.SetActive(true);
                    }
                }

                if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                {
                    if (displayEffects)
                    {
                        leftEye.SetActive(true);
                    }
                }
                if (displayHead)
                {
                    head.SetActive(true);
                }
                if (displayAxes)
                {
                    axes.SetActive(true);
                }


                float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                if (mouseDistance > noseDistance / 5.0)
                {
                    if (displayEffects)
                    {
                        mouth.SetActive(true);
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = true;
                            #if UNITY_5_5_OR_NEWER
                            var main = ps.main;
                            main.startSizeMultiplier = 40 * (mouseDistance / noseDistance);
                            #else
                            ps.startSize = 40 * (mouseDistance / noseDistance);
                            #endif
                        }
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = false;
                        }
                    }
                }

                Calib3d.Rodrigues(rvec, rotMat);

                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)(tvec.get(0, 0) [0] / 1000.0)));
                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)(tvec.get(1, 0) [0] / 1000.0)));
                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)(tvec.get(2, 0) [0] / 1000.0 / imageOptimizationHelper.downscaleRatio)));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                ARM = invertYM * transformationM;

                // Apply Z axis inverted matrix.
                ARM = ARM * invertZM;

                // Apply camera transform matrix.
                ARM = arCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
                {
//                    Debug.Log ("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
//                            Debug.Log ("faces " + faces.dump ());

                                List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
                                List <Point[]> pointsList = faceTracker.getPoints();

                                if (isAutoResetMode)
                                {
                                    //add initial face points from MatOfRect
                                    if (pointsList.Count <= 0)
                                    {
                                        faceTracker.addPoints(faces);
//                                    Debug.Log ("reset faces ");
                                    }
                                    else
                                    {
                                        for (int i = 0; i < rectsList.Count; i++)
                                        {
                                            OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                            //It determines whether nose point has been included in trackRect.
                                            if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                            {
                                                rectsList.RemoveAt(i);
                                                pointsList.RemoveAt(i);
//                                            Debug.Log ("remove " + i);
                                            }
                                            Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                        }
                                    }
                                }
                                else
                                {
                                    faceTracker.addPoints(faces);
                                }

                                //draw face rect
                                for (int i = 0; i < rectsList.Count; i++)
                                {
                                    Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                            else
                            {
                                if (isAutoResetMode)
                                {
                                    faceTracker.reset();

                                    rightEye.SetActive(false);
                                    leftEye.SetActive(false);
                                    head.SetActive(false);
                                    mouth.SetActive(false);
                                    axes.SetActive(false);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isShowingFacePoints)
                    {
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }

                    Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);


                    Point[] points = faceTracker.getPoints() [0];


                    if (points.Length > 0)
                    {
//                        for (int i = 0; i < points.Length; i++)
//                        {
//                            Imgproc.putText(rgbaMat, "" + i, new Point(points [i].x, points [i].y), Imgproc.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_AA, false);
//                        }


                        imagePoints.fromArray(
                            points [31], //l eye
                            points [36], //r eye
                            points [67], //nose
                            points [48], //l mouth
                            points [54]  //r mouth
//                          ,
//                          points [0],//l ear
//                          points [14]//r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }


                            //filter Rvec Noise.
                            using (Mat absDiffRvec = new Mat()) {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpRvec = new Mat()) {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }

                            //filter Tvec Noise.
                            using (Mat absDiffTvec = new Mat()) {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpTvec = new Mat()) {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (isShowingEffects)
                            {
                                rightEye.SetActive(true);
                            }
                            if (isShowingEffects)
                            {
                                leftEye.SetActive(true);
                            }
                            if (isShowingHead)
                            {
                                head.SetActive(true);
                            }
                            if (isShowingAxes)
                            {
                                axes.SetActive(true);
                            }


                            if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 &&
                                 Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) ||
                                Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(true);
                                }
                            }
                            else
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(false);
                                }
                            }

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            ARM = invertYM * transformationM;

                            // Apply Z-axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }

                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers and estimate pose
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);
                //          Aruco.detectMarkers (imgMat, dictionary, corners, ids);
                if (estimatePose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }


                // draw results
                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (estimatePose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            //This example can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                // position
                                double[] tvec = tvecs.get(i, 0);
                                Vector4  pos  = new Vector4((float)tvec [0], (float)tvec [1], (float)tvec [2], 0); // from OpenCV
                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARGameObject.transform.localPosition = new Vector3(pos.x, -pos.y, pos.z);


                                // rotation
                                double[] rv   = rvecs.get(i, 0);
                                Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                                rvec.put(0, 0, rv[0]);
                                rvec.put(1, 0, rv[1]);
                                rvec.put(2, 0, rv[2]);

                                Calib3d.Rodrigues(rvec, rotMat);

                                Vector3 forward = new Vector3((float)rotMat.get(0, 2) [0], (float)rotMat.get(1, 2) [0], (float)rotMat.get(2, 2) [0]); // from OpenCV
                                Vector3 up      = new Vector3((float)rotMat.get(0, 1) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(2, 1) [0]); // from OpenCV
                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                Quaternion rot = Quaternion.LookRotation(new Vector3(forward.x, -forward.y, forward.z), new Vector3(up.x, -up.y, up.z));
                                ARGameObject.transform.localRotation = rot;


                                // Apply Z axis inverted matrix.
                                invertZM        = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                                transformationM = ARGameObject.transform.localToWorldMatrix * invertZM;

                                // Apply camera transform matrix.
                                transformationM = ARCamera.transform.localToWorldMatrix * transformationM;

                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref transformationM);
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Example #14
0
        private IEnumerator init()
        {
            axes.SetActive(false);
            head.SetActive(false);
            rightEye.SetActive(false);
            leftEye.SetActive(false);
            mouth.SetActive(false);


            if (webCamTexture != null)
            {
                faceTracker.reset();

                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();
                grayMat.Dispose();
                cascade.Dispose();
                camMatrix.Dispose();
                distCoeffs.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);

                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();


            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif
                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);


                    cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    if (cascade.empty())
                    {
                        Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                    }

                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


                    gameObject.transform.localEulerAngles = new Vector3(0, 0, 0);
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


//										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                    Camera.main.orthographicSize = webCamTexture.height / 2;



                    int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols());
                    camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                    camMatrix.put(0, 0, max_d);
                    camMatrix.put(0, 1, 0);
                    camMatrix.put(0, 2, rgbaMat.cols() / 2.0f);
                    camMatrix.put(1, 0, 0);
                    camMatrix.put(1, 1, max_d);
                    camMatrix.put(1, 2, rgbaMat.rows() / 2.0f);
                    camMatrix.put(2, 0, 0);
                    camMatrix.put(2, 1, 0);
                    camMatrix.put(2, 2, 1.0f);

                    Size     imageSize      = new Size(rgbaMat.cols(), rgbaMat.rows());
                    double   apertureWidth  = 0;
                    double   apertureHeight = 0;
                    double[] fovx           = new double[1];
                    double[] fovy           = new double[1];
                    double[] focalLength    = new double[1];
                    Point    principalPoint = new Point();
                    double[] aspectratio    = new double[1];



                    Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                    Debug.Log("imageSize " + imageSize.ToString());
                    Debug.Log("apertureWidth " + apertureWidth);
                    Debug.Log("apertureHeight " + apertureHeight);
                    Debug.Log("fovx " + fovx [0]);
                    Debug.Log("fovy " + fovy [0]);
                    Debug.Log("focalLength " + focalLength [0]);
                    Debug.Log("principalPoint " + principalPoint.ToString());
                    Debug.Log("aspectratio " + aspectratio [0]);


                    ARCamera.fieldOfView = (float)fovy [0];

                    Debug.Log("camMatrix " + camMatrix.dump());


                    distCoeffs = new MatOfDouble(0, 0, 0, 0);
                    Debug.Log("distCoeffs " + distCoeffs.dump());



                    lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0));
                    Debug.Log("lookAt " + lookAtM.ToString());

                    invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));



                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                //flip to correct direction.
                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (faceTracker.getPoints().Count <= 0)
                {
                    Debug.Log("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
                                Debug.Log("faces " + faces.dump());
                                //add initial face points from MatOfRect
                                faceTracker.addPoints(faces);

                                //draw face rect
                                OpenCVForUnity.Rect[] rects = faces.toArray();
                                for (int i = 0; i < rects.Length; i++)
                                {
                                    Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isDrawPoints)
                    {
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }

                    Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);


                    Point[] points = faceTracker.getPoints() [0];


                    if (points.Length > 0)
                    {
//												for (int i = 0; i < points.Length; i++) {
//														Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
//												}


                        imagePoints.fromArray(
                            points [31],                    //l eye
                            points [36],                    //r eye
                            points [67],                    //nose
                            points [48],                    //l mouth
                            points [54]                     //r mouth
//							,
//											points [1],//l ear
//											points [13]//r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }


                            //filter Rvec Noise.
                            using (Mat absDiffRvec = new Mat()) {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpRvec = new Mat()) {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }



                            //filter Tvec Noise.
                            using (Mat absDiffTvec = new Mat()) {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);

                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpTvec = new Mat()) {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (!rightEye.activeSelf)
                            {
                                rightEye.SetActive(true);
                            }
                            if (!leftEye.activeSelf)
                            {
                                leftEye.SetActive(true);
                            }


                            if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 &&
                                 Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) ||
                                Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
                            {
                                if (!mouth.activeSelf)
                                {
                                    mouth.SetActive(true);
                                }
                            }
                            else
                            {
                                if (mouth.activeSelf)
                                {
                                    mouth.SetActive(false);
                                }
                            }



                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            modelViewMtrx = lookAtM * transformationM * invertZM;

                            ARCamera.worldToCameraMatrix = modelViewMtrx;


                            //				Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString());
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }

                ARCamera.ResetWorldToCameraMatrix();

                rightEye.SetActive(false);
                leftEye.SetActive(false);
                mouth.SetActive(false);
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        private Matrix4x4 getLookAtMatrix(Vector3 pos, Vector3 target, Vector3 up)
        {
            Vector3 z = Vector3.Normalize(pos - target);
            Vector3 x = Vector3.Normalize(Vector3.Cross(up, z));
            Vector3 y = Vector3.Normalize(Vector3.Cross(z, x));

            Matrix4x4 result = new Matrix4x4();

            result.SetRow(0, new Vector4(x.x, x.y, x.z, -(Vector3.Dot(pos, x))));
            result.SetRow(1, new Vector4(y.x, y.y, y.z, -(Vector3.Dot(pos, y))));
            result.SetRow(2, new Vector4(z.x, z.y, z.z, -(Vector3.Dot(pos, z))));
            result.SetRow(3, new Vector4(0, 0, 0, 1));

            return(result);
        }

        void OnGUI()
        {
            float     screenScale  = Screen.height / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("FaceTrackerSample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            if (GUILayout.Button("drawPoints"))
            {
                if (isDrawPoints)
                {
                    isDrawPoints = false;
                }
                else
                {
                    isDrawPoints = true;
                }
            }
            if (GUILayout.Button("axes"))
            {
                if (axes.activeSelf)
                {
                    axes.SetActive(false);
                }
                else
                {
                    axes.SetActive(true);
                }
            }
            if (GUILayout.Button("head"))
            {
                if (head.activeSelf)
                {
                    head.SetActive(false);
                }
                else
                {
                    head.SetActive(true);
                }
            }


            GUILayout.EndVertical();
        }
    }
Example #15
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (isShowingFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye
                        new Point(points [33].x, points [33].y),                                             //nose
                        new Point(points [48].x, points [48].y),                                             //l mouth
                        new Point(points [54].x, points [54].y)                                              //r mouth
                        ,
                        new Point(points [0].x, points [0].y),                                               //l ear
                        new Point(points [16].x, points [16].y)                                              //r ear
                        );


                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


                    if (tvec.get(2, 0) [0] > 0)
                    {
                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                        {
                            if (isShowingEffects)
                            {
                                rightEye.SetActive(true);
                            }
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                        {
                            if (isShowingEffects)
                            {
                                leftEye.SetActive(true);
                            }
                        }
                        if (isShowingHead)
                        {
                            head.SetActive(true);
                        }
                        if (isShowingAxes)
                        {
                            axes.SetActive(true);
                        }


                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            if (isShowingEffects)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    ps.enableEmission = true;
                                    ps.startSize      = 40 * (mouseDistance / noseDistance);
                                }
                            }
                        }
                        else
                        {
                            if (isShowingEffects)
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    ps.enableEmission = false;
                                }
                            }
                        }


                        // position
                        Vector4 pos = new Vector4((float)tvec.get(0, 0) [0], (float)tvec.get(1, 0) [0], (float)tvec.get(2, 0) [0], 0);     // from OpenCV
                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARGameObject.transform.localPosition = new Vector3(pos.x, -pos.y, pos.z);


                        // rotation
                        Calib3d.Rodrigues(rvec, rotMat);

                        Vector3 forward = new Vector3((float)rotMat.get(0, 2) [0], (float)rotMat.get(1, 2) [0], (float)rotMat.get(2, 2) [0]); // from OpenCV
                        Vector3 up      = new Vector3((float)rotMat.get(0, 1) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(2, 1) [0]); // from OpenCV
                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        Quaternion rot = Quaternion.LookRotation(new Vector3(forward.x, -forward.y, forward.z), new Vector3(up.x, -up.y, up.z));
                        ARGameObject.transform.localRotation = rot;


                        // Apply Z axis inverted matrix.
                        invertZM        = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                        transformationM = ARGameObject.transform.localToWorldMatrix * invertZM;

                        // Apply camera transform matrix.
                        transformationM = ARCamera.transform.localToWorldMatrix * transformationM;

                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref transformationM);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(rgbaMat);

            //Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));

            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //(int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");

            Point bottomLeft  = new Point(boundRect.x, boundRect.y + boundRect.height);
            Point topLeft     = new Point(boundRect.x, boundRect.y);
            Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height);
            Point topRight    = new Point(boundRect.x + boundRect.width, boundRect.y);

            rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y),                                      //topleft
                                          new Point(boundRect.x + boundRect.width, boundRect.y),                    //Top Right
                                          new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right
                                          new Point(boundRect.x, boundRect.y + boundRect.height)                    //Bottom Left
                                          );

            //double a = boundRect.br ().y - boundRect.tl ().y;
            //a = a * 0.7;
            //a = boundRect.tl ().y + a;

            //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

            List <Point3> m_markerCorners3dList = new List <Point3>();

            m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A)
            m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B)
            m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C)
            m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D)
            m_markerCorners3d.fromList(m_markerCorners3dList);

            //estimate pose
            Mat Rvec = new Mat();
            Mat Tvec = new Mat();
            Mat raux = new Mat();
            Mat taux = new Mat();

            Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux);

            raux.convertTo(Rvec, CvType.CV_32F);
            taux.convertTo(Tvec, CvType.CV_32F);

            rotMat = new Mat(3, 3, CvType.CV_64FC1);
            Calib3d.Rodrigues(Rvec, rotMat);

            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            //Debug.Log ("transformationM " + transformationM.ToString ());

            Rvec.Dispose();
            Tvec.Dispose();
            raux.Dispose();
            taux.Dispose();
            rotMat.Dispose();

            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
            //Debug.Log("arM " + ARM.ToString());

            if (ARGameObject != null)
            {
                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                if (deactivateCoroutine == null)
                {
                    deactivateCoroutine = StartCoroutine(Wait(10.0f));
                }
                ARGameObject.SetActive(true);
            }

            //end pose estimation

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <Point> listPoDefect = new List <Point>();

            if (convexDefect.rows() > 0)
            {
                List <int>   convexDefectList = convexDefect.toList();
                List <Point> contourList      = contour.toList();
                for (int j = 0; j < convexDefectList.Count; j = j + 4)
                {
                    Point farPoint = contourList[convexDefectList[j + 2]];
                    int   depth    = convexDefectList[j + 3];
                    //if (depth > threasholdSlider.value && farPoint.y < a)
                    //{
                    //    listPoDefect.Add(contourList[convexDefectList[j + 2]]);
                    //}
                    //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
                }
            }


            Debug.Log("hull: " + hull.toList());
            if (convexDefect.rows() > 0)
            {
                Debug.Log("defects: " + convexDefect.toList());
            }

            //use these contours to do heart detection
            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            int defectsTotal = (int)convexDefect.total();

            Debug.Log("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

            Debug.Log("numberOfFingers " + numberOfFingers);

            Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Example #17
0
        private void UpdateARHeadTransform(List <Vector2> points)
        {
            // The coordinates in pixels of the object detected on the image projected onto the plane.
            imagePoints.fromArray(
                new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                new Point(points [33].x, points [33].y),                                             //nose (Nose top)
                new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                new Point(points [54].x, points [54].y)                                              //r mouth (Mouth breadth)
                ,
                new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                );

            //Estimate head pose.
            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


            if (tvec.get(2, 0) [0] > 0)
            {
                if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                {
                    if (isShowingEffects)
                    {
                        rightEye.SetActive(true);
                    }
                }

                if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                {
                    if (isShowingEffects)
                    {
                        leftEye.SetActive(true);
                    }
                }
                if (isShowingHead)
                {
                    head.SetActive(true);
                }
                if (isShowingAxes)
                {
                    axes.SetActive(true);
                }


                float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                if (mouseDistance > noseDistance / 5.0)
                {
                    if (isShowingEffects)
                    {
                        mouth.SetActive(true);
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            ps.enableEmission = true;
                            ps.startSize      = 40 * (mouseDistance / noseDistance);
                        }
                    }
                }
                else
                {
                    if (isShowingEffects)
                    {
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            ps.enableEmission = false;
                        }
                    }
                }

                Calib3d.Rodrigues(rvec, rotMat);

                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec.get(0, 0) [0] / 1000));
                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec.get(1, 0) [0] / 1000));
                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec.get(2, 0) [0] / 1000 / webCamTextureToMatHelper.DOWNSCALE_RATIO));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                ARM = invertYM * transformationM;

                // Apply Z axis inverted matrix.
                ARM = ARM * invertZM;

                // Apply camera transform matrix.
                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
            }
        }
        private void ARUcoDetect()
        {
            if (DOWNSCALE_RATIO == 1)
            {
                downScaleRgbaMat = rgbaMat4Thread;
            }
            else
            {
                Imgproc.resize(rgbaMat4Thread, downScaleRgbaMat, new Size(), 1.0 / DOWNSCALE_RATIO, 1.0 / DOWNSCALE_RATIO, Imgproc.INTER_LINEAR);
            }
            Imgproc.cvtColor(downScaleRgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

            // Detect markers and estimate Pose
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

            if (estimatePose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Position
                        double[] tvec = tvecs.get(i, 0);

                        // Rotation
                        double[] rv   = rvecs.get(i, 0);
                        Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                        rvec.put(0, 0, rv [0]);
                        rvec.put(1, 0, rv [1]);
                        rvec.put(2, 0, rv [2]);
                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)(tvec [2] / DOWNSCALE_RATIO)));

                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        ARTransMatrixUpdated = true;

                        break;
                    }
                }
            }

            /*
             * // Draw results
             * if (ids.total () > 0) {
             *  Aruco.drawDetectedMarkers (rgbMat, corners, ids, new Scalar (255, 0, 0));
             *
             *  if (estimatePose) {
             *      for (int i = 0; i < ids.total (); i++) {
             *          Aruco.drawAxis (rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);
             *      }
             *  }
             * }
             */
        }
Example #19
0
    /// <summary>
    /// Update is called once per frame
    /// </summary>
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

            // Detect faces on resize image 在调整图像时察觉脸
            //detect face rects 发现脸的矩形
            List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();
            ;
            if (detectResult.Count > 0)
            {
                //detect landmark points 检测具有界标意义的点
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);
                //将points绘制在mat上
                OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                imagePoints.fromArray(
                    new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                    new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                    new Point(points[30].x, points[30].y),                                           //nose (Nose top)
                    new Point(points[48].x, points[48].y),                                           //l mouth (Mouth breadth)
                    new Point(points[54].x, points[54].y),                                           //r mouth (Mouth breadth)
                    new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                    new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                    );

                // Estimate head pose. 估计头部姿势
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    //从3D到2D点的pose中找到一个物体的姿势 rvec是旋转 tvec是平移向量
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0)[0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                if (!double.IsNaN(tvec_z) && points.Count == 68)
                {
                    cubismParameterDictionary.Clear();
                    Calib3d.Rodrigues(rvec, rotMat);

                    transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                    transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                    ARM = invertYM * transformationM * invertZM;
                    Vector3 forward;
                    forward.x = ARM.m02;
                    forward.y = ARM.m12;
                    forward.z = ARM.m22;

                    Vector3 upwards;
                    upwards.x = ARM.m01;
                    upwards.y = ARM.m11;
                    upwards.z = ARM.m21;

                    Vector3 angles  = Quaternion.LookRotation(forward, upwards).eulerAngles;
                    float   rotateX = angles.x > 180 ? angles.x - 360 : angles.x;
                    cubismParameterDictionary.Add(ParamAngleY, (float)Math.Round(rotateX));
                    float rotateY = angles.y > 180 ? angles.y - 360 : angles.y;
                    cubismParameterDictionary.Add(ParamAngleX, (float)Math.Round(-rotateY) * 2);
                    float rotateZ = angles.z > 180 ? angles.z - 360 : angles.z;
                    cubismParameterDictionary.Add(ParamAngleZ, (float)Math.Round(-rotateZ) * 2);
                    //Debug.("X" + rotateX + "Y" + rotateY + "Z" + rotateZ);

                    //ParamAngleX.BlendToValue(BlendMode,(float)(Math.Round(-rotateY) * 2));
                    //ParamAngleY.BlendToValue(BlendMode, (float)Math.Round(rotateX));
                    //ParamAngleZ.BlendToValue(BlendMode, (float)Math.Round(-rotateZ) * 2);

                    float eyeOpen_L = Mathf.Clamp(Mathf.Abs(points[43].y - points[47].y) / (Mathf.Abs(points[43].x - points[44].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_L >= 0.8f)
                    {
                        eyeOpen_L = 1f;
                    }
                    else
                    {
                        eyeOpen_L = 0;
                    }


                    float eyeOpen_R = Mathf.Clamp(Mathf.Abs(points[38].y - points[40].y) / (Mathf.Abs(points[37].x - points[38].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_R >= 0.8f)
                    {
                        eyeOpen_R = 1f;
                    }
                    else
                    {
                        eyeOpen_R = 0;
                    }

                    // ParamEyeROpen.BlendToValue(BlendMode, eyeOpen_R);
                    cubismParameterDictionary.Add(ParamEyeROpen, eyeOpen_R);
                    // ParamEyeLOpen.BlendToValue(BlendMode, eyeOpen_L);
                    cubismParameterDictionary.Add(ParamEyeLOpen, eyeOpen_L);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)rotateY / 30f);
                    cubismParameterDictionary.Add(ParamEyeBallX, rotateY / 30f);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)-rotateX / 30f - 0.25f);
                    cubismParameterDictionary.Add(ParamEyeBallY, (float)-rotateX / 30f - 0.25f);

                    float RY = Mathf.Abs(points[19].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    RY -= 1;
                    RY *= 4f;
                    float LY = Mathf.Abs(points[24].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    LY -= 1;
                    LY *= 4f;

                    // ParamBrowRY.BlendToValue(BlendMode, RY);
                    cubismParameterDictionary.Add(ParamBrowRY, RY);
                    // ParamBrowLY.BlendToValue(BlendMode, LY);
                    cubismParameterDictionary.Add(ParamBrowLY, LY);
                    float mouthOpen = Mathf.Clamp01(Mathf.Abs(points[62].y - points[66].y) / (Mathf.Abs(points[51].y - points[62].y) + Mathf.Abs(points[66].y - points[57].y)));
                    if (mouthOpen < 0.6f)
                    {
                        mouthOpen = 0;
                    }
                    // ParamMouthOpenY.BlendToValue(BlendMode, mouthOpen);
                    cubismParameterDictionary.Add(ParamMouthOpenY, mouthOpen);
                    float mouthSize = Mathf.Abs(points[48].x - points[54].x) / (Mathf.Abs(points[31].x - points[35].x));
                    // ParamMouthForm.BlendToValue(BlendMode, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                    cubismParameterDictionary.Add(ParamMouthForm, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                }
            }
            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
        }
    }
Example #20
0
        // 更新は、フレームごとに呼ばれます
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                        #else
            if (webCamTexture.didUpdateThisFrame)
            {
                        #endif
                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
                // 方向を修正するために反転.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }
                // グレースケールにイメージを変換します
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                if (faceTracker.getPoints().Count <= 0)
                {
                    Debug.Log("detectFace");

                    // グレースケールにイメージを変換します
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            if (faces.rows() > 0)
                            {
                                Debug.Log("--------------------------faces");
                                Debug.Log("faces " + faces.dump());
                                Debug.Log("--------------------------faces");
                                // MatOfRectから顔の初期座標を追加
                                faceTracker.addPoints(faces);

                                // 顔の四角形を描きます
                                OpenCVForUnity.Rect[] rects = faces.toArray();
                                for (int i = 0; i < rects.Length; i++)
                                {
                                    Core.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                        }
                }

                // 顔の座標を追跡します.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isDrawPoints)
                    {
                        //Debug.Log("--------------------------100-----------------------------------");
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }
                    //Debug.Log("--------------------------1000");
                    //Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);

                    Point[] points = faceTracker.getPoints()[0];

                    //eyeWidth = points[36] - points[31];

/*
 *                  Debug.Log("--------------------------0");
 *                  Debug.Log(points[31]);
 *                  Debug.Log("--------------------------2");
 *                  Debug.Log(points[36]);
 *                  Debug.Log("--------------------------1");
 */
                    if (points.Length > 0)
                    {
                        imagePoints.fromArray(
                            points[31],                             //l eye
                            points[36],                             //r eye
                            points[67],                             // nose
                            points[48],                             //l mouth
                            points[54]                              //r mouth
                            );

                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)webCamTexture.width / (float)width))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }

                            // Rvecノイズをフィルタリング.
                            using (Mat absDiffRvec = new Mat())
                            {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                using (Mat cmpRvec = new Mat())
                                {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }

                            // Tvecノイズをフィルタリング.
                            using (Mat absDiffTvec = new Mat())
                            {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);
                                using (Mat cmpTvec = new Mat())
                                {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (!rightEye.activeSelf)
                            {
                                rightEye.SetActive(true);
                            }
                            if (!leftEye.activeSelf)
                            {
                                leftEye.SetActive(true);
                            }

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            modelViewMtrx = lookAtM * transformationM * invertZM;
                            ARCamera.worldToCameraMatrix = modelViewMtrx;
                        }
                    }
                }
                Utils.matToTexture2D(rgbaMat, texture, colors);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }
                ARCamera.ResetWorldToCameraMatrix();

                rightEye.SetActive(false);
                leftEye.SetActive(false);
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }
Example #21
0
        // Use this for initialization
        void Start()
        {
            Mat rgbMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, rgbMat);
            Debug.Log("imgMat dst ToString " + rgbMat.ToString());


            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = rgbMat.width();
            float height = rgbMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set cameraparam.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }



            Mat        ids      = new Mat();
            List <Mat> corners  = new List <Mat> ();
            List <Mat> rejected = new List <Mat> ();
            Mat        rvecs    = new Mat();
            Mat        tvecs    = new Mat();
            Mat        rotMat   = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary(dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

            // estimate pose.
            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
            }

            if (ids.total() > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                if (applyEstimationPose)
                {
                    for (int i = 0; i < ids.total(); i++)
                    {
//                        Debug.Log ("ids.dump() " + ids.dump ());

                        Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                        // This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // position
                            double[] tvec = tvecs.get(i, 0);

                            // rotation
                            double[] rv   = rvecs.get(i, 0);
                            Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                            rvec.put(0, 0, rv[0]);
                            rvec.put(1, 0, rv[1]);
                            rvec.put(2, 0, rv[2]);
                            Calib3d.Rodrigues(rvec, rotMat);

                            Matrix4x4 transformationM = new Matrix4x4();  // from OpenCV
                            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                            Debug.Log("transformationM " + transformationM.ToString());

                            Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                            Debug.Log("invertZM " + invertZM.ToString());

                            Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                            Debug.Log("invertYM " + invertYM.ToString());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            Matrix4x4 ARM = invertYM * transformationM;

                            // Apply Z axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejected && rejected.Count > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
            }


            Texture2D texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(rgbMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
        public List <ZOArucoTrackerDetection> DetectMarkers(Mat rgbMat)
        {
            List <ZOArucoTrackerDetection> results = new List <ZOArucoTrackerDetection>();

            // Debug.Log("imgMat dst ToString " + rgbMat.ToString());

            float width          = rgbMat.width();
            float height         = rgbMat.height();
            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                // Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                // Camera.main.orthographicSize = height / 2;
            }

            // set camera parameters.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            // Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);
            // Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            // Debug.Log("imageSize " + imageSize.ToString());
            // Debug.Log("apertureWidth " + apertureWidth);
            // Debug.Log("apertureHeight " + apertureHeight);
            // Debug.Log("fovx " + fovx[0]);
            // Debug.Log("fovy " + fovy[0]);
            // Debug.Log("focalLength " + focalLength[0]);
            // Debug.Log("principalPoint " + principalPoint.ToString());
            // Debug.Log("aspectratio " + aspectratio[0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            // Debug.Log("fovXScale " + fovXScale);
            // Debug.Log("fovYScale " + fovYScale);
            Mat        ids             = new Mat();
            List <Mat> corners         = new List <Mat>();
            List <Mat> rejectedCorners = new List <Mat>();
            Mat        rvecs           = new Mat();
            Mat        tvecs           = new Mat();
            Mat        rotMat          = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            // Debug.Log("INFO: Number of markers detected: " + ids.total());
            // if at least one marker detected
            if (ids.total() > 0)
            {
                if (_debug)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));
                }


                // estimate pose.
                Aruco.estimatePoseSingleMarkers(corners, _markerLengthMeters, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    // Get translation vector
                    double[] tvecArr = tvecs.get(i, 0);

                    // Get rotation vector
                    double[] rvecArr = rvecs.get(i, 0);
                    Mat      rvec    = new Mat(3, 1, CvType.CV_64FC1);
                    rvec.put(0, 0, rvecArr);

                    // Convert rotation vector to rotation matrix.
                    Calib3d.Rodrigues(rvec, rotMat);
                    double[] rotMatArr = new double[rotMat.total()];
                    rotMat.get(0, 0, rotMatArr);

                    // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4.
                    Matrix4x4 transformationM = new Matrix4x4(); // from OpenCV
                    transformationM.SetRow(0, new Vector4((float)rotMatArr[0], (float)rotMatArr[1], (float)rotMatArr[2], (float)tvecArr[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMatArr[3], (float)rotMatArr[4], (float)rotMatArr[5], (float)tvecArr[1]));
                    transformationM.SetRow(2, new Vector4((float)rotMatArr[6], (float)rotMatArr[7], (float)rotMatArr[8], (float)tvecArr[2]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                    // Debug.Log("transformationM " + transformationM.ToString());

                    ZOArucoTrackerDetection detection = new ZOArucoTrackerDetection();
                    int [] currentId = new int[1];
                    // ids.get(0, i, currentId);
                    ids.get(i, 0, currentId);
                    detection.arucoId   = currentId[0];
                    detection.transform = transformationM;
                    results.Add(detection);
                }
            }

            return(results);
        }
Example #23
0
    void handleCalibration()
    {
        for (int i = 0; i < AK_receiver.GetComponent <akplay>().camInfoList.Count; i++)
        {
            //create color mat:
            byte[]   colorBytes = ((Texture2D)(AK_receiver.GetComponent <akplay>().camInfoList[i].colorCube.GetComponent <Renderer>().material.mainTexture)).GetRawTextureData();
            GCHandle ch         = GCHandle.Alloc(colorBytes, GCHandleType.Pinned);
            Mat      colorMat   = new Mat(AK_receiver.GetComponent <akplay>().camInfoList[i].color_height, AK_receiver.GetComponent <akplay>().camInfoList[i].color_width, CvType.CV_8UC4);
            Utils.copyToMat(ch.AddrOfPinnedObject(), colorMat);
            ch.Free();

            //OpenCVForUnity.CoreModule.Core.flip(colorMat, colorMat, 0);

            //detect a chessboard in the image, and refine the points, and save the pixel positions:
            MatOfPoint2f positions = new MatOfPoint2f();
            int          resizer   = 4;
            resizer = 1;                   //noresize!
            Mat colorMatSmall = new Mat(); //~27 ms each
            Imgproc.resize(colorMat, colorMatSmall, new Size(colorMat.cols() / resizer, colorMat.rows() / resizer));
            bool success = Calib3d.findChessboardCorners(colorMatSmall, new Size(7, 7), positions);
            for (int ss = 0; ss < positions.rows(); ss++)
            {
                double[] data = positions.get(ss, 0);
                data[0] = data[0] * resizer;
                data[1] = data[1] * resizer;

                positions.put(ss, 0, data);
            }

            //subpixel, drawing chessboard, and getting orange blobs takes 14ms
            TermCriteria tc = new TermCriteria();
            Imgproc.cornerSubPix(colorMat, positions, new Size(5, 5), new Size(-1, -1), tc);

            Mat chessboardResult = new Mat();
            colorMat.copyTo(chessboardResult);
            Calib3d.drawChessboardCorners(chessboardResult, new Size(7, 7), positions, success);



            //Find the orange blobs:
            Mat       orangeMask = new Mat();
            Vector2[] blobs      = getOrangeBlobs(ref colorMat, ref orangeMask);

            //find blob closest to chessboard
            if (success && (blobs.Length > 0))
            {
                Debug.Log("found a chessboard and blobs for camera: " + i);

                // time to get pin1 and chessboard positions: 27ms
                //find pin1:
                Point closestBlob = new Point();
                int   pin1idx     = getPin1(positions, blobs, ref closestBlob);
                Imgproc.circle(chessboardResult, new Point(positions.get(pin1idx, 0)[0], positions.get(pin1idx, 0)[1]), 10, new Scalar(255, 0, 0), -1);
                Imgproc.circle(chessboardResult, closestBlob, 10, new Scalar(255, 255, 0), -1);


                //get world positions of chessboard
                Point[]  realWorldPointArray  = new Point[positions.rows()];
                Point3[] realWorldPointArray3 = new Point3[positions.rows()];
                Point[]  imagePointArray      = new Point[positions.rows()];
                //getChessBoardWorldPositions(positions, pin1idx, 0.0498f, ref realWorldPointArray, ref realWorldPointArray3, ref imagePointArray); //green and white checkerboard.
                getChessBoardWorldPositions(positions, pin1idx, 0.07522f, ref realWorldPointArray, ref realWorldPointArray3, ref imagePointArray); //black and white checkerboard.


                string text       = "";
                float  decimals   = 1000.0f;
                int    text_red   = 255;
                int    text_green = 0;
                int    text_blue  = 0;
                text = ((int)(realWorldPointArray3[0].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[0].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[0].z * decimals)) / decimals;
                //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z);
                Imgproc.putText(chessboardResult, text, new Point(positions.get(0, 0)[0], positions.get(0, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue));
                text = ((int)(realWorldPointArray3[6].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[6].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[6].z * decimals)) / decimals;
                //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z);
                Imgproc.putText(chessboardResult, text, new Point(positions.get(6, 0)[0], positions.get(6, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue));
                text = ((int)(realWorldPointArray3[42].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[42].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[42].z * decimals)) / decimals;
                //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z);
                Imgproc.putText(chessboardResult, text, new Point(positions.get(42, 0)[0], positions.get(42, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue));
                text = ((int)(realWorldPointArray3[48].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[48].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[48].z * decimals)) / decimals;
                //text = sprintf("%2.2f,%2.2f,%2.2f", realWorldPointArray3[48].x, realWorldPointArray3[48].y, realWorldPointArray3[48].z);
                Imgproc.putText(chessboardResult, text, new Point(positions.get(48, 0)[0], positions.get(48, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue));



                Mat cameraMatrix = Mat.eye(3, 3, CvType.CV_64F);
                cameraMatrix.put(0, 0, AK_receiver.GetComponent <akplay>().camInfoList[i].color_fx);
                cameraMatrix.put(1, 1, AK_receiver.GetComponent <akplay>().camInfoList[i].color_fy);
                cameraMatrix.put(0, 2, AK_receiver.GetComponent <akplay>().camInfoList[i].color_cx);
                cameraMatrix.put(1, 2, AK_receiver.GetComponent <akplay>().camInfoList[i].color_cy);

                double[] distortion = new double[8];

                distortion[0] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k1;
                distortion[1] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k2;
                distortion[2] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_p1;
                distortion[3] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_p2;
                distortion[4] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k3;
                distortion[5] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k4;
                distortion[6] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k5;
                distortion[7] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k6;


                /*
                 * distortion[0] = 0.0;
                 * distortion[1] = 0.0;
                 * distortion[2] = 0.0;
                 * distortion[3] = 0.0;
                 * distortion[4] = 0.0;
                 * distortion[5] = 0.0;
                 * distortion[6] = 0.0;
                 * distortion[7] = 0.0;
                 */

                //~1 ms to solve for pnp
                Mat  rvec           = new Mat();
                Mat  tvec           = new Mat();
                bool solvepnpSucces = Calib3d.solvePnP(new MatOfPoint3f(realWorldPointArray3), new MatOfPoint2f(imagePointArray), cameraMatrix, new MatOfDouble(distortion), rvec, tvec);

                Mat R = new Mat();
                Calib3d.Rodrigues(rvec, R);


                //calculate unity vectors, and camera transforms
                Mat camCenter     = -R.t() * tvec;
                Mat forwardOffset = new Mat(3, 1, tvec.type());
                forwardOffset.put(0, 0, 0);
                forwardOffset.put(1, 0, 0);
                forwardOffset.put(2, 0, 1);
                Mat upOffset = new Mat(3, 1, tvec.type());
                upOffset.put(0, 0, 0);
                upOffset.put(1, 0, -1);
                upOffset.put(2, 0, 0);

                Mat forwardVectorCV = R.t() * (forwardOffset - tvec);
                forwardVectorCV = forwardVectorCV - camCenter;
                Mat upVectorCV = R.t() * (upOffset - tvec);
                upVectorCV = upVectorCV - camCenter;

                Vector3    forwardVectorUnity = new Vector3((float)forwardVectorCV.get(0, 0)[0], (float)forwardVectorCV.get(2, 0)[0], (float)forwardVectorCV.get(1, 0)[0]); //need to flip y and z due to unity coordinate system
                Vector3    upVectorUnity      = new Vector3((float)upVectorCV.get(0, 0)[0], (float)upVectorCV.get(2, 0)[0], (float)upVectorCV.get(1, 0)[0]);                //need to flip y and z due to unity coordinate system
                Vector3    camCenterUnity     = new Vector3((float)camCenter.get(0, 0)[0], (float)camCenter.get(2, 0)[0], (float)camCenter.get(1, 0)[0]);
                Quaternion rotationUnity      = Quaternion.LookRotation(forwardVectorUnity, upVectorUnity);



                GameObject colorMarker = GameObject.CreatePrimitive(PrimitiveType.Cube);
                //colorMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f);
                //colorMarker.transform.parent = AK_receiver.transform;
                colorMarker.layer = LayerMask.NameToLayer("Debug");
                colorMarker.transform.position = camCenterUnity;
                colorMarker.transform.rotation = Quaternion.LookRotation(forwardVectorUnity, upVectorUnity);
                colorMarker.GetComponent <Renderer>().material.color = Color.blue;

                Vector3    forwardDepth   = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(forwardVectorUnity);
                Vector3    upDepth        = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(upVectorUnity);
                Vector3    camCenterDepth = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(camCenterUnity);
                Quaternion rotationDepth  = Quaternion.LookRotation(forwardDepth, upDepth);

                GameObject depthMarker = GameObject.CreatePrimitive(PrimitiveType.Cube);
                depthMarker.layer            = LayerMask.NameToLayer("Debug");
                depthMarker.transform.parent = colorMarker.transform;
                //depthMarker.transform.localScale = AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.lossyScale;

                depthMarker.transform.localRotation = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.rotation;

                Vector3 matrixPosition = new Vector3(AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).x,
                                                     AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).y,
                                                     AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).z);


                /*
                 * depthMarker.transform.localRotation = AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.rotation;
                 *
                 * Vector3 matrixPosition = new Vector3(AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).x,
                 *                                      AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).y,
                 *                                      AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).z);
                 */

                depthMarker.transform.localPosition = -matrixPosition;
                depthMarker.transform.parent        = null;

                colorMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f);
                depthMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f);

                //depthMarker.transform.parent = AK_receiver.transform;
                //depthMarker.transform.position = camCenterDepth;
                //depthMarker.transform.rotation = Quaternion.LookRotation(forwardDepth-camCenterDepth, upDepth-camCenterDepth);
                depthMarker.GetComponent <Renderer>().material.color = Color.red;


                AK_receiver.GetComponent <akplay>().camInfoList[i].visualization.transform.position = depthMarker.transform.position; //need to flip y and z due to unity coordinate system
                AK_receiver.GetComponent <akplay>().camInfoList[i].visualization.transform.rotation = depthMarker.transform.rotation;
            }


            //draw chessboard result to calibration ui:
            Texture2D colorTexture = new Texture2D(chessboardResult.cols(), chessboardResult.rows(), TextureFormat.BGRA32, false);
            colorTexture.LoadRawTextureData((IntPtr)chessboardResult.dataAddr(), (int)chessboardResult.total() * (int)chessboardResult.elemSize());
            colorTexture.Apply();
            checkerboard_display_list[i].GetComponent <Renderer>().material.mainTexture = colorTexture;

            //draw threshold to calibration ui:
            Texture2D orangeTexture = new Texture2D(orangeMask.cols(), orangeMask.rows(), TextureFormat.R8, false);
            orangeTexture.LoadRawTextureData((IntPtr)orangeMask.dataAddr(), (int)orangeMask.total() * (int)orangeMask.elemSize());
            orangeTexture.Apply();
            threshold_display_list[i].GetComponent <Renderer>().material.mainTexture = orangeTexture;
        }
    }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers and estimate pose
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);
                //          Aruco.detectMarkers (imgMat, dictionary, corners, ids);
                if (estimatePose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }


                // draw results
                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (estimatePose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            //This sample can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                Calib3d.Rodrigues(rvecs, rotMat);


                                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvecs.get(0, 0) [0]));
                                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvecs.get(0, 0) [1]));
                                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvecs.get(0, 0) [2]));
                                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                                if (shouldMoveARCamera)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
//                                                              Debug.Log ("ARM " + ARM.ToString ());

                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                }
                                else
                                {
                                    ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
//                                                              Debug.Log ("ARM " + ARM.ToString ());

                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                }
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Example #25
0
        private void DetectMarkers()
        {
            Utils.texture2DToMat(imgTexture, rgbMat);
            Debug.Log("imgMat dst ToString " + rgbMat.ToString());

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = rgbMat.width();
            float height = rgbMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set camera parameters.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                arCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                arCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }
            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;



            Mat        ids             = new Mat();
            List <Mat> corners         = new List <Mat> ();
            List <Mat> rejectedCorners = new List <Mat> ();
            Mat        rvecs           = new Mat();
            Mat        tvecs           = new Mat();
            Mat        rotMat          = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            // if at least one marker detected
            if (ids.total() > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));

                // estimate pose.
                if (applyEstimationPose)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                            using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) {
                                // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                            }

                        // This example can display the ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Get translation vector
                            double[] tvecArr = tvecs.get(i, 0);

                            // Get rotation vector
                            double[] rvecArr = rvecs.get(i, 0);
                            Mat      rvec    = new Mat(3, 1, CvType.CV_64FC1);
                            rvec.put(0, 0, rvecArr);

                            // Convert rotation vector to rotation matrix.
                            Calib3d.Rodrigues(rvec, rotMat);
                            double[] rotMatArr = new double[rotMat.total()];
                            rotMat.get(0, 0, rotMatArr);

                            // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4.
                            Matrix4x4 transformationM = new Matrix4x4();  // from OpenCV
                            transformationM.SetRow(0, new Vector4((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0]));
                            transformationM.SetRow(1, new Vector4((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1]));
                            transformationM.SetRow(2, new Vector4((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                            Debug.Log("transformationM " + transformationM.ToString());

                            Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                            Debug.Log("invertYM " + invertYM.ToString());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                            Matrix4x4 ARM = invertYM * transformationM * invertYM;

                            if (shouldMoveARCamera)
                            {
                                ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);
                            }
                            else
                            {
                                ARM = arCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejectedCorners && rejectedCorners.Count > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0));
            }

            Utils.matToTexture2D(rgbMat, texture);
        }
Example #26
0
        /// <summary>
        /// Gets the frontal face angles.
        /// </summary>
        /// <returns>Frontal face angles.</returns>
        /// <param name="points">Points of face landmark which was detected with Dlib.</param>
        public Vector3 GetFrontalFaceAngles(List <Vector2> points)
        {
            if (points.Count < 68)
            {
                throw new ArgumentException("Invalid face landmark points", "points");
            }

            landmarkPoints [0].x = (points [38].x + points [41].x) / 2;
            landmarkPoints [0].y = (points [38].y + points [41].y) / 2;
            landmarkPoints [1].x = (points [43].x + points [46].x) / 2;
            landmarkPoints [1].y = (points [43].y + points [46].y) / 2;
            landmarkPoints [2].x = points [30].x;
            landmarkPoints [2].y = points [30].y;
            landmarkPoints [3].x = points [48].x;
            landmarkPoints [3].y = points [48].y;
            landmarkPoints [4].x = points [54].x;
            landmarkPoints [4].y = points [54].y;
            landmarkPoints [5].x = points [0].x;
            landmarkPoints [5].y = points [0].y;
            landmarkPoints [6].x = points [16].x;
            landmarkPoints [6].y = points [16].y;

            // Normalize points.
            Point centerOffset = landmarkPoints [2] - new Point(imageWidth / 2, imageHeight / 2);

            for (int i = 0; i < landmarkPoints.Length; i++)
            {
                landmarkPoints [i] = landmarkPoints [i] - centerOffset;
            }

            imagePoints.fromArray(landmarkPoints);

            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

            double tvec_z = tvec.get(2, 0) [0];

//            Debug.Log (rvec.dump());
//            Debug.Log (tvec.dump());

            if (!double.IsNaN(tvec_z))
            {
                Calib3d.Rodrigues(rvec, rotM);

//                Debug.Log (rotM.dump());

                transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                transformationM = invertYM * transformationM * invertZM;

                Vector3 angles = ExtractRotationFromMatrix(ref transformationM).eulerAngles;

//                Debug.Log ("angles " + angles.x + " " + angles.y + " " + angles.z);

                float rotationX = (angles.x > 180) ? angles.x - 360 : angles.x;
                float rotationY = (angles.y > 180) ? angles.y - 360 : angles.y;
                float rotationZ = (tvec_z >= 0) ? (angles.z > 180) ? angles.z - 360 : angles.z : 180 - angles.z;

                if (tvec_z < 0)
                {
                    rotationX = -rotationX;
                    rotationY = -rotationY;
                    rotationZ = -rotationZ;
                }

                return(new Vector3(rotationX, rotationY, rotationZ));
            }
            else
            {
                return(new Vector3(0, 0, 0));
            }
        }
        // Update is called once per frame
        void Update()
        {
            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (points.Count > 0)
                    {
                        if (shouldDrawFacePoints)
                        {
                            OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                        }

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye
                            new Point(points [33].x, points [33].y),                                             //nose
                            new Point(points [48].x, points [48].y),                                             //l mouth
                            new Point(points [54].x, points [54].y)                                              //r mouth
                            ,
                            new Point(points [0].x, points [0].y),                                               //l ear
                            new Point(points [16].x, points [16].y)                                              //r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


                        if (tvec.get(2, 0) [0] > 0)
                        {
                            if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    rightEye.SetActive(true);
                                }
                            }

                            if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    leftEye.SetActive(true);
                                }
                            }
                            if (shouldDrawHead)
                            {
                                head.SetActive(true);
                            }
                            if (shouldDrawAxes)
                            {
                                axes.SetActive(true);
                            }



                            float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                            float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                            if (mouseDistance > noseDistance / 5.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    mouth.SetActive(true);
                                    foreach (ParticleSystem ps in mouthParticleSystem)
                                    {
                                        ps.enableEmission = true;
                                        ps.startSize      = 40 * (mouseDistance / noseDistance);
                                    }
                                }
                            }
                            else
                            {
                                if (shouldDrawEffects)
                                {
                                    foreach (ParticleSystem ps in mouthParticleSystem)
                                    {
                                        ps.enableEmission = false;
                                    }
                                }
                            }


                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }
                else
                {
                    rightEye.SetActive(false);
                    leftEye.SetActive(false);
                    head.SetActive(false);
                    mouth.SetActive(false);
                    axes.SetActive(false);
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
Example #28
0
    /// <summary>
    /// Looks for markers in the most recent ZED image, and updates all registered MarkerObjects accordingly.
    /// </summary>
    private void ImageUpdated(Camera cam, Mat camMat, Mat iamgeMat)
    {
        Dictionary predict = Aruco.getPredefinedDictionary((int)markerDictionary); //Load the selected pre-defined dictionary.

        //Create empty structures to hold the output of Aruco.detectMarkers.
        List <Mat>         corners        = new List <Mat>();
        Mat                ids            = new Mat();
        DetectorParameters detectparams   = DetectorParameters.create();
        List <Mat>         rejectedpoints = new List <Mat>(); //There is no overload for detectMarkers that will take camMat without this also.

        //Call OpenCV to tell us which markers were detected, and give their 2D positions.
        Aruco.detectMarkers(iamgeMat, predict, corners, ids, detectparams, rejectedpoints, camMat);

        //Make matrices to hold the output rotations and translations of each detected marker.
        Mat rotvectors   = new Mat();
        Mat transvectors = new Mat();

        //Convert the 2D corner positions into a 3D pose for each detected marker.
        Aruco.estimatePoseSingleMarkers(corners, markerWidthMeters, camMat, new Mat(), rotvectors, transvectors);

        //Now we have ids, rotvectors and transvectors, which are all vertical arrays holding info about each detection:
        // - ids: An Nx1 array (N = number of markers detected) where each slot is the ID of a detected marker in the dictionary.
        // - rotvectors: An Nx3 array where each row is for an individual detection: The first row is the rotation of the marker
        //    listed in the first row of ids, etc. The columns are the X, Y and Z angles of that marker's rotation, BUT they are not
        //    directly usable in Unity because they're calculated very differetly. We'll deal with that soon.
        // - transvectors: An Nx1 array like rotvectors with each row corresponding to a detected marker, with a double[3] array with the X, Y and Z positions.
        //    positions. These three values are usable in Unity - they're just relative to the camera, not the world, which is easy to fix.

        //Convert matrix of IDs into a List, to simply things for those not familiar with using Matrices.
        List <int> detectedIDs = new List <int>();

        for (int i = 0; i < ids.height(); i++)
        {
            int id = (int)ids.get(i, 0)[0];
            if (!detectedIDs.Contains(id))
            {
                detectedIDs.Add(id);
            }
        }

        //We'll go through every ID that's been registered into registered Markers, and see if we found any markers in the scene with that ID.
        Dictionary <int, List <sl.Pose> > detectedWorldPoses = new Dictionary <int, List <sl.Pose> >(); //Key is marker ID, value is world space poses.

        //foreach (int id in registeredMarkers.Keys)
        for (int index = 0; index < transvectors.rows(); index++)
        {
            int id = (int)ids.get(index, 0)[0];
            if (!registeredMarkers.ContainsKey(id) || registeredMarkers[id].Count == 0)
            {
                continue;                 //Don't waste time if the list is empty. Can happen if markers are added, then removed.
            }
            if (detectedIDs.Contains(id)) //At least one MarkerObject needs to be updated. Convert pose to world space and call MarkerDetected() on it.
            {
                //Translation is just pose relative to camera. But we need to flip Y because of OpenCV's different coordinate system.
                Vector3 localpos;
                localpos.x = (float)transvectors.get(index, 0)[0];
                localpos.y = -(float)transvectors.get(index, 0)[1];
                localpos.z = (float)transvectors.get(index, 0)[2];

                Vector3 worldpos = cam.transform.TransformPoint(localpos); //Convert from local to world space.

                //Because of different coordinate frame, we need to flip the Y direction, which is pointing down instead of up.
                //We need to do this before we calculate the 3x3 rotation matrix soon, as that makes it muuuch harder to work with.
                double[] flip = rotvectors.get(index, 0);
                flip[1] = -flip[1];
                rotvectors.put(index, 0, flip);

                //Convert this rotation vector to a 3x3 matrix, which will hold values we can use in Unity.
                Mat rotmatrix = new Mat();
                Calib3d.Rodrigues(rotvectors.row(index), rotmatrix);

                //This new 3x3 matrix holds a vector pointing right in the first column, a vector pointing up in the second,
                //and a vector pointing forward in the third column. Rows 0, 1 and 2 are the X, Y and Z values of each vector.
                //We'll grab the forward and up vectors, which we can put into Quaternion.LookRotation() to get a representative Quaternion.
                Vector3 forward;
                forward.x = (float)rotmatrix.get(2, 0)[0];
                forward.y = (float)rotmatrix.get(2, 1)[0];
                forward.z = (float)rotmatrix.get(2, 2)[0];

                Vector3 up;
                up.x = (float)rotmatrix.get(1, 0)[0];
                up.y = (float)rotmatrix.get(1, 1)[0];
                up.z = (float)rotmatrix.get(1, 2)[0];

                Quaternion rot = Quaternion.LookRotation(forward, up);

                //Compensate for flip on Z axis.
                rot *= Quaternion.Euler(0, 0, 180);

                //Convert from local space to world space by multiplying the camera's world rotation with it.
                Quaternion worldrot = cam.transform.rotation * rot;

                if (!detectedWorldPoses.ContainsKey(id))
                {
                    detectedWorldPoses.Add(id, new List <sl.Pose>());
                }
                detectedWorldPoses[id].Add(new sl.Pose()
                {
                    translation = worldpos, rotation = worldrot
                });

                foreach (MarkerObject marker in registeredMarkers[id])
                {
                    marker.MarkerDetectedSingle(worldpos, worldrot);
                }
            }
        }

        //Call the event that gives all marker world poses, if any listeners.
        if (OnMarkersDetected != null)
        {
            OnMarkersDetected.Invoke(detectedWorldPoses);
        }

        //foreach (int detectedid in detectedWorldPoses.Keys)
        foreach (int key in registeredMarkers.Keys)
        {
            if (detectedWorldPoses.ContainsKey(key))
            {
                foreach (MarkerObject marker in registeredMarkers[key])
                {
                    marker.MarkerDetectedAll(detectedWorldPoses[key]);
                }
            }
            else
            {
                foreach (MarkerObject marker in registeredMarkers[key])
                {
                    marker.MarkerNotDetected();
                }
            }
        }
    }