Exemplo n.º 1
0
        /// <summary>
        /// Estimates the position.
        /// </summary>
        /// <param name="detectedMarkers">Detected markers.</param>
        void estimatePosition(List <Marker> detectedMarkers)
        {
            for (int i = 0; i < detectedMarkers.Count; i++)
            {
                Marker m = detectedMarkers [i];

                Mat Rvec = new Mat();
                Mat Tvec = new Mat();
                Mat raux = new Mat();
                Mat taux = new Mat();
                Calib3d.solvePnP(m_markerCorners3d, new MatOfPoint2f(m.points.toArray()), camMatrix, distCoeff, raux, taux);


                raux.convertTo(Rvec, CvType.CV_32F);
                taux.convertTo(Tvec, CvType.CV_32F);

                Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);
                Calib3d.Rodrigues(Rvec, rotMat);


                m.transformation.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)Tvec.get(0, 0) [0]));
                m.transformation.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)Tvec.get(1, 0) [0]));
                m.transformation.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)Tvec.get(2, 0) [0]));
                m.transformation.SetRow(3, new Vector4(0, 0, 0, 1));

//                      Debug.Log ("m.transformation " + m.transformation.ToString ());


                Rvec.Dispose();
                Tvec.Dispose();
                raux.Dispose();
                taux.Dispose();
                rotMat.Dispose();
            }
        }
    /// <summary>
    /// Computes the pose.
    /// </summary>
    /// <param name="pattern">Pattern.</param>
    /// <param name="camMatrix">Cam matrix.</param>
    /// <param name="distCoeff">Dist coeff.</param>
    public void computePose(Pattern pattern, Mat camMatrix, MatOfDouble distCoeff)
    {
        Mat Rvec = new Mat();
        Mat Tvec = new Mat();
        Mat raux = new Mat();
        Mat taux = new Mat();

        Calib3d.solvePnP(pattern.points3d, points2d, camMatrix, distCoeff, raux, taux);
        raux.convertTo(Rvec, CvType.CV_32F);
        taux.convertTo(Tvec, CvType.CV_32F);

        Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);

        Calib3d.Rodrigues(Rvec, rotMat);

        pose3d.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)Tvec.get(0, 0) [0]));
        pose3d.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)Tvec.get(1, 0) [0]));
        pose3d.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)Tvec.get(2, 0) [0]));
        pose3d.SetRow(3, new Vector4(0, 0, 0, 1));

//      Debug.Log ("pose3d " + pose3d.ToString ());

        Rvec.Dispose();
        Tvec.Dispose();
        raux.Dispose();
        taux.Dispose();
        rotMat.Dispose();
    }
        /// <summary>
        /// Gets the frontal face angle.
        /// </summary>
        /// <returns>The frontal face angle.</returns>
        /// <param name="points">Points.</param>
        public Vector3 getFrontalFaceAngle(List <Vector2> points)
        {
            if (points.Count != 68)
            {
                throw new ArgumentNullException("Invalid landmark_points.");
            }

            if (camMatrix == null)
            {
                throw new ArgumentNullException("Invalid camMatrix.");
            }

            //normalize

            float   normScale = Math.Abs(points [30].y - points [8].y) / (normHeight / 2);
            Vector2 normDiff  = points [30] * normScale - new Vector2(normWidth / 2, normHeight / 2);

            for (int i = 0; i < points.Count; i++)
            {
                normPoints [i] = points [i] * normScale - normDiff;
            }
            //Debug.Log ("points[30] " + points[30]);
            //Debug.Log ("normPoints[30] " + normPoints[30]);
            //Debug.Log ("normScale " + normScale);
            //Debug.Log ("normDiff " + normDiff);

            imagePoints.fromArray(
                new Point((normPoints [38].x + normPoints [41].x) / 2, (normPoints [38].y + normPoints [41].y) / 2), //l eye
                new Point((normPoints [43].x + normPoints [46].x) / 2, (normPoints [43].y + normPoints [46].y) / 2), //r eye
                new Point(normPoints [33].x, normPoints [33].y),                                                     //nose
                new Point(normPoints [48].x, normPoints [48].y),                                                     //l mouth
                new Point(normPoints [54].x, normPoints [54].y),                                                     //r mouth                         ,
                new Point(normPoints [0].x, normPoints [0].y),                                                       //l ear
                new Point(normPoints [16].x, normPoints [16].y)                                                      //r ear
                );


            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

            Calib3d.Rodrigues(rvec, rotM);

            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            ARM = invertYM * transformationM * invertZM;

            return(ExtractRotationFromMatrix(ref ARM).eulerAngles);
        }
        public bool UpdateExtrinsics(MatOfPoint3f patternPointsWorldMat, MatOfPoint2f patternPointsImageMat, Intrinsics intrinsics, int imageWidth, int imageHeight)
        {
            intrinsics.ToOpenCV(ref _sensorMatrix, imageWidth, imageHeight);

            // In order to match OpenCV's pixel space (zero at top-left) and Unity's camera space (up is positive), we flip the sensor matrix.
            _sensorMatrix.WriteValue(-_sensorMatrix.ReadValue(1, 1), 1, 1);                  // fy
            _sensorMatrix.WriteValue(imageHeight - _sensorMatrix.ReadValue(1, 2), 1, 2);     // cy

            // Find pattern pose, relative to camera (at zero position) using solvePnP.
            _isValid = Calib3d.solvePnP(patternPointsWorldMat, patternPointsImageMat, _sensorMatrix, _noDistCoeffs, _rotationVecMat, _translationVecMat);

            if (_isValid)
            {
                _extrinsics.UpdateFromOpenCvSolvePnp(_rotationVecMat, _translationVecMat);
            }

            return(_isValid);
        }
Exemplo n.º 5
0
        void UpdateCameraTransform()
        {
            // Update points.
            for (int p = 0; p < pointCount; p++)
            {
                Vector2 posImage = _userPointRects[p].anchorMin;
                posImage.Scale(new Vector2(_cameraTexture.width, _cameraTexture.height));
                _anchorPointsImage[p].set(new double[] { posImage.x, posImage.y });
                Vector3 posWorld = _anchorTransform.TransformPoint(defaultPointPositions[p] - Vector2.one * 0.5f);
                _anchorPointsWorld[p].set(new double[] { posWorld.x, posWorld.y, posWorld.z });
            }
            _anchorPointsImageMat.fromArray(_anchorPointsImage);
            _anchorPointsWorldMat.fromArray(_anchorPointsWorld);

            // Compute
            bool success = Calib3d.solvePnP(_anchorPointsWorldMat, _anchorPointsImageMat, _cameraMatrix, _noDistCoeffs, _rVec, _tVec);

            if (!success)
            {
                return;
            }

            // Convert.
            bool inverse = true;

            TrackingToolsHelper.ApplyPose(_rVec, _tVec, _targetCameraTransform, inverse);

            /*
             * Vector3 translation = TrackingToolsHelper.TranslationMatVectorToVector3( _tVec  );
             * Quaternion rotation = TrackingToolsHelper.RotationMatVectorToQuaternion( _rVec );
             * translation = rotation * translation;
             *
             * // Apply.
             * _targetCameraTransform.SetPositionAndRotation( translation, rotation );
             */

            // Save.
            SaveCircleAnchorPoints();
        }
Exemplo n.º 6
0
        /// <summary>
        /// Gets the frontal face angles.
        /// </summary>
        /// <returns>Frontal face angles.</returns>
        /// <param name="points">Points of face landmark which was detected with Dlib.</param>
        public Vector3 GetFrontalFaceAngles(List <Vector2> points)
        {
            if (points.Count < 68)
            {
                throw new ArgumentException("Invalid face landmark points", "points");
            }

            landmarkPoints [0].x = (points [38].x + points [41].x) / 2;
            landmarkPoints [0].y = (points [38].y + points [41].y) / 2;
            landmarkPoints [1].x = (points [43].x + points [46].x) / 2;
            landmarkPoints [1].y = (points [43].y + points [46].y) / 2;
            landmarkPoints [2].x = points [30].x;
            landmarkPoints [2].y = points [30].y;
            landmarkPoints [3].x = points [48].x;
            landmarkPoints [3].y = points [48].y;
            landmarkPoints [4].x = points [54].x;
            landmarkPoints [4].y = points [54].y;
            landmarkPoints [5].x = points [0].x;
            landmarkPoints [5].y = points [0].y;
            landmarkPoints [6].x = points [16].x;
            landmarkPoints [6].y = points [16].y;

            // Normalize points.
            Point centerOffset = landmarkPoints [2] - new Point(imageWidth / 2, imageHeight / 2);

            for (int i = 0; i < landmarkPoints.Length; i++)
            {
                landmarkPoints [i] = landmarkPoints [i] - centerOffset;
            }

            imagePoints.fromArray(landmarkPoints);

            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

            double tvec_z = tvec.get(2, 0) [0];

//            Debug.Log (rvec.dump());
//            Debug.Log (tvec.dump());

            if (!double.IsNaN(tvec_z))
            {
                Calib3d.Rodrigues(rvec, rotM);

//                Debug.Log (rotM.dump());

                transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                transformationM = invertYM * transformationM * invertZM;

                Vector3 angles = ExtractRotationFromMatrix(ref transformationM).eulerAngles;

//                Debug.Log ("angles " + angles.x + " " + angles.y + " " + angles.z);

                float rotationX = (angles.x > 180) ? angles.x - 360 : angles.x;
                float rotationY = (angles.y > 180) ? angles.y - 360 : angles.y;
                float rotationZ = (tvec_z >= 0) ? (angles.z > 180) ? angles.z - 360 : angles.z : 180 - angles.z;

                if (tvec_z < 0)
                {
                    rotationX = -rotationX;
                    rotationY = -rotationY;
                    rotationZ = -rotationZ;
                }

                return(new Vector3(rotationX, rotationY, rotationZ));
            }
            else
            {
                return(new Vector3(0, 0, 0));
            }
        }
Exemplo n.º 7
0
        // Update is called once per frame
        public override void UpdateValue()
        {
            if (dlibFaceLanmarkGetter == null)
            {
                return;
            }

            didUpdateHeadRotation = false;


            List <Vector2> points = dlibFaceLanmarkGetter.getFaceLanmarkPoints();

            if (points != null)
            {
                MatOfPoint3f objectPoints = null;
                if (points.Count == 68)
                {
                    objectPoints = objectPoints68;

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                        new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                        new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                        new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                        new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                        );
                }

                // Estimate head pose.
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0) [0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }
                //Debug.Log (tvec.dump());

                if (!double.IsNaN(tvec_z))
                {
                    // Convert to unity pose data.
                    double[] rvecArr = new double[3];
                    rvec.get(0, 0, rvecArr);
                    double[] tvecArr = new double[3];
                    tvec.get(0, 0, tvecArr);
                    PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                    // Changes in pos/rot below these thresholds are ignored.
                    if (enableLowPassFilter)
                    {
                        ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                    }
                    oldPoseData = poseData;


                    transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    ARM = invertYM * transformationM;

                    // Apply Z axis inverted matrix.
                    ARM = ARM * invertZM;



                    headRotation = ARUtils.ExtractRotationFromMatrix(ref ARM);

                    didUpdateHeadRotation = true;
                }
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // detect faces on the downscale image
                if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped())
                {
                    Mat   downScaleRgbaMat = null;
                    float DOWNSCALE_RATIO  = 1.0f;
                    if (enableDownScale)
                    {
                        downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                        DOWNSCALE_RATIO  = imageOptimizationHelper.downscaleRatio;
                    }
                    else
                    {
                        downScaleRgbaMat = rgbaMat;
                        DOWNSCALE_RATIO  = 1.0f;
                    }

                    // set the downscale mat
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat);

                    //detect face rects
                    detectionResult = faceLandmarkDetector.Detect();

                    if (enableDownScale)
                    {
                        for (int i = 0; i < detectionResult.Count; ++i)
                        {
                            var rect = detectionResult[i];
                            detectionResult[i] = new UnityEngine.Rect(
                                rect.x * DOWNSCALE_RATIO,
                                rect.y * DOWNSCALE_RATIO,
                                rect.width * DOWNSCALE_RATIO,
                                rect.height * DOWNSCALE_RATIO);
                        }
                    }
                }

                List <Vector2> points = null;
                if (detectionResult != null && detectionResult.Count > 0)
                {
                    // set the original scale image
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(detectionResult[0]);
                }

                if (points != null)
                {
                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    MatOfPoint3f objectPoints   = null;
                    bool         isRightEyeOpen = false;
                    bool         isLeftEyeOpen  = false;
                    bool         isMouthOpen    = false;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[30].x, points[30].y),                                           //nose (Tip)
                            new Point(points[33].x, points[33].y),                                           //nose (Subnasale)
                            new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                            new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points[43].y - points[46].y)) > Mathf.Abs((float)(points[42].x - points[45].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points[38].y - points[41].y)) > Mathf.Abs((float)(points[39].x - points[36].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points[27].y - points[33].y));
                        float mouseDistance = Mathf.Abs((float)(points[62].y - points[66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 17)
                    {
                        objectPoints = objectPoints17;

                        imagePoints.fromArray(
                            new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[0].x, points[0].y),                                         //nose (Tip)
                            new Point(points[1].x, points[1].y),                                         //nose (Subnasale)
                            new Point(points[6].x, points[6].y),                                         //l ear (Bitragion breadth)
                            new Point(points[8].x, points[8].y)                                          //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points[11].y - points[12].y)) > Mathf.Abs((float)(points[4].x - points[5].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points[9].y - points[10].y)) > Mathf.Abs((float)(points[2].x - points[3].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points[3].y - points[1].y));
                        float mouseDistance = Mathf.Abs((float)(points[14].y - points[16].y));
                        if (mouseDistance > noseDistance / 2.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 6)
                    {
                        objectPoints = objectPoints6;

                        imagePoints.fromArray(
                            new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[0].x, points[0].y),                                         //nose (Tip)
                            new Point(points[1].x, points[1].y)                                          //nose (Subnasale)
                            );
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points[3].x, points[3].y), //l eye (Inner corner of the eye)
                            new Point(points[1].x, points[1].y), //r eye (Inner corner of the eye)
                            new Point(points[2].x, points[2].y), //l eye (Tail of the eye)
                            new Point(points[0].x, points[0].y), //r eye (Tail of the eye)
                            new Point(points[4].x, points[4].y)  //nose (Subnasale)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68/17/6 points.";
                        }
                    }

                    // estimate head pose
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }


                    double tvec_x = tvec.get(0, 0)[0], tvec_y = tvec.get(1, 0)[0], tvec_z = tvec.get(2, 0)[0];

                    bool    isNotInViewport = false;
                    Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                    if (pos.w != 0)
                    {
                        float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                        if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                        {
                            isNotInViewport = true;
                        }
                    }

                    if (double.IsNaN(tvec_z) || isNotInViewport)
                    { // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

                    //Debug.Log (tvec.dump());

                    if (!isNotInViewport)
                    {
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        if (displayEffects)
                        {
                            rightEye.SetActive(isRightEyeOpen);
                            leftEye.SetActive(isLeftEyeOpen);

                            if (isMouthOpen)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = true;
#if UNITY_5_5_OR_NEWER
                                    var main = ps.main;
                                    main.startSizeMultiplier = 20;
#else
                                    ps.startSize = 20;
#endif
                                }
                            }
                            else
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                    ARM = invertYM * transformationM * invertYM;

                    // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                    ARM = ARM * invertYM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }
                //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Exemplo n.º 9
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                        new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                        new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                        new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                        new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                        );

                    // Estimate head pose.
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }

                    double tvec_z = tvec.get(2, 0) [0];

                    if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

//                    Debug.Log (tvec.dump());

                    if (!double.IsNaN(tvec_z))
                    {
                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                        {
                            if (displayEffects)
                            {
                                rightEye.SetActive(true);
                            }
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                        {
                            if (displayEffects)
                            {
                                leftEye.SetActive(true);
                            }
                        }
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }


                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            if (displayEffects)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled   = true;
                                    ps.startSize = 40 * (mouseDistance / noseDistance);
                                }
                            }
                        }
                        else
                        {
                            if (displayEffects)
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        if (shouldMoveARCamera)
                        {
                            ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                            ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                        }
                        else
                        {
                            ARM = ARCamera.transform.localToWorldMatrix * ARM;

                            ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                        }
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Exemplo n.º 10
0
        private void UpdateARHeadTransform(List <Vector2> points, Matrix4x4 cameraToWorldMatrix)
        {
            // The coordinates in pixels of the object detected on the image projected onto the plane.
            imagePoints.fromArray(
                new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                );

            // Estimate head pose.
            if (rvec == null || tvec == null)
            {
                rvec = new Mat(3, 1, CvType.CV_64FC1);
                tvec = new Mat(3, 1, CvType.CV_64FC1);
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }

            double tvec_z = tvec.get(2, 0) [0];

            if (double.IsNaN(tvec_z) || tvec_z < 0)   // if tvec is wrong data, do not use extrinsic guesses.
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }
            else
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
            }

            if (applyEstimationPose && !double.IsNaN(tvec_z))
            {
                if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 5.0)
                {
                    if (displayEffects)
                    {
                        rightEye.SetActive(true);
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        rightEye.SetActive(false);
                    }
                }

                if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 5.0)
                {
                    if (displayEffects)
                    {
                        leftEye.SetActive(true);
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        leftEye.SetActive(false);
                    }
                }

                if (displayHead)
                {
                    head.SetActive(true);
                }
                if (displayAxes)
                {
                    axes.SetActive(true);
                }


                float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                if (mouseDistance > noseDistance / 5.0)
                {
                    if (displayEffects)
                    {
                        mouth.SetActive(true);
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = true;
                            #if UNITY_5_5_OR_NEWER
                            var main = ps.main;
                            main.startSizeMultiplier = 40 * (mouseDistance / noseDistance);
                            #else
                            ps.startSize = 40 * (mouseDistance / noseDistance);
                            #endif
                        }
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = false;
                        }
                    }
                }

                // Convert to unity pose data.
                double[] rvecArr = new double[3];
                rvec.get(0, 0, rvecArr);
                double[] tvecArr = new double[3];
                tvec.get(0, 0, tvecArr);
                tvecArr [0] = tvecArr [0] / 1000.0;
                tvecArr[1]  = tvecArr[1] / 1000.0;
                tvecArr[2]  = tvecArr[2] / 1000.0 / imageOptimizationHelper.downscaleRatio;
                PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                // Changes in pos/rot below these thresholds are ignored.
                if (enableLowPassFilter)
                {
                    ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                }
                oldPoseData = poseData;

                // Create transform matrix.
                transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                ARM = invertYM * transformationM;

                // Apply Z-axis inverted matrix.
                ARM = ARM * invertZM;

                // Apply the cameraToWorld matrix with the Z-axis inverted.
                ARM = cameraToWorldMatrix * invertZM * ARM;

                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
            }
        }
Exemplo n.º 11
0
        //NOTE: ここList渡しなのがちょっとイヤなんだけど、Dlibのマナーに沿うことを重視
        /// <summary>
        /// 特徴点を受け取って頭部姿勢の推定状況を更新します。
        /// </summary>
        /// <param name="landmarks"></param>
        public void EstimatePose(List <Vector2> landmarks)
        {
            //画像の初期化が終わってない場合は完全NG
            if (_width < 1 || _height < 1)
            {
                return;
            }

            HasValidPoseData = false;

            SetImagePoints(landmarks);

            //初期推定っぽいやつ
            if (_rVec == null || _tVec == null)
            {
                _rVec = new Mat(3, 1, CvType.CV_64FC1);
                _tVec = new Mat(3, 1, CvType.CV_64FC1);
                Calib3d.solvePnP(_objPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec);
            }

            _tVec.get(0, 0, _tVecArrayTemp);
            float tx = (float)_tVecArrayTemp[0];
            float ty = (float)_tVecArrayTemp[1];
            float tz = (float)_tVecArrayTemp[2];

            bool notInViewport = false;
            var  pos           = _vp * new Vector4(tx, ty, tz, 1.0f);

            if (Mathf.Abs(pos.w) > 0.0001)
            {
                float x = pos.x / pos.w;
                float y = pos.y / pos.w;
                float z = pos.z / pos.w;
                notInViewport = Mathf.Abs(x) > 1.0f || Mathf.Abs(y) > 1.0f || Mathf.Abs(z) > 1.0f;
            }

            //NOTE: 要するに現状の推定が怪しすぎるならゼロベースで計算をやり直せ、って話
            if (float.IsNaN(tz) || notInViewport)
            {
                Calib3d.solvePnP(_objPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec);
            }
            else
            {
                //普通にトラッキングできてればこっちのはず
                Calib3d.solvePnP(
                    _objPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec,
                    true, Calib3d.SOLVEPNP_ITERATIVE
                    );
            }

            if (notInViewport)
            {
                return;
            }

            // 最終的なt/rの値をUnityで使いたいのでPosition/Rotationに変換
            _rVec.get(0, 0, _rVecArray);
            _tVec.get(0, 0, _tVecArray);
            var poseData = ARUtils.ConvertRvecTvecToPoseData(_rVecArray, _tVecArray);

            // 0.001fは[mm]単位から[m]への変換
            // YMのほう: 右手系を左手系にする
            // ZMのほう: 手前/奥をひっくり返す(はず)
            var transformationM =
                _invertYM *
                Matrix4x4.TRS(0.001f * poseData.pos, poseData.rot, Vector3.one) *
                _invertZM;

            HeadPosition     = ARUtils.ExtractTranslationFromMatrix(ref transformationM);
            HeadRotation     = ARUtils.ExtractRotationFromMatrix(ref transformationM);
            HasValidPoseData = true;
        }
Exemplo n.º 12
0
        public override void UpdateValue()
        {
            if (matSourceGetterInterface == null)
            {
                return;
            }

            if (faceLandmarkGetterInterface == null)
            {
                return;
            }

            Mat rgbaMat = matSourceGetterInterface.GetMatSource();

            if (rgbaMat == null)
            {
                return;
            }
            else
            {
                if (rgbaMat.width() != imageWidth || rgbaMat.height() != imageHeight)
                {
                    imageWidth  = rgbaMat.width();
                    imageHeight = rgbaMat.height();
                    SetCameraMatrix(camMatrix, imageWidth, imageHeight);
                }
            }

            didUpdateHeadPositionAndRotation = false;

            List <Vector2> points = faceLandmarkGetterInterface.GetFaceLanmarkPoints();

            if (points != null)
            {
                MatOfPoint3f objectPoints = null;

                if (points.Count == 68)
                {
                    objectPoints = objectPoints68;

                    imagePoints.fromArray(
                        new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[30].x, points[30].y),                                           //nose (Tip)
                        new Point(points[33].x, points[33].y),                                           //nose (Subnasale)
                        new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                        new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                        );
                }
                else if (points.Count == 17)
                {
                    objectPoints = objectPoints17;

                    imagePoints.fromArray(
                        new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[0].x, points[0].y),                                         //nose (Tip)
                        new Point(points[1].x, points[1].y),                                         //nose (Subnasale)
                        new Point(points[6].x, points[6].y),                                         //l ear (Bitragion breadth)
                        new Point(points[8].x, points[8].y)                                          //r ear (Bitragion breadth)
                        );
                }
                else if (points.Count == 6)
                {
                    objectPoints = objectPoints6;

                    imagePoints.fromArray(
                        new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[0].x, points[0].y),                                         //nose (Tip)
                        new Point(points[1].x, points[1].y)                                          //nose (Subnasale)
                        );
                }

                // Estimate head pose.
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }


                double tvec_x = tvec.get(0, 0)[0], tvec_y = tvec.get(1, 0)[0], tvec_z = tvec.get(2, 0)[0];

                bool    isNotInViewport = false;
                Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                if (pos.w != 0)
                {
                    float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                    if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                    {
                        isNotInViewport = true;
                    }
                }

                if (double.IsNaN(tvec_z) || isNotInViewport)
                { // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                //Debug.Log (tvec.dump () + " " + isNotInViewport);

                if (!isNotInViewport)
                {
                    // Convert to unity pose data.
                    double[] rvecArr = new double[3];
                    rvec.get(0, 0, rvecArr);
                    double[] tvecArr = new double[3];
                    tvec.get(0, 0, tvecArr);
                    PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                    // adjust the position to the scale of real-world space.
                    poseData.pos = new Vector3(poseData.pos.x * 0.001f, poseData.pos.y * 0.001f, poseData.pos.z * 0.001f);

                    // Changes in pos/rot below these thresholds are ignored.
                    if (enableLowPassFilter)
                    {
                        ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                    }
                    oldPoseData = poseData;

                    Matrix4x4 transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                    transformationM = invertYM * transformationM * invertYM;

                    // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                    transformationM = transformationM * invertYM * invertZM;

                    headPosition = ARUtils.ExtractTranslationFromMatrix(ref transformationM);
                    headRotation = ARUtils.ExtractRotationFromMatrix(ref transformationM);

                    didUpdateHeadPositionAndRotation = true;
                }
            }
        }
Exemplo n.º 13
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (isShowingFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye
                        new Point(points [33].x, points [33].y),                                             //nose
                        new Point(points [48].x, points [48].y),                                             //l mouth
                        new Point(points [54].x, points [54].y)                                              //r mouth
                        ,
                        new Point(points [0].x, points [0].y),                                               //l ear
                        new Point(points [16].x, points [16].y)                                              //r ear
                        );


                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


                    if (tvec.get(2, 0) [0] > 0)
                    {
                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                        {
                            if (isShowingEffects)
                            {
                                rightEye.SetActive(true);
                            }
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                        {
                            if (isShowingEffects)
                            {
                                leftEye.SetActive(true);
                            }
                        }
                        if (isShowingHead)
                        {
                            head.SetActive(true);
                        }
                        if (isShowingAxes)
                        {
                            axes.SetActive(true);
                        }


                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            if (isShowingEffects)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    ps.enableEmission = true;
                                    ps.startSize      = 40 * (mouseDistance / noseDistance);
                                }
                            }
                        }
                        else
                        {
                            if (isShowingEffects)
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    ps.enableEmission = false;
                                }
                            }
                        }


                        // position
                        Vector4 pos = new Vector4((float)tvec.get(0, 0) [0], (float)tvec.get(1, 0) [0], (float)tvec.get(2, 0) [0], 0);     // from OpenCV
                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARGameObject.transform.localPosition = new Vector3(pos.x, -pos.y, pos.z);


                        // rotation
                        Calib3d.Rodrigues(rvec, rotMat);

                        Vector3 forward = new Vector3((float)rotMat.get(0, 2) [0], (float)rotMat.get(1, 2) [0], (float)rotMat.get(2, 2) [0]); // from OpenCV
                        Vector3 up      = new Vector3((float)rotMat.get(0, 1) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(2, 1) [0]); // from OpenCV
                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        Quaternion rot = Quaternion.LookRotation(new Vector3(forward.x, -forward.y, forward.z), new Vector3(up.x, -up.y, up.z));
                        ARGameObject.transform.localRotation = rot;


                        // Apply Z axis inverted matrix.
                        invertZM        = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                        transformationM = ARGameObject.transform.localToWorldMatrix * invertZM;

                        // Apply camera transform matrix.
                        transformationM = ARCamera.transform.localToWorldMatrix * transformationM;

                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref transformationM);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Exemplo n.º 14
0
        private void UpdateARHeadTransform(List <Vector2> points)
        {
            // The coordinates in pixels of the object detected on the image projected onto the plane.
            imagePoints.fromArray(
                new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                new Point(points [33].x, points [33].y),                                             //nose (Nose top)
                new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                new Point(points [54].x, points [54].y)                                              //r mouth (Mouth breadth)
                ,
                new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                );

            //Estimate head pose.
            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


            if (tvec.get(2, 0) [0] > 0)
            {
                if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                {
                    if (isShowingEffects)
                    {
                        rightEye.SetActive(true);
                    }
                }

                if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                {
                    if (isShowingEffects)
                    {
                        leftEye.SetActive(true);
                    }
                }
                if (isShowingHead)
                {
                    head.SetActive(true);
                }
                if (isShowingAxes)
                {
                    axes.SetActive(true);
                }


                float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                if (mouseDistance > noseDistance / 5.0)
                {
                    if (isShowingEffects)
                    {
                        mouth.SetActive(true);
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            ps.enableEmission = true;
                            ps.startSize      = 40 * (mouseDistance / noseDistance);
                        }
                    }
                }
                else
                {
                    if (isShowingEffects)
                    {
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            ps.enableEmission = false;
                        }
                    }
                }

                Calib3d.Rodrigues(rvec, rotMat);

                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec.get(0, 0) [0] / 1000));
                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec.get(1, 0) [0] / 1000));
                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec.get(2, 0) [0] / 1000 / webCamTextureToMatHelper.DOWNSCALE_RATIO));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                ARM = invertYM * transformationM;

                // Apply Z axis inverted matrix.
                ARM = ARM * invertZM;

                // Apply camera transform matrix.
                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
            }
        }
Exemplo n.º 15
0
    /// <summary>
    /// Update is called once per frame
    /// </summary>
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

            // Detect faces on resize image 在调整图像时察觉脸
            //detect face rects 发现脸的矩形
            List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();
            ;
            if (detectResult.Count > 0)
            {
                //detect landmark points 检测具有界标意义的点
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);
                //将points绘制在mat上
                OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                imagePoints.fromArray(
                    new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                    new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                    new Point(points[30].x, points[30].y),                                           //nose (Nose top)
                    new Point(points[48].x, points[48].y),                                           //l mouth (Mouth breadth)
                    new Point(points[54].x, points[54].y),                                           //r mouth (Mouth breadth)
                    new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                    new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                    );

                // Estimate head pose. 估计头部姿势
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    //从3D到2D点的pose中找到一个物体的姿势 rvec是旋转 tvec是平移向量
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0)[0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                if (!double.IsNaN(tvec_z) && points.Count == 68)
                {
                    cubismParameterDictionary.Clear();
                    Calib3d.Rodrigues(rvec, rotMat);

                    transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                    transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                    ARM = invertYM * transformationM * invertZM;
                    Vector3 forward;
                    forward.x = ARM.m02;
                    forward.y = ARM.m12;
                    forward.z = ARM.m22;

                    Vector3 upwards;
                    upwards.x = ARM.m01;
                    upwards.y = ARM.m11;
                    upwards.z = ARM.m21;

                    Vector3 angles  = Quaternion.LookRotation(forward, upwards).eulerAngles;
                    float   rotateX = angles.x > 180 ? angles.x - 360 : angles.x;
                    cubismParameterDictionary.Add(ParamAngleY, (float)Math.Round(rotateX));
                    float rotateY = angles.y > 180 ? angles.y - 360 : angles.y;
                    cubismParameterDictionary.Add(ParamAngleX, (float)Math.Round(-rotateY) * 2);
                    float rotateZ = angles.z > 180 ? angles.z - 360 : angles.z;
                    cubismParameterDictionary.Add(ParamAngleZ, (float)Math.Round(-rotateZ) * 2);
                    //Debug.("X" + rotateX + "Y" + rotateY + "Z" + rotateZ);

                    //ParamAngleX.BlendToValue(BlendMode,(float)(Math.Round(-rotateY) * 2));
                    //ParamAngleY.BlendToValue(BlendMode, (float)Math.Round(rotateX));
                    //ParamAngleZ.BlendToValue(BlendMode, (float)Math.Round(-rotateZ) * 2);

                    float eyeOpen_L = Mathf.Clamp(Mathf.Abs(points[43].y - points[47].y) / (Mathf.Abs(points[43].x - points[44].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_L >= 0.8f)
                    {
                        eyeOpen_L = 1f;
                    }
                    else
                    {
                        eyeOpen_L = 0;
                    }


                    float eyeOpen_R = Mathf.Clamp(Mathf.Abs(points[38].y - points[40].y) / (Mathf.Abs(points[37].x - points[38].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_R >= 0.8f)
                    {
                        eyeOpen_R = 1f;
                    }
                    else
                    {
                        eyeOpen_R = 0;
                    }

                    // ParamEyeROpen.BlendToValue(BlendMode, eyeOpen_R);
                    cubismParameterDictionary.Add(ParamEyeROpen, eyeOpen_R);
                    // ParamEyeLOpen.BlendToValue(BlendMode, eyeOpen_L);
                    cubismParameterDictionary.Add(ParamEyeLOpen, eyeOpen_L);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)rotateY / 30f);
                    cubismParameterDictionary.Add(ParamEyeBallX, rotateY / 30f);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)-rotateX / 30f - 0.25f);
                    cubismParameterDictionary.Add(ParamEyeBallY, (float)-rotateX / 30f - 0.25f);

                    float RY = Mathf.Abs(points[19].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    RY -= 1;
                    RY *= 4f;
                    float LY = Mathf.Abs(points[24].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    LY -= 1;
                    LY *= 4f;

                    // ParamBrowRY.BlendToValue(BlendMode, RY);
                    cubismParameterDictionary.Add(ParamBrowRY, RY);
                    // ParamBrowLY.BlendToValue(BlendMode, LY);
                    cubismParameterDictionary.Add(ParamBrowLY, LY);
                    float mouthOpen = Mathf.Clamp01(Mathf.Abs(points[62].y - points[66].y) / (Mathf.Abs(points[51].y - points[62].y) + Mathf.Abs(points[66].y - points[57].y)));
                    if (mouthOpen < 0.6f)
                    {
                        mouthOpen = 0;
                    }
                    // ParamMouthOpenY.BlendToValue(BlendMode, mouthOpen);
                    cubismParameterDictionary.Add(ParamMouthOpenY, mouthOpen);
                    float mouthSize = Mathf.Abs(points[48].x - points[54].x) / (Mathf.Abs(points[31].x - points[35].x));
                    // ParamMouthForm.BlendToValue(BlendMode, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                    cubismParameterDictionary.Add(ParamMouthForm, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                }
            }
            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
        }
    }
Exemplo n.º 16
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    MatOfPoint3f objectPoints = null;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [30].x, points [30].y),                                             //nose (Tip)
                            new Point(points [33].x, points [33].y),                                             //nose (Subnasale)
                            new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                            new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                            );


                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                    }
                    else if (points.Count == 17)
                    {
                        objectPoints = objectPoints17;

                        imagePoints.fromArray(
                            new Point((points [2].x + points [3].x) / 2, (points [2].y + points [3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [4].x + points [5].x) / 2, (points [4].y + points [5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [0].x, points [0].y),                                           //nose (Tip)
                            new Point(points [1].x, points [1].y),                                           //nose (Subnasale)
                            new Point(points [6].x, points [6].y),                                           //l ear (Bitragion breadth)
                            new Point(points [8].x, points [8].y)                                            //r ear (Bitragion breadth)
                            );

                        float noseDistance  = Mathf.Abs((float)(points [3].y - points [1].y));
                        float mouseDistance = Mathf.Abs((float)(points [14].y - points [16].y));
                    }
                    else if (points.Count == 6)
                    {
                        objectPoints = objectPoints6;

                        imagePoints.fromArray(
                            new Point((points [2].x + points [3].x) / 2, (points [2].y + points [3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [4].x + points [5].x) / 2, (points [4].y + points [5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [0].x, points [0].y),                                           //nose (Tip)
                            new Point(points [1].x, points [1].y)                                            //nose (Subnasale)
                            );
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points [3].x, points [3].y), //l eye (Inner corner of the eye)
                            new Point(points [1].x, points [1].y), //r eye (Inner corner of the eye)
                            new Point(points [2].x, points [2].y), //l eye (Tail of the eye)
                            new Point(points [0].x, points [0].y), //r eye (Tail of the eye)
                            new Point(points [4].x, points [4].y)  //nose (Subnasale)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68/17/6 points.";
                        }
                    }

                    // estimate head pose
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }


                    double tvec_x = tvec.get(0, 0) [0], tvec_y = tvec.get(1, 0) [0], tvec_z = tvec.get(2, 0) [0];

                    bool    isNotInViewport = false;
                    Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                    if (pos.w != 0)
                    {
                        float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                        if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                        {
                            isNotInViewport = true;
                        }
                    }

                    if (double.IsNaN(tvec_z) || isNotInViewport)    // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

                    if (!isNotInViewport)
                    {
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                        //move joystick
                        float t = 0.50f;
                        int   xpos;
                        if ((poseData.rot.eulerAngles.x - oldPoseData.rot.eulerAngles.x) < 10 && (poseData.rot.eulerAngles.x - oldPoseData.rot.eulerAngles.x) > 0)
                        {
                            xpos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.x, oldPoseData.rot.eulerAngles.x, t));
                        }
                        else
                        {
                            xpos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.x, oldPoseData.rot.eulerAngles.x, 1));
                        }


                        int ypos;
                        if ((poseData.rot.eulerAngles.y - oldPoseData.rot.eulerAngles.y) < 10 && (poseData.rot.eulerAngles.y - oldPoseData.rot.eulerAngles.y) > 0)
                        {
                            ypos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.y, oldPoseData.rot.eulerAngles.y, t));
                        }
                        else
                        {
                            ypos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.y, oldPoseData.rot.eulerAngles.y, 1));
                        }


                        int rpos;
                        if ((poseData.rot.eulerAngles.z - oldPoseData.rot.eulerAngles.z) < 10 && (poseData.rot.eulerAngles.z - oldPoseData.rot.eulerAngles.z) > 0)
                        {
                            rpos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.z, oldPoseData.rot.eulerAngles.z, t));
                        }
                        else
                        {
                            rpos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.z, oldPoseData.rot.eulerAngles.z, 1));
                        }


                        //int ypos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.y, oldPoseData.rot.eulerAngles.y,t));
                        int zpos = Convert.ToInt16(poseData.pos.z);

                        //zoom
                        int scZ = Convert.ToInt32(Scale(zpos, 400, 800, 0, 32000));

                        //up down
                        int scX;
                        if (xpos >= 0 && xpos < 20)
                        {
                            scX = Convert.ToInt32(Scale(xpos, 0, 10, 29000, 32000));
                        }
                        else
                        {
                            scX = Convert.ToInt32(Scale(xpos, 340, 360, 0, 29000));
                        }

                        //left right
                        int scY = Convert.ToInt32(Scale(ypos, 160, 200, 0, 32000));

                        //roll
                        int scR = Convert.ToInt32(Scale(rpos, 160, 200, 0, 32000));


                        iReport.AxisX  = scX;
                        iReport.AxisY  = scY;
                        iReport.AxisZ  = scZ;
                        iReport.Slider = scR;
                        bool upd = joystick.UpdateVJD(id, ref iReport);

                        string message = "x:" + scX + " y:" + scY + " z:" + scZ + " r:" + scR;
                        Debug.Log(message);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    ARM = invertYM * transformationM;

                    // Apply Z-axis inverted matrix.
                    ARM = ARM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);



                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Exemplo n.º 17
0
        // 更新は、フレームごとに呼ばれます
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                        #else
            if (webCamTexture.didUpdateThisFrame)
            {
                        #endif
                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
                // 方向を修正するために反転.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }
                // グレースケールにイメージを変換します
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                if (faceTracker.getPoints().Count <= 0)
                {
                    Debug.Log("detectFace");

                    // グレースケールにイメージを変換します
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            if (faces.rows() > 0)
                            {
                                Debug.Log("--------------------------faces");
                                Debug.Log("faces " + faces.dump());
                                Debug.Log("--------------------------faces");
                                // MatOfRectから顔の初期座標を追加
                                faceTracker.addPoints(faces);

                                // 顔の四角形を描きます
                                OpenCVForUnity.Rect[] rects = faces.toArray();
                                for (int i = 0; i < rects.Length; i++)
                                {
                                    Core.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                        }
                }

                // 顔の座標を追跡します.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isDrawPoints)
                    {
                        //Debug.Log("--------------------------100-----------------------------------");
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }
                    //Debug.Log("--------------------------1000");
                    //Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);

                    Point[] points = faceTracker.getPoints()[0];

                    //eyeWidth = points[36] - points[31];

/*
 *                  Debug.Log("--------------------------0");
 *                  Debug.Log(points[31]);
 *                  Debug.Log("--------------------------2");
 *                  Debug.Log(points[36]);
 *                  Debug.Log("--------------------------1");
 */
                    if (points.Length > 0)
                    {
                        imagePoints.fromArray(
                            points[31],                             //l eye
                            points[36],                             //r eye
                            points[67],                             // nose
                            points[48],                             //l mouth
                            points[54]                              //r mouth
                            );

                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)webCamTexture.width / (float)width))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }

                            // Rvecノイズをフィルタリング.
                            using (Mat absDiffRvec = new Mat())
                            {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                using (Mat cmpRvec = new Mat())
                                {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }

                            // Tvecノイズをフィルタリング.
                            using (Mat absDiffTvec = new Mat())
                            {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);
                                using (Mat cmpTvec = new Mat())
                                {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (!rightEye.activeSelf)
                            {
                                rightEye.SetActive(true);
                            }
                            if (!leftEye.activeSelf)
                            {
                                leftEye.SetActive(true);
                            }

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            modelViewMtrx = lookAtM * transformationM * invertZM;
                            ARCamera.worldToCameraMatrix = modelViewMtrx;
                        }
                    }
                }
                Utils.matToTexture2D(rgbaMat, texture, colors);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }
                ARCamera.ResetWorldToCameraMatrix();

                rightEye.SetActive(false);
                leftEye.SetActive(false);
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(rgbaMat);

            //Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));

            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //(int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");

            Point bottomLeft  = new Point(boundRect.x, boundRect.y + boundRect.height);
            Point topLeft     = new Point(boundRect.x, boundRect.y);
            Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height);
            Point topRight    = new Point(boundRect.x + boundRect.width, boundRect.y);

            rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y),                                      //topleft
                                          new Point(boundRect.x + boundRect.width, boundRect.y),                    //Top Right
                                          new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right
                                          new Point(boundRect.x, boundRect.y + boundRect.height)                    //Bottom Left
                                          );

            //double a = boundRect.br ().y - boundRect.tl ().y;
            //a = a * 0.7;
            //a = boundRect.tl ().y + a;

            //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

            List <Point3> m_markerCorners3dList = new List <Point3>();

            m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A)
            m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B)
            m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C)
            m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D)
            m_markerCorners3d.fromList(m_markerCorners3dList);

            //estimate pose
            Mat Rvec = new Mat();
            Mat Tvec = new Mat();
            Mat raux = new Mat();
            Mat taux = new Mat();

            Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux);

            raux.convertTo(Rvec, CvType.CV_32F);
            taux.convertTo(Tvec, CvType.CV_32F);

            rotMat = new Mat(3, 3, CvType.CV_64FC1);
            Calib3d.Rodrigues(Rvec, rotMat);

            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            //Debug.Log ("transformationM " + transformationM.ToString ());

            Rvec.Dispose();
            Tvec.Dispose();
            raux.Dispose();
            taux.Dispose();
            rotMat.Dispose();

            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
            //Debug.Log("arM " + ARM.ToString());

            if (ARGameObject != null)
            {
                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                if (deactivateCoroutine == null)
                {
                    deactivateCoroutine = StartCoroutine(Wait(10.0f));
                }
                ARGameObject.SetActive(true);
            }

            //end pose estimation

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <Point> listPoDefect = new List <Point>();

            if (convexDefect.rows() > 0)
            {
                List <int>   convexDefectList = convexDefect.toList();
                List <Point> contourList      = contour.toList();
                for (int j = 0; j < convexDefectList.Count; j = j + 4)
                {
                    Point farPoint = contourList[convexDefectList[j + 2]];
                    int   depth    = convexDefectList[j + 3];
                    //if (depth > threasholdSlider.value && farPoint.y < a)
                    //{
                    //    listPoDefect.Add(contourList[convexDefectList[j + 2]]);
                    //}
                    //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
                }
            }


            Debug.Log("hull: " + hull.toList());
            if (convexDefect.rows() > 0)
            {
                Debug.Log("defects: " + convexDefect.toList());
            }

            //use these contours to do heart detection
            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            int defectsTotal = (int)convexDefect.total();

            Debug.Log("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

            Debug.Log("numberOfFingers " + numberOfFingers);

            Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
Exemplo n.º 19
0
        private IEnumerator init()
        {
            axes.SetActive(false);
            head.SetActive(false);
            rightEye.SetActive(false);
            leftEye.SetActive(false);
            mouth.SetActive(false);


            if (webCamTexture != null)
            {
                faceTracker.reset();

                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();
                grayMat.Dispose();
                cascade.Dispose();
                camMatrix.Dispose();
                distCoeffs.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);

                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();


            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif
                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);


                    cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    if (cascade.empty())
                    {
                        Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                    }

                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


                    gameObject.transform.localEulerAngles = new Vector3(0, 0, 0);
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


//										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                    Camera.main.orthographicSize = webCamTexture.height / 2;



                    int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols());
                    camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                    camMatrix.put(0, 0, max_d);
                    camMatrix.put(0, 1, 0);
                    camMatrix.put(0, 2, rgbaMat.cols() / 2.0f);
                    camMatrix.put(1, 0, 0);
                    camMatrix.put(1, 1, max_d);
                    camMatrix.put(1, 2, rgbaMat.rows() / 2.0f);
                    camMatrix.put(2, 0, 0);
                    camMatrix.put(2, 1, 0);
                    camMatrix.put(2, 2, 1.0f);

                    Size     imageSize      = new Size(rgbaMat.cols(), rgbaMat.rows());
                    double   apertureWidth  = 0;
                    double   apertureHeight = 0;
                    double[] fovx           = new double[1];
                    double[] fovy           = new double[1];
                    double[] focalLength    = new double[1];
                    Point    principalPoint = new Point();
                    double[] aspectratio    = new double[1];



                    Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                    Debug.Log("imageSize " + imageSize.ToString());
                    Debug.Log("apertureWidth " + apertureWidth);
                    Debug.Log("apertureHeight " + apertureHeight);
                    Debug.Log("fovx " + fovx [0]);
                    Debug.Log("fovy " + fovy [0]);
                    Debug.Log("focalLength " + focalLength [0]);
                    Debug.Log("principalPoint " + principalPoint.ToString());
                    Debug.Log("aspectratio " + aspectratio [0]);


                    ARCamera.fieldOfView = (float)fovy [0];

                    Debug.Log("camMatrix " + camMatrix.dump());


                    distCoeffs = new MatOfDouble(0, 0, 0, 0);
                    Debug.Log("distCoeffs " + distCoeffs.dump());



                    lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0));
                    Debug.Log("lookAt " + lookAtM.ToString());

                    invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));



                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                //flip to correct direction.
                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (faceTracker.getPoints().Count <= 0)
                {
                    Debug.Log("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
                                Debug.Log("faces " + faces.dump());
                                //add initial face points from MatOfRect
                                faceTracker.addPoints(faces);

                                //draw face rect
                                OpenCVForUnity.Rect[] rects = faces.toArray();
                                for (int i = 0; i < rects.Length; i++)
                                {
                                    Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isDrawPoints)
                    {
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }

                    Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);


                    Point[] points = faceTracker.getPoints() [0];


                    if (points.Length > 0)
                    {
//												for (int i = 0; i < points.Length; i++) {
//														Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
//												}


                        imagePoints.fromArray(
                            points [31],                    //l eye
                            points [36],                    //r eye
                            points [67],                    //nose
                            points [48],                    //l mouth
                            points [54]                     //r mouth
//							,
//											points [1],//l ear
//											points [13]//r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }


                            //filter Rvec Noise.
                            using (Mat absDiffRvec = new Mat()) {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpRvec = new Mat()) {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }



                            //filter Tvec Noise.
                            using (Mat absDiffTvec = new Mat()) {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);

                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpTvec = new Mat()) {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (!rightEye.activeSelf)
                            {
                                rightEye.SetActive(true);
                            }
                            if (!leftEye.activeSelf)
                            {
                                leftEye.SetActive(true);
                            }


                            if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 &&
                                 Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) ||
                                Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
                            {
                                if (!mouth.activeSelf)
                                {
                                    mouth.SetActive(true);
                                }
                            }
                            else
                            {
                                if (mouth.activeSelf)
                                {
                                    mouth.SetActive(false);
                                }
                            }



                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            modelViewMtrx = lookAtM * transformationM * invertZM;

                            ARCamera.worldToCameraMatrix = modelViewMtrx;


                            //				Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString());
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }

                ARCamera.ResetWorldToCameraMatrix();

                rightEye.SetActive(false);
                leftEye.SetActive(false);
                mouth.SetActive(false);
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        private Matrix4x4 getLookAtMatrix(Vector3 pos, Vector3 target, Vector3 up)
        {
            Vector3 z = Vector3.Normalize(pos - target);
            Vector3 x = Vector3.Normalize(Vector3.Cross(up, z));
            Vector3 y = Vector3.Normalize(Vector3.Cross(z, x));

            Matrix4x4 result = new Matrix4x4();

            result.SetRow(0, new Vector4(x.x, x.y, x.z, -(Vector3.Dot(pos, x))));
            result.SetRow(1, new Vector4(y.x, y.y, y.z, -(Vector3.Dot(pos, y))));
            result.SetRow(2, new Vector4(z.x, z.y, z.z, -(Vector3.Dot(pos, z))));
            result.SetRow(3, new Vector4(0, 0, 0, 1));

            return(result);
        }

        void OnGUI()
        {
            float     screenScale  = Screen.height / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("FaceTrackerSample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            if (GUILayout.Button("drawPoints"))
            {
                if (isDrawPoints)
                {
                    isDrawPoints = false;
                }
                else
                {
                    isDrawPoints = true;
                }
            }
            if (GUILayout.Button("axes"))
            {
                if (axes.activeSelf)
                {
                    axes.SetActive(false);
                }
                else
                {
                    axes.SetActive(true);
                }
            }
            if (GUILayout.Button("head"))
            {
                if (head.activeSelf)
                {
                    head.SetActive(false);
                }
                else
                {
                    head.SetActive(true);
                }
            }


            GUILayout.EndVertical();
        }
    }
Exemplo n.º 20
0
    void handleCalibration()
    {
        for (int i = 0; i < AK_receiver.GetComponent <akplay>().camInfoList.Count; i++)
        {
            //create color mat:
            byte[]   colorBytes = ((Texture2D)(AK_receiver.GetComponent <akplay>().camInfoList[i].colorCube.GetComponent <Renderer>().material.mainTexture)).GetRawTextureData();
            GCHandle ch         = GCHandle.Alloc(colorBytes, GCHandleType.Pinned);
            Mat      colorMat   = new Mat(AK_receiver.GetComponent <akplay>().camInfoList[i].color_height, AK_receiver.GetComponent <akplay>().camInfoList[i].color_width, CvType.CV_8UC4);
            Utils.copyToMat(ch.AddrOfPinnedObject(), colorMat);
            ch.Free();

            //OpenCVForUnity.CoreModule.Core.flip(colorMat, colorMat, 0);

            //detect a chessboard in the image, and refine the points, and save the pixel positions:
            MatOfPoint2f positions = new MatOfPoint2f();
            int          resizer   = 4;
            resizer = 1;                   //noresize!
            Mat colorMatSmall = new Mat(); //~27 ms each
            Imgproc.resize(colorMat, colorMatSmall, new Size(colorMat.cols() / resizer, colorMat.rows() / resizer));
            bool success = Calib3d.findChessboardCorners(colorMatSmall, new Size(7, 7), positions);
            for (int ss = 0; ss < positions.rows(); ss++)
            {
                double[] data = positions.get(ss, 0);
                data[0] = data[0] * resizer;
                data[1] = data[1] * resizer;

                positions.put(ss, 0, data);
            }

            //subpixel, drawing chessboard, and getting orange blobs takes 14ms
            TermCriteria tc = new TermCriteria();
            Imgproc.cornerSubPix(colorMat, positions, new Size(5, 5), new Size(-1, -1), tc);

            Mat chessboardResult = new Mat();
            colorMat.copyTo(chessboardResult);
            Calib3d.drawChessboardCorners(chessboardResult, new Size(7, 7), positions, success);



            //Find the orange blobs:
            Mat       orangeMask = new Mat();
            Vector2[] blobs      = getOrangeBlobs(ref colorMat, ref orangeMask);

            //find blob closest to chessboard
            if (success && (blobs.Length > 0))
            {
                Debug.Log("found a chessboard and blobs for camera: " + i);

                // time to get pin1 and chessboard positions: 27ms
                //find pin1:
                Point closestBlob = new Point();
                int   pin1idx     = getPin1(positions, blobs, ref closestBlob);
                Imgproc.circle(chessboardResult, new Point(positions.get(pin1idx, 0)[0], positions.get(pin1idx, 0)[1]), 10, new Scalar(255, 0, 0), -1);
                Imgproc.circle(chessboardResult, closestBlob, 10, new Scalar(255, 255, 0), -1);


                //get world positions of chessboard
                Point[]  realWorldPointArray  = new Point[positions.rows()];
                Point3[] realWorldPointArray3 = new Point3[positions.rows()];
                Point[]  imagePointArray      = new Point[positions.rows()];
                //getChessBoardWorldPositions(positions, pin1idx, 0.0498f, ref realWorldPointArray, ref realWorldPointArray3, ref imagePointArray); //green and white checkerboard.
                getChessBoardWorldPositions(positions, pin1idx, 0.07522f, ref realWorldPointArray, ref realWorldPointArray3, ref imagePointArray); //black and white checkerboard.


                string text       = "";
                float  decimals   = 1000.0f;
                int    text_red   = 255;
                int    text_green = 0;
                int    text_blue  = 0;
                text = ((int)(realWorldPointArray3[0].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[0].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[0].z * decimals)) / decimals;
                //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z);
                Imgproc.putText(chessboardResult, text, new Point(positions.get(0, 0)[0], positions.get(0, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue));
                text = ((int)(realWorldPointArray3[6].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[6].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[6].z * decimals)) / decimals;
                //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z);
                Imgproc.putText(chessboardResult, text, new Point(positions.get(6, 0)[0], positions.get(6, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue));
                text = ((int)(realWorldPointArray3[42].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[42].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[42].z * decimals)) / decimals;
                //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z);
                Imgproc.putText(chessboardResult, text, new Point(positions.get(42, 0)[0], positions.get(42, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue));
                text = ((int)(realWorldPointArray3[48].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[48].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[48].z * decimals)) / decimals;
                //text = sprintf("%2.2f,%2.2f,%2.2f", realWorldPointArray3[48].x, realWorldPointArray3[48].y, realWorldPointArray3[48].z);
                Imgproc.putText(chessboardResult, text, new Point(positions.get(48, 0)[0], positions.get(48, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue));



                Mat cameraMatrix = Mat.eye(3, 3, CvType.CV_64F);
                cameraMatrix.put(0, 0, AK_receiver.GetComponent <akplay>().camInfoList[i].color_fx);
                cameraMatrix.put(1, 1, AK_receiver.GetComponent <akplay>().camInfoList[i].color_fy);
                cameraMatrix.put(0, 2, AK_receiver.GetComponent <akplay>().camInfoList[i].color_cx);
                cameraMatrix.put(1, 2, AK_receiver.GetComponent <akplay>().camInfoList[i].color_cy);

                double[] distortion = new double[8];

                distortion[0] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k1;
                distortion[1] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k2;
                distortion[2] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_p1;
                distortion[3] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_p2;
                distortion[4] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k3;
                distortion[5] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k4;
                distortion[6] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k5;
                distortion[7] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k6;


                /*
                 * distortion[0] = 0.0;
                 * distortion[1] = 0.0;
                 * distortion[2] = 0.0;
                 * distortion[3] = 0.0;
                 * distortion[4] = 0.0;
                 * distortion[5] = 0.0;
                 * distortion[6] = 0.0;
                 * distortion[7] = 0.0;
                 */

                //~1 ms to solve for pnp
                Mat  rvec           = new Mat();
                Mat  tvec           = new Mat();
                bool solvepnpSucces = Calib3d.solvePnP(new MatOfPoint3f(realWorldPointArray3), new MatOfPoint2f(imagePointArray), cameraMatrix, new MatOfDouble(distortion), rvec, tvec);

                Mat R = new Mat();
                Calib3d.Rodrigues(rvec, R);


                //calculate unity vectors, and camera transforms
                Mat camCenter     = -R.t() * tvec;
                Mat forwardOffset = new Mat(3, 1, tvec.type());
                forwardOffset.put(0, 0, 0);
                forwardOffset.put(1, 0, 0);
                forwardOffset.put(2, 0, 1);
                Mat upOffset = new Mat(3, 1, tvec.type());
                upOffset.put(0, 0, 0);
                upOffset.put(1, 0, -1);
                upOffset.put(2, 0, 0);

                Mat forwardVectorCV = R.t() * (forwardOffset - tvec);
                forwardVectorCV = forwardVectorCV - camCenter;
                Mat upVectorCV = R.t() * (upOffset - tvec);
                upVectorCV = upVectorCV - camCenter;

                Vector3    forwardVectorUnity = new Vector3((float)forwardVectorCV.get(0, 0)[0], (float)forwardVectorCV.get(2, 0)[0], (float)forwardVectorCV.get(1, 0)[0]); //need to flip y and z due to unity coordinate system
                Vector3    upVectorUnity      = new Vector3((float)upVectorCV.get(0, 0)[0], (float)upVectorCV.get(2, 0)[0], (float)upVectorCV.get(1, 0)[0]);                //need to flip y and z due to unity coordinate system
                Vector3    camCenterUnity     = new Vector3((float)camCenter.get(0, 0)[0], (float)camCenter.get(2, 0)[0], (float)camCenter.get(1, 0)[0]);
                Quaternion rotationUnity      = Quaternion.LookRotation(forwardVectorUnity, upVectorUnity);



                GameObject colorMarker = GameObject.CreatePrimitive(PrimitiveType.Cube);
                //colorMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f);
                //colorMarker.transform.parent = AK_receiver.transform;
                colorMarker.layer = LayerMask.NameToLayer("Debug");
                colorMarker.transform.position = camCenterUnity;
                colorMarker.transform.rotation = Quaternion.LookRotation(forwardVectorUnity, upVectorUnity);
                colorMarker.GetComponent <Renderer>().material.color = Color.blue;

                Vector3    forwardDepth   = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(forwardVectorUnity);
                Vector3    upDepth        = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(upVectorUnity);
                Vector3    camCenterDepth = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(camCenterUnity);
                Quaternion rotationDepth  = Quaternion.LookRotation(forwardDepth, upDepth);

                GameObject depthMarker = GameObject.CreatePrimitive(PrimitiveType.Cube);
                depthMarker.layer            = LayerMask.NameToLayer("Debug");
                depthMarker.transform.parent = colorMarker.transform;
                //depthMarker.transform.localScale = AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.lossyScale;

                depthMarker.transform.localRotation = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.rotation;

                Vector3 matrixPosition = new Vector3(AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).x,
                                                     AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).y,
                                                     AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).z);


                /*
                 * depthMarker.transform.localRotation = AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.rotation;
                 *
                 * Vector3 matrixPosition = new Vector3(AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).x,
                 *                                      AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).y,
                 *                                      AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).z);
                 */

                depthMarker.transform.localPosition = -matrixPosition;
                depthMarker.transform.parent        = null;

                colorMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f);
                depthMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f);

                //depthMarker.transform.parent = AK_receiver.transform;
                //depthMarker.transform.position = camCenterDepth;
                //depthMarker.transform.rotation = Quaternion.LookRotation(forwardDepth-camCenterDepth, upDepth-camCenterDepth);
                depthMarker.GetComponent <Renderer>().material.color = Color.red;


                AK_receiver.GetComponent <akplay>().camInfoList[i].visualization.transform.position = depthMarker.transform.position; //need to flip y and z due to unity coordinate system
                AK_receiver.GetComponent <akplay>().camInfoList[i].visualization.transform.rotation = depthMarker.transform.rotation;
            }


            //draw chessboard result to calibration ui:
            Texture2D colorTexture = new Texture2D(chessboardResult.cols(), chessboardResult.rows(), TextureFormat.BGRA32, false);
            colorTexture.LoadRawTextureData((IntPtr)chessboardResult.dataAddr(), (int)chessboardResult.total() * (int)chessboardResult.elemSize());
            colorTexture.Apply();
            checkerboard_display_list[i].GetComponent <Renderer>().material.mainTexture = colorTexture;

            //draw threshold to calibration ui:
            Texture2D orangeTexture = new Texture2D(orangeMask.cols(), orangeMask.rows(), TextureFormat.R8, false);
            orangeTexture.LoadRawTextureData((IntPtr)orangeMask.dataAddr(), (int)orangeMask.total() * (int)orangeMask.elemSize());
            orangeTexture.Apply();
            threshold_display_list[i].GetComponent <Renderer>().material.mainTexture = orangeTexture;
        }
    }
Exemplo n.º 21
0
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                    }

                    MatOfPoint3f objectPoints   = null;
                    bool         isRightEyeOpen = false;
                    bool         isLeftEyeOpen  = false;
                    bool         isMouthOpen    = false;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                            new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                            new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                            new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                            new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                            );


                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points [3].x, points [3].y), //l eye (Inner corner of the eye)
                            new Point(points [1].x, points [1].y), //r eye (Inner corner of the eye)
                            new Point(points [2].x, points [2].y), //l eye (Tail of the eye)
                            new Point(points [0].x, points [0].y), //r eye (Tail of the eye)
                            new Point(points [4].x, points [4].y)  //nose (Nose top)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68 points.";
                        }
                    }

                    // Estimate head pose.
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }

                    double tvec_z = tvec.get(2, 0) [0];

                    if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }
                    //Debug.Log (tvec.dump());

                    if (!double.IsNaN(tvec_z))
                    {
                        // Display effects.
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        if (displayEffects)
                        {
                            rightEye.SetActive(isRightEyeOpen);
                            leftEye.SetActive(isLeftEyeOpen);

                            if (isMouthOpen)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = true;
                                    #if UNITY_5_5_OR_NEWER
                                    var main = ps.main;
                                    main.startSizeMultiplier = 20;
                                    #else
                                    ps.startSize = 20;
                                    #endif
                                }
                            }
                            else
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    ARM = invertYM * transformationM;

                    // Apply Z-axis inverted matrix.
                    ARM = ARM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }
            }
            else
            {
                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }

            //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

            OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat, texture);
        }
Exemplo n.º 22
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
                {
//                    Debug.Log ("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
//                            Debug.Log ("faces " + faces.dump ());

                                List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
                                List <Point[]> pointsList = faceTracker.getPoints();

                                if (isAutoResetMode)
                                {
                                    //add initial face points from MatOfRect
                                    if (pointsList.Count <= 0)
                                    {
                                        faceTracker.addPoints(faces);
//                                    Debug.Log ("reset faces ");
                                    }
                                    else
                                    {
                                        for (int i = 0; i < rectsList.Count; i++)
                                        {
                                            OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                            //It determines whether nose point has been included in trackRect.
                                            if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                            {
                                                rectsList.RemoveAt(i);
                                                pointsList.RemoveAt(i);
//                                            Debug.Log ("remove " + i);
                                            }
                                            Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                        }
                                    }
                                }
                                else
                                {
                                    faceTracker.addPoints(faces);
                                }

                                //draw face rect
                                for (int i = 0; i < rectsList.Count; i++)
                                {
                                    Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                            else
                            {
                                if (isAutoResetMode)
                                {
                                    faceTracker.reset();

                                    rightEye.SetActive(false);
                                    leftEye.SetActive(false);
                                    head.SetActive(false);
                                    mouth.SetActive(false);
                                    axes.SetActive(false);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isShowingFacePoints)
                    {
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }

                    Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);


                    Point[] points = faceTracker.getPoints() [0];


                    if (points.Length > 0)
                    {
//                        for (int i = 0; i < points.Length; i++)
//                        {
//                            Imgproc.putText(rgbaMat, "" + i, new Point(points [i].x, points [i].y), Imgproc.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_AA, false);
//                        }


                        imagePoints.fromArray(
                            points [31], //l eye
                            points [36], //r eye
                            points [67], //nose
                            points [48], //l mouth
                            points [54]  //r mouth
//                          ,
//                          points [0],//l ear
//                          points [14]//r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }


                            //filter Rvec Noise.
                            using (Mat absDiffRvec = new Mat()) {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpRvec = new Mat()) {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }

                            //filter Tvec Noise.
                            using (Mat absDiffTvec = new Mat()) {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpTvec = new Mat()) {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (isShowingEffects)
                            {
                                rightEye.SetActive(true);
                            }
                            if (isShowingEffects)
                            {
                                leftEye.SetActive(true);
                            }
                            if (isShowingHead)
                            {
                                head.SetActive(true);
                            }
                            if (isShowingAxes)
                            {
                                axes.SetActive(true);
                            }


                            if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 &&
                                 Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) ||
                                Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(true);
                                }
                            }
                            else
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(false);
                                }
                            }

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            ARM = invertYM * transformationM;

                            // Apply Z-axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }

                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }
        }
Exemplo n.º 23
0
    /// <summary>
    /// 顔の向きを取得
    /// </summary>
    private Vector3 GetFrontalFaceAngle(List <Vector2> points)
    {
        if (points.Count != 68)
        {
            throw new ArgumentNullException("ランドマークが正しくありません。");
        }

        if (_camMatrix == null)
        {
            throw new ArgumentNullException("カメラの内部パラメータが正しくありません。");
        }

        // スケールの正規化
        float   normScale = Math.Abs(points[30].y - points[8].y) / (normHeight / 2);
        Vector2 normDiff  = points[30] * normScale - new Vector2(normWidth / 2, normHeight / 2);

        for (int i = 0; i < points.Count; i++)
        {
            _normPoints[i] = points[i] * normScale - normDiff;
        }

        _imagePoints.fromArray(
            new Point((_normPoints[38].x + _normPoints[41].x) / 2, (_normPoints[38].y + _normPoints[41].y) / 2), // 左目
            new Point((_normPoints[43].x + _normPoints[46].x) / 2, (_normPoints[43].y + _normPoints[46].y) / 2), // 右目
            new Point(_normPoints[33].x, _normPoints[33].y),                                                     // 鼻
            new Point(_normPoints[48].x, _normPoints[48].y),                                                     // 左口角
            new Point(_normPoints[54].x, _normPoints[54].y),                                                     // 右口角
            new Point(_normPoints[0].x, _normPoints[0].y),                                                       // 左耳
            new Point(_normPoints[16].x, _normPoints[16].y)                                                      // 右耳
            );

        // 2-3次元対応点から頭部を姿勢推定する
        if (_rVec == null || _tVec == null)
        {
            _rVec = new Mat(3, 1, CvType.CV_64FC1);
            _tVec = new Mat(3, 1, CvType.CV_64FC1);
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec);
        }

        double  tVecX           = _tVec.get(0, 0)[0];
        double  tVecY           = _tVec.get(1, 0)[0];
        double  tVecZ           = _tVec.get(2, 0)[0];
        bool    isNotInViewport = false;
        Vector4 pos             = _VP * new Vector4((float)tVecX, (float)tVecY, (float)tVecZ, 1.0f);

        if (pos.w != 0)
        {
            float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
            if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
            {
                isNotInViewport = true;
            }
        }

        // オブジェクトがカメラ視野に存在しない場合、外部パラメータを使用しない
        if (double.IsNaN(tVecZ) || isNotInViewport)
        {
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs,
                             _rVec, _tVec);
        }
        else
        {
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs,
                             _rVec, _tVec, true, Calib3d.SOLVEPNP_ITERATIVE);
        }

        if (!isNotInViewport)
        {
            // Unityのポーズデータに変換
            double[] rVecArr = new double[3];
            _rVec.get(0, 0, rVecArr);
            double[] tVecArr = new double[3];
            _tVec.get(0, 0, tVecArr);
            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rVecArr, tVecArr);
            // 閾値以下ならば更新を無視
            ARUtils.LowpassPoseData(ref _oldPoseData, ref poseData, positionLowPass, rotationLowPass);
            _oldPoseData = poseData;
            // 変換行列を作成
            _transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
        }


        Calib3d.Rodrigues(_rVec, _rotM);

        _transformationM.SetRow(0,
                                new Vector4(
                                    (float)_rotM.get(0, 0)[0],
                                    (float)_rotM.get(0, 1)[0],
                                    (float)_rotM.get(0, 2)[0],
                                    (float)_tVec.get(0, 0)[0]));
        _transformationM.SetRow(1,
                                new Vector4(
                                    (float)_rotM.get(1, 0)[0],
                                    (float)_rotM.get(1, 1)[0],
                                    (float)_rotM.get(1, 2)[0],
                                    (float)_tVec.get(1, 0)[0]));
        _transformationM.SetRow(2,
                                new Vector4(
                                    (float)_rotM.get(2, 0)[0],
                                    (float)_rotM.get(2, 1)[0],
                                    (float)_rotM.get(2, 2)[0],
                                    (float)_tVec.get(2, 0)[0]));
        _transformationM.SetRow(3,
                                new Vector4(
                                    0,
                                    0,
                                    0,
                                    1));

        _ARM = _invertYM * _transformationM * _invertYM;
        _ARM = _ARM * _invertYM * _invertZM;

        return(ExtractRotationFromMatrix(ref _ARM).eulerAngles);
    }
        // Update is called once per frame
        void Update()
        {
            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (points.Count > 0)
                    {
                        if (shouldDrawFacePoints)
                        {
                            OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                        }

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye
                            new Point(points [33].x, points [33].y),                                             //nose
                            new Point(points [48].x, points [48].y),                                             //l mouth
                            new Point(points [54].x, points [54].y)                                              //r mouth
                            ,
                            new Point(points [0].x, points [0].y),                                               //l ear
                            new Point(points [16].x, points [16].y)                                              //r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


                        if (tvec.get(2, 0) [0] > 0)
                        {
                            if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    rightEye.SetActive(true);
                                }
                            }

                            if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    leftEye.SetActive(true);
                                }
                            }
                            if (shouldDrawHead)
                            {
                                head.SetActive(true);
                            }
                            if (shouldDrawAxes)
                            {
                                axes.SetActive(true);
                            }



                            float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                            float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                            if (mouseDistance > noseDistance / 5.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    mouth.SetActive(true);
                                    foreach (ParticleSystem ps in mouthParticleSystem)
                                    {
                                        ps.enableEmission = true;
                                        ps.startSize      = 40 * (mouseDistance / noseDistance);
                                    }
                                }
                            }
                            else
                            {
                                if (shouldDrawEffects)
                                {
                                    foreach (ParticleSystem ps in mouthParticleSystem)
                                    {
                                        ps.enableEmission = false;
                                    }
                                }
                            }


                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }
                else
                {
                    rightEye.SetActive(false);
                    leftEye.SetActive(false);
                    head.SetActive(false);
                    mouth.SetActive(false);
                    axes.SetActive(false);
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
        private void UpdateARHeadTransform(List <Vector2> points)
        {
            // The coordinates in pixels of the object detected on the image projected onto the plane.
            imagePoints.fromArray(
                new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                );

            // Estimate head pose.
            if (rvec == null || tvec == null)
            {
                rvec = new Mat(3, 1, CvType.CV_64FC1);
                tvec = new Mat(3, 1, CvType.CV_64FC1);
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }

            double tvec_z = tvec.get(2, 0) [0];

            if (double.IsNaN(tvec_z) || tvec_z < 0)   // if tvec is wrong data, do not use extrinsic guesses.
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }
            else
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
            }

            if (applyEstimationPose && !double.IsNaN(tvec_z))
            {
                if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                {
                    if (displayEffects)
                    {
                        rightEye.SetActive(true);
                    }
                }

                if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                {
                    if (displayEffects)
                    {
                        leftEye.SetActive(true);
                    }
                }
                if (displayHead)
                {
                    head.SetActive(true);
                }
                if (displayAxes)
                {
                    axes.SetActive(true);
                }


                float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                if (mouseDistance > noseDistance / 5.0)
                {
                    if (displayEffects)
                    {
                        mouth.SetActive(true);
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = true;
                            #if UNITY_5_5_OR_NEWER
                            var main = ps.main;
                            main.startSizeMultiplier = 40 * (mouseDistance / noseDistance);
                            #else
                            ps.startSize = 40 * (mouseDistance / noseDistance);
                            #endif
                        }
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = false;
                        }
                    }
                }

                Calib3d.Rodrigues(rvec, rotMat);

                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)(tvec.get(0, 0) [0] / 1000.0)));
                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)(tvec.get(1, 0) [0] / 1000.0)));
                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)(tvec.get(2, 0) [0] / 1000.0 / imageOptimizationHelper.downscaleRatio)));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                ARM = invertYM * transformationM;

                // Apply Z axis inverted matrix.
                ARM = ARM * invertZM;

                // Apply camera transform matrix.
                ARM = arCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
            }
        }