/// <summary>
        /// Gets the frontal face angle.
        /// </summary>
        /// <returns>The frontal face angle.</returns>
        /// <param name="points">Points.</param>
        public Vector3 getFrontalFaceAngle(List <Vector2> points)
        {
            if (points.Count != 68)
            {
                throw new ArgumentNullException("Invalid landmark_points.");
            }

            if (camMatrix == null)
            {
                throw new ArgumentNullException("Invalid camMatrix.");
            }

            //normalize

            float   normScale = Math.Abs(points [30].y - points [8].y) / (normHeight / 2);
            Vector2 normDiff  = points [30] * normScale - new Vector2(normWidth / 2, normHeight / 2);

            for (int i = 0; i < points.Count; i++)
            {
                normPoints [i] = points [i] * normScale - normDiff;
            }
            //Debug.Log ("points[30] " + points[30]);
            //Debug.Log ("normPoints[30] " + normPoints[30]);
            //Debug.Log ("normScale " + normScale);
            //Debug.Log ("normDiff " + normDiff);

            imagePoints.fromArray(
                new Point((normPoints [38].x + normPoints [41].x) / 2, (normPoints [38].y + normPoints [41].y) / 2), //l eye
                new Point((normPoints [43].x + normPoints [46].x) / 2, (normPoints [43].y + normPoints [46].y) / 2), //r eye
                new Point(normPoints [33].x, normPoints [33].y),                                                     //nose
                new Point(normPoints [48].x, normPoints [48].y),                                                     //l mouth
                new Point(normPoints [54].x, normPoints [54].y),                                                     //r mouth                         ,
                new Point(normPoints [0].x, normPoints [0].y),                                                       //l ear
                new Point(normPoints [16].x, normPoints [16].y)                                                      //r ear
                );


            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

            Calib3d.Rodrigues(rvec, rotM);

            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            ARM = invertYM * transformationM * invertZM;

            return(ExtractRotationFromMatrix(ref ARM).eulerAngles);
        }
示例#2
0
        private void SetImagePoints(List <Vector2> landmarks)
        {
            //Listにランダムアクセスしたくないので、配列に全部書き写したのを用いる
            int i = 0;

            foreach (var mark in landmarks)
            {
                _landmarks[i] = mark;
                i++;
            }

            //NOTE: 17点モデルから目、鼻、耳を(_objPointsと同じ対応付けで)取り出す。
            if (_imagePoints.rows() == 0)
            {
                //初回: 領域確保も兼ねてちゃんと作る
                _imagePoints.fromArray(
                    new Point((_landmarks[2].x + _landmarks[3].x) / 2, (_landmarks[2].y + _landmarks[3].y) / 2),
                    new Point((_landmarks[4].x + _landmarks[5].x) / 2, (_landmarks[4].y + _landmarks[5].y) / 2),
                    new Point(_landmarks[0].x, _landmarks[0].y),
                    new Point(_landmarks[1].x, _landmarks[1].y),
                    new Point(_landmarks[6].x, _landmarks[6].y),
                    new Point(_landmarks[8].x, _landmarks[8].y)
                    );
            }
            else
            {
                //初回以外: fromArrayとかnew PointはGCAllocなのでダメです。
                _imagePointsSetter[0]  = (_landmarks[2].x + _landmarks[3].x) / 2;
                _imagePointsSetter[1]  = (_landmarks[2].y + _landmarks[3].y) / 2;
                _imagePointsSetter[2]  = (_landmarks[4].x + _landmarks[5].x) / 2;
                _imagePointsSetter[3]  = (_landmarks[4].y + _landmarks[5].y) / 2;
                _imagePointsSetter[4]  = _landmarks[0].x;
                _imagePointsSetter[5]  = _landmarks[0].y;
                _imagePointsSetter[6]  = _landmarks[1].x;
                _imagePointsSetter[7]  = _landmarks[1].y;
                _imagePointsSetter[8]  = _landmarks[6].x;
                _imagePointsSetter[9]  = _landmarks[6].y;
                _imagePointsSetter[10] = _landmarks[8].x;
                _imagePointsSetter[11] = _landmarks[8].y;
                _imagePoints.put(0, 0, _imagePointsSetter);
            }
        }
 private void updateFeatures()
 {
     if (mMOP2fptsPrev.rows() == 0)
     {
         Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);
         matOpFlowThis.copyTo(matOpFlowPrev);
         Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, maxCorners, qualityLevel, minDistance);
         mMOP2fptsPrev.fromArray(MOPcorners.toArray());
         mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
     }
     else
     {
         matOpFlowThis.copyTo(matOpFlowPrev);
         Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);
         Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, maxCorners, qualityLevel, minDistance);
         mMOP2fptsThis.fromArray(MOPcorners.toArray());
         mMOP2fptsSafe.copyTo(mMOP2fptsPrev);
         mMOP2fptsThis.copyTo(mMOP2fptsSafe);
     }
 }
示例#4
0
        void UpdateCameraTransform()
        {
            // Update points.
            for (int p = 0; p < pointCount; p++)
            {
                Vector2 posImage = _userPointRects[p].anchorMin;
                posImage.Scale(new Vector2(_cameraTexture.width, _cameraTexture.height));
                _anchorPointsImage[p].set(new double[] { posImage.x, posImage.y });
                Vector3 posWorld = _anchorTransform.TransformPoint(defaultPointPositions[p] - Vector2.one * 0.5f);
                _anchorPointsWorld[p].set(new double[] { posWorld.x, posWorld.y, posWorld.z });
            }
            _anchorPointsImageMat.fromArray(_anchorPointsImage);
            _anchorPointsWorldMat.fromArray(_anchorPointsWorld);

            // Compute
            bool success = Calib3d.solvePnP(_anchorPointsWorldMat, _anchorPointsImageMat, _cameraMatrix, _noDistCoeffs, _rVec, _tVec);

            if (!success)
            {
                return;
            }

            // Convert.
            bool inverse = true;

            TrackingToolsHelper.ApplyPose(_rVec, _tVec, _targetCameraTransform, inverse);

            /*
             * Vector3 translation = TrackingToolsHelper.TranslationMatVectorToVector3( _tVec  );
             * Quaternion rotation = TrackingToolsHelper.RotationMatVectorToQuaternion( _rVec );
             * translation = rotation * translation;
             *
             * // Apply.
             * _targetCameraTransform.SetPositionAndRotation( translation, rotation );
             */

            // Save.
            SaveCircleAnchorPoints();
        }
        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                //flip to correct direction.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // detect faces on the downscale image
                if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped())
                {
                    Mat   downScaleRgbaMat = null;
                    float DOWNSCALE_RATIO  = 1.0f;
                    if (enableDownScale)
                    {
                        downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                        DOWNSCALE_RATIO  = imageOptimizationHelper.downscaleRatio;
                    }
                    else
                    {
                        downScaleRgbaMat = rgbaMat;
                        DOWNSCALE_RATIO  = 1.0f;
                    }

                    // set the downscale mat
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat);

                    //detect face rects
                    detectionResult = faceLandmarkDetector.Detect();

                    if (enableDownScale)
                    {
                        for (int i = 0; i < detectionResult.Count; ++i)
                        {
                            var rect = detectionResult[i];
                            detectionResult[i] = new UnityEngine.Rect(
                                rect.x * DOWNSCALE_RATIO,
                                rect.y * DOWNSCALE_RATIO,
                                rect.width * DOWNSCALE_RATIO,
                                rect.height * DOWNSCALE_RATIO);
                        }
                    }
                }

                List <Vector2> points = null;
                if (detectionResult != null && detectionResult.Count > 0)
                {
                    // set the original scale image
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(detectionResult[0]);
                }

                if (points != null)
                {
                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    MatOfPoint3f objectPoints   = null;
                    bool         isRightEyeOpen = false;
                    bool         isLeftEyeOpen  = false;
                    bool         isMouthOpen    = false;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[30].x, points[30].y),                                           //nose (Tip)
                            new Point(points[33].x, points[33].y),                                           //nose (Subnasale)
                            new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                            new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points[43].y - points[46].y)) > Mathf.Abs((float)(points[42].x - points[45].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points[38].y - points[41].y)) > Mathf.Abs((float)(points[39].x - points[36].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points[27].y - points[33].y));
                        float mouseDistance = Mathf.Abs((float)(points[62].y - points[66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 17)
                    {
                        objectPoints = objectPoints17;

                        imagePoints.fromArray(
                            new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[0].x, points[0].y),                                         //nose (Tip)
                            new Point(points[1].x, points[1].y),                                         //nose (Subnasale)
                            new Point(points[6].x, points[6].y),                                         //l ear (Bitragion breadth)
                            new Point(points[8].x, points[8].y)                                          //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points[11].y - points[12].y)) > Mathf.Abs((float)(points[4].x - points[5].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points[9].y - points[10].y)) > Mathf.Abs((float)(points[2].x - points[3].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points[3].y - points[1].y));
                        float mouseDistance = Mathf.Abs((float)(points[14].y - points[16].y));
                        if (mouseDistance > noseDistance / 2.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 6)
                    {
                        objectPoints = objectPoints6;

                        imagePoints.fromArray(
                            new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[0].x, points[0].y),                                         //nose (Tip)
                            new Point(points[1].x, points[1].y)                                          //nose (Subnasale)
                            );
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points[3].x, points[3].y), //l eye (Inner corner of the eye)
                            new Point(points[1].x, points[1].y), //r eye (Inner corner of the eye)
                            new Point(points[2].x, points[2].y), //l eye (Tail of the eye)
                            new Point(points[0].x, points[0].y), //r eye (Tail of the eye)
                            new Point(points[4].x, points[4].y)  //nose (Subnasale)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68/17/6 points.";
                        }
                    }

                    // estimate head pose
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }


                    double tvec_x = tvec.get(0, 0)[0], tvec_y = tvec.get(1, 0)[0], tvec_z = tvec.get(2, 0)[0];

                    bool    isNotInViewport = false;
                    Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                    if (pos.w != 0)
                    {
                        float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                        if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                        {
                            isNotInViewport = true;
                        }
                    }

                    if (double.IsNaN(tvec_z) || isNotInViewport)
                    { // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

                    //Debug.Log (tvec.dump());

                    if (!isNotInViewport)
                    {
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        if (displayEffects)
                        {
                            rightEye.SetActive(isRightEyeOpen);
                            leftEye.SetActive(isLeftEyeOpen);

                            if (isMouthOpen)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = true;
#if UNITY_5_5_OR_NEWER
                                    var main = ps.main;
                                    main.startSizeMultiplier = 20;
#else
                                    ps.startSize = 20;
#endif
                                }
                            }
                            else
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                    ARM = invertYM * transformationM * invertYM;

                    // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                    ARM = ARM * invertYM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }
                //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
示例#7
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                        new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                        new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                        new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                        new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                        );

                    // Estimate head pose.
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }

                    double tvec_z = tvec.get(2, 0) [0];

                    if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

//                    Debug.Log (tvec.dump());

                    if (!double.IsNaN(tvec_z))
                    {
                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                        {
                            if (displayEffects)
                            {
                                rightEye.SetActive(true);
                            }
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                        {
                            if (displayEffects)
                            {
                                leftEye.SetActive(true);
                            }
                        }
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }


                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            if (displayEffects)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled   = true;
                                    ps.startSize = 40 * (mouseDistance / noseDistance);
                                }
                            }
                        }
                        else
                        {
                            if (displayEffects)
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        if (shouldMoveARCamera)
                        {
                            ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                            ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                        }
                        else
                        {
                            ARM = ARCamera.transform.localToWorldMatrix * ARM;

                            ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                        }
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
示例#8
0
        private void UpdateARHeadTransform(List <Vector2> points, Matrix4x4 cameraToWorldMatrix)
        {
            // The coordinates in pixels of the object detected on the image projected onto the plane.
            imagePoints.fromArray(
                new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                );

            // Estimate head pose.
            if (rvec == null || tvec == null)
            {
                rvec = new Mat(3, 1, CvType.CV_64FC1);
                tvec = new Mat(3, 1, CvType.CV_64FC1);
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }

            double tvec_z = tvec.get(2, 0) [0];

            if (double.IsNaN(tvec_z) || tvec_z < 0)   // if tvec is wrong data, do not use extrinsic guesses.
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }
            else
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
            }

            if (applyEstimationPose && !double.IsNaN(tvec_z))
            {
                if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 5.0)
                {
                    if (displayEffects)
                    {
                        rightEye.SetActive(true);
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        rightEye.SetActive(false);
                    }
                }

                if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 5.0)
                {
                    if (displayEffects)
                    {
                        leftEye.SetActive(true);
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        leftEye.SetActive(false);
                    }
                }

                if (displayHead)
                {
                    head.SetActive(true);
                }
                if (displayAxes)
                {
                    axes.SetActive(true);
                }


                float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                if (mouseDistance > noseDistance / 5.0)
                {
                    if (displayEffects)
                    {
                        mouth.SetActive(true);
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = true;
                            #if UNITY_5_5_OR_NEWER
                            var main = ps.main;
                            main.startSizeMultiplier = 40 * (mouseDistance / noseDistance);
                            #else
                            ps.startSize = 40 * (mouseDistance / noseDistance);
                            #endif
                        }
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = false;
                        }
                    }
                }

                // Convert to unity pose data.
                double[] rvecArr = new double[3];
                rvec.get(0, 0, rvecArr);
                double[] tvecArr = new double[3];
                tvec.get(0, 0, tvecArr);
                tvecArr [0] = tvecArr [0] / 1000.0;
                tvecArr[1]  = tvecArr[1] / 1000.0;
                tvecArr[2]  = tvecArr[2] / 1000.0 / imageOptimizationHelper.downscaleRatio;
                PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                // Changes in pos/rot below these thresholds are ignored.
                if (enableLowPassFilter)
                {
                    ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                }
                oldPoseData = poseData;

                // Create transform matrix.
                transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                ARM = invertYM * transformationM;

                // Apply Z-axis inverted matrix.
                ARM = ARM * invertZM;

                // Apply the cameraToWorld matrix with the Z-axis inverted.
                ARM = cameraToWorldMatrix * invertZM * ARM;

                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
            }
        }
示例#9
0
        public override void UpdateValue()
        {
            if (matSourceGetterInterface == null)
            {
                return;
            }

            if (faceLandmarkGetterInterface == null)
            {
                return;
            }

            Mat rgbaMat = matSourceGetterInterface.GetMatSource();

            if (rgbaMat == null)
            {
                return;
            }
            else
            {
                if (rgbaMat.width() != imageWidth || rgbaMat.height() != imageHeight)
                {
                    imageWidth  = rgbaMat.width();
                    imageHeight = rgbaMat.height();
                    SetCameraMatrix(camMatrix, imageWidth, imageHeight);
                }
            }

            didUpdateHeadPositionAndRotation = false;

            List <Vector2> points = faceLandmarkGetterInterface.GetFaceLanmarkPoints();

            if (points != null)
            {
                MatOfPoint3f objectPoints = null;

                if (points.Count == 68)
                {
                    objectPoints = objectPoints68;

                    imagePoints.fromArray(
                        new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[30].x, points[30].y),                                           //nose (Tip)
                        new Point(points[33].x, points[33].y),                                           //nose (Subnasale)
                        new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                        new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                        );
                }
                else if (points.Count == 17)
                {
                    objectPoints = objectPoints17;

                    imagePoints.fromArray(
                        new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[0].x, points[0].y),                                         //nose (Tip)
                        new Point(points[1].x, points[1].y),                                         //nose (Subnasale)
                        new Point(points[6].x, points[6].y),                                         //l ear (Bitragion breadth)
                        new Point(points[8].x, points[8].y)                                          //r ear (Bitragion breadth)
                        );
                }
                else if (points.Count == 6)
                {
                    objectPoints = objectPoints6;

                    imagePoints.fromArray(
                        new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[0].x, points[0].y),                                         //nose (Tip)
                        new Point(points[1].x, points[1].y)                                          //nose (Subnasale)
                        );
                }

                // Estimate head pose.
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }


                double tvec_x = tvec.get(0, 0)[0], tvec_y = tvec.get(1, 0)[0], tvec_z = tvec.get(2, 0)[0];

                bool    isNotInViewport = false;
                Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                if (pos.w != 0)
                {
                    float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                    if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                    {
                        isNotInViewport = true;
                    }
                }

                if (double.IsNaN(tvec_z) || isNotInViewport)
                { // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                //Debug.Log (tvec.dump () + " " + isNotInViewport);

                if (!isNotInViewport)
                {
                    // Convert to unity pose data.
                    double[] rvecArr = new double[3];
                    rvec.get(0, 0, rvecArr);
                    double[] tvecArr = new double[3];
                    tvec.get(0, 0, tvecArr);
                    PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                    // adjust the position to the scale of real-world space.
                    poseData.pos = new Vector3(poseData.pos.x * 0.001f, poseData.pos.y * 0.001f, poseData.pos.z * 0.001f);

                    // Changes in pos/rot below these thresholds are ignored.
                    if (enableLowPassFilter)
                    {
                        ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                    }
                    oldPoseData = poseData;

                    Matrix4x4 transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                    transformationM = invertYM * transformationM * invertYM;

                    // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                    transformationM = transformationM * invertYM * invertZM;

                    headPosition = ARUtils.ExtractTranslationFromMatrix(ref transformationM);
                    headRotation = ARUtils.ExtractRotationFromMatrix(ref transformationM);

                    didUpdateHeadPositionAndRotation = true;
                }
            }
        }
        private void UpdateARHeadTransform(List <Vector2> points)
        {
            // The coordinates in pixels of the object detected on the image projected onto the plane.
            imagePoints.fromArray(
                new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                );

            // Estimate head pose.
            if (rvec == null || tvec == null)
            {
                rvec = new Mat(3, 1, CvType.CV_64FC1);
                tvec = new Mat(3, 1, CvType.CV_64FC1);
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }

            double tvec_z = tvec.get(2, 0) [0];

            if (double.IsNaN(tvec_z) || tvec_z < 0)   // if tvec is wrong data, do not use extrinsic guesses.
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
            }
            else
            {
                Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
            }

            if (applyEstimationPose && !double.IsNaN(tvec_z))
            {
                if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                {
                    if (displayEffects)
                    {
                        rightEye.SetActive(true);
                    }
                }

                if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                {
                    if (displayEffects)
                    {
                        leftEye.SetActive(true);
                    }
                }
                if (displayHead)
                {
                    head.SetActive(true);
                }
                if (displayAxes)
                {
                    axes.SetActive(true);
                }


                float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                if (mouseDistance > noseDistance / 5.0)
                {
                    if (displayEffects)
                    {
                        mouth.SetActive(true);
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = true;
                            #if UNITY_5_5_OR_NEWER
                            var main = ps.main;
                            main.startSizeMultiplier = 40 * (mouseDistance / noseDistance);
                            #else
                            ps.startSize = 40 * (mouseDistance / noseDistance);
                            #endif
                        }
                    }
                }
                else
                {
                    if (displayEffects)
                    {
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            var em = ps.emission;
                            em.enabled = false;
                        }
                    }
                }

                Calib3d.Rodrigues(rvec, rotMat);

                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)(tvec.get(0, 0) [0] / 1000.0)));
                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)(tvec.get(1, 0) [0] / 1000.0)));
                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)(tvec.get(2, 0) [0] / 1000.0 / imageOptimizationHelper.downscaleRatio)));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                ARM = invertYM * transformationM;

                // Apply Z axis inverted matrix.
                ARM = ARM * invertZM;

                // Apply camera transform matrix.
                ARM = arCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
            }
        }
示例#11
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 *  Parameters:
                 *      prevImg first 8-bit input image
                 *      nextImg second input image
                 *      prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 *      nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 *      status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 *      err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Imgproc.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Imgproc.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }

//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
示例#12
0
        //public RawImage document;

        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat mainMat = webCamTextureToMatHelper.GetMat();


                if (!selectTarget) //find paper by contours
                {
                    grayMat = new Mat();

                    // convert texture to matrix
                    mainMat.copyTo(grayMat);

                    mainMat = findPaper(mainMat);

                    // display matrix on the screen
                    Utils.fastMatToTexture2D(mainMat, texture);
                }
                else
                { // using optical flow
                    // set the currentGrayMat mat
                    currentGrayMat = new Mat(mainMat.rows(), mainMat.cols(), Imgproc.COLOR_RGB2GRAY);
                    Imgproc.cvtColor(mainMat, currentGrayMat, Imgproc.COLOR_RGBA2GRAY);


                    if (initOpticalFlow == true) // doing the init setting for optical flow
                    {
                        // create 40 points
                        Point[] points = new Point[40];
                        // set those points near the corner
                        // paperCornerMatOfPoint  is the corner of the paper
                        for (int i = 0; i < 4; i++)
                        {
                            points[i * 10]     = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 1] = new Point(paperCornerMatOfPoint.toList()[i].x + 1, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 2] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y + 1);
                            points[i * 10 + 3] = new Point(paperCornerMatOfPoint.toList()[i].x + 1, paperCornerMatOfPoint.toList()[i].y + 1);
                            points[i * 10 + 4] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y - 1);
                            points[i * 10 + 5] = new Point(paperCornerMatOfPoint.toList()[i].x - 1, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 6] = new Point(paperCornerMatOfPoint.toList()[i].x - 2, paperCornerMatOfPoint.toList()[i].y - 1);
                            points[i * 10 + 7] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y - 2);
                            points[i * 10 + 8] = new Point(paperCornerMatOfPoint.toList()[i].x - 2, paperCornerMatOfPoint.toList()[i].y - 2);
                            points[i * 10 + 9] = new Point(paperCornerMatOfPoint.toList()[i].x + 2, paperCornerMatOfPoint.toList()[i].y + 2);
                        }

                        // make the points closer to the corners (Harris Corner Detection )
                        //Imgproc.goodFeaturesToTrack(currentGrayMat, corners, 40, qualityLevel, minDistance, none, blockSize, false, 0.04);
                        //Imgproc.goodFeaturesToTrack(currentGrayMat, corners, 40,0.05,20);

                        corners.fromArray(points);

                        prevFeatures.fromList(corners.toList());
                        currentFeatures.fromList(corners.toList());
                        prevGrayMat = currentGrayMat.clone();

                        // won't go back t again
                        initOpticalFlow = false;


                        // not that useful lol
                        // create random color
                        // not working now
                        for (int i = 0; i < maxCorners; i++)
                        {
                            color.Add(new Scalar((int)(Random.value * 255), (int)(Random.value * 255),
                                                 (int)(Random.value * 255), 255));
                        }
                    }
                    else
                    {
                        // Don't want ball move
                        //currentFeatures.fromArray(prevFeatures.toArray());


                        // want ball move
                        prevFeatures.fromArray(currentFeatures.toArray());

                        // optical flow it will changes the valu of currentFeatures
                        Video.calcOpticalFlowPyrLK(prevGrayMat, currentGrayMat, prevFeatures, currentFeatures, mMOBStatus, err);
                        //Debug.Log(st.rows());

                        // change to points list
                        List <Point> prevList  = prevFeatures.toList(),
                                     nextList  = currentFeatures.toList();
                        List <byte> byteStatus = mMOBStatus.toList();


                        int x = 0;
                        int y = byteStatus.Count - 1;

                        for (x = 0; x < y; x++)
                        {
                            if (byteStatus[x] == 1)
                            {
                                Point pt  = nextList[x];
                                Point pt2 = prevList[x];

                                Imgproc.circle(mainMat, pt, 10, new Scalar(0, 0, 255), -1);

                                Imgproc.line(mainMat, pt, pt2, new Scalar(0, 0, 255));
                            }
                        }

                        // draw the data
                        //for (int i = 0; i < prevList.Count; i++)
                        //{
                        //    //Imgproc.circle(frame, prevList[i], 5, color[10]);
                        //    Imgproc.circle(mainMat, nextList[i], 10, new Scalar(0, 0, 255), -1);

                        //    Imgproc.line(mainMat, prevList[i], nextList[i], color[20]);
                        //}


                        List <List <Point> > cornersFeatures = new List <List <Point> >(40);
                        cornersFeatures.Add(new List <Point>(10));

                        // put the corners features data into the list
                        int  tmp  = 0;
                        bool last = true;
                        for (int i = 0; i < nextList.Count - 1; i++)
                        {
                            if (Mathf.Abs((float)(nextList[i].x - nextList[i + 1].x)) < 10 && Mathf.Abs((float)(nextList[i].y - nextList[i + 1].y)) < 10)
                            {
                                if (last == true)
                                {
                                    cornersFeatures[tmp].Add(nextList[i]);
                                }
                                else
                                {
                                    cornersFeatures.Add(new List <Point>(10));
                                    tmp = tmp + 1;
                                    cornersFeatures[tmp].Add(nextList[i]);
                                }
                                last = true;
                            }
                            else
                            {
                                last = false;
                            }
                        }

                        // count corners
                        int manyCornersFeatures = 0;
                        for (int i = 0; i < cornersFeatures.Count; i++)
                        {
                            Debug.Log(cornersFeatures[i].Count);
                            if (cornersFeatures[i].Count < 5)
                            {
                                cornersFeatures.RemoveAt(i);
                            }
                            else
                            {
                                manyCornersFeatures++;
                            }
                        }

                        //Debug.Log("Length" + manyCornersFeatures);

                        // if corners equal 4 then diplay virtual docunment into the frame
                        // doing the perspective transform
                        if (manyCornersFeatures == 4)
                        {
                            Mat documentMat = new Mat(document.height, document.width, CvType.CV_8UC3);
                            Utils.texture2DToMat(document, documentMat);

                            List <Point> srcPoints = new List <Point>();
                            srcPoints.Add(new Point(0, 0));
                            srcPoints.Add(new Point(documentMat.cols(), 0));
                            srcPoints.Add(new Point(documentMat.cols(), documentMat.rows()));
                            srcPoints.Add(new Point(0, documentMat.rows()));


                            Mat srcPointsMat = Converters.vector_Point_to_Mat(srcPoints, CvType.CV_32F);


                            List <Point> dstPoints = new List <Point>()
                            {
                                cornersFeatures[0][0], cornersFeatures[1][0], cornersFeatures[2][0], cornersFeatures[3][0]
                            };
                            Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);


                            //Make perspective transform
                            Mat m         = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat);
                            Mat warpedMat = new Mat(new Size(), documentMat.type());
                            Debug.Log((cornersFeatures[1][0].x - cornersFeatures[0][0].x) + " " + (cornersFeatures[2][0].y - cornersFeatures[1][0].y));
                            Imgproc.warpPerspective(documentMat, warpedMat, m, mainMat.size(), Imgproc.INTER_LINEAR);
                            //warpedMat.convertTo(warpedMat, CvType.CV_32F);


                            //warpedMat.convertTo(warpedMat, CvType.CV_8UC3);
                            warpedMat.convertTo(warpedMat, CvType.CV_8UC3);
                            // same size as frame
                            Mat dst = new Mat(mainMat.size(), CvType.CV_8UC3);
                            //Mat dst = new Mat(frame.size(), CvType.CV_8UC3);
                            //Mat dst2 = new Mat();

                            Imgproc.cvtColor(mainMat, dst, Imgproc.COLOR_RGBA2RGB);

                            //dst.setTo(new Scalar(0, 255, 0));
                            //currentGrayMat.copyTo(dst);
                            //dst.convertTo(dst, CvType.CV_8UC3);


                            //Imgproc.cvtColor(currentGrayMat, frame, Imgproc.COLOR_GRAY2RGBA);

                            Mat img1 = new Mat();
                            Mat mask = new Mat(mainMat.size(), CvType.CV_8UC1, new Scalar(0));
                            Imgproc.cvtColor(warpedMat, img1, Imgproc.COLOR_RGB2GRAY);
                            Imgproc.Canny(img1, img1, 100, 200);
                            List <MatOfPoint> doc_contours = new List <MatOfPoint>();;
                            Imgproc.findContours(img1, doc_contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
                            Imgproc.drawContours(mask, doc_contours, -1, new Scalar(255), Core.FILLED);

                            warpedMat.copyTo(dst, mask);

                            dst.convertTo(dst, CvType.CV_8UC3);

                            Debug.Log("dst" + dst.size());


                            Imgproc.cvtColor(dst, mainMat, Imgproc.COLOR_RGB2RGBA);


                            // display on the right
                            Texture2D finalTextue = new Texture2D(dst.width(), dst.height(), TextureFormat.RGB24, false);
                            Utils.matToTexture2D(dst, finalTextue);

                            targetRawImage.texture = finalTextue;
                        }


                        // current frame to old frame
                        prevGrayMat = currentGrayMat.clone();



                        //Imgproc.cvtColor(currentGrayMat, frame, Imgproc.COLOR_GRAY2RGBA);

                        // display matrix on the screen
                        Utils.fastMatToTexture2D(mainMat, texture);
                    }
                }
            }
        }
示例#13
0
        private void UpdateARHeadTransform(List <Vector2> points)
        {
            // The coordinates in pixels of the object detected on the image projected onto the plane.
            imagePoints.fromArray(
                new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                new Point(points [33].x, points [33].y),                                             //nose (Nose top)
                new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                new Point(points [54].x, points [54].y)                                              //r mouth (Mouth breadth)
                ,
                new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                );

            //Estimate head pose.
            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


            if (tvec.get(2, 0) [0] > 0)
            {
                if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                {
                    if (isShowingEffects)
                    {
                        rightEye.SetActive(true);
                    }
                }

                if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                {
                    if (isShowingEffects)
                    {
                        leftEye.SetActive(true);
                    }
                }
                if (isShowingHead)
                {
                    head.SetActive(true);
                }
                if (isShowingAxes)
                {
                    axes.SetActive(true);
                }


                float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                if (mouseDistance > noseDistance / 5.0)
                {
                    if (isShowingEffects)
                    {
                        mouth.SetActive(true);
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            ps.enableEmission = true;
                            ps.startSize      = 40 * (mouseDistance / noseDistance);
                        }
                    }
                }
                else
                {
                    if (isShowingEffects)
                    {
                        foreach (ParticleSystem ps in mouthParticleSystem)
                        {
                            ps.enableEmission = false;
                        }
                    }
                }

                Calib3d.Rodrigues(rvec, rotMat);

                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec.get(0, 0) [0] / 1000));
                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec.get(1, 0) [0] / 1000));
                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec.get(2, 0) [0] / 1000 / webCamTextureToMatHelper.DOWNSCALE_RATIO));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                ARM = invertYM * transformationM;

                // Apply Z axis inverted matrix.
                ARM = ARM * invertZM;

                // Apply camera transform matrix.
                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
            }
        }
示例#14
0
        // Update is called once per frame
        public override void UpdateValue()
        {
            if (dlibFaceLanmarkGetter == null)
            {
                return;
            }

            didUpdateHeadRotation = false;


            List <Vector2> points = dlibFaceLanmarkGetter.getFaceLanmarkPoints();

            if (points != null)
            {
                MatOfPoint3f objectPoints = null;
                if (points.Count == 68)
                {
                    objectPoints = objectPoints68;

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                        new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                        new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                        new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                        new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                        );
                }

                // Estimate head pose.
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0) [0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }
                //Debug.Log (tvec.dump());

                if (!double.IsNaN(tvec_z))
                {
                    // Convert to unity pose data.
                    double[] rvecArr = new double[3];
                    rvec.get(0, 0, rvecArr);
                    double[] tvecArr = new double[3];
                    tvec.get(0, 0, tvecArr);
                    PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                    // Changes in pos/rot below these thresholds are ignored.
                    if (enableLowPassFilter)
                    {
                        ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                    }
                    oldPoseData = poseData;


                    transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    ARM = invertYM * transformationM;

                    // Apply Z axis inverted matrix.
                    ARM = ARM * invertZM;



                    headRotation = ARUtils.ExtractRotationFromMatrix(ref ARM);

                    didUpdateHeadRotation = true;
                }
            }
        }
示例#15
0
    /// <summary>
    /// Update is called once per frame
    /// </summary>
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

            // Detect faces on resize image 在调整图像时察觉脸
            //detect face rects 发现脸的矩形
            List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();
            ;
            if (detectResult.Count > 0)
            {
                //detect landmark points 检测具有界标意义的点
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);
                //将points绘制在mat上
                OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                imagePoints.fromArray(
                    new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                    new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                    new Point(points[30].x, points[30].y),                                           //nose (Nose top)
                    new Point(points[48].x, points[48].y),                                           //l mouth (Mouth breadth)
                    new Point(points[54].x, points[54].y),                                           //r mouth (Mouth breadth)
                    new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                    new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                    );

                // Estimate head pose. 估计头部姿势
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    //从3D到2D点的pose中找到一个物体的姿势 rvec是旋转 tvec是平移向量
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0)[0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                if (!double.IsNaN(tvec_z) && points.Count == 68)
                {
                    cubismParameterDictionary.Clear();
                    Calib3d.Rodrigues(rvec, rotMat);

                    transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                    transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                    ARM = invertYM * transformationM * invertZM;
                    Vector3 forward;
                    forward.x = ARM.m02;
                    forward.y = ARM.m12;
                    forward.z = ARM.m22;

                    Vector3 upwards;
                    upwards.x = ARM.m01;
                    upwards.y = ARM.m11;
                    upwards.z = ARM.m21;

                    Vector3 angles  = Quaternion.LookRotation(forward, upwards).eulerAngles;
                    float   rotateX = angles.x > 180 ? angles.x - 360 : angles.x;
                    cubismParameterDictionary.Add(ParamAngleY, (float)Math.Round(rotateX));
                    float rotateY = angles.y > 180 ? angles.y - 360 : angles.y;
                    cubismParameterDictionary.Add(ParamAngleX, (float)Math.Round(-rotateY) * 2);
                    float rotateZ = angles.z > 180 ? angles.z - 360 : angles.z;
                    cubismParameterDictionary.Add(ParamAngleZ, (float)Math.Round(-rotateZ) * 2);
                    //Debug.("X" + rotateX + "Y" + rotateY + "Z" + rotateZ);

                    //ParamAngleX.BlendToValue(BlendMode,(float)(Math.Round(-rotateY) * 2));
                    //ParamAngleY.BlendToValue(BlendMode, (float)Math.Round(rotateX));
                    //ParamAngleZ.BlendToValue(BlendMode, (float)Math.Round(-rotateZ) * 2);

                    float eyeOpen_L = Mathf.Clamp(Mathf.Abs(points[43].y - points[47].y) / (Mathf.Abs(points[43].x - points[44].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_L >= 0.8f)
                    {
                        eyeOpen_L = 1f;
                    }
                    else
                    {
                        eyeOpen_L = 0;
                    }


                    float eyeOpen_R = Mathf.Clamp(Mathf.Abs(points[38].y - points[40].y) / (Mathf.Abs(points[37].x - points[38].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_R >= 0.8f)
                    {
                        eyeOpen_R = 1f;
                    }
                    else
                    {
                        eyeOpen_R = 0;
                    }

                    // ParamEyeROpen.BlendToValue(BlendMode, eyeOpen_R);
                    cubismParameterDictionary.Add(ParamEyeROpen, eyeOpen_R);
                    // ParamEyeLOpen.BlendToValue(BlendMode, eyeOpen_L);
                    cubismParameterDictionary.Add(ParamEyeLOpen, eyeOpen_L);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)rotateY / 30f);
                    cubismParameterDictionary.Add(ParamEyeBallX, rotateY / 30f);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)-rotateX / 30f - 0.25f);
                    cubismParameterDictionary.Add(ParamEyeBallY, (float)-rotateX / 30f - 0.25f);

                    float RY = Mathf.Abs(points[19].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    RY -= 1;
                    RY *= 4f;
                    float LY = Mathf.Abs(points[24].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    LY -= 1;
                    LY *= 4f;

                    // ParamBrowRY.BlendToValue(BlendMode, RY);
                    cubismParameterDictionary.Add(ParamBrowRY, RY);
                    // ParamBrowLY.BlendToValue(BlendMode, LY);
                    cubismParameterDictionary.Add(ParamBrowLY, LY);
                    float mouthOpen = Mathf.Clamp01(Mathf.Abs(points[62].y - points[66].y) / (Mathf.Abs(points[51].y - points[62].y) + Mathf.Abs(points[66].y - points[57].y)));
                    if (mouthOpen < 0.6f)
                    {
                        mouthOpen = 0;
                    }
                    // ParamMouthOpenY.BlendToValue(BlendMode, mouthOpen);
                    cubismParameterDictionary.Add(ParamMouthOpenY, mouthOpen);
                    float mouthSize = Mathf.Abs(points[48].x - points[54].x) / (Mathf.Abs(points[31].x - points[35].x));
                    // ParamMouthForm.BlendToValue(BlendMode, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                    cubismParameterDictionary.Add(ParamMouthForm, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                }
            }
            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
        }
    }
示例#16
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    MatOfPoint3f objectPoints = null;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [30].x, points [30].y),                                             //nose (Tip)
                            new Point(points [33].x, points [33].y),                                             //nose (Subnasale)
                            new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                            new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                            );


                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                    }
                    else if (points.Count == 17)
                    {
                        objectPoints = objectPoints17;

                        imagePoints.fromArray(
                            new Point((points [2].x + points [3].x) / 2, (points [2].y + points [3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [4].x + points [5].x) / 2, (points [4].y + points [5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [0].x, points [0].y),                                           //nose (Tip)
                            new Point(points [1].x, points [1].y),                                           //nose (Subnasale)
                            new Point(points [6].x, points [6].y),                                           //l ear (Bitragion breadth)
                            new Point(points [8].x, points [8].y)                                            //r ear (Bitragion breadth)
                            );

                        float noseDistance  = Mathf.Abs((float)(points [3].y - points [1].y));
                        float mouseDistance = Mathf.Abs((float)(points [14].y - points [16].y));
                    }
                    else if (points.Count == 6)
                    {
                        objectPoints = objectPoints6;

                        imagePoints.fromArray(
                            new Point((points [2].x + points [3].x) / 2, (points [2].y + points [3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [4].x + points [5].x) / 2, (points [4].y + points [5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [0].x, points [0].y),                                           //nose (Tip)
                            new Point(points [1].x, points [1].y)                                            //nose (Subnasale)
                            );
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points [3].x, points [3].y), //l eye (Inner corner of the eye)
                            new Point(points [1].x, points [1].y), //r eye (Inner corner of the eye)
                            new Point(points [2].x, points [2].y), //l eye (Tail of the eye)
                            new Point(points [0].x, points [0].y), //r eye (Tail of the eye)
                            new Point(points [4].x, points [4].y)  //nose (Subnasale)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68/17/6 points.";
                        }
                    }

                    // estimate head pose
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }


                    double tvec_x = tvec.get(0, 0) [0], tvec_y = tvec.get(1, 0) [0], tvec_z = tvec.get(2, 0) [0];

                    bool    isNotInViewport = false;
                    Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                    if (pos.w != 0)
                    {
                        float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                        if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                        {
                            isNotInViewport = true;
                        }
                    }

                    if (double.IsNaN(tvec_z) || isNotInViewport)    // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

                    if (!isNotInViewport)
                    {
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                        //move joystick
                        float t = 0.50f;
                        int   xpos;
                        if ((poseData.rot.eulerAngles.x - oldPoseData.rot.eulerAngles.x) < 10 && (poseData.rot.eulerAngles.x - oldPoseData.rot.eulerAngles.x) > 0)
                        {
                            xpos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.x, oldPoseData.rot.eulerAngles.x, t));
                        }
                        else
                        {
                            xpos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.x, oldPoseData.rot.eulerAngles.x, 1));
                        }


                        int ypos;
                        if ((poseData.rot.eulerAngles.y - oldPoseData.rot.eulerAngles.y) < 10 && (poseData.rot.eulerAngles.y - oldPoseData.rot.eulerAngles.y) > 0)
                        {
                            ypos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.y, oldPoseData.rot.eulerAngles.y, t));
                        }
                        else
                        {
                            ypos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.y, oldPoseData.rot.eulerAngles.y, 1));
                        }


                        int rpos;
                        if ((poseData.rot.eulerAngles.z - oldPoseData.rot.eulerAngles.z) < 10 && (poseData.rot.eulerAngles.z - oldPoseData.rot.eulerAngles.z) > 0)
                        {
                            rpos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.z, oldPoseData.rot.eulerAngles.z, t));
                        }
                        else
                        {
                            rpos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.z, oldPoseData.rot.eulerAngles.z, 1));
                        }


                        //int ypos = Convert.ToInt16(Mathf.Lerp(poseData.rot.eulerAngles.y, oldPoseData.rot.eulerAngles.y,t));
                        int zpos = Convert.ToInt16(poseData.pos.z);

                        //zoom
                        int scZ = Convert.ToInt32(Scale(zpos, 400, 800, 0, 32000));

                        //up down
                        int scX;
                        if (xpos >= 0 && xpos < 20)
                        {
                            scX = Convert.ToInt32(Scale(xpos, 0, 10, 29000, 32000));
                        }
                        else
                        {
                            scX = Convert.ToInt32(Scale(xpos, 340, 360, 0, 29000));
                        }

                        //left right
                        int scY = Convert.ToInt32(Scale(ypos, 160, 200, 0, 32000));

                        //roll
                        int scR = Convert.ToInt32(Scale(rpos, 160, 200, 0, 32000));


                        iReport.AxisX  = scX;
                        iReport.AxisY  = scY;
                        iReport.AxisZ  = scZ;
                        iReport.Slider = scR;
                        bool upd = joystick.UpdateVJD(id, ref iReport);

                        string message = "x:" + scX + " y:" + scY + " z:" + scZ + " r:" + scR;
                        Debug.Log(message);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    ARM = invertYM * transformationM;

                    // Apply Z-axis inverted matrix.
                    ARM = ARM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);



                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
示例#17
0
        // 更新は、フレームごとに呼ばれます
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                        #else
            if (webCamTexture.didUpdateThisFrame)
            {
                        #endif
                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
                // 方向を修正するために反転.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }
                // グレースケールにイメージを変換します
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                if (faceTracker.getPoints().Count <= 0)
                {
                    Debug.Log("detectFace");

                    // グレースケールにイメージを変換します
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            if (faces.rows() > 0)
                            {
                                Debug.Log("--------------------------faces");
                                Debug.Log("faces " + faces.dump());
                                Debug.Log("--------------------------faces");
                                // MatOfRectから顔の初期座標を追加
                                faceTracker.addPoints(faces);

                                // 顔の四角形を描きます
                                OpenCVForUnity.Rect[] rects = faces.toArray();
                                for (int i = 0; i < rects.Length; i++)
                                {
                                    Core.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                        }
                }

                // 顔の座標を追跡します.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isDrawPoints)
                    {
                        //Debug.Log("--------------------------100-----------------------------------");
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }
                    //Debug.Log("--------------------------1000");
                    //Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);

                    Point[] points = faceTracker.getPoints()[0];

                    //eyeWidth = points[36] - points[31];

/*
 *                  Debug.Log("--------------------------0");
 *                  Debug.Log(points[31]);
 *                  Debug.Log("--------------------------2");
 *                  Debug.Log(points[36]);
 *                  Debug.Log("--------------------------1");
 */
                    if (points.Length > 0)
                    {
                        imagePoints.fromArray(
                            points[31],                             //l eye
                            points[36],                             //r eye
                            points[67],                             // nose
                            points[48],                             //l mouth
                            points[54]                              //r mouth
                            );

                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)webCamTexture.width / (float)width))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }

                            // Rvecノイズをフィルタリング.
                            using (Mat absDiffRvec = new Mat())
                            {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                using (Mat cmpRvec = new Mat())
                                {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }

                            // Tvecノイズをフィルタリング.
                            using (Mat absDiffTvec = new Mat())
                            {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);
                                using (Mat cmpTvec = new Mat())
                                {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (!rightEye.activeSelf)
                            {
                                rightEye.SetActive(true);
                            }
                            if (!leftEye.activeSelf)
                            {
                                leftEye.SetActive(true);
                            }

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            modelViewMtrx = lookAtM * transformationM * invertZM;
                            ARCamera.worldToCameraMatrix = modelViewMtrx;
                        }
                    }
                }
                Utils.matToTexture2D(rgbaMat, texture, colors);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }
                ARCamera.ResetWorldToCameraMatrix();

                rightEye.SetActive(false);
                leftEye.SetActive(false);
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }
    // Update is called once per frame
    void Update()
    {
        if (!IsStarted)
        {
            return;
        }
        Mat grayMat = webCamTextureToMat.GetMat();

        Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
        //Debug.Log("mMOP2fptsPrev.rows() : " + mMOP2fptsPrev.rows().ToString());

        //Debug.Log("rgbaMat.rows() : " + rgbaMat.rows().ToString());
        //Debug.Log("matOpFlowThis.rows() : " + matOpFlowThis.rows().ToString());

        if (mMOP2fptsPrev.rows() == 0)
        {
            // first time through the loop so we need prev and this mats
            // plus prev points
            // get this mat
            //rgbaMat.copyTo(matOpFlowThis);

            grayMat.copyTo(matOpFlowThis);
            grayMat.copyTo(matOpFlowPrev);
            //matOpFlowThis = rgbaMat;
            //matOpFlowPrev = rgbaMat;
            matOpFlowPrev.empty();
            //matOpFlowPrev = new Mat(rgbaMat.size(), rgbaMat.type());
            //Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // copy that to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            // get prev corners
            Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.1, 100);
            mMOP2fptsPrev.fromArray(MOPcorners.toArray());

            // get safe copy of this corners
            mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
            //Debug.Log("opencv optical flow --- 1 ");
        }
        else
        {
            // we've been through before so
            // this mat is valid. Copy it to prev mat
            //rgbaMat.copyTo(matOpFlowThis);
            //matOpFlowPrev = new Mat(rgbaMat.size(), rgbaMat.type());
            matOpFlowThis.copyTo(matOpFlowPrev);

            //matOpFlowThis = new Mat(rgbaMat.size(), rgbaMat.type());

            // get this mat
            grayMat.copyTo(matOpFlowThis);
            //matOpFlowThis = rgbaMat;
            //Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // get the corners for this mat
            Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.1, 100);
            mMOP2fptsThis.fromArray(MOPcorners.toArray());

            // retrieve the corners from the prev mat
            // (saves calculating them again)
            mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

            // and save this corners for next time through

            mMOP2fptsThis.copyTo(mMOP2fptsSafe);

            //Debug.Log("opencv optical flow --- 2 ");
        }


        /*
         *  Parameters:
         *      prevImg first 8-bit input image
         *      nextImg second input image
         *      prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
         *      nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
         *      status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
         *      err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
         */
        Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

        if (!mMOBStatus.empty())
        {
            List <Point> cornersPrev = mMOP2fptsPrev.toList();
            List <Point> cornersThis = mMOP2fptsThis.toList();
            List <byte>  byteStatus  = mMOBStatus.toList();

            int x = 0;
            int y = byteStatus.Count - 1;

            int num_distance = 0;
            for (x = 0; x < y; x++)
            {
                if (byteStatus[x] == 1)
                {
                    Point pt  = cornersThis[x];
                    Point pt2 = cornersPrev[x];

                    Imgproc.circle(grayMat, pt, 5, colorRed, iLineThickness - 1);

                    Imgproc.line(grayMat, pt, pt2, colorRed, iLineThickness);
                    double distance = System.Math.Sqrt(System.Math.Pow((pt2.x - pt.x), 2.0) + System.Math.Pow((pt2.y - pt.y), 2.0));
                    if (distance > 20)
                    {
                        num_distance++;
                    }

                    //Utilities.Debug("Distance[" + x + "] : " + distance);
                    //Debug.Log("Distance[" + x + "] : " + distance);
                }
            }
            Debug.Log("Num of Distance : " + num_distance);
            if (num_distance > 0)
            {
                Debug.Log("Movement Detected !!");
            }
        }

        //              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
        //this.GetComponent<CVCore>().Add(0, rgbaMat);
        Utils.matToTexture2D(grayMat, texture, colors);
        gameObject.GetComponent <Renderer>().material.mainTexture = texture;
    }
示例#19
0
        private IEnumerator init()
        {
            axes.SetActive(false);
            head.SetActive(false);
            rightEye.SetActive(false);
            leftEye.SetActive(false);
            mouth.SetActive(false);


            if (webCamTexture != null)
            {
                faceTracker.reset();

                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();
                grayMat.Dispose();
                cascade.Dispose();
                camMatrix.Dispose();
                distCoeffs.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);

                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();


            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif
                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);


                    cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    if (cascade.empty())
                    {
                        Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                    }

                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


                    gameObject.transform.localEulerAngles = new Vector3(0, 0, 0);
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


//										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                    Camera.main.orthographicSize = webCamTexture.height / 2;



                    int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols());
                    camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                    camMatrix.put(0, 0, max_d);
                    camMatrix.put(0, 1, 0);
                    camMatrix.put(0, 2, rgbaMat.cols() / 2.0f);
                    camMatrix.put(1, 0, 0);
                    camMatrix.put(1, 1, max_d);
                    camMatrix.put(1, 2, rgbaMat.rows() / 2.0f);
                    camMatrix.put(2, 0, 0);
                    camMatrix.put(2, 1, 0);
                    camMatrix.put(2, 2, 1.0f);

                    Size     imageSize      = new Size(rgbaMat.cols(), rgbaMat.rows());
                    double   apertureWidth  = 0;
                    double   apertureHeight = 0;
                    double[] fovx           = new double[1];
                    double[] fovy           = new double[1];
                    double[] focalLength    = new double[1];
                    Point    principalPoint = new Point();
                    double[] aspectratio    = new double[1];



                    Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                    Debug.Log("imageSize " + imageSize.ToString());
                    Debug.Log("apertureWidth " + apertureWidth);
                    Debug.Log("apertureHeight " + apertureHeight);
                    Debug.Log("fovx " + fovx [0]);
                    Debug.Log("fovy " + fovy [0]);
                    Debug.Log("focalLength " + focalLength [0]);
                    Debug.Log("principalPoint " + principalPoint.ToString());
                    Debug.Log("aspectratio " + aspectratio [0]);


                    ARCamera.fieldOfView = (float)fovy [0];

                    Debug.Log("camMatrix " + camMatrix.dump());


                    distCoeffs = new MatOfDouble(0, 0, 0, 0);
                    Debug.Log("distCoeffs " + distCoeffs.dump());



                    lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0));
                    Debug.Log("lookAt " + lookAtM.ToString());

                    invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));



                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                //flip to correct direction.
                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (faceTracker.getPoints().Count <= 0)
                {
                    Debug.Log("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
                                Debug.Log("faces " + faces.dump());
                                //add initial face points from MatOfRect
                                faceTracker.addPoints(faces);

                                //draw face rect
                                OpenCVForUnity.Rect[] rects = faces.toArray();
                                for (int i = 0; i < rects.Length; i++)
                                {
                                    Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isDrawPoints)
                    {
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }

                    Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);


                    Point[] points = faceTracker.getPoints() [0];


                    if (points.Length > 0)
                    {
//												for (int i = 0; i < points.Length; i++) {
//														Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
//												}


                        imagePoints.fromArray(
                            points [31],                    //l eye
                            points [36],                    //r eye
                            points [67],                    //nose
                            points [48],                    //l mouth
                            points [54]                     //r mouth
//							,
//											points [1],//l ear
//											points [13]//r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }


                            //filter Rvec Noise.
                            using (Mat absDiffRvec = new Mat()) {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpRvec = new Mat()) {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }



                            //filter Tvec Noise.
                            using (Mat absDiffTvec = new Mat()) {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);

                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpTvec = new Mat()) {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (!rightEye.activeSelf)
                            {
                                rightEye.SetActive(true);
                            }
                            if (!leftEye.activeSelf)
                            {
                                leftEye.SetActive(true);
                            }


                            if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 &&
                                 Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) ||
                                Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
                            {
                                if (!mouth.activeSelf)
                                {
                                    mouth.SetActive(true);
                                }
                            }
                            else
                            {
                                if (mouth.activeSelf)
                                {
                                    mouth.SetActive(false);
                                }
                            }



                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            modelViewMtrx = lookAtM * transformationM * invertZM;

                            ARCamera.worldToCameraMatrix = modelViewMtrx;


                            //				Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString());
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }

                ARCamera.ResetWorldToCameraMatrix();

                rightEye.SetActive(false);
                leftEye.SetActive(false);
                mouth.SetActive(false);
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        private Matrix4x4 getLookAtMatrix(Vector3 pos, Vector3 target, Vector3 up)
        {
            Vector3 z = Vector3.Normalize(pos - target);
            Vector3 x = Vector3.Normalize(Vector3.Cross(up, z));
            Vector3 y = Vector3.Normalize(Vector3.Cross(z, x));

            Matrix4x4 result = new Matrix4x4();

            result.SetRow(0, new Vector4(x.x, x.y, x.z, -(Vector3.Dot(pos, x))));
            result.SetRow(1, new Vector4(y.x, y.y, y.z, -(Vector3.Dot(pos, y))));
            result.SetRow(2, new Vector4(z.x, z.y, z.z, -(Vector3.Dot(pos, z))));
            result.SetRow(3, new Vector4(0, 0, 0, 1));

            return(result);
        }

        void OnGUI()
        {
            float     screenScale  = Screen.height / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("FaceTrackerSample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            if (GUILayout.Button("drawPoints"))
            {
                if (isDrawPoints)
                {
                    isDrawPoints = false;
                }
                else
                {
                    isDrawPoints = true;
                }
            }
            if (GUILayout.Button("axes"))
            {
                if (axes.activeSelf)
                {
                    axes.SetActive(false);
                }
                else
                {
                    axes.SetActive(true);
                }
            }
            if (GUILayout.Button("head"))
            {
                if (head.activeSelf)
                {
                    head.SetActive(false);
                }
                else
                {
                    head.SetActive(true);
                }
            }


            GUILayout.EndVertical();
        }
    }
示例#20
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (isShowingFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye
                        new Point(points [33].x, points [33].y),                                             //nose
                        new Point(points [48].x, points [48].y),                                             //l mouth
                        new Point(points [54].x, points [54].y)                                              //r mouth
                        ,
                        new Point(points [0].x, points [0].y),                                               //l ear
                        new Point(points [16].x, points [16].y)                                              //r ear
                        );


                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


                    if (tvec.get(2, 0) [0] > 0)
                    {
                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                        {
                            if (isShowingEffects)
                            {
                                rightEye.SetActive(true);
                            }
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                        {
                            if (isShowingEffects)
                            {
                                leftEye.SetActive(true);
                            }
                        }
                        if (isShowingHead)
                        {
                            head.SetActive(true);
                        }
                        if (isShowingAxes)
                        {
                            axes.SetActive(true);
                        }


                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            if (isShowingEffects)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    ps.enableEmission = true;
                                    ps.startSize      = 40 * (mouseDistance / noseDistance);
                                }
                            }
                        }
                        else
                        {
                            if (isShowingEffects)
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    ps.enableEmission = false;
                                }
                            }
                        }


                        // position
                        Vector4 pos = new Vector4((float)tvec.get(0, 0) [0], (float)tvec.get(1, 0) [0], (float)tvec.get(2, 0) [0], 0);     // from OpenCV
                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARGameObject.transform.localPosition = new Vector3(pos.x, -pos.y, pos.z);


                        // rotation
                        Calib3d.Rodrigues(rvec, rotMat);

                        Vector3 forward = new Vector3((float)rotMat.get(0, 2) [0], (float)rotMat.get(1, 2) [0], (float)rotMat.get(2, 2) [0]); // from OpenCV
                        Vector3 up      = new Vector3((float)rotMat.get(0, 1) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(2, 1) [0]); // from OpenCV
                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        Quaternion rot = Quaternion.LookRotation(new Vector3(forward.x, -forward.y, forward.z), new Vector3(up.x, -up.y, up.z));
                        ARGameObject.transform.localRotation = rot;


                        // Apply Z axis inverted matrix.
                        invertZM        = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                        transformationM = ARGameObject.transform.localToWorldMatrix * invertZM;

                        // Apply camera transform matrix.
                        transformationM = ARCamera.transform.localToWorldMatrix * transformationM;

                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref transformationM);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
示例#21
0
    //Optical flow
    IEnumerator OpticalFlow()
    {
        Scalar tempHue;
        Scalar tempSpeed;

        int iCountTrackedPoints = 0;
        int vecCount            = 0;

        if (mMOP2fptsPrev.rows() == 0)
        {
            // first time through the loop so we need prev and this mats
            Imgproc.cvtColor(openCVCreateMat.rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // copy that to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            //if (blurImage == true){
            //Gaussian filter of the MOG2 images
            //Imgproc.GaussianBlur(matOpFlowPrev, matOpFlowPrev, kernelSize, sigmaX, sigmaY);//Gauss filter
            //}
            // get prev corners
            Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, qLevel, minDistCorners);             //SLIDER input
            mMOP2fptsPrev.fromArray(MOPcorners.toArray());

            // get safe copy of this corners
            mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
        }
        else
        {
            // we've been through before so
            // this mat is valid. Copy it to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            // get this mat
            Imgproc.cvtColor(openCVCreateMat.rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            //if (blurImage == true){
            //Gaussian filter of the MOG2 images
            //Imgproc.GaussianBlur(matOpFlowThis, matOpFlowThis, kernelSize, sigmaX, sigmaY);//Gauss filter
            //}
            // get the corners for this mat
            Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, qLevel, minDistCorners);             // SLIDER input
            mMOP2fptsThis.fromArray(MOPcorners.toArray());

            // retrieve the corners from the prev mat (saves calculating them again)
            mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

            // and save this corners for next time through
            mMOP2fptsThis.copyTo(mMOP2fptsSafe);
        }

        Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

        if (mMOBStatus.rows() > 0)
        {
            List <Point> cornersPrev = mMOP2fptsPrev.toList();
            List <Point> cornersThis = mMOP2fptsThis.toList();
            List <byte>  byteStatus  = mMOBStatus.toList();

            int x = 0;
            int y = byteStatus.Count - 1;

            double absX;
            double absY;             //will use for calculation of polar coordiates

            for (x = 0; x < y; x++)
            {
                if (byteStatus [x] == 1)
                {
                    Point pt  = cornersThis [x];
                    Point pt2 = cornersPrev [x];

                    //if (pt != pt2) {//I think this IF statement should be removed as pt and pt2 should always be differnt

                    float mySpeed = CalculateSpeedFloat(pt, pt2);

                    absX = pt.x - pt2.x;
                    absY = pt.y - pt2.y;
                    float angle = Mathf.Atan2((float)absX, (float)absY) * Mathf.Rad2Deg;
                    angle = Mathf.RoundToInt(angle);

                    //Get Hue based on Angle
                    tempHue   = GetHueColor((int)angle);
                    tempSpeed = GetSpeedColor((int)mySpeed);

                    //Store so we can add tracers
                    if (mySpeed > maxSpeed)                               //|| CalculateSpeedFloat (pt, pt2) <= 1
                    {
                        yield return(null);
                    }
                    else
                    {
                        tracerPoints.AddTracersToStorage(pt, pt2, tempHue, tempSpeed, videoPlayer.frame, angle, mySpeed);
                        speedVec = speedVec + mySpeed;
                        angleVec = angleVec + angle;
                        vecCount++;
                        //tracerPoints2.AddTracersToStorage (pt, pt2, tempSpeed, videoPlayer.frame, angle, mySpeed);

                        //ADD STORING SPEEDS TO VECTOR
                        //CSVDataEmail.AddDataTrack (speed,angle.ToString ());
                    }
                    iCountTrackedPoints++;
                }
            }
        }

        meanSpeed = (int)(speedVec / vecCount);
        meanAngle = (int)(angleVec / vecCount);
        //sTrackingLogger = "Video frame: " + videoPlayer.frame.ToString() + "    Points: " + iCountTrackedPoints.ToString() + "";
        sTrackingLogger = "Speed: " + meanSpeed.ToString() + " Angle: " + meanAngle.ToString() + "";

        textTrackedPoints.text = sTrackingLogger;
        yield return(null);
    }
示例#22
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IPHONE && !UNITY_EDITOR
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.transform.eulerAngles = new Vector3(0, 0, 0);
                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
                                                                                #endif
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										if (webCamTexture.videoRotationAngle == 270)
//												scaleY = -1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    Camera.main.orthographicSize = webCamTexture.width / 2;
                                                                                #else
                    Camera.main.orthographicSize = webCamTexture.height / 2;
                                                                                #endif

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IPHONE && !UNITY_EDITOR
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.width / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("OpenCVForUnitySample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            GUILayout.EndVertical();
        }
    }
}
示例#23
0
        /// <summary>
        /// Gets the frontal face angles.
        /// </summary>
        /// <returns>Frontal face angles.</returns>
        /// <param name="points">Points of face landmark which was detected with Dlib.</param>
        public Vector3 GetFrontalFaceAngles(List <Vector2> points)
        {
            if (points.Count < 68)
            {
                throw new ArgumentException("Invalid face landmark points", "points");
            }

            landmarkPoints [0].x = (points [38].x + points [41].x) / 2;
            landmarkPoints [0].y = (points [38].y + points [41].y) / 2;
            landmarkPoints [1].x = (points [43].x + points [46].x) / 2;
            landmarkPoints [1].y = (points [43].y + points [46].y) / 2;
            landmarkPoints [2].x = points [30].x;
            landmarkPoints [2].y = points [30].y;
            landmarkPoints [3].x = points [48].x;
            landmarkPoints [3].y = points [48].y;
            landmarkPoints [4].x = points [54].x;
            landmarkPoints [4].y = points [54].y;
            landmarkPoints [5].x = points [0].x;
            landmarkPoints [5].y = points [0].y;
            landmarkPoints [6].x = points [16].x;
            landmarkPoints [6].y = points [16].y;

            // Normalize points.
            Point centerOffset = landmarkPoints [2] - new Point(imageWidth / 2, imageHeight / 2);

            for (int i = 0; i < landmarkPoints.Length; i++)
            {
                landmarkPoints [i] = landmarkPoints [i] - centerOffset;
            }

            imagePoints.fromArray(landmarkPoints);

            Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

            double tvec_z = tvec.get(2, 0) [0];

//            Debug.Log (rvec.dump());
//            Debug.Log (tvec.dump());

            if (!double.IsNaN(tvec_z))
            {
                Calib3d.Rodrigues(rvec, rotM);

//                Debug.Log (rotM.dump());

                transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                transformationM = invertYM * transformationM * invertZM;

                Vector3 angles = ExtractRotationFromMatrix(ref transformationM).eulerAngles;

//                Debug.Log ("angles " + angles.x + " " + angles.y + " " + angles.z);

                float rotationX = (angles.x > 180) ? angles.x - 360 : angles.x;
                float rotationY = (angles.y > 180) ? angles.y - 360 : angles.y;
                float rotationZ = (tvec_z >= 0) ? (angles.z > 180) ? angles.z - 360 : angles.z : 180 - angles.z;

                if (tvec_z < 0)
                {
                    rotationX = -rotationX;
                    rotationY = -rotationY;
                    rotationZ = -rotationZ;
                }

                return(new Vector3(rotationX, rotationY, rotationZ));
            }
            else
            {
                return(new Vector3(0, 0, 0));
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
                {
//                    Debug.Log ("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
//                            Debug.Log ("faces " + faces.dump ());

                                List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
                                List <Point[]> pointsList = faceTracker.getPoints();

                                if (isAutoResetMode)
                                {
                                    //add initial face points from MatOfRect
                                    if (pointsList.Count <= 0)
                                    {
                                        faceTracker.addPoints(faces);
//                                    Debug.Log ("reset faces ");
                                    }
                                    else
                                    {
                                        for (int i = 0; i < rectsList.Count; i++)
                                        {
                                            OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                            //It determines whether nose point has been included in trackRect.
                                            if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                            {
                                                rectsList.RemoveAt(i);
                                                pointsList.RemoveAt(i);
//                                            Debug.Log ("remove " + i);
                                            }
                                            Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                        }
                                    }
                                }
                                else
                                {
                                    faceTracker.addPoints(faces);
                                }

                                //draw face rect
                                for (int i = 0; i < rectsList.Count; i++)
                                {
                                    Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                            else
                            {
                                if (isAutoResetMode)
                                {
                                    faceTracker.reset();

                                    rightEye.SetActive(false);
                                    leftEye.SetActive(false);
                                    head.SetActive(false);
                                    mouth.SetActive(false);
                                    axes.SetActive(false);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isShowingFacePoints)
                    {
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }

                    Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);


                    Point[] points = faceTracker.getPoints() [0];


                    if (points.Length > 0)
                    {
//                        for (int i = 0; i < points.Length; i++)
//                        {
//                            Imgproc.putText(rgbaMat, "" + i, new Point(points [i].x, points [i].y), Imgproc.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_AA, false);
//                        }


                        imagePoints.fromArray(
                            points [31], //l eye
                            points [36], //r eye
                            points [67], //nose
                            points [48], //l mouth
                            points [54]  //r mouth
//                          ,
//                          points [0],//l ear
//                          points [14]//r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }


                            //filter Rvec Noise.
                            using (Mat absDiffRvec = new Mat()) {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpRvec = new Mat()) {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }

                            //filter Tvec Noise.
                            using (Mat absDiffTvec = new Mat()) {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpTvec = new Mat()) {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (isShowingEffects)
                            {
                                rightEye.SetActive(true);
                            }
                            if (isShowingEffects)
                            {
                                leftEye.SetActive(true);
                            }
                            if (isShowingHead)
                            {
                                head.SetActive(true);
                            }
                            if (isShowingAxes)
                            {
                                axes.SetActive(true);
                            }


                            if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 &&
                                 Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) ||
                                Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(true);
                                }
                            }
                            else
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(false);
                                }
                            }

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            ARM = invertYM * transformationM;

                            // Apply Z-axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }

                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }
        }
示例#25
0
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                    }

                    MatOfPoint3f objectPoints   = null;
                    bool         isRightEyeOpen = false;
                    bool         isLeftEyeOpen  = false;
                    bool         isMouthOpen    = false;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                            new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                            new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                            new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                            new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                            );


                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points [3].x, points [3].y), //l eye (Inner corner of the eye)
                            new Point(points [1].x, points [1].y), //r eye (Inner corner of the eye)
                            new Point(points [2].x, points [2].y), //l eye (Tail of the eye)
                            new Point(points [0].x, points [0].y), //r eye (Tail of the eye)
                            new Point(points [4].x, points [4].y)  //nose (Nose top)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68 points.";
                        }
                    }

                    // Estimate head pose.
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }

                    double tvec_z = tvec.get(2, 0) [0];

                    if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }
                    //Debug.Log (tvec.dump());

                    if (!double.IsNaN(tvec_z))
                    {
                        // Display effects.
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        if (displayEffects)
                        {
                            rightEye.SetActive(isRightEyeOpen);
                            leftEye.SetActive(isLeftEyeOpen);

                            if (isMouthOpen)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = true;
                                    #if UNITY_5_5_OR_NEWER
                                    var main = ps.main;
                                    main.startSizeMultiplier = 20;
                                    #else
                                    ps.startSize = 20;
                                    #endif
                                }
                            }
                            else
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    ARM = invertYM * transformationM;

                    // Apply Z-axis inverted matrix.
                    ARM = ARM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }
            }
            else
            {
                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }

            //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

            OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat, texture);
        }
        // Update is called once per frame
        void Update()
        {
            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (points.Count > 0)
                    {
                        if (shouldDrawFacePoints)
                        {
                            OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                        }

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye
                            new Point(points [33].x, points [33].y),                                             //nose
                            new Point(points [48].x, points [48].y),                                             //l mouth
                            new Point(points [54].x, points [54].y)                                              //r mouth
                            ,
                            new Point(points [0].x, points [0].y),                                               //l ear
                            new Point(points [16].x, points [16].y)                                              //r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


                        if (tvec.get(2, 0) [0] > 0)
                        {
                            if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    rightEye.SetActive(true);
                                }
                            }

                            if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    leftEye.SetActive(true);
                                }
                            }
                            if (shouldDrawHead)
                            {
                                head.SetActive(true);
                            }
                            if (shouldDrawAxes)
                            {
                                axes.SetActive(true);
                            }



                            float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                            float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                            if (mouseDistance > noseDistance / 5.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    mouth.SetActive(true);
                                    foreach (ParticleSystem ps in mouthParticleSystem)
                                    {
                                        ps.enableEmission = true;
                                        ps.startSize      = 40 * (mouseDistance / noseDistance);
                                    }
                                }
                            }
                            else
                            {
                                if (shouldDrawEffects)
                                {
                                    foreach (ParticleSystem ps in mouthParticleSystem)
                                    {
                                        ps.enableEmission = false;
                                    }
                                }
                            }


                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }
                else
                {
                    rightEye.SetActive(false);
                    leftEye.SetActive(false);
                    head.SetActive(false);
                    mouth.SetActive(false);
                    axes.SetActive(false);
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
示例#27
0
    /// <summary>
    /// 顔の向きを取得
    /// </summary>
    private Vector3 GetFrontalFaceAngle(List <Vector2> points)
    {
        if (points.Count != 68)
        {
            throw new ArgumentNullException("ランドマークが正しくありません。");
        }

        if (_camMatrix == null)
        {
            throw new ArgumentNullException("カメラの内部パラメータが正しくありません。");
        }

        // スケールの正規化
        float   normScale = Math.Abs(points[30].y - points[8].y) / (normHeight / 2);
        Vector2 normDiff  = points[30] * normScale - new Vector2(normWidth / 2, normHeight / 2);

        for (int i = 0; i < points.Count; i++)
        {
            _normPoints[i] = points[i] * normScale - normDiff;
        }

        _imagePoints.fromArray(
            new Point((_normPoints[38].x + _normPoints[41].x) / 2, (_normPoints[38].y + _normPoints[41].y) / 2), // 左目
            new Point((_normPoints[43].x + _normPoints[46].x) / 2, (_normPoints[43].y + _normPoints[46].y) / 2), // 右目
            new Point(_normPoints[33].x, _normPoints[33].y),                                                     // 鼻
            new Point(_normPoints[48].x, _normPoints[48].y),                                                     // 左口角
            new Point(_normPoints[54].x, _normPoints[54].y),                                                     // 右口角
            new Point(_normPoints[0].x, _normPoints[0].y),                                                       // 左耳
            new Point(_normPoints[16].x, _normPoints[16].y)                                                      // 右耳
            );

        // 2-3次元対応点から頭部を姿勢推定する
        if (_rVec == null || _tVec == null)
        {
            _rVec = new Mat(3, 1, CvType.CV_64FC1);
            _tVec = new Mat(3, 1, CvType.CV_64FC1);
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec);
        }

        double  tVecX           = _tVec.get(0, 0)[0];
        double  tVecY           = _tVec.get(1, 0)[0];
        double  tVecZ           = _tVec.get(2, 0)[0];
        bool    isNotInViewport = false;
        Vector4 pos             = _VP * new Vector4((float)tVecX, (float)tVecY, (float)tVecZ, 1.0f);

        if (pos.w != 0)
        {
            float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
            if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
            {
                isNotInViewport = true;
            }
        }

        // オブジェクトがカメラ視野に存在しない場合、外部パラメータを使用しない
        if (double.IsNaN(tVecZ) || isNotInViewport)
        {
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs,
                             _rVec, _tVec);
        }
        else
        {
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs,
                             _rVec, _tVec, true, Calib3d.SOLVEPNP_ITERATIVE);
        }

        if (!isNotInViewport)
        {
            // Unityのポーズデータに変換
            double[] rVecArr = new double[3];
            _rVec.get(0, 0, rVecArr);
            double[] tVecArr = new double[3];
            _tVec.get(0, 0, tVecArr);
            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rVecArr, tVecArr);
            // 閾値以下ならば更新を無視
            ARUtils.LowpassPoseData(ref _oldPoseData, ref poseData, positionLowPass, rotationLowPass);
            _oldPoseData = poseData;
            // 変換行列を作成
            _transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
        }


        Calib3d.Rodrigues(_rVec, _rotM);

        _transformationM.SetRow(0,
                                new Vector4(
                                    (float)_rotM.get(0, 0)[0],
                                    (float)_rotM.get(0, 1)[0],
                                    (float)_rotM.get(0, 2)[0],
                                    (float)_tVec.get(0, 0)[0]));
        _transformationM.SetRow(1,
                                new Vector4(
                                    (float)_rotM.get(1, 0)[0],
                                    (float)_rotM.get(1, 1)[0],
                                    (float)_rotM.get(1, 2)[0],
                                    (float)_tVec.get(1, 0)[0]));
        _transformationM.SetRow(2,
                                new Vector4(
                                    (float)_rotM.get(2, 0)[0],
                                    (float)_rotM.get(2, 1)[0],
                                    (float)_rotM.get(2, 2)[0],
                                    (float)_tVec.get(2, 0)[0]));
        _transformationM.SetRow(3,
                                new Vector4(
                                    0,
                                    0,
                                    0,
                                    1));

        _ARM = _invertYM * _transformationM * _invertYM;
        _ARM = _ARM * _invertYM * _invertZM;

        return(ExtractRotationFromMatrix(ref _ARM).eulerAngles);
    }