示例#1
0
    private void UpdateARObjectTransform_2(Mat rvec, Mat tvec)
    {
        // Convert to unity pose data.
        double[] rvecArr = new double[3];
        rvec.get(0, 0, rvecArr);
        double[] tvecArr = new double[3];
        tvec.get(0, 0, tvecArr);
        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

        // Changes in pos/rot below these thresholds are ignored.
        if (enableLowPassFilter)
        {
            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
        }
        oldPoseData = poseData;

        // Convert to transform matrix.
        ARM = ARUtils.ConvertPoseDataToMatrix(ref poseData, true, true);

        //if (shouldMoveARCamera) {

        //  ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

        //  ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);

        //} else {

        ARM = arCamera.transform.localToWorldMatrix * ARM;

        ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
        //}
    }
示例#2
0
    private void UpdateARObjectTransform(Mat rvec, Mat tvec, int id)
    {
        // Find the object of this id
        if (idDict.ContainsKey(id))
        {
            arGameObject = GameObject.Find(idDict[id]);
        }
        // Convert to unity pose data.
        double[] rvecArr = new double[3];
        rvec.get(0, 0, rvecArr);
        double[] tvecArr = new double[3];
        tvec.get(0, 0, tvecArr);
        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);


        // Changes in pos/rot below these thresholds are ignored.
        if (enableLowPassFilter)
        {
            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
        }
        oldPoseData = poseData;

        // Convert to transform matrix.
        ARM = ARUtils.ConvertPoseDataToMatrix(ref poseData, true, true);


        ARM = arCamera.transform.localToWorldMatrix * ARM;
        ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);


        //Only move and do not rotate
        Vector3 rawRotation = arGameObject.transform.rotation.eulerAngles;

        arGameObject.transform.rotation = Quaternion.Euler(new Vector3(0, adjustRotation(rawRotation[1]), 0));
    }
示例#3
0
        public override void UpdateValue()
        {
            if (matSourceGetterInterface == null)
            {
                return;
            }

            if (faceLandmarkGetterInterface == null)
            {
                return;
            }

            Mat rgbaMat = matSourceGetterInterface.GetMatSource();

            if (rgbaMat == null)
            {
                return;
            }
            else
            {
                if (rgbaMat.width() != imageWidth || rgbaMat.height() != imageHeight)
                {
                    imageWidth  = rgbaMat.width();
                    imageHeight = rgbaMat.height();
                    SetCameraMatrix(camMatrix, imageWidth, imageHeight);
                }
            }

            didUpdateHeadPositionAndRotation = false;

            List <Vector2> points = faceLandmarkGetterInterface.GetFaceLanmarkPoints();

            if (points != null)
            {
                MatOfPoint3f objectPoints = null;

                if (points.Count == 68)
                {
                    objectPoints = objectPoints68;

                    imagePoints.fromArray(
                        new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[30].x, points[30].y),                                           //nose (Tip)
                        new Point(points[33].x, points[33].y),                                           //nose (Subnasale)
                        new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                        new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                        );
                }
                else if (points.Count == 17)
                {
                    objectPoints = objectPoints17;

                    imagePoints.fromArray(
                        new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[0].x, points[0].y),                                         //nose (Tip)
                        new Point(points[1].x, points[1].y),                                         //nose (Subnasale)
                        new Point(points[6].x, points[6].y),                                         //l ear (Bitragion breadth)
                        new Point(points[8].x, points[8].y)                                          //r ear (Bitragion breadth)
                        );
                }
                else if (points.Count == 6)
                {
                    objectPoints = objectPoints6;

                    imagePoints.fromArray(
                        new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points[0].x, points[0].y),                                         //nose (Tip)
                        new Point(points[1].x, points[1].y)                                          //nose (Subnasale)
                        );
                }

                // Estimate head pose.
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }


                double tvec_x = tvec.get(0, 0)[0], tvec_y = tvec.get(1, 0)[0], tvec_z = tvec.get(2, 0)[0];

                bool    isNotInViewport = false;
                Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                if (pos.w != 0)
                {
                    float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                    if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                    {
                        isNotInViewport = true;
                    }
                }

                if (double.IsNaN(tvec_z) || isNotInViewport)
                { // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                //Debug.Log (tvec.dump () + " " + isNotInViewport);

                if (!isNotInViewport)
                {
                    // Convert to unity pose data.
                    double[] rvecArr = new double[3];
                    rvec.get(0, 0, rvecArr);
                    double[] tvecArr = new double[3];
                    tvec.get(0, 0, tvecArr);
                    PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                    // adjust the position to the scale of real-world space.
                    poseData.pos = new Vector3(poseData.pos.x * 0.001f, poseData.pos.y * 0.001f, poseData.pos.z * 0.001f);

                    // Changes in pos/rot below these thresholds are ignored.
                    if (enableLowPassFilter)
                    {
                        ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                    }
                    oldPoseData = poseData;

                    Matrix4x4 transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                    transformationM = invertYM * transformationM * invertYM;

                    // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                    transformationM = transformationM * invertYM * invertZM;

                    headPosition = ARUtils.ExtractTranslationFromMatrix(ref transformationM);
                    headRotation = ARUtils.ExtractRotationFromMatrix(ref transformationM);

                    didUpdateHeadPositionAndRotation = true;
                }
            }
        }
示例#4
0
        // Update is called once per frame
        public override void UpdateValue()
        {
            if (dlibFaceLanmarkGetter == null)
            {
                return;
            }

            didUpdateHeadRotation = false;


            List <Vector2> points = dlibFaceLanmarkGetter.getFaceLanmarkPoints();

            if (points != null)
            {
                MatOfPoint3f objectPoints = null;
                if (points.Count == 68)
                {
                    objectPoints = objectPoints68;

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                        new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                        new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                        new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                        new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                        );
                }

                // Estimate head pose.
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0) [0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }
                //Debug.Log (tvec.dump());

                if (!double.IsNaN(tvec_z))
                {
                    // Convert to unity pose data.
                    double[] rvecArr = new double[3];
                    rvec.get(0, 0, rvecArr);
                    double[] tvecArr = new double[3];
                    tvec.get(0, 0, tvecArr);
                    PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                    // Changes in pos/rot below these thresholds are ignored.
                    if (enableLowPassFilter)
                    {
                        ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                    }
                    oldPoseData = poseData;


                    transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    ARM = invertYM * transformationM;

                    // Apply Z axis inverted matrix.
                    ARM = ARM * invertZM;



                    headRotation = ARUtils.ExtractRotationFromMatrix(ref ARM);

                    didUpdateHeadRotation = true;
                }
            }
        }
示例#5
0
    /// <summary>
    /// 顔の向きを取得
    /// </summary>
    private Vector3 GetFrontalFaceAngle(List <Vector2> points)
    {
        if (points.Count != 68)
        {
            throw new ArgumentNullException("ランドマークが正しくありません。");
        }

        if (_camMatrix == null)
        {
            throw new ArgumentNullException("カメラの内部パラメータが正しくありません。");
        }

        // スケールの正規化
        float   normScale = Math.Abs(points[30].y - points[8].y) / (normHeight / 2);
        Vector2 normDiff  = points[30] * normScale - new Vector2(normWidth / 2, normHeight / 2);

        for (int i = 0; i < points.Count; i++)
        {
            _normPoints[i] = points[i] * normScale - normDiff;
        }

        _imagePoints.fromArray(
            new Point((_normPoints[38].x + _normPoints[41].x) / 2, (_normPoints[38].y + _normPoints[41].y) / 2), // 左目
            new Point((_normPoints[43].x + _normPoints[46].x) / 2, (_normPoints[43].y + _normPoints[46].y) / 2), // 右目
            new Point(_normPoints[33].x, _normPoints[33].y),                                                     // 鼻
            new Point(_normPoints[48].x, _normPoints[48].y),                                                     // 左口角
            new Point(_normPoints[54].x, _normPoints[54].y),                                                     // 右口角
            new Point(_normPoints[0].x, _normPoints[0].y),                                                       // 左耳
            new Point(_normPoints[16].x, _normPoints[16].y)                                                      // 右耳
            );

        // 2-3次元対応点から頭部を姿勢推定する
        if (_rVec == null || _tVec == null)
        {
            _rVec = new Mat(3, 1, CvType.CV_64FC1);
            _tVec = new Mat(3, 1, CvType.CV_64FC1);
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec);
        }

        double  tVecX           = _tVec.get(0, 0)[0];
        double  tVecY           = _tVec.get(1, 0)[0];
        double  tVecZ           = _tVec.get(2, 0)[0];
        bool    isNotInViewport = false;
        Vector4 pos             = _VP * new Vector4((float)tVecX, (float)tVecY, (float)tVecZ, 1.0f);

        if (pos.w != 0)
        {
            float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
            if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
            {
                isNotInViewport = true;
            }
        }

        // オブジェクトがカメラ視野に存在しない場合、外部パラメータを使用しない
        if (double.IsNaN(tVecZ) || isNotInViewport)
        {
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs,
                             _rVec, _tVec);
        }
        else
        {
            Calib3d.solvePnP(_objectPoints, _imagePoints, _camMatrix, _distCoeffs,
                             _rVec, _tVec, true, Calib3d.SOLVEPNP_ITERATIVE);
        }

        if (!isNotInViewport)
        {
            // Unityのポーズデータに変換
            double[] rVecArr = new double[3];
            _rVec.get(0, 0, rVecArr);
            double[] tVecArr = new double[3];
            _tVec.get(0, 0, tVecArr);
            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rVecArr, tVecArr);
            // 閾値以下ならば更新を無視
            ARUtils.LowpassPoseData(ref _oldPoseData, ref poseData, positionLowPass, rotationLowPass);
            _oldPoseData = poseData;
            // 変換行列を作成
            _transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
        }


        Calib3d.Rodrigues(_rVec, _rotM);

        _transformationM.SetRow(0,
                                new Vector4(
                                    (float)_rotM.get(0, 0)[0],
                                    (float)_rotM.get(0, 1)[0],
                                    (float)_rotM.get(0, 2)[0],
                                    (float)_tVec.get(0, 0)[0]));
        _transformationM.SetRow(1,
                                new Vector4(
                                    (float)_rotM.get(1, 0)[0],
                                    (float)_rotM.get(1, 1)[0],
                                    (float)_rotM.get(1, 2)[0],
                                    (float)_tVec.get(1, 0)[0]));
        _transformationM.SetRow(2,
                                new Vector4(
                                    (float)_rotM.get(2, 0)[0],
                                    (float)_rotM.get(2, 1)[0],
                                    (float)_rotM.get(2, 2)[0],
                                    (float)_tVec.get(2, 0)[0]));
        _transformationM.SetRow(3,
                                new Vector4(
                                    0,
                                    0,
                                    0,
                                    1));

        _ARM = _invertYM * _transformationM * _invertYM;
        _ARM = _ARM * _invertYM * _invertZM;

        return(ExtractRotationFromMatrix(ref _ARM).eulerAngles);
    }
示例#6
0
        //NOTE: ここList渡しなのがちょっとイヤなんだけど、Dlibのマナーに沿うことを重視
        /// <summary>
        /// 特徴点を受け取って頭部姿勢の推定状況を更新します。
        /// </summary>
        /// <param name="landmarks"></param>
        public void EstimatePose(List <Vector2> landmarks)
        {
            //画像の初期化が終わってない場合は完全NG
            if (_width < 1 || _height < 1)
            {
                return;
            }

            HasValidPoseData = false;

            SetImagePoints(landmarks);

            //初期推定っぽいやつ
            if (_rVec == null || _tVec == null)
            {
                _rVec = new Mat(3, 1, CvType.CV_64FC1);
                _tVec = new Mat(3, 1, CvType.CV_64FC1);
                Calib3d.solvePnP(_objPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec);
            }

            _tVec.get(0, 0, _tVecArrayTemp);
            float tx = (float)_tVecArrayTemp[0];
            float ty = (float)_tVecArrayTemp[1];
            float tz = (float)_tVecArrayTemp[2];

            bool notInViewport = false;
            var  pos           = _vp * new Vector4(tx, ty, tz, 1.0f);

            if (Mathf.Abs(pos.w) > 0.0001)
            {
                float x = pos.x / pos.w;
                float y = pos.y / pos.w;
                float z = pos.z / pos.w;
                notInViewport = Mathf.Abs(x) > 1.0f || Mathf.Abs(y) > 1.0f || Mathf.Abs(z) > 1.0f;
            }

            //NOTE: 要するに現状の推定が怪しすぎるならゼロベースで計算をやり直せ、って話
            if (float.IsNaN(tz) || notInViewport)
            {
                Calib3d.solvePnP(_objPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec);
            }
            else
            {
                //普通にトラッキングできてればこっちのはず
                Calib3d.solvePnP(
                    _objPoints, _imagePoints, _camMatrix, _distCoeffs, _rVec, _tVec,
                    true, Calib3d.SOLVEPNP_ITERATIVE
                    );
            }

            if (notInViewport)
            {
                return;
            }

            // 最終的なt/rの値をUnityで使いたいのでPosition/Rotationに変換
            _rVec.get(0, 0, _rVecArray);
            _tVec.get(0, 0, _tVecArray);
            var poseData = ARUtils.ConvertRvecTvecToPoseData(_rVecArray, _tVecArray);

            // 0.001fは[mm]単位から[m]への変換
            // YMのほう: 右手系を左手系にする
            // ZMのほう: 手前/奥をひっくり返す(はず)
            var transformationM =
                _invertYM *
                Matrix4x4.TRS(0.001f * poseData.pos, poseData.rot, Vector3.one) *
                _invertZM;

            HeadPosition     = ARUtils.ExtractTranslationFromMatrix(ref transformationM);
            HeadRotation     = ARUtils.ExtractRotationFromMatrix(ref transformationM);
            HasValidPoseData = true;
        }