Ejemplo n.º 1
0
    private void Update()
    {
        inputMat = webCamTextureToMatHelper.GetMat();

        MatOfKeyPoint camKeyPoints   = new MatOfKeyPoint();
        Mat           camDescriptors = new Mat();

        Imgproc.cvtColor(inputMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        detector.detect(grayMat, camKeyPoints);
        extractor.compute(grayMat, camKeyPoints, camDescriptors);

        if (camKeyPoints.toList().Count < 1)
        {
            return;
        }

        List <MatOfDMatch> matches = new List <MatOfDMatch>();

        matcher.knnMatch(makerDescriptors, camDescriptors, matches, 2);

        //-- Filter matches using the Lowe's ratio test
        float         ratioThresh       = 0.75f;
        List <DMatch> listOfGoodMatches = new List <DMatch>();

        for (int i = 0; i < matches.Count; i++)
        {
            if (matches[i].rows() > 1)
            {
                DMatch[] dMatches = matches[i].toArray();
                if (dMatches[0].distance < ratioThresh * dMatches[1].distance)
                {
                    listOfGoodMatches.Add(dMatches[0]);
                }
            }
        }
        MatOfDMatch goodMatches = new MatOfDMatch();

        goodMatches.fromList(listOfGoodMatches);

        //-- Draw matches
        Mat resultImg = new Mat();

        Features2d.drawMatches(makerMat, makerKeyPoints, grayMat, camKeyPoints, goodMatches, resultImg);

        //listOfGoodMatches = goodMatches.toList();

        ////-- Localize the object
        //List<Point> obj = new List<Point>();
        //List<Point> scene = new List<Point>();
        //List<KeyPoint> listOfKeypointsObject = makerKeyPoints.toList();
        //List<KeyPoint> listOfKeypointsScene = camKeyPoints.toList();
        //for (int i = 0; i < listOfGoodMatches.Count(); i++)
        //{
        //    //-- Get the keypoints from the good matches
        //    obj.Add(listOfKeypointsObject[listOfGoodMatches[i].queryIdx].pt);
        //    scene.Add(listOfKeypointsScene[listOfGoodMatches[i].trainIdx].pt);
        //}
        //MatOfPoint2f objMat = new MatOfPoint2f();
        //MatOfPoint2f sceneMat = new MatOfPoint2f();
        //objMat.fromList(obj);
        //sceneMat.fromList(scene);
        //double ransacReprojThreshold = 3.0;
        //Mat H = Calib3d.findHomography(objMat, sceneMat, Calib3d.RANSAC, ransacReprojThreshold);

        ////-- Get the corners from the image_1 ( the object to be "detected" )
        //Mat objCorners = new Mat(4, 1, CvType.CV_32FC2);
        //Mat sceneCorners = new Mat();
        //float[] objCornersData = new float[(int)(objCorners.total() * objCorners.channels())];
        //objCorners.get(0, 0, objCornersData);
        //objCornersData[0] = 0;
        //objCornersData[1] = 0;
        //objCornersData[2] = makerMat.cols();
        //objCornersData[3] = 0;
        //objCornersData[4] = makerMat.cols();
        //objCornersData[5] = makerMat.rows();
        //objCornersData[6] = 0;
        //objCornersData[7] = makerMat.rows();
        //objCorners.put(0, 0, objCornersData);

        //Core.perspectiveTransform(objCorners, sceneCorners, H);
        //byte[] sceneCornersData = new byte[(int)(sceneCorners.total() * sceneCorners.channels())];
        //sceneCorners.get(0, 0, sceneCornersData);

        ////-- Draw lines between the corners (the mapped object in the scene - image_2 )
        //Imgproc.line(resultImg, new Point(sceneCornersData[0] + makerMat.cols(), sceneCornersData[1]),
        //        new Point(sceneCornersData[2] + makerMat.cols(), sceneCornersData[3]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[2] + makerMat.cols(), sceneCornersData[3]),
        //        new Point(sceneCornersData[4] + makerMat.cols(), sceneCornersData[5]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[4] + makerMat.cols(), sceneCornersData[5]),
        //        new Point(sceneCornersData[6] + makerMat.cols(), sceneCornersData[7]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[6] + makerMat.cols(), sceneCornersData[7]),
        //        new Point(sceneCornersData[0] + makerMat.cols(), sceneCornersData[1]), new Scalar(0, 255, 0), 4);

        if (!first)
        {
            texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false);
            dstQuad.GetComponent <Renderer>().material.mainTexture = texture;
            first = true;
        }

        Utils.matToTexture2D(resultImg, texture);
    }
Ejemplo n.º 2
0
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log("OnWebCamTextureToMatHelperInited");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            colors  = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()];
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);



            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;

            float imageScale  = 1.0f;
            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = height / 2;
            }
            else
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageScale = (float)Screen.height / (float)Screen.width;
            }

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            //set cameraparam
            int max_d = Mathf.Max(webCamTextureMat.rows(), webCamTextureMat.cols());

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, max_d);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, webCamTextureMat.cols() / 2.0f);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, max_d);
            camMatrix.put(1, 2, webCamTextureMat.rows() / 2.0f);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            //calibration camera
            Size   imageSize      = new Size(webCamTextureMat.cols() * imageScale, webCamTextureMat.rows() * imageScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point();

            double[] aspectratio = new double[1];


            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);

            //Adjust Unity Camera FOV
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)fovy[0];
            }
            else
            {
                ARCamera.fieldOfView = (float)fovx[0];
            }



            MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length];
            for (int i = 0; i < markerDesigns.Length; i++)
            {
                markerDesigns [i] = markerSettings [i].markerDesign;
            }

            markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns);



            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());
        }
 public Mat GetMat()
 {
     return(webCamTextureToMatHelper.GetMat());
 }
Ejemplo n.º 4
0
// Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            frame = webCamTextureToMatHelper.GetMat();
            frame.copyTo(img_orig);

            drawing = img_orig.clone();

            int       lowThreshold = 50;// (int)200;// slider.value;
            const int ratio        = 1;
            const int kernel_size  = 3;

            Imgproc.cvtColor(img_orig, img_lab, Imgproc.COLOR_BGR2Lab);
            double omrSize = img_orig.cols() * img_orig.rows();

            Imgproc.cvtColor(img_orig, img_gray, Imgproc.COLOR_RGBA2GRAY);
            Imgproc.GaussianBlur(img_gray, img_gray, new Size(15, 15), 1.5, 1.5);       //Gaussian blur
            Imgproc.erode(img_gray, img_gray, new Mat(), new Point(-1, -1), 1);         //Erosion
                                                                                        // Imgproc.dilate(img_gray, img_gray, new Mat(), new Point(-1, -1), 10, 1, new Scalar(10));    //Dilation
            Imgproc.Canny(img_gray, img_edges, lowThreshold, lowThreshold * ratio, kernel_size, false);

            //Shape detection
            List <MatOfPoint> contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();
            Imgproc.findContours(img_edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

            //Texture2D tex = new Texture2D(img_edges.width(), img_edges.height(), TextureFormat.RGB24, false);
            //Utils.matToTexture2D(img_edges, tex);
            //byte[] bytes1 = tex.EncodeToJPG();
            //File.WriteAllBytes("D:/2019/OMR/" + "test213123.png", bytes1);

            List <MatOfPoint> hulls = new List <MatOfPoint>();

            for (int i = 0; i < contours.Count; i++)
            {
                MatOfInt hull_temp = new MatOfInt();
                Imgproc.convexHull(contours[i], hull_temp);
                int[]   arrIndex   = hull_temp.toArray();
                Point[] arrContour = contours[i].toArray();
                Point[] arrPoints  = new Point[arrIndex.Length];

                for (int k = 0; k < arrIndex.Length; k++)
                {
                    arrPoints[k] = arrContour[arrIndex[k]];
                }

                MatOfPoint temp = new MatOfPoint();
                temp.fromArray(arrPoints);

                //Filter outliers
                if (Imgproc.contourArea(temp) > omrSize / 3 && Imgproc.contourArea(temp) < (omrSize * 4) / 5)
                {
                    hulls.Add(temp);
                }
            }

            List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();
            for (int i = 0; i < hulls.Count; i++)
            {
                MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
                hull2f.Add(newPoint);
            }

            for (int i = 0; i < hulls.Count; i++)
            {
                //Approximate polygon
                MatOfPoint2f approx = new MatOfPoint2f();

                Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
                List <Point> approx_polygon = approx.toList();
                // approx_polygon = Scannerproc.filterPolygon(approx_polygon);
                // Debug.Log(approx_polygon.Count);
                if (!Scannerproc.isSquare(approx_polygon))
                {
                    continue;
                }
                else
                {
                    nowRectPoints.Clear();
                    nowRectPoints.AddRange(approx_polygon);
                    perspectiveAlign();
                }

                //Center of mass
                int cx = 0,
                    cy = 0;


                for (int k = 0; k < approx_polygon.Count; k++)
                {
                    cx += (int)approx_polygon[k].x;
                    cy += (int)approx_polygon[k].y;
                }
                cx /= approx_polygon.Count;
                cy /= approx_polygon.Count;

                Scannerproc.drawShape(drawing, approx_polygon, new Scalar(0, 255, 0));
            }

            if (showTextureOnScreen)
            {
                showCurrentTextureOnScreen();
            }
        }
    }
Ejemplo n.º 5
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            ScreenQuad.GetComponent <Renderer> ().material.mainTexture = texture;


            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);


            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
            }
            else
            {
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }

            //set cameraparam
            int    max_d = (int)Mathf.Max(width, height);
            double fx    = max_d;
            double fy    = max_d;
            double cx    = width / 2.0f;
            double cy    = height / 2.0f;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());


            //calibration camera
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            //To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            //resize screen Quad
            Matrix4x4 p = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 0.3f, 1000f);
            Vector3   cameraSpacePos = UnProjectVector(p, new Vector3(1.0f, 1.0f, 1.0f));

            if (widthScale > heightScale)
            {
                ScreenQuad.transform.localScale = new Vector3(cameraSpacePos.x * 2f, cameraSpacePos.x * height / width * 2f, 1);
            }
            else
            {
                ScreenQuad.transform.localScale = new Vector3(cameraSpacePos.y * width / height * 2f, cameraSpacePos.y * 2f, 1);
            }


            //create markerDetector
            MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length];
            for (int i = 0; i < markerDesigns.Length; i++)
            {
                markerDesigns [i] = markerSettings [i].markerDesign;
            }
            markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns);


            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            //if WebCamera is frontFaceing,flip Mat.
            if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
            {
                webCamTextureToMatHelper.flipHorizontal = true;
            }
        }
Ejemplo n.º 6
0
    /// <summary>
    /// Update is called once per frame
    /// </summary>
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

            // Detect faces on resize image 在调整图像时察觉脸
            //detect face rects 发现脸的矩形
            List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();
            ;
            if (detectResult.Count > 0)
            {
                //detect landmark points 检测具有界标意义的点
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);
                //将points绘制在mat上
                OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                imagePoints.fromArray(
                    new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                    new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                    new Point(points[30].x, points[30].y),                                           //nose (Nose top)
                    new Point(points[48].x, points[48].y),                                           //l mouth (Mouth breadth)
                    new Point(points[54].x, points[54].y),                                           //r mouth (Mouth breadth)
                    new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                    new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                    );

                // Estimate head pose. 估计头部姿势
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    //从3D到2D点的pose中找到一个物体的姿势 rvec是旋转 tvec是平移向量
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0)[0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                if (!double.IsNaN(tvec_z) && points.Count == 68)
                {
                    cubismParameterDictionary.Clear();
                    Calib3d.Rodrigues(rvec, rotMat);

                    transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                    transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                    ARM = invertYM * transformationM * invertZM;
                    Vector3 forward;
                    forward.x = ARM.m02;
                    forward.y = ARM.m12;
                    forward.z = ARM.m22;

                    Vector3 upwards;
                    upwards.x = ARM.m01;
                    upwards.y = ARM.m11;
                    upwards.z = ARM.m21;

                    Vector3 angles  = Quaternion.LookRotation(forward, upwards).eulerAngles;
                    float   rotateX = angles.x > 180 ? angles.x - 360 : angles.x;
                    cubismParameterDictionary.Add(ParamAngleY, (float)Math.Round(rotateX));
                    float rotateY = angles.y > 180 ? angles.y - 360 : angles.y;
                    cubismParameterDictionary.Add(ParamAngleX, (float)Math.Round(-rotateY) * 2);
                    float rotateZ = angles.z > 180 ? angles.z - 360 : angles.z;
                    cubismParameterDictionary.Add(ParamAngleZ, (float)Math.Round(-rotateZ) * 2);
                    //Debug.("X" + rotateX + "Y" + rotateY + "Z" + rotateZ);

                    //ParamAngleX.BlendToValue(BlendMode,(float)(Math.Round(-rotateY) * 2));
                    //ParamAngleY.BlendToValue(BlendMode, (float)Math.Round(rotateX));
                    //ParamAngleZ.BlendToValue(BlendMode, (float)Math.Round(-rotateZ) * 2);

                    float eyeOpen_L = Mathf.Clamp(Mathf.Abs(points[43].y - points[47].y) / (Mathf.Abs(points[43].x - points[44].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_L >= 0.8f)
                    {
                        eyeOpen_L = 1f;
                    }
                    else
                    {
                        eyeOpen_L = 0;
                    }


                    float eyeOpen_R = Mathf.Clamp(Mathf.Abs(points[38].y - points[40].y) / (Mathf.Abs(points[37].x - points[38].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_R >= 0.8f)
                    {
                        eyeOpen_R = 1f;
                    }
                    else
                    {
                        eyeOpen_R = 0;
                    }

                    // ParamEyeROpen.BlendToValue(BlendMode, eyeOpen_R);
                    cubismParameterDictionary.Add(ParamEyeROpen, eyeOpen_R);
                    // ParamEyeLOpen.BlendToValue(BlendMode, eyeOpen_L);
                    cubismParameterDictionary.Add(ParamEyeLOpen, eyeOpen_L);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)rotateY / 30f);
                    cubismParameterDictionary.Add(ParamEyeBallX, rotateY / 30f);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)-rotateX / 30f - 0.25f);
                    cubismParameterDictionary.Add(ParamEyeBallY, (float)-rotateX / 30f - 0.25f);

                    float RY = Mathf.Abs(points[19].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    RY -= 1;
                    RY *= 4f;
                    float LY = Mathf.Abs(points[24].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    LY -= 1;
                    LY *= 4f;

                    // ParamBrowRY.BlendToValue(BlendMode, RY);
                    cubismParameterDictionary.Add(ParamBrowRY, RY);
                    // ParamBrowLY.BlendToValue(BlendMode, LY);
                    cubismParameterDictionary.Add(ParamBrowLY, LY);
                    float mouthOpen = Mathf.Clamp01(Mathf.Abs(points[62].y - points[66].y) / (Mathf.Abs(points[51].y - points[62].y) + Mathf.Abs(points[66].y - points[57].y)));
                    if (mouthOpen < 0.6f)
                    {
                        mouthOpen = 0;
                    }
                    // ParamMouthOpenY.BlendToValue(BlendMode, mouthOpen);
                    cubismParameterDictionary.Add(ParamMouthOpenY, mouthOpen);
                    float mouthSize = Mathf.Abs(points[48].x - points[54].x) / (Mathf.Abs(points[31].x - points[35].x));
                    // ParamMouthForm.BlendToValue(BlendMode, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                    cubismParameterDictionary.Add(ParamMouthForm, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                }
            }
            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
        }
    }
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log("OnWebCamTextureToMatHelperInited");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);


            float width  = gameObject.transform.localScale.x;
            float height = gameObject.transform.localScale.y;

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = height / 2;
            }
            else
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }


            //set cameraparam
            int    max_d = Mathf.Max(webCamTextureMat.cols(), webCamTextureMat.rows());
            double fx    = max_d;
            double fy    = max_d;
            double cx    = webCamTextureMat.cols() / 2.0f;
            double cy    = webCamTextureMat.rows() / 2.0f;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());


            //calibration camera
            Size   imageSize      = new Size(webCamTextureMat.cols() * imageSizeScale, webCamTextureMat.rows() * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point();

            double[] aspectratio = new double[1];


            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);


            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            //To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale > heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }


            MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length];
            for (int i = 0; i < markerDesigns.Length; i++)
            {
                markerDesigns [i] = markerSettings [i].markerDesign;
            }

            markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns);



            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            //if WebCamera is frontFaceing,flip Mat.
            if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
            {
                webCamTextureToMatHelper.flipHorizontal = true;
            }

            StartCoroutine(resetCameraStatus());
        }
Ejemplo n.º 8
0
    /// <summary>
    /// Raises the webcam texture to mat helper initialized event.
    /// </summary>
    public void OnWebCamTextureToMatHelperInitialized()
    {
        Debug.Log("OnWebCamTextureToMatHelperInitialized");
        Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

        gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
        Utilities.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
        Utilities.Log("Webcam width " + webCamTextureMat.cols() + ".Webcam height " + webCamTextureMat.rows());
        float width       = webCamTextureMat.width();
        float height      = webCamTextureMat.height();
        float widthScale  = (float)Screen.width / width;
        float heightScale = (float)Screen.height / height;

        if (widthScale < heightScale)
        {
            Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
        }
        else
        {
            Camera.main.orthographicSize = height / 2;
        }

        var rat = (float)webCamTextureMat.cols() / (float)webCamTextureMat.rows();

        nW_goc = webCamTextureMat.cols();
        nW_goc = 200;
        nH_goc = (int)((float)nW_goc / (float)rat);

        nW  = nW_goc;
        nH  = (int)(nH_goc * 0.815f);
        rat = (float)nW / (float)nH;

        if (rawmainDL != null && debug == true)
        {
            rawmainDL.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }
        if (rawdebugDL != null && debug == true)
        {
            rawdebugDL.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }
        if (rawdebugDL2 != null && debug == true)
        {
            rawdebugDL2.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }
        if (rawdebugDL3 != null && debug == true)
        {
            rawdebugDL3.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }
        if (rawdebugDL4 != null && debug == true)
        {
            rawdebugDL4.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }
        if (rawdebugDL5 != null && debug == true)
        {
            rawdebugDL5.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }
        if (rawdebugDL6 != null && debug == true)
        {
            rawdebugDL6.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }
        if (rawdebugDL7 != null && debug == true)
        {
            rawdebugDL7.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }
        if (rawdebugDL8 != null && debug == true)
        {
            rawdebugDL8.GetComponent <AspectRatioFitter>().aspectRatio = rat;
        }


        if (rawmainDL != null && debug == true)
        {
            texture = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        if (rawdebugDL != null && debug == true)
        {
            dbText1 = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        if (rawdebugDL2 != null && debug == true)
        {
            dbText2 = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        if (rawdebugDL3 != null && debug == true)
        {
            dbText3 = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        if (rawdebugDL4 != null && debug == true)
        {
            dbText4 = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        if (rawdebugDL5 != null && debug == true)
        {
            dbText5 = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        if (rawdebugDL6 != null && debug == true)
        {
            dbText6 = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        if (rawdebugDL7 != null && debug == true)
        {
            dbText7 = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        if (rawdebugDL8 != null && debug == true)
        {
            dbText8 = new Texture2D(nW, nH, TextureFormat.RGBA32, false);
        }
        rgbMat = new Mat(nW, nH, CvType.CV_8UC3);
        if (rawmainDL != null && debug == true)
        {
            rawmainDL.texture = texture;
        }
        if (rawdebugDL != null && debug == true)
        {
            rawdebugDL.texture = dbText1;
        }
        if (rawdebugDL2 != null && debug == true)
        {
            rawdebugDL2.texture = dbText2;
        }
        if (rawdebugDL3 != null && debug == true)
        {
            rawdebugDL3.texture = dbText3;
        }
        if (rawdebugDL4 != null && debug == true)
        {
            rawdebugDL4.texture = dbText4;
        }
        if (rawdebugDL5 != null && debug == true)
        {
            rawdebugDL5.texture = dbText5;
        }
        if (rawdebugDL6 != null && debug == true)
        {
            rawdebugDL6.texture = dbText6;
        }
        if (rawdebugDL7 != null && debug == true)
        {
            rawdebugDL7.texture = dbText7;
        }
        if (rawdebugDL8 != null && debug == true)
        {
            rawdebugDL8.texture = dbText8;
        }

        thresholdMat  = new Mat();
        thresholdMat2 = new Mat();
        hsvMat        = new Mat();
        hsvMat2       = new Mat();
        hsvMat3       = new Mat();
        hsvMat4       = new Mat();

        rgbaMat     = new Mat(1, 1, CvType.CV_8UC3);
        rgbMat2     = new Mat(1, 1, CvType.CV_8UC3);
        rgbMat3     = new Mat(1, 1, CvType.CV_8UC3);
        rgbMat4     = new Mat(1, 1, CvType.CV_8UC3);
        rgbMat2copy = new Mat(1, 1, CvType.CV_8UC3);

        thresholdMatArr = new Mat[7];

        Observable.FromMicroCoroutine(worker).Subscribe();
    }