Ejemplo n.º 1
0
 void Update()
 {
     if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
     {
         rgbaMat = webCamTextureToMatHelper.GetMat();
         OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
     }
 }
Ejemplo n.º 2
0
    /// <summary>
    /// Raises the web cam texture to mat helper initialized event.
    /// </summary>

    public void OnWebCamTextureToMatHelperInitialized()
    {
        Debug.Log("OnWebCamTextureToMatHelperInitialized");

        Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

        texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
        Utils.matToTexture2D(webCamTextureMat, texture, webCamTextureToMatHelper.GetBufferColors());

        gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

        gameObject.transform.localScale = new Vector3((float)Screen.width, (float)Screen.height, 1);

        Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

        // if (fpsMonitor != null) {
        //     fpsMonitor.Add ("width", webCamTextureMat.width ().ToString ());
        //     fpsMonitor.Add ("height", webCamTextureMat.height ().ToString ());
        //     fpsMonitor.Add ("orientation", Screen.orientation.ToString ());
        // }


        float width  = webCamTextureMat.width();
        float height = webCamTextureMat.height();

        float widthScale  = (float)Screen.width / width;
        float heightScale = (float)Screen.height / height;

        if (widthScale < heightScale)
        {
            //Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
        }
        else
        {
            //Camera.main.orthographicSize = height / 2;
        }
        Camera.main.orthographicSize = Screen.height / 2;

        grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
    }
        // Update is called once per frame
        void Update()
        {
            Debug.Log("called update func");
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat       rgbaMat = webCamTextureToMatHelper.GetMat();
                Color32[] rgbabuf = webCamTextureToMatHelper.GetBufferColors();

                if (rgbabuf != null && faceLandmarkDetector != null && texture != null)
                {
                    Debug.Log("on Update above SetImage");
                    faceLandmarkDetector.SetImage <Color32> (rgbabuf, texture.width, texture.height, 4, true);

                    //detect face rects
                    List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                    foreach (var rect in detectResult)
                    {
                        Debug.Log("face : " + rect);

                        //detect landmark points
                        faceLandmarkDetector.DetectLandmark(rect);

                        //draw landmark points
                        faceLandmarkDetector.DrawDetectLandmarkResult <Color32> (rgbabuf, texture.width, texture.height, 4, true, 255, 255, 255, 255);
                        //faceLandmarkDetector.DrawDetectLandmarkResult<Color32>(drawbuf, texture.width, texture.height, 4, true, 255, 255, 255, 255);

                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                        if (points.Count > 0)
                        {
                            live2DModelUpdate(points);
                        }
                    }


                    if (isHideCameraImage == false)
                    {
                        texture.SetPixels32(rgbabuf);
                        texture.Apply(false);
                    }
                }
            }
        }
Ejemplo n.º 4
0
        // Image Processing function - called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                //load camera feed into matrix
                Mat frame = webCamTextureToMatHelper.GetMat();

                //clone frame to new variable
                Mat cameraFeed = frame.clone();


                //apply blurring methods to image
                Imgproc.GaussianBlur(cameraFeed, cameraFeed, new Size(5, 5), 0);
                Imgproc.medianBlur(cameraFeed, cameraFeed, 3);


                //convert to hsv colour space
                Mat hsv_image = new Mat();
                Imgproc.cvtColor(cameraFeed, hsv_image, Imgproc.COLOR_BGR2HSV);

                //create thresholds for colour isolation
                Mat blue_hue_range = new Mat();
                Mat red_hue_range  = new Mat();
                Mat lower_red      = new Mat();
                Mat upper_red      = new Mat();

                //upper and lower red colour thresholds
                Core.inRange(hsv_image, new Scalar(0, 100, 100), new Scalar(10, 200, 200), lower_red);
                Core.inRange(hsv_image, new Scalar(160, 100, 100), new Scalar(179, 255, 255), upper_red);

                //add red thresholds together
                Core.addWeighted(lower_red, 1.0, upper_red, 1.0, 0.0, red_hue_range);

                Core.inRange(hsv_image, new Scalar(115, 100, 100), new Scalar(135, 200, 200), blue_hue_range);

                //add red and blue thresholds together
                Mat hue_image = new Mat();
                Core.addWeighted(blue_hue_range, 1.0, red_hue_range, 1.0, 0.0, hue_image);

                //noise reduction on hsv image
                Imgproc.GaussianBlur(hue_image, hue_image, new Size(9, 9), 5);

                Mat erodeElement  = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
                Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(8, 8));

                Imgproc.erode(hue_image, hue_image, erodeElement);
                Imgproc.dilate(hue_image, hue_image, dilateElement);

                //find contours in image
                System.Collections.Generic.List <MatOfPoint> circles = new System.Collections.Generic.List <MatOfPoint>();
                Mat hierarchy = new Mat();

                Imgproc.findContours(hue_image, circles, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

                //find circles and draw if radius is > 30
                for (int i = 0; i < circles.Count; i++)
                {
                    Point   pt     = new Point();
                    float[] radius = new float[1];
                    Imgproc.minEnclosingCircle(new MatOfPoint2f(circles[i].toArray()), pt, radius);

                    int r = (int)radius[0];

                    if (r > 30)
                    {
                        Imgproc.circle(frame, pt, r, new Scalar(0, 255, 0), 3);
                    }
                }

                //output either frame with circles drawn or hsv feed depending on status of change camera button
                if (camMode == false)
                {
                    Utils.matToTexture2D(frame, texture, webCamTextureToMatHelper.GetBufferColors());
                }
                else
                {
                    Utils.matToTexture2D(hue_image, texture, webCamTextureToMatHelper.GetBufferColors());
                }
            }
        }
Ejemplo n.º 5
0
    /// <summary>
    /// Update is called once per frame
    /// </summary>
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

            // Detect faces on resize image 在调整图像时察觉脸
            //detect face rects 发现脸的矩形
            List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();
            ;
            if (detectResult.Count > 0)
            {
                //detect landmark points 检测具有界标意义的点
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);
                //将points绘制在mat上
                OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                imagePoints.fromArray(
                    new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                    new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                    new Point(points[30].x, points[30].y),                                           //nose (Nose top)
                    new Point(points[48].x, points[48].y),                                           //l mouth (Mouth breadth)
                    new Point(points[54].x, points[54].y),                                           //r mouth (Mouth breadth)
                    new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                    new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                    );

                // Estimate head pose. 估计头部姿势
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    //从3D到2D点的pose中找到一个物体的姿势 rvec是旋转 tvec是平移向量
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0)[0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                if (!double.IsNaN(tvec_z) && points.Count == 68)
                {
                    cubismParameterDictionary.Clear();
                    Calib3d.Rodrigues(rvec, rotMat);

                    transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                    transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                    ARM = invertYM * transformationM * invertZM;
                    Vector3 forward;
                    forward.x = ARM.m02;
                    forward.y = ARM.m12;
                    forward.z = ARM.m22;

                    Vector3 upwards;
                    upwards.x = ARM.m01;
                    upwards.y = ARM.m11;
                    upwards.z = ARM.m21;

                    Vector3 angles  = Quaternion.LookRotation(forward, upwards).eulerAngles;
                    float   rotateX = angles.x > 180 ? angles.x - 360 : angles.x;
                    cubismParameterDictionary.Add(ParamAngleY, (float)Math.Round(rotateX));
                    float rotateY = angles.y > 180 ? angles.y - 360 : angles.y;
                    cubismParameterDictionary.Add(ParamAngleX, (float)Math.Round(-rotateY) * 2);
                    float rotateZ = angles.z > 180 ? angles.z - 360 : angles.z;
                    cubismParameterDictionary.Add(ParamAngleZ, (float)Math.Round(-rotateZ) * 2);
                    //Debug.("X" + rotateX + "Y" + rotateY + "Z" + rotateZ);

                    //ParamAngleX.BlendToValue(BlendMode,(float)(Math.Round(-rotateY) * 2));
                    //ParamAngleY.BlendToValue(BlendMode, (float)Math.Round(rotateX));
                    //ParamAngleZ.BlendToValue(BlendMode, (float)Math.Round(-rotateZ) * 2);

                    float eyeOpen_L = Mathf.Clamp(Mathf.Abs(points[43].y - points[47].y) / (Mathf.Abs(points[43].x - points[44].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_L >= 0.8f)
                    {
                        eyeOpen_L = 1f;
                    }
                    else
                    {
                        eyeOpen_L = 0;
                    }


                    float eyeOpen_R = Mathf.Clamp(Mathf.Abs(points[38].y - points[40].y) / (Mathf.Abs(points[37].x - points[38].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_R >= 0.8f)
                    {
                        eyeOpen_R = 1f;
                    }
                    else
                    {
                        eyeOpen_R = 0;
                    }

                    // ParamEyeROpen.BlendToValue(BlendMode, eyeOpen_R);
                    cubismParameterDictionary.Add(ParamEyeROpen, eyeOpen_R);
                    // ParamEyeLOpen.BlendToValue(BlendMode, eyeOpen_L);
                    cubismParameterDictionary.Add(ParamEyeLOpen, eyeOpen_L);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)rotateY / 30f);
                    cubismParameterDictionary.Add(ParamEyeBallX, rotateY / 30f);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)-rotateX / 30f - 0.25f);
                    cubismParameterDictionary.Add(ParamEyeBallY, (float)-rotateX / 30f - 0.25f);

                    float RY = Mathf.Abs(points[19].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    RY -= 1;
                    RY *= 4f;
                    float LY = Mathf.Abs(points[24].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    LY -= 1;
                    LY *= 4f;

                    // ParamBrowRY.BlendToValue(BlendMode, RY);
                    cubismParameterDictionary.Add(ParamBrowRY, RY);
                    // ParamBrowLY.BlendToValue(BlendMode, LY);
                    cubismParameterDictionary.Add(ParamBrowLY, LY);
                    float mouthOpen = Mathf.Clamp01(Mathf.Abs(points[62].y - points[66].y) / (Mathf.Abs(points[51].y - points[62].y) + Mathf.Abs(points[66].y - points[57].y)));
                    if (mouthOpen < 0.6f)
                    {
                        mouthOpen = 0;
                    }
                    // ParamMouthOpenY.BlendToValue(BlendMode, mouthOpen);
                    cubismParameterDictionary.Add(ParamMouthOpenY, mouthOpen);
                    float mouthSize = Mathf.Abs(points[48].x - points[54].x) / (Mathf.Abs(points[31].x - points[35].x));
                    // ParamMouthForm.BlendToValue(BlendMode, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                    cubismParameterDictionary.Add(ParamMouthForm, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                }
            }
            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
        }
    }
        void LateUpdate()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                markerDetector.processFrame(rgbaMat, 1);


                foreach (MarkerSettings settings in markerSettings)
                {
                    if (!settings.shouldNotSetToInactivePerFrame)
                    {
                        settings.setAllARGameObjectsDisable();
                    }
                    else
                    {
                        GameObject ARGameObject = settings.getARGameObject();
                        if (ARGameObject != null)
                        {
                            DelayableSetActive obj = ARGameObject.GetComponent <DelayableSetActive>();
                            if (obj != null)
                            {
                                obj.SetActive(false, 0.5f);
                            }
                        }
                    }
                }

                List <Marker> findMarkers = markerDetector.getFindMarkers();
                for (int i = 0; i < findMarkers.Count; i++)
                {
                    Marker marker = findMarkers [i];

                    foreach (MarkerSettings settings in markerSettings)
                    {
                        if (marker.id == settings.getMarkerId())
                        {
                            transformationM = marker.transformation;
                            //Debug.Log ("transformationM " + transformationM.ToString ());

                            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
                            //Debug.Log ("arM " + arM.ToString ());

                            GameObject ARGameObject = settings.getARGameObject();

                            if (ARGameObject != null)
                            {
                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);

                                DelayableSetActive obj = ARGameObject.GetComponent <DelayableSetActive>();
                                if (obj != null)
                                {
                                    obj.SetActive(true);
                                }
                                else
                                {
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }