void Update()
 {
     if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
     {
         ARGvrHead.trackRotation = true;
     }
     else
     {
         ARGvrHead.trackRotation = false;
     }
 }
Ejemplo n.º 2
0
    // Update is called once per frame
    void Update()
    {
#if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
        //Touch
        int touchCount = Input.touchCount;
        if (touchCount == 1)
        {
            Touch t = Input.GetTouch(0);
            if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId))
            {
                storedTouchPoint = new Point(t.position.x, t.position.y);
                //Debug.Log ("touch X " + t.position.x);
                //Debug.Log ("touch Y " + t.position.y);
            }
        }
#else
        //Mouse
#endif
        timer -= Time.deltaTime;

        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();
            HandPoseEstimationProcess(rgbaMat);
        }
    }
Ejemplo n.º 3
0
        // Update is called once per frame
        void Update()
        {
            if (Input.GetKeyDown(KeyCode.Escape))
            {
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
                SceneManager.LoadScene("FacemojiStart");
#else
                Application.LoadLevel("FacemojiStart");
#endif
            }

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                foreach (var rect in detectResult)
                {
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    if (points.Count > 0)
                    {
                        live2DModelUpdate(points);

                        //currentFacePoints = points;

                        break;
                    }
                }
            }
        }
Ejemplo n.º 4
0
    void Update()
    {
        //アプリ起動時とカメラ切替時,カメラ起動まで待つ
        if (_remainingWaitingTime > 0)
        {
            _remainingWaitingTime -= Time.deltaTime;
            return;
        }
        _waitingingIndicator.SetActive(false);

        if (!_toMatHelper.IsPlaying() || !_toMatHelper.DidUpdateThisFrame())
        {
            return;
        }

        //背景を保存する.StartでやるとWebCamのPlayが間に合わない?
        if (!_invCvtr.IsSavedBg)
        {
            _invCvtr.SaveBgr(_toMatHelper.GetMat());
        }

        //透明人間に変換して表示
        var invImg = _invCvtr.CvtToInvisible(_toMatHelper.GetMat());

        if (_isRecording)
        {
            _movieTaker?.Write(invImg);
        }
        Utils.fastMatToTexture2D(invImg, _quadTex);
    }
Ejemplo n.º 5
0
 void Update()
 {
     if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
     {
         Run();
     }
 }
Ejemplo n.º 6
0
 void Update()
 {
     if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
     {
         rgbaMat = webCamTextureToMatHelper.GetMat();
         OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
     }
 }
Ejemplo n.º 7
0
 void Update()
 {
     if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
     {
         Mat rgbaMat = webCamTextureToMatHelper.GetMat();
         Utils.fastMatToTexture2D(rgbaMat, texture);
     }
 }
Ejemplo n.º 8
0
        void Update()
        {
            if (_WebCamTextureToMatHelper.IsPlaying() && _WebCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = _WebCamTextureToMatHelper.GetMat();
                Imgproc.cvtColor(rgbaMat, _GrayMat, Imgproc.COLOR_RGBA2GRAY);
                _ImageDetector.FindARMarker(_GrayMat);

                Utils.fastMatToTexture2D(rgbaMat, _WebCamTexture);
            }
        }
Ejemplo n.º 9
0
    void Update()
    {
        if (!_webCamTextureToMatHelper.IsPlaying() || !_webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            return;
        }

        var rgbaMat = _webCamTextureToMatHelper.GetMat();

        SetFire(rgbaMat);
        Utils.fastMatToTexture2D(rgbaMat, _texture);
    }
Ejemplo n.º 10
0
    IEnumerator worker()
    {
        bool inProcess = false;

        while (true)
        {
            yield return(null);

            if (inProcess)
            {
                yield return(null);
            }
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                color_filter.Clear();

                Mat t_rgbaMat = webCamTextureToMatHelper.GetMat();


                tagramDetect(t_rgbaMat, (TangramResultModel trm, List <MyShape> lms) =>
                {
                    if (trm != null)
                    {
                        if (this.handler != null)
                        {
                            handler(trm, lms, tangramFeatureModelList);
                        }

                        if (debug == true)
                        {
                            string s = "";
                            for (var i = 0; i < trm.datas.Length; i++)
                            {
                                s += trm.datas[i] + " ";
                            }
                            if (uitext != null && debug == true)
                            {
                                uitext.text = s;
                            }
                        }
                    }
                });
            }
        }
    }
        // Update is called once per frame
        void Update()
        {
            Debug.Log("called update func");
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat       rgbaMat = webCamTextureToMatHelper.GetMat();
                Color32[] rgbabuf = webCamTextureToMatHelper.GetBufferColors();

                if (rgbabuf != null && faceLandmarkDetector != null && texture != null)
                {
                    Debug.Log("on Update above SetImage");
                    faceLandmarkDetector.SetImage <Color32> (rgbabuf, texture.width, texture.height, 4, true);

                    //detect face rects
                    List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                    foreach (var rect in detectResult)
                    {
                        Debug.Log("face : " + rect);

                        //detect landmark points
                        faceLandmarkDetector.DetectLandmark(rect);

                        //draw landmark points
                        faceLandmarkDetector.DrawDetectLandmarkResult <Color32> (rgbabuf, texture.width, texture.height, 4, true, 255, 255, 255, 255);
                        //faceLandmarkDetector.DrawDetectLandmarkResult<Color32>(drawbuf, texture.width, texture.height, 4, true, 255, 255, 255, 255);

                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                        if (points.Count > 0)
                        {
                            live2DModelUpdate(points);
                        }
                    }


                    if (isHideCameraImage == false)
                    {
                        texture.SetPixels32(rgbabuf);
                        texture.Apply(false);
                    }
                }
            }
        }
Ejemplo n.º 12
0
// Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            frame = webCamTextureToMatHelper.GetMat();
            frame.copyTo(img_orig);

            drawing = img_orig.clone();

            int       lowThreshold = 50;// (int)200;// slider.value;
            const int ratio        = 1;
            const int kernel_size  = 3;

            Imgproc.cvtColor(img_orig, img_lab, Imgproc.COLOR_BGR2Lab);
            double omrSize = img_orig.cols() * img_orig.rows();

            Imgproc.cvtColor(img_orig, img_gray, Imgproc.COLOR_RGBA2GRAY);
            Imgproc.GaussianBlur(img_gray, img_gray, new Size(15, 15), 1.5, 1.5);       //Gaussian blur
            Imgproc.erode(img_gray, img_gray, new Mat(), new Point(-1, -1), 1);         //Erosion
                                                                                        // Imgproc.dilate(img_gray, img_gray, new Mat(), new Point(-1, -1), 10, 1, new Scalar(10));    //Dilation
            Imgproc.Canny(img_gray, img_edges, lowThreshold, lowThreshold * ratio, kernel_size, false);

            //Shape detection
            List <MatOfPoint> contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();
            Imgproc.findContours(img_edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

            //Texture2D tex = new Texture2D(img_edges.width(), img_edges.height(), TextureFormat.RGB24, false);
            //Utils.matToTexture2D(img_edges, tex);
            //byte[] bytes1 = tex.EncodeToJPG();
            //File.WriteAllBytes("D:/2019/OMR/" + "test213123.png", bytes1);

            List <MatOfPoint> hulls = new List <MatOfPoint>();

            for (int i = 0; i < contours.Count; i++)
            {
                MatOfInt hull_temp = new MatOfInt();
                Imgproc.convexHull(contours[i], hull_temp);
                int[]   arrIndex   = hull_temp.toArray();
                Point[] arrContour = contours[i].toArray();
                Point[] arrPoints  = new Point[arrIndex.Length];

                for (int k = 0; k < arrIndex.Length; k++)
                {
                    arrPoints[k] = arrContour[arrIndex[k]];
                }

                MatOfPoint temp = new MatOfPoint();
                temp.fromArray(arrPoints);

                //Filter outliers
                if (Imgproc.contourArea(temp) > omrSize / 3 && Imgproc.contourArea(temp) < (omrSize * 4) / 5)
                {
                    hulls.Add(temp);
                }
            }

            List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();
            for (int i = 0; i < hulls.Count; i++)
            {
                MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
                hull2f.Add(newPoint);
            }

            for (int i = 0; i < hulls.Count; i++)
            {
                //Approximate polygon
                MatOfPoint2f approx = new MatOfPoint2f();

                Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
                List <Point> approx_polygon = approx.toList();
                // approx_polygon = Scannerproc.filterPolygon(approx_polygon);
                // Debug.Log(approx_polygon.Count);
                if (!Scannerproc.isSquare(approx_polygon))
                {
                    continue;
                }
                else
                {
                    nowRectPoints.Clear();
                    nowRectPoints.AddRange(approx_polygon);
                    perspectiveAlign();
                }

                //Center of mass
                int cx = 0,
                    cy = 0;


                for (int k = 0; k < approx_polygon.Count; k++)
                {
                    cx += (int)approx_polygon[k].x;
                    cy += (int)approx_polygon[k].y;
                }
                cx /= approx_polygon.Count;
                cy /= approx_polygon.Count;

                Scannerproc.drawShape(drawing, approx_polygon, new Scalar(0, 255, 0));
            }

            if (showTextureOnScreen)
            {
                showCurrentTextureOnScreen();
            }
        }
    }
    // Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            //convert image to greyscale
            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


            if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
            {
//                    Debug.Log ("detectFace");

                //convert image to greyscale
                using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) {
                        Imgproc.equalizeHist(grayMat, equalizeHistMat);

                        cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                 //                                                                                 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                        if (faces.rows() > 0)
                        {
//                            Debug.Log ("faces " + faces.dump ());

                            List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
                            List <Point[]> pointsList = faceTracker.getPoints();

                            if (isAutoResetMode)
                            {
                                //add initial face points from MatOfRect
                                if (pointsList.Count <= 0)
                                {
                                    faceTracker.addPoints(faces);
//                                    Debug.Log ("reset faces ");
                                }
                                else
                                {
                                    for (int i = 0; i < rectsList.Count; i++)
                                    {
                                        OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                        //It determines whether nose point has been included in trackRect.
                                        if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                        {
                                            rectsList.RemoveAt(i);
                                            pointsList.RemoveAt(i);
//                                                                                      Debug.Log ("remove " + i);
                                        }
                                        //uncomment below for rectangle around face
                                        Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                    }
                                }
                            }
                            else
                            {
                                faceTracker.addPoints(faces);
                            }
                            //draw face rect
                            for (int i = 0; i < rectsList.Count; i++)
                            {
                                //uncomment below for rectangle around face
                                Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                            }
                        }
                        else
                        {
                            if (isAutoResetMode)
                            {
                                faceTracker.reset();
                            }
                        }
                    }
            }

            //track face points.if face points <= 0, always return false.
            if (faceTracker.track(grayMat, faceTrackerParams))
            {
                //GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0.05f, 0.05f, 50);
                //GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.05f, 0.05f, 50);
                //facecount = 0;
                if (facerec > 15)
                {
                    GameObject.FindGameObjectWithTag("left hand").transform.localScale  = new Vector3(0.2f, 0.2f, 50);
                    GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.2f, 0.2f, 50);
                    facecount = 0;
                }
                else
                {
                    facerec++;
                }
                //uncomment below for rectangle around face
                //faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
            }
            else
            {
                //facecount prevents flickering of hand from poor face recognition
                if (facecount > 15)
                {
                    facerec = 0;
                    GameObject.FindGameObjectWithTag("left hand").transform.localScale  = new Vector3(0f, 0f, 0);
                    GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0f, 0f, 0);                    facecount++;
                }
                else
                {
                    facecount++;
                }
            }

            //Imgproc.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

            Utils.fastMatToTexture2D(rgbaMat, texture);
        }

        //facetrac resets upon screen click and space bar
        if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
        {
            faceTracker.reset();
        }

        if (Input.GetKeyDown(KeyCode.Escape))
        {
            if (SpeechRecognizer.IsRecording())
            {
                SpeechRecognizer.StopIfRecording();
                //resultText.text = "I stopped recording";
            }
            Application.Quit();
            //Application.LoadLevel ("MainActivity.class");
        }
    }
Ejemplo n.º 14
0
    void Update()
    {
        if (!running)
        {
            return;
        }


        if (Input.GetMouseButtonDown(0))
        {
            Debug.Log($"Mouse Position: {Input.mousePosition} -> World: {Camera.main.ScreenToWorldPoint(Input.mousePosition)}");
        }


        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();
            //Writes into the mat
            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGB2GRAY);     //COLOR_RGBA2GRAY
            //Applies gaussian blur for better results
            Imgproc.GaussianBlur(grayMat, grayMat, new Size(3, 3), 2);
            using (Mat circles = new Mat()) {
                //Circle detection using the hough gradient
                Imgproc.HoughCircles(grayMat, circles, Imgproc.CV_HOUGH_GRADIENT, dp, minDist, param1, param2, minRadius, maxRadius);
                Point pt = new Point();

                //Limits the circle drawing when too much circles are detected
                if ((int)circles.total() > 5)
                {
                    for (int i = 0; i < circles.rows(); i++)
                    {
                        double[] data = circles.get(i, 0);
                        pt.x = data [0];
                        pt.y = data [1];
                        double rho = data [2];
                        Imgproc.circle(rgbaMat, pt, (int)rho, GlobalValues.DETECTION_COLOR, GlobalValues.RINGS_RADIUS);
                    }
                }
                else      //Tennis ball tracking starts here
                {
                    for (int i = 0; i < circles.rows(); i++)
                    {
                        for (var j = 0; j < circles.cols(); j++)
                        {
                            //Get the data from the API
                            double[] data = circles.get(i, j);
                            pt.x = data [0];
                            pt.y = data [1];
                            double rho = data [2];

                            //Convert to worldspace
                            Vector2 pos      = new Vector2((float)data[0], webCamTextureToMatHelper.GetWebCamTexture().height - (float)data[1]);
                            Vector3 worldPos = Camera.main.ScreenToWorldPoint(AdjustToResolution(pos));

                            //Drawings for debug purposes
                            Debug.DrawRay(worldPos, Vector3.up * 10, Color.magenta, 1f);
                            Debug.DrawRay(worldPos, Vector3.down * 10, Color.magenta, 1f);
                            Debug.DrawRay(worldPos, Vector3.left * 10, Color.magenta, 1f);
                            Debug.DrawRay(worldPos, Vector3.right * 10, Color.magenta, 1f);

                            //If the ball went outside the detection threshold
                            if (ball_tracker.AwaitingForRegainFocus(worldPos))
                            {
                                //Flash a blue cirlcle to indicate the player where to start
                                if (Mathf.Sin(Time.time * GlobalValues.CHECK_POINT_BLINKING_FRECUENCY) > 0)
                                {
                                    var last_pos   = ball_tracker.GetLastPosition();
                                    var screen_pos = InvertAdjustToResolution(Camera.main.WorldToScreenPoint(last_pos));
                                    screen_pos.y = webCamTextureToMatHelper.GetWebCamTexture().height - screen_pos.y;
                                    Imgproc.circle(rgbaMat, new Point(screen_pos.x, screen_pos.y), (int)rho, GlobalValues.CHECK_POINT_COLOR, GlobalValues.RINGS_RADIUS);
                                }
                            }    //Otherwise Update the ball tracker
                            else if (ball_tracker.Update(worldPos))
                            {
                                Imgproc.circle(rgbaMat, pt, (int)rho, GlobalValues.TRACKING_COLOR, GlobalValues.RINGS_RADIUS);
                            }
                        }
                    }
                }
            }

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

            Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
        }
    }
Ejemplo n.º 15
0
    // Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            // CV_8UC3

            if (net == null)
            {
                Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);

                // Create a 4D blob from a frame.
                Size inpSize = new Size(inpWidth > 0 ? inpWidth : bgrMat.cols(),
                                        inpHeight > 0 ? inpHeight : bgrMat.rows());
                Mat blob = Dnn.blobFromImage(bgrMat, scale, inpSize, mean, swapRB, false);


                // Run a model.
                net.setInput(blob);

                if (net.getLayer(new DictValue(0)).outputNameToIndex("im_info") != -1)
                {      // Faster-RCNN or R-FCN
                    Imgproc.resize(bgrMat, bgrMat, inpSize);
                    Mat imInfo = new Mat(1, 3, CvType.CV_32FC1);
                    imInfo.put(0, 0, new float[] {
                        (float)inpSize.height,
                        (float)inpSize.width,
                        1.6f
                    });
                    net.setInput(imInfo, "im_info");
                }


                TickMeter tm = new TickMeter();
                tm.start();

                List <Mat> outs = new List <Mat>();
                net.forward(outs, outBlobNames);

                tm.stop();
                //Debug.Log ("Inference time, ms: " + tm.getTimeMilli ());


                postprocess(rgbaMat, outs, net);

                for (int i = 0; i < outs.Count; i++)
                {
                    outs[i].Dispose();
                }
                blob.Dispose();
            }

            Utils.fastMatToTexture2D(rgbaMat, texture);
        }
    }
Ejemplo n.º 16
0
    /// <summary>
    /// Update is called once per frame
    /// </summary>
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

            // Detect faces on resize image 在调整图像时察觉脸
            //detect face rects 发现脸的矩形
            List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();
            ;
            if (detectResult.Count > 0)
            {
                //detect landmark points 检测具有界标意义的点
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);
                //将points绘制在mat上
                OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                imagePoints.fromArray(
                    new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                    new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                    new Point(points[30].x, points[30].y),                                           //nose (Nose top)
                    new Point(points[48].x, points[48].y),                                           //l mouth (Mouth breadth)
                    new Point(points[54].x, points[54].y),                                           //r mouth (Mouth breadth)
                    new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                    new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                    );

                // Estimate head pose. 估计头部姿势
                if (rvec == null || tvec == null)
                {
                    rvec = new Mat(3, 1, CvType.CV_64FC1);
                    tvec = new Mat(3, 1, CvType.CV_64FC1);
                    //从3D到2D点的pose中找到一个物体的姿势 rvec是旋转 tvec是平移向量
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }

                double tvec_z = tvec.get(2, 0)[0];

                if (double.IsNaN(tvec_z) || tvec_z < 0)
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                }
                else
                {
                    Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                }

                if (!double.IsNaN(tvec_z) && points.Count == 68)
                {
                    cubismParameterDictionary.Clear();
                    Calib3d.Rodrigues(rvec, rotMat);

                    transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                    transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                    ARM = invertYM * transformationM * invertZM;
                    Vector3 forward;
                    forward.x = ARM.m02;
                    forward.y = ARM.m12;
                    forward.z = ARM.m22;

                    Vector3 upwards;
                    upwards.x = ARM.m01;
                    upwards.y = ARM.m11;
                    upwards.z = ARM.m21;

                    Vector3 angles  = Quaternion.LookRotation(forward, upwards).eulerAngles;
                    float   rotateX = angles.x > 180 ? angles.x - 360 : angles.x;
                    cubismParameterDictionary.Add(ParamAngleY, (float)Math.Round(rotateX));
                    float rotateY = angles.y > 180 ? angles.y - 360 : angles.y;
                    cubismParameterDictionary.Add(ParamAngleX, (float)Math.Round(-rotateY) * 2);
                    float rotateZ = angles.z > 180 ? angles.z - 360 : angles.z;
                    cubismParameterDictionary.Add(ParamAngleZ, (float)Math.Round(-rotateZ) * 2);
                    //Debug.("X" + rotateX + "Y" + rotateY + "Z" + rotateZ);

                    //ParamAngleX.BlendToValue(BlendMode,(float)(Math.Round(-rotateY) * 2));
                    //ParamAngleY.BlendToValue(BlendMode, (float)Math.Round(rotateX));
                    //ParamAngleZ.BlendToValue(BlendMode, (float)Math.Round(-rotateZ) * 2);

                    float eyeOpen_L = Mathf.Clamp(Mathf.Abs(points[43].y - points[47].y) / (Mathf.Abs(points[43].x - points[44].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_L >= 0.8f)
                    {
                        eyeOpen_L = 1f;
                    }
                    else
                    {
                        eyeOpen_L = 0;
                    }


                    float eyeOpen_R = Mathf.Clamp(Mathf.Abs(points[38].y - points[40].y) / (Mathf.Abs(points[37].x - points[38].x) * 0.75f), -0.1f, 2.0f);
                    if (eyeOpen_R >= 0.8f)
                    {
                        eyeOpen_R = 1f;
                    }
                    else
                    {
                        eyeOpen_R = 0;
                    }

                    // ParamEyeROpen.BlendToValue(BlendMode, eyeOpen_R);
                    cubismParameterDictionary.Add(ParamEyeROpen, eyeOpen_R);
                    // ParamEyeLOpen.BlendToValue(BlendMode, eyeOpen_L);
                    cubismParameterDictionary.Add(ParamEyeLOpen, eyeOpen_L);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)rotateY / 30f);
                    cubismParameterDictionary.Add(ParamEyeBallX, rotateY / 30f);
                    // ParamEyeBallX.BlendToValue(BlendMode, (float)-rotateX / 30f - 0.25f);
                    cubismParameterDictionary.Add(ParamEyeBallY, (float)-rotateX / 30f - 0.25f);

                    float RY = Mathf.Abs(points[19].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    RY -= 1;
                    RY *= 4f;
                    float LY = Mathf.Abs(points[24].y - points[27].y) / Mathf.Abs(points[27].y - points[29].y);
                    LY -= 1;
                    LY *= 4f;

                    // ParamBrowRY.BlendToValue(BlendMode, RY);
                    cubismParameterDictionary.Add(ParamBrowRY, RY);
                    // ParamBrowLY.BlendToValue(BlendMode, LY);
                    cubismParameterDictionary.Add(ParamBrowLY, LY);
                    float mouthOpen = Mathf.Clamp01(Mathf.Abs(points[62].y - points[66].y) / (Mathf.Abs(points[51].y - points[62].y) + Mathf.Abs(points[66].y - points[57].y)));
                    if (mouthOpen < 0.6f)
                    {
                        mouthOpen = 0;
                    }
                    // ParamMouthOpenY.BlendToValue(BlendMode, mouthOpen);
                    cubismParameterDictionary.Add(ParamMouthOpenY, mouthOpen);
                    float mouthSize = Mathf.Abs(points[48].x - points[54].x) / (Mathf.Abs(points[31].x - points[35].x));
                    // ParamMouthForm.BlendToValue(BlendMode, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                    cubismParameterDictionary.Add(ParamMouthForm, Mathf.Clamp(mouthSize, -1.0f, 1.0f));
                }
            }
            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
        }
    }
    // Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            OpenCVForUnity.CoreModule.Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            if (mMOP2fptsPrev.rows() == 0)
            {
                // first time through the loop so we need prev and this mats
                // plus prev points
                // get this mat
                Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                // copy that to prev mat
                matOpFlowThis.copyTo(matOpFlowPrev);

                // get prev corners
                Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                // get safe copy of this corners
                mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
            }
            else
            {
                // we've been through before so
                // this mat is valid. Copy it to prev mat
                matOpFlowThis.copyTo(matOpFlowPrev);

                // get this mat
                Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                // get the corners for this mat
                Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                mMOP2fptsThis.fromArray(MOPcorners.toArray());

                // retrieve the corners from the prev mat
                // (saves calculating them again)
                mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                // and save this corners for next time through

                mMOP2fptsThis.copyTo(mMOP2fptsSafe);
            }


            /*
             *  Parameters:
             *      prevImg first 8-bit input image
             *      nextImg second input image
             *      prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
             *      nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
             *      status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
             *      err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
             */
            Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

            //if (mMOBStatus.rows() > 0)
            //{
            //    List<Point> cornersPrev = mMOP2fptsPrev.toList();
            //    List<Point> cornersThis = mMOP2fptsThis.toList();
            //    List<byte> byteStatus = mMOBStatus.toList();

            //    int x = 0;
            //    int y = byteStatus.Count - 1;

            //    for (x = 0; x < y; x++)
            //    {
            //        if (byteStatus[x] == 1)
            //        {
            //            Point pt = cornersThis[x];
            //            Point pt2 = cornersPrev[x];

            //            Imgproc.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

            //            Imgproc.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
            //        }
            //    }
            //}

            //                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

            Utils.fastMatToTexture2D(rgbaMat, texture);

            if (DateTime.Now >= dataAtual.AddSeconds(5))
            {
                webCamTextureToMatHelper.Pause();
                TakeSnapshot();
                DetectShapes();
                dataAtual = DateTime.Now;
                webCamTextureToMatHelper.Play();
            }
        }
    }
Ejemplo n.º 18
0
        void Update()
        {
            if (Input.GetKeyDown(KeyCode.Escape))
            {
                OnBackButton();
            }

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                markerDetector.processFrame(rgbaMat, 1);

                foreach (MarkerSettings settings in markerSettings)
                {
                    if (!settings.shouldNotSetToInactivePerFrame)
                    {
                        settings.setAllARGameObjectsDisable();
                    }
                    else
                    {
                        GameObject ARGameObject = settings.getARGameObject();
                        if (ARGameObject != null)
                        {
                            DelayableSetActive obj = ARGameObject.GetComponent <DelayableSetActive> ();
                            if (obj != null)
                            {
                                obj.SetActive(false, 0.5f);
                            }
                        }
                    }
                }

                List <Marker> findMarkers = markerDetector.getFindMarkers();
                for (int i = 0; i < findMarkers.Count; i++)
                {
                    Marker marker = findMarkers [i];

                    foreach (MarkerSettings settings in markerSettings)
                    {
                        if (marker.id == settings.getMarkerId())
                        {
                            transformationM = marker.transformation;
                            //Debug.Log ("transformationM " + transformationM.ToString ());

                            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
                            //Debug.Log ("arM " + arM.ToString ());

                            GameObject ARGameObject = settings.getARGameObject();
                            if (ARGameObject != null)
                            {
                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);

                                DelayableSetActive obj = ARGameObject.GetComponent <DelayableSetActive> ();
                                if (obj != null)
                                {
                                    obj.SetActive(true);
                                }
                                else
                                {
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }
                Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Ejemplo n.º 19
0
        // Image Processing function - called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                //load camera feed into matrix
                Mat frame = webCamTextureToMatHelper.GetMat();

                //clone frame to new variable
                Mat cameraFeed = frame.clone();


                //apply blurring methods to image
                Imgproc.GaussianBlur(cameraFeed, cameraFeed, new Size(5, 5), 0);
                Imgproc.medianBlur(cameraFeed, cameraFeed, 3);


                //convert to hsv colour space
                Mat hsv_image = new Mat();
                Imgproc.cvtColor(cameraFeed, hsv_image, Imgproc.COLOR_BGR2HSV);

                //create thresholds for colour isolation
                Mat blue_hue_range = new Mat();
                Mat red_hue_range  = new Mat();
                Mat lower_red      = new Mat();
                Mat upper_red      = new Mat();

                //upper and lower red colour thresholds
                Core.inRange(hsv_image, new Scalar(0, 100, 100), new Scalar(10, 200, 200), lower_red);
                Core.inRange(hsv_image, new Scalar(160, 100, 100), new Scalar(179, 255, 255), upper_red);

                //add red thresholds together
                Core.addWeighted(lower_red, 1.0, upper_red, 1.0, 0.0, red_hue_range);

                Core.inRange(hsv_image, new Scalar(115, 100, 100), new Scalar(135, 200, 200), blue_hue_range);

                //add red and blue thresholds together
                Mat hue_image = new Mat();
                Core.addWeighted(blue_hue_range, 1.0, red_hue_range, 1.0, 0.0, hue_image);

                //noise reduction on hsv image
                Imgproc.GaussianBlur(hue_image, hue_image, new Size(9, 9), 5);

                Mat erodeElement  = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
                Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(8, 8));

                Imgproc.erode(hue_image, hue_image, erodeElement);
                Imgproc.dilate(hue_image, hue_image, dilateElement);

                //find contours in image
                System.Collections.Generic.List <MatOfPoint> circles = new System.Collections.Generic.List <MatOfPoint>();
                Mat hierarchy = new Mat();

                Imgproc.findContours(hue_image, circles, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

                //find circles and draw if radius is > 30
                for (int i = 0; i < circles.Count; i++)
                {
                    Point   pt     = new Point();
                    float[] radius = new float[1];
                    Imgproc.minEnclosingCircle(new MatOfPoint2f(circles[i].toArray()), pt, radius);

                    int r = (int)radius[0];

                    if (r > 30)
                    {
                        Imgproc.circle(frame, pt, r, new Scalar(0, 255, 0), 3);
                    }
                }

                //output either frame with circles drawn or hsv feed depending on status of change camera button
                if (camMode == false)
                {
                    Utils.matToTexture2D(frame, texture, webCamTextureToMatHelper.GetBufferColors());
                }
                else
                {
                    Utils.matToTexture2D(hue_image, texture, webCamTextureToMatHelper.GetBufferColors());
                }
            }
        }