Пример #1
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                //first find blue objects
                Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
                Core.inRange(hsvMat, blue.getHSVmin(), blue.getHSVmax(), thresholdMat);
                morphOps(thresholdMat);
                trackFilteredObject(blue, thresholdMat, hsvMat, rgbMat);
                //then yellows
                Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
                Core.inRange(hsvMat, yellow.getHSVmin(), yellow.getHSVmax(), thresholdMat);
                morphOps(thresholdMat);
                trackFilteredObject(yellow, thresholdMat, hsvMat, rgbMat);
                //then reds
                Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
                Core.inRange(hsvMat, red.getHSVmin(), red.getHSVmax(), thresholdMat);
                morphOps(thresholdMat);
                trackFilteredObject(red, thresholdMat, hsvMat, rgbMat);
                //then greens
                Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
                Core.inRange(hsvMat, green.getHSVmin(), green.getHSVmax(), thresholdMat);
                morphOps(thresholdMat);
                trackFilteredObject(green, thresholdMat, hsvMat, rgbMat);

                Core.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                using (Mat circles = new Mat()) {
                    Imgproc.HoughCircles(grayMat, circles, Imgproc.CV_HOUGH_GRADIENT, 2, 10, 160, 50, 10, 20);
                    Point pt = new Point();

                    for (int i = 0; i < circles.cols(); i++)
                    {
                        double[] data = circles.get(0, i);
                        pt.x = data [0];
                        pt.y = data [1];
                        double rho = data [2];
                        Imgproc.circle(rgbaMat, pt, (int)rho, new Scalar(255, 0, 0, 255), 5);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Пример #3
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);


                if (cascade != null)
                {
                    cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2,                      // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                             new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size());
                }


                OpenCVForUnity.Rect[] rects = faces.toArray();
                for (int i = 0; i < rects.Length; i++)
                {
                    //				Debug.Log ("detect faces " + rects [i]);

                    Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                }

//				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Пример #4
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Пример #5
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);
                backgroundSubstractorMOG2.apply(rgbMat, fgmaskMat);

                Core.bitwise_not(fgmaskMat, fgmaskMat);
                rgbaMat.setTo(new Scalar(0, 0, 0, 0), fgmaskMat);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Пример #6
0
        // Update is called once per frame
        void Update()
        {
            #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
            //Touch
            int touchCount = Input.touchCount;
            if (touchCount == 1)
            {
                Touch t = Input.GetTouch(0);
                if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId))
                {
                    storedTouchPoint = new Point(t.position.x, t.position.y);
                    //Debug.Log ("touch X " + t.position.x);
                    //Debug.Log ("touch Y " + t.position.y);
                }
            }
            #else
            //Mouse
            if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject())
            {
                storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y);
                //Debug.Log ("mouse X " + Input.mousePosition.x);
                //Debug.Log ("mouse Y " + Input.mousePosition.y);
            }
            #endif

            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (storedTouchPoint != null)
                {
                    onTouch(rgbaMat, convertScreenPoint(storedTouchPoint, gameObject, Camera.main));
                    storedTouchPoint = null;
                }

                handPoseEstimationProcess(rgbaMat);

                Imgproc.putText(rgbaMat, "Please touch the area of the open hand.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Пример #7
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                                                                #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                //Touch
                int touchCount = Input.touchCount;
                if (touchCount == 1)
                {
                    Touch t = Input.GetTouch(0);
                    if (t.phase == TouchPhase.Ended)
                    {
                        onTouch(rgbaMat, convertScreenPoint(new Point(t.position.x, t.position.y), gameObject, Camera.main));
                        //									Debug.Log ("touch X " + t.position.x);
                        //									Debug.Log ("touch Y " + t.position.y);
                    }
                }
                                                                #else
                //Mouse
                if (Input.GetMouseButtonUp(0))
                {
                    onTouch(rgbaMat, convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                    //												Debug.Log ("mouse X " + Input.mousePosition.x);
                    //												Debug.Log ("mouse Y " + Input.mousePosition.y);
                }
                                                                #endif


                handPoseEstimationProcess(rgbaMat);

                Core.putText(rgbaMat, "PLEASE TOUCH HAND POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);


//				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Пример #8
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);


                Point[] points = roiPointList.ToArray();

                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat()) {
                        Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                    }

                    #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        if (Input.GetTouch(0).phase == TouchPhase.Ended)
                        {
                            roiPointList.Clear();
                        }
                    }
                    #else
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Clear();
                    }
                    #endif
                }


                if (roiPointList.Count < 4)
                {
                    #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        Touch t = Input.GetTouch(0);
                        if (t.phase == TouchPhase.Ended)
                        {
                            roiPointList.Add(convertScreenPoint(new Point(t.position.x, t.position.y), gameObject, Camera.main));
                            //                                  Debug.Log ("touch X " + t.position.x);
                            //                                  Debug.Log ("touch Y " + t.position.y);

                            if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                            {
                                roiPointList.RemoveAt(roiPointList.Count - 1);
                            }
                        }
                    }
                    #else
                    //Mouse
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //                                              Debug.Log ("mouse X " + Input.mousePosition.x);
                        //                                              Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }
                    #endif


                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray())) {
                            roiRect = Imgproc.boundingRect(roiPointMat);
                        }


                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat()) {
                                Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                                //                                                      Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Imgproc.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Imgproc.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }

                    Imgproc.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

                Imgproc.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);


//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Пример #9
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (AppControl.control.calibrationComplete)
                {
                    Mat  rgbaMatUndistorted = rgbaMat.clone();
                    Size SizeCm             = AppControl.control.cameraMatrix.size();
                    Size SizeDc             = AppControl.control.distCoeffs.size();
                    Imgproc.undistort(rgbaMat, rgbaMatUndistorted, AppControl.control.cameraMatrix, AppControl.control.distCoeffs);
                    rgbaMat = rgbaMatUndistorted;
                }

                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                // IMAGE PROCESSING STARTS (rgbaMat is the captured image)

                // Convert the current rgba frame (rgbaMat) to a gray image (grayMat)
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                //CALIB_CB_FAST_CHECK saves a lot of time on images
                //that do not contain any chessboard corners
                bool patternfound = Calib3d.findChessboardCorners(grayMat, patternsize, pointbuf,
                                                                  Calib3d.CALIB_CB_ADAPTIVE_THRESH + Calib3d.CALIB_CB_NORMALIZE_IMAGE
                                                                  + Calib3d.CALIB_CB_FAST_CHECK);

                if (patternfound)
                {
                    Imgproc.cornerSubPix(grayMat, pointbuf, new Size(11, 11), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.MAX_ITER, 30, 0.1));

                    float timeNow = Time.realtimeSinceStartup;
                    if ((timeNow > lastInterval + updateInterval))
                    {
                        imagePoints.Add(pointbuf.clone());
                        calcChessboardCorners(patternsize, squaresize, objectpointbuf);
                        objectPoints.Add(objectpointbuf.clone());

                        ++numCalibImages;
                        lastInterval = timeNow;
                    }
                }


                if (numCalibImages > numCalibrationTakes && !calibrationComplete)
                {
                    // Do calibration

                    int flags = 0;
                    calibrationComplete = true;
                    Calib3d.calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags);

                    // Calibration is complete
                    List <float> perViewErrors = new List <float>();
                    float        error;
                    error = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, perViewErrors);

                    // Debug statements
                    Debug.Log("Calibration complete!");
                    Debug.Log("Camera Matrix:");
                    Debug.Log(cameraMatrix.dump());
                    Debug.Log("Distortion Coefficients:");
                    Debug.Log(distCoeffs.dump());                      // Debug.Log("fx: " + cameraMatrix.get(0,0)[0]);
                    Debug.Log("Reprojection error average: " + error); // Error should be in units of pixels
                    Debug.Log(perViewErrors.ToString());

                    // Save the calibration variables in AppControl
                    AppControl.control.calibrationComplete = calibrationComplete;
                    AppControl.control.cameraMatrix        = cameraMatrix;
                    AppControl.control.distCoeffs          = distCoeffs;
                    AppControl.control.reprojectionError   = error;
                    AppControl.control.fx = (float)cameraMatrix.get(0, 0)[0];
                    AppControl.control.fy = (float)cameraMatrix.get(1, 1)[0];
                    AppControl.control.cx = (float)cameraMatrix.get(0, 2)[0];
                    AppControl.control.cy = (float)cameraMatrix.get(1, 2)[0];

                    // Store the calibration parameters in player preferences text file
                    PlayerPrefs.SetInt("Calibrated", 1);
                    PlayerPrefs.SetFloat("Error", error);
                    float k1 = (float)distCoeffs.get(0, 1)[0];
                    float k2 = (float)distCoeffs.get(0, 1)[0];
                    PlayerPrefs.SetFloat("k1", (float)distCoeffs.get(0, 0)[0]);
                    PlayerPrefs.SetFloat("k2", (float)distCoeffs.get(0, 1)[0]);
                    PlayerPrefs.SetFloat("p1", (float)distCoeffs.get(0, 2)[0]);
                    PlayerPrefs.SetFloat("p2", (float)distCoeffs.get(0, 3)[0]);
                    PlayerPrefs.SetFloat("k3", (float)distCoeffs.get(0, 4)[0]);
                    PlayerPrefs.SetFloat("fx", (float)cameraMatrix.get(0, 0)[0]);
                    PlayerPrefs.SetFloat("fy", (float)cameraMatrix.get(1, 1)[0]);
                    PlayerPrefs.SetFloat("cx", (float)cameraMatrix.get(0, 2)[0]);
                    PlayerPrefs.SetFloat("cy", (float)cameraMatrix.get(1, 2)[0]);
                }


                // SHOW IMAGE ON THE DISPLAY

                // Draw the chessboard corners so it's obvious that the app is working
                Calib3d.drawChessboardCorners(rgbaMat, patternsize, pointbuf, patternfound);

                // Notify the user when the calibration is finished
                if (!calibrationComplete)
                {
                    Core.putText(rgbaMat, "Number of images collected: " + numCalibImages, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);
                }
                else
                {
                    // Mat rgbaMatUndistorted = new Mat();
                    Core.putText(rgbaMat, "Calibration complete", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);
                }


                Utils.matToTexture2D(rgbaMat, texture); // Default display
                //  Utils.matToTexture2D (rgbaMat, texture, colors)
            }
        }
Пример #10
0
        // Update is called once per frame
        void Update()
        {
            // Debug.Log("Did update this frame: " + webCamTextureToMatHelper.didUpdateThisFrame());
            // Debug.Log("WebCam Texture is playing: " + webCamTextureToMatHelper.isPlaying());
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                ///////////////////////////////////////////////////////////////
                // Acquire the next frame from the camera and undistort if necessary
                ///////////////////////////////////////////////////////////////
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (AppControl.control.calibrationComplete && false)
                {
                    Mat rgbaMatUndistorted = rgbaMat.clone();
                    Imgproc.undistort(rgbaMat, rgbaMatUndistorted, AppControl.control.cameraMatrix, AppControl.control.distCoeffs);
                    rgbaMat = rgbaMatUndistorted;
                }

                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                ///////////////////////////////////////////////////////////////
                // If the object color spectrum is initialized, find the object
                ///////////////////////////////////////////////////////////////
                // Debug.Log("Number of ROI points: " + roiPointList.Count);
                Point[] points = roiPointList.ToArray();
                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat()) {
                        Imgproc.calcBackProject(new List <Mat> (new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);
                        // Video.meanShift(backProj, roiRect, termination);
                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        r.points(points);
                        if (roiRect.height > 0 && roiRect.width > 0)
                        {
                            objectFound = true;
                            AppControl.control.TargetBox = roiRect;
                            // Debug.Log("Object found: " + objectFound);
                        }
                        else
                        {
                            objectFound = false;
                        }
                    }

                    // Estimate the 3D position of the object
                    if (objectFound && AppControl.control.calibrationComplete)
                    {
                        float fx = AppControl.control.fx;                       // focal length x-axis
                        float fy = AppControl.control.fy;                       // focal length y-axis
                        float cx = AppControl.control.cx;                       // focal length x-axis
                        float cy = AppControl.control.cy;                       // focal length y-axis
                        float xu = (roiRect.x) + 0.5f * roiRect.width - cx;     // undistorted object center along x-axis in pixels
                        float yu = -((roiRect.y) + 0.5f * roiRect.height - cy); // undistorted object center along y-axis in pixels
                        float du = (roiRect.height + roiRect.width) / 2f;       // Size of the object in pixels
                        float pz = (objectSize / du) * (1 / Mathf.Sqrt(1f / Mathf.Pow(fx, 2) + 1f / Mathf.Pow(fy, 2)));
                        float px = xu * pz / fx;
                        float py = yu * pz / fy;
                        objectPosition = new Vector3(px, py, pz); // in units of mm
                        target         = GameObject.FindWithTag("Target");
                        stick          = GameObject.FindWithTag("Breadboard");
                        if (target != null && stick != null)
                        {
                            target.transform.position          = mainCamera.transform.position + 0.001f * objectPosition; // in units of meters
                            AppControl.control.camObservations = new double[] { xu, yu, du };
                            AppControl.control.writeData();
                            // stick.transform.rotation = mainCamera.transform.rotation * stick.transform.rotation * Quaternion.Inverse(mainCamera.transform.rotation);
                            // stick.transform.position = stick.transform.position
                        }
                    }
                    else
                    {
                        objectPosition = new Vector3(0, 0, 0);
                    }


                    // not necessary to reset the object tracking by clicking

                    /*
                     #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                     *  //Touch
                     *  int touchCount = Input.touchCount;
                     *  if (touchCount == 1)
                     *  {
                     *      if(Input.GetTouch(0).phase == TouchPhase.Ended){
                     *          roiPointList.Clear ();
                     *      }
                     *  }
                     #else
                     * if (Input.GetMouseButtonUp (0)) {
                     *      roiPointList.Clear ();
                     * }
                     #endif
                     */
                }


                ///////////////////////////////////////////////////////////////
                // Capture the ROI from user input; compute the HSV histogram
                // of the ROI
                ///////////////////////////////////////////////////////////////
                if (roiPointList.Count < 4)
                {
                    #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    //Touch
                    int touchCount = Input.touchCount;
                    if (touchCount == 1)
                    {
                        Touch t = Input.GetTouch(0);
                        if (t.phase == TouchPhase.Ended)
                        {
                            roiPointList.Add(convertScreenPoint(new Point(t.position.x, t.position.y), gameObject, Camera.main));
//									Debug.Log ("touch X " + t.position.x);
//									Debug.Log ("touch Y " + t.position.y);

                            if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                            {
                                roiPointList.RemoveAt(roiPointList.Count - 1);
                            }
                        }
                    }
                    #else
                    // Capture mouse input events and add the points to the ROI List
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //												Debug.Log ("mouse X " + Input.mousePosition.x);
                        //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList [roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }
                    #endif

                    // If the user has selected four points, lock in the ROI; compute
                    // the HSV histogram of the ROI
                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray())) {
                            roiRect = Imgproc.boundingRect(roiPointMat);
                        }

                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat()) {
                                Imgproc.calcHist(new List <Mat> (new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);
                                //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }


                ///////////////////////////////////////////////////////////////
                // Draw the an image that displays where the user has pressed
                ///////////////////////////////////////////////////////////////
                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Core.circle(rgbaMat, points [i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    for (int i = 0; i < 4; i++)
                    {
                        Core.line(rgbaMat, points [i], points [(i + 1) % 4], new Scalar(255, 0, 0, 255), 2);
                    }
                    Core.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                }

                Core.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);
//				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Пример #11
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 *  Parameters:
                 *      prevImg first 8-bit input image
                 *      nextImg second input image
                 *      prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 *      nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 *      status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 *      err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Imgproc.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Imgproc.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }

//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Пример #12
0
        // Update is called once per frame
        void Update()
        {
            // Sample rate update
            ++frames;
            float timeNow = Time.realtimeSinceStartup;

            if (timeNow > lastInterval + updateInterval)
            {
                fps          = (float)(frames / (timeNow - lastInterval));
                frames       = 0;
                lastInterval = timeNow;
            }

            // Time since last update
            float dt = 1 / fps;

            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();
                Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);


                Point[] points = roiPointList.ToArray();

                if (roiPointList.Count == 4)
                {
                    using (Mat backProj = new Mat())
                    {
                        Imgproc.calcBackProject(new List <Mat>(new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0);


                        ///////////////////////////////////////////////////////
                        // Kalman Filter Start
                        ///////////////////////////////////////////////////////

                        // Process noise matrix, Q
                        Matrix Q2 = (Mathf.Pow(pn, 2) / dt) * Matrix.Build.DenseOfArray(new float[, ]
                        {
                            { Mathf.Pow(dt, 4) / 4, Mathf.Pow(dt, 3) / 2f, Mathf.Pow(dt, 2) / 2f },
                            { Mathf.Pow(dt, 3) / 2f, Mathf.Pow(dt, 2) / 2f, dt },
                            { Mathf.Pow(dt, 2) / 2f, dt, 1 }
                        });

                        Q = (Mathf.Pow(pn, 2) / dt) * Matrix.Build.DenseOfMatrixArray(new Matrix[, ]
                        {
                            { Q2, zero3x3, zero3x3 },
                            { zero3x3, Q2, zero3x3 },
                            { zero3x3, zero3x3, Q2 }
                        });

                        // Measurement noise matrix, R
                        R = Mathf.Pow(mn, 2) * Matrix.Build.DenseIdentity(3);

                        // Build transition and control matrices
                        Matrix F2 = Matrix.Build.DenseOfArray(new float[, ] {
                            { 1, dt, Mathf.Pow(dt, 2) / 2f }, { 0, 1, dt }, { 0, 0, 1 }
                        });
                        Matrix F = Matrix.Build.DenseOfMatrixArray(new Matrix[, ]
                        {
                            { F2, zero3x3, zero3x3 },
                            { zero3x3, F2, zero3x3 },
                            { zero3x3, zero3x3, F2 },
                        });

                        // Prediction
                        Pp = F * Pe * F.Transpose() + Q;
                        xp = F * xe;

                        roiPred = new OpenCVForUnity.Rect((int)(xp[0] - 0.5f * xp[6]), (int)(xp[3] - 0.5f * xp[6]), (int)xp[6], (int)xp[6]);
                        roiRect = new OpenCVForUnity.Rect((int)(xp[0] - 0.5f * roiSearch.width), (int)(xp[3] - 0.5f * roiSearch.height), roiSearch.width, roiSearch.height);
                        // roiRect = roiPred.clone();

                        RotatedRect r = Video.CamShift(backProj, roiRect, termination);
                        ObjectFound = (roiRect.height > 0 && roiRect.width > 0);

                        // Innovation
                        Vector nu;
                        if (ObjectFound)
                        {
                            // Innovation
                            Vector zk = Vector.Build.DenseOfArray(new float[] { (int)(roiRect.x + 0.5f * roiRect.width),
                                                                                (int)(roiRect.y + 0.5f * roiRect.height),
                                                                                (int)(0.5 * (roiRect.width + roiRect.height)) });
                            nu = zk - H * xp;

                            // Search window update
                            roiSearch = r.boundingRect().clone();

                            // Debug
                            SpeedAtFailure = -1f;
                        }
                        else
                        {
                            roiRect = roiPred.clone();

                            if (xp[0] < 0 || xp[0] < 0 || xp[0] > 640 || xp[3] > 480)
                            {
                                xp[0] = 320f; xp[1] = 0; xp[2] = 0;
                                xp[3] = 240f; xp[4] = 0; xp[5] = 0;
                                xp[6] = 40f; xp[7] = 0; xp[8] = 0;

                                roiRect.x      = (int)(320 - 0.5f * 40);
                                roiRect.y      = (int)(240 - 0.5f * 40);
                                roiRect.height = 40;
                                roiRect.width  = 40;

                                roiPred = roiRect.clone();
                            }

                            // Innovation
                            Vector zk = Vector.Build.DenseOfArray(new float[] { (float)(roiRect.x + 0.5f * roiRect.width),
                                                                                (float)(roiRect.y + 0.5f * roiRect.height),
                                                                                (float)(0.5 * (roiRect.width + roiRect.height)) });

                            nu        = zk - H * xp;
                            roiSearch = roiPred.clone();
                        }

                        // Kalman gain
                        Matrix K = Pp * H.Transpose() * R.Transpose();


                        // Innovation gain
                        Vector gain = K * nu;

                        // State update
                        xe = xp + gain;

                        // Covariance update
                        Pe = (Pp.Inverse() + H.Transpose() * R.Transpose() * H).Inverse();

                        // Display results to console
                        StateArray      = xe.ToArray();
                        InnovationGain  = gain.ToArray();
                        CovarianceTrace = Pe.Diagonal().ToArray();

                        ///////////////////////////////////////////////////////
                        // Kalman Filter End
                        ///////////////////////////////////////////////////////

                        r.points(points);
                    }

                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Clear();
                    }
                }


                if (roiPointList.Count < 4)
                {
                    if (Input.GetMouseButtonUp(0))
                    {
                        roiPointList.Add(convertScreenPoint(new Point(Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                        //												Debug.Log ("mouse X " + Input.mousePosition.x);
                        //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                        if (!(new OpenCVForUnity.Rect(0, 0, hsvMat.width(), hsvMat.height()).contains(roiPointList[roiPointList.Count - 1])))
                        {
                            roiPointList.RemoveAt(roiPointList.Count - 1);
                        }
                    }

                    if (roiPointList.Count == 4)
                    {
                        using (MatOfPoint roiPointMat = new MatOfPoint(roiPointList.ToArray()))
                        {
                            roiRect   = Imgproc.boundingRect(roiPointMat);
                            roiPred   = roiRect.clone();
                            roiSearch = roiRect.clone();
                        }

                        ///////////////////////////////////////////////////////
                        // Kalman Filter Initialize
                        ///////////////////////////////////////////////////////
                        Pe = Matrix.Build.DenseIdentity(9, 9);
                        Vector z1 = roi2center(roiRect);
                        xe = Vector.Build.DenseOfArray(new float[] { z1[0], 0, 0, z1[1], 0, 0, (roiRect.width + roiRect.height) / 2, 0, 0 });

                        ///////////////////////////////////////////////////////
                        // End Kalman Filter Initialize
                        ///////////////////////////////////////////////////////

                        if (roiHistMat != null)
                        {
                            roiHistMat.Dispose();
                            roiHistMat = null;
                        }
                        roiHistMat = new Mat();

                        using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                            using (Mat maskMat = new Mat())
                            {
                                Imgproc.calcHist(new List <Mat>(new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180));
                                Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                                //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                            }
                    }
                }

                if (points.Length < 4)
                {
                    for (int i = 0; i < points.Length; i++)
                    {
                        Core.circle(rgbaMat, points[i], 6, new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    Core.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2);
                    Core.rectangle(rgbaMat, roiPred.tl(), roiPred.br(), new Scalar(0, 0, 255, 255), 2);
                    Core.rectangle(rgbaMat, roiSearch.tl(), roiSearch.br(), new Scalar(255, 0, 0, 255), 2);
                }

                Core.putText(rgbaMat, "PLEASE TOUCH 4 POINTS", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);

                //				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Пример #13
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                //						Utils.webCamTextureToMat (webCamTexture, grayMat, colors);


                bgMat.copyTo(dstMat);


                Imgproc.GaussianBlur(grayMat, lineMat, new Size(3, 3), 0);



                grayMat.get(0, 0, grayPixels);

                for (int i = 0; i < grayPixels.Length; i++)
                {
                    maskPixels [i] = 0;

                    if (grayPixels [i] < 70)
                    {
                        grayPixels [i] = 0;

                        maskPixels [i] = 1;
                    }
                    else if (70 <= grayPixels [i] && grayPixels [i] < 120)
                    {
                        grayPixels [i] = 100;
                    }
                    else
                    {
                        grayPixels [i] = 255;

                        maskPixels [i] = 1;
                    }
                }

                grayMat.put(0, 0, grayPixels);

                maskMat.put(0, 0, maskPixels);

                grayMat.copyTo(dstMat, maskMat);



                Imgproc.Canny(lineMat, lineMat, 20, 120);

                lineMat.copyTo(maskMat);

                Core.bitwise_not(lineMat, lineMat);

                lineMat.copyTo(dstMat, maskMat);

//			Imgproc.putText (dstMat, "W:" + dstMat.width () + " H:" + dstMat.height () + " SO:" + Screen.orientation, new Point (5, dstMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (0), 2, Imgproc.LINE_AA, false);


                //		Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA);
                //				Utils.matToTexture2D (rgbaMat, texture);

                Utils.matToTexture2D(dstMat, texture, colors);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers and estimate pose
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);
                //          Aruco.detectMarkers (imgMat, dictionary, corners, ids);
                if (estimatePose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }


                // draw results
                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (estimatePose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            //This sample can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                Calib3d.Rodrigues(rvecs, rotMat);


                                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvecs.get(0, 0) [0]));
                                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvecs.get(0, 0) [1]));
                                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvecs.get(0, 0) [2]));
                                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                                if (shouldMoveARCamera)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
//                                                              Debug.Log ("ARM " + ARM.ToString ());

                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                }
                                else
                                {
                                    ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
//                                                              Debug.Log ("ARM " + ARM.ToString ());

                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                }
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }