public void SelectTracker(TrackerType tracker_type)
 {
     if (tracker_type == TrackerType.Boosting)
     {
         monotracker = TrackerBoosting.create();
     }
     if (tracker_type == TrackerType.MIL)
     {
         monotracker = TrackerMIL.create();
     }
     if (tracker_type == TrackerType.KCF)
     {
         monotracker = TrackerKCF.create();
     }
     if (tracker_type == TrackerType.TLD)
     {
         monotracker = TrackerTLD.create();
     }
     if (tracker_type == TrackerType.MedianFlow)
     {
         monotracker = TrackerMedianFlow.create();
     }
     if (tracker_type == TrackerType.CSRT)
     {
         monotracker = TrackerCSRT.create();
     }
     if (tracker_type == TrackerType.MOSSE)
     {
         monotracker = TrackerMOSSE.create();
     }
 }
Ejemplo n.º 2
0
        public FaceTrack(Image <Bgr, byte> image, Rectangle location) : base()
        {
            _location = location;
            _tracker  = new TrackerKCFWorkaround();

            _tracker.Init(image.Mat, _location);
        }
 public void Update()
 {
     using (var tracker = TrackerKCF.Create())
     {
         UpdateBase(tracker);
     }
 }
 public void Init()
 {
     using (var tracker = TrackerKCF.Create())
     {
         InitBase(tracker);
     }
 }
        private void Init()
        {
            rgbMat = new Mat();

            if (capture.isOpened())
            {
                Debug.Log("capture.isOpened() true");
            }
            else
            {
                Debug.Log("capture.isOpened() false");
            }


            Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            Debug.Log("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get(Videoio.CV_CAP_PROP_PREVIEW_FORMAT));
            Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));

            capture.grab();
            capture.retrieve(rgbMat, 0);
            int frameWidth  = rgbMat.cols();
            int frameHeight = rgbMat.rows();

            texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
            gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1);
            float widthScale  = (float)Screen.width / (float)frameWidth;
            float heightScale = (float)Screen.height / (float)frameHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = (float)frameHeight / 2;
            }
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);


            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            //trackers = MultiTracker.create ();
            //objects = new MatOfRect2d ();

            monotracker = TrackerKCF.create(); //8n
            bbox        = new Rect2d();


            // trackingColorList = new List<Scalar> ();
            selectedPointList = new List <Point> ();
        }
Ejemplo n.º 6
0
        protected override Tracker CreateTracker()
        {
            TrackerKCF tracker = TrackerKCF.create();

            if (tracker == null)
            {
                throw new ArgumentNullException(nameof(tracker));
            }
            return(tracker);
        }
Ejemplo n.º 7
0
 public void AddOne()
 {
     using (var mt = MultiTracker.Create())
     {
         using (var tracker = TrackerKCF.Create())
             using (var vc = Image("lenna.png"))
             {
                 var ret = mt.Add(tracker, vc, new Rect2d(220, 60, 200, 220));
                 Assert.True(ret);
             }
     }
 }
Ejemplo n.º 8
0
        // Update is called once per frame
        void Update()
        {
            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);
                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);


                selectPoint(rgbMat);
                if (selectedPointList.Count < 2)
                {
                    foreach (var point in selectedPointList)
                    {
                        Imgproc.circle(rgbMat, point, 6, new Scalar(0, 0, 255), 2);
                    }
                }
                else
                {
                    trackers.add(TrackerKCF.create(), rgbMat, new Rect2d(selectedPointList [0].x, selectedPointList [0].y, Math.Abs(selectedPointList [1].x - selectedPointList [0].x), Math.Abs(selectedPointList [1].y - selectedPointList [0].y)));
                    selectedPointList.Clear();
                    trackingColorList.Add(new Scalar(UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255)));
                }

                bool updated = trackers.update(rgbMat, objects);
                Debug.Log("updated " + updated);
//                if (!updated && objects.rows () > 0) {
//                    OnResetTrackerButtonClick ();
//                }


                Rect2d[] objectsArray = objects.toArray();
                for (int i = 0; i < objectsArray.Length; i++)
                {
                    Imgproc.rectangle(rgbMat, objectsArray [i].tl(), objectsArray [i].br(), trackingColorList [i], 2, 1, 0);
                }

                Imgproc.putText(rgbMat, "Please touch the screen, and select tracking regions.", new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                //Debug.Log ("Mat toString " + rgbMat.ToString ());

                Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
Ejemplo n.º 9
0
        public void Issue459()
        {
            var paras = new TrackerKCF.Params
            {
                CompressFeature = true,
                CompressedSize  = 1,
                Resize          = true,
                DescNpca        = 1,
                DescPca         = 1
            };

            using var tracker = TrackerKCF.Create(paras);
            GC.KeepAlive(tracker);
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("width", webCamTextureMat.width().ToString());
                fpsMonitor.Add("height", webCamTextureMat.height().ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            monotracker       = TrackerKCF.create(); //8n
            bbox              = new Rect2d();
            previousBox       = new Rect2d();
            selectedPointList = new List <Point>();

            grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();


            #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            // HololensCameraStream always returns image data in BGRA format.
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.BGRA32, false);
            #else
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            #endif

            texture.wrapMode = TextureWrapMode.Clamp;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            Debug.Log("ScreenMat.width " + webCamTextureMat.width() + " ScreenMat.height " + webCamTextureMat.height());


            processingAreaRect = new OpenCVForUnity.Rect((int)(webCamTextureMat.cols() * (outsideClippingRatio.x - clippingOffset.x)), (int)(webCamTextureMat.rows() * (outsideClippingRatio.y + clippingOffset.y)),
                                                         (int)(webCamTextureMat.cols() * (1f - outsideClippingRatio.x * 2)), (int)(webCamTextureMat.rows() * (1f - outsideClippingRatio.y * 2)));
            processingAreaRect = processingAreaRect.intersect(new OpenCVForUnity.Rect(0, 0, webCamTextureMat.cols(), webCamTextureMat.rows()));


            grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);

            //

            //HL
            quad_renderer = gameObject.GetComponent <Renderer>() as Renderer;
            quad_renderer.sharedMaterial.SetTexture("_MainTex", texture);
            quad_renderer.sharedMaterial.SetVector("_VignetteOffset", new Vector4(clippingOffset.x, clippingOffset.y));

            Matrix4x4 projectionMatrix;
#if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            projectionMatrix = webCamTextureToMatHelper.GetProjectionMatrix();
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
#else
            //19n
            //Matrix4x4 projectionMatrix2 = webCamTextureToMatHelper.GetProjectionMatrix();
            //Matrix4x4 camera2WorldMatrix = webCamTextureToMatHelper.GetCameraToWorldMatrix();

            //HoloLensCameraStream.Resolution _resolution = CameraStreamHelper.Instance.GetLowestResolution();

            //Vector3 imageCenterDirection = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix2, _resolution, new Vector2(_resolution.width / 2, _resolution.height / 2));
            //Vector3 imageBotRightDirection = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix2, _resolution, new Vector2(_resolution.width, _resolution.height));
            ////_laser.ShootLaserFrom(camera2WorldMatrix.GetColumn(3), imageBotRightDirection, 10f, _botRightMaterial);
            //Debug.Log(imageBotRightDirection);



            //HL
            //This value is obtained from PhotoCapture's TryGetProjectionMatrix() method.I do not know whether this method is good.
            //Please see the discussion of this thread.Https://forums.hololens.com/discussion/782/live-stream-of-locatable-camera-webcam-in-unity
            projectionMatrix     = Matrix4x4.identity;
            projectionMatrix.m00 = 2.31029f;
            projectionMatrix.m01 = 0.00000f;
            projectionMatrix.m02 = 0.09614f;
            projectionMatrix.m03 = 0.00000f;
            projectionMatrix.m10 = 0.00000f;
            projectionMatrix.m11 = 4.10427f;
            projectionMatrix.m12 = -0.06231f;
            projectionMatrix.m13 = 0.00000f;
            projectionMatrix.m20 = 0.00000f;
            projectionMatrix.m21 = 0.00000f;
            projectionMatrix.m22 = -1.00000f;
            projectionMatrix.m23 = 0.00000f;
            projectionMatrix.m30 = 0.00000f;
            projectionMatrix.m31 = 0.00000f;
            projectionMatrix.m32 = -1.00000f;
            projectionMatrix.m33 = 0.00000f;
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
#endif

            quad_renderer.sharedMaterial.SetFloat("_VignetteScale", vignetteScale);


            float halfOfVerticalFov = Mathf.Atan(1.0f / projectionMatrix.m11);
            float aspectRatio       = (1.0f / Mathf.Tan(halfOfVerticalFov)) / projectionMatrix.m00;
            Debug.Log("halfOfVerticalFov " + halfOfVerticalFov);
            Debug.Log("aspectRatio " + aspectRatio);

            //13n
            monotracker       = TrackerKCF.create();
            bbox              = new Rect2d();
            selectedPointList = new List <Point>();



            SetupGestureRecognizer();
        }
Ejemplo n.º 12
0
        // Update is called once per frame
        void Update()
        {
            if (!sourceToMatHelper.IsInitialized())
            {
                return;
            }

#if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
            //Touch
            int touchCount = Input.touchCount;
            if (touchCount == 1)
            {
                Touch t = Input.GetTouch(0);
                if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId))
                {
                    storedTouchPoint = new Point(t.position.x, t.position.y);
                    //Debug.Log ("touch X " + t.position.x);
                    //Debug.Log ("touch Y " + t.position.y);
                }
            }
#else
            //Mouse
            if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject())
            {
                storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y);
                //Debug.Log ("mouse X " + Input.mousePosition.x);
                //Debug.Log ("mouse Y " + Input.mousePosition.y);
            }
#endif

            if (selectedPointList.Count != 1)
            {
                if (!sourceToMatHelper.IsPlaying())
                {
                    sourceToMatHelper.Play();
                }

                if (sourceToMatHelper.IsPlaying() && sourceToMatHelper.DidUpdateThisFrame())
                {
                    Mat rgbMat = sourceToMatHelper.GetMat();

                    if (storedTouchPoint != null)
                    {
                        ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, texture.width, texture.height);
                        OnTouch(storedTouchPoint, texture.width, texture.height);
                        storedTouchPoint = null;
                    }

                    if (selectedPointList.Count == 1)
                    {
                        foreach (var point in selectedPointList)
                        {
                            Imgproc.circle(rgbMat, point, 6, new Scalar(0, 0, 255), 2);
                        }
                    }
                    else if (selectedPointList.Count == 2)
                    {
                        using (MatOfPoint selectedPointMat = new MatOfPoint(selectedPointList.ToArray()))
                        {
                            // add tracker.
                            OpenCVForUnity.CoreModule.Rect region = Imgproc.boundingRect(selectedPointMat);
                            trackers.add(TrackerKCF.create(), rgbMat, new Rect2d(region.x, region.y, region.width, region.height));
                        }

                        selectedPointList.Clear();
                        trackingColorList.Add(new Scalar(UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255)));
                    }


                    trackers.update(rgbMat, objects);

                    // draw tracked objects regions.
                    Rect2d[] objectsArray = objects.toArray();
                    for (int i = 0; i < objectsArray.Length; i++)
                    {
                        Imgproc.rectangle(rgbMat, objectsArray[i].tl(), objectsArray[i].br(), trackingColorList[i], 2, 1, 0);
                    }


                    if (selectedPointList.Count != 1)
                    {
                        //Imgproc.putText (rgbMat, "Please touch the screen, and select tracking regions.", new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "Please touch the screen, and select tracking regions.";
                        }
                    }
                    else
                    {
                        //Imgproc.putText (rgbMat, "Please select the end point of the new tracking region.", new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "Please select the end point of the new tracking region.";
                        }
                    }

                    Utils.fastMatToTexture2D(rgbMat, texture);
                }
            }
            else
            {
                if (sourceToMatHelper.IsPlaying())
                {
                    sourceToMatHelper.Pause();
                }

                if (storedTouchPoint != null)
                {
                    ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, texture.width, texture.height);
                    OnTouch(storedTouchPoint, texture.width, texture.height);
                    storedTouchPoint = null;
                }
            }
        }
Ejemplo n.º 13
0
        // Update is called once per frame
        void Update()
        {
            #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
            //Touch
            int touchCount = Input.touchCount;
            if (touchCount == 1)
            {
                Touch t = Input.GetTouch(0);
                if (t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId))
                {
                    storedTouchPoint = new Point(t.position.x, t.position.y);
                    //Debug.Log ("touch X " + t.position.x);
                    //Debug.Log ("touch Y " + t.position.y);
                }
            }
            #else
            //Mouse
            if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject())
            {
                storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y);
                //Debug.Log ("mouse X " + Input.mousePosition.x);
                //Debug.Log ("mouse Y " + Input.mousePosition.y);
            }
            #endif

            if (selectedPointList.Count == 1)
            {
                if (storedTouchPoint != null)
                {
                    ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbMat.cols(), rgbMat.rows());
                    OnTouch(rgbMat, storedTouchPoint);
                    storedTouchPoint = null;
                }
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
            if (selectedPointList.Count != 1 && capture.grab())
            {
                capture.retrieve(rgbMat, 0);
                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);

                if (storedTouchPoint != null)
                {
                    ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbMat.cols(), rgbMat.rows());
                    OnTouch(rgbMat, storedTouchPoint);
                    storedTouchPoint = null;
                }

                if (selectedPointList.Count < 2)
                {
                    foreach (var point in selectedPointList)
                    {
                        Imgproc.circle(rgbMat, point, 6, new Scalar(0, 0, 255), 2);
                    }
                }
                else
                {
                    using (MatOfPoint selectedPointMat = new MatOfPoint(selectedPointList.ToArray())) {
                        OpenCVForUnity.Rect region = Imgproc.boundingRect(selectedPointMat);
                        trackers.add(TrackerKCF.create(), rgbMat, new Rect2d(region.x, region.y, region.width, region.height));
                    }

                    selectedPointList.Clear();
                    trackingColorList.Add(new Scalar(UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255)));
                }

                bool updated = trackers.update(rgbMat, objects);
                Debug.Log("updated " + updated);
//                if (!updated && objects.rows () > 0) {
//                    OnResetTrackerButtonClick ();
//                }

                Rect2d[] objectsArray = objects.toArray();
                for (int i = 0; i < objectsArray.Length; i++)
                {
                    Imgproc.rectangle(rgbMat, objectsArray [i].tl(), objectsArray [i].br(), trackingColorList [i], 2, 1, 0);
                }

                if (selectedPointList.Count != 1)
                {
                    Imgproc.putText(rgbMat, "Please touch the screen, and select tracking regions.", new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }
                else
                {
                    Imgproc.putText(rgbMat, "Please select the end point of the new tracking region.", new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }

                Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }