Exemplo n.º 1
0
        /// <summary>
        /// Raises the scan face mask button click event.
        /// </summary>
        public void OnScanFaceMaskButtonClick()
        {
            RemoveFaceMask();

            // Capture webcam frame.
            if (webCamTextureToMatHelper.IsPlaying())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                faceRectInMask = DetectFace(rgbaMat);
                if (faceRectInMask.width == 0 && faceRectInMask.height == 0)
                {
                    Debug.Log("A face could not be detected from the input image.");
                    return;
                }

                OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect((int)faceRectInMask.x, (int)faceRectInMask.y, (int)faceRectInMask.width, (int)faceRectInMask.height);
                rect.inflate(rect.x / 5, rect.y / 5);
                rect = rect.intersect(new OpenCVForUnity.Rect(0, 0, rgbaMat.width(), rgbaMat.height()));

                faceMaskTexture = new Texture2D(rect.width, rect.height, TextureFormat.RGBA32, false);
                faceMaskMat     = new Mat(rgbaMat, rect).clone();
                OpenCVForUnity.Utils.matToTexture2D(faceMaskMat, faceMaskTexture);
                Debug.Log("faceMaskMat ToString " + faceMaskMat.ToString());

                faceRectInMask           = DetectFace(faceMaskMat);
                faceLandmarkPointsInMask = DetectFaceLandmarkPoints(faceMaskMat, faceRectInMask);

                if (faceRectInMask.width == 0 && faceRectInMask.height == 0)
                {
                    RemoveFaceMask();
                    Debug.Log("A face could not be detected from the input image.");
                }
            }
        }
Exemplo n.º 2
0
        public virtual Texture2D UpdateLUTTex(int id, Mat src, Mat dst, List <Vector2> src_landmarkPoints, List <Vector2> dst_landmarkPoints)
        {
            if (src_mask != null && (src.width() != src_mask.width() || src.height() != src_mask.height()))
            {
                src_mask.Dispose();
                src_mask = null;
            }
            src_mask = src_mask ?? new Mat(src.rows(), src.cols(), CvType.CV_8UC1, Scalar.all(0));

            if (dst_mask != null && (dst.width() != dst_mask.width() || dst.height() != dst_mask.height()))
            {
                dst_mask.Dispose();
                dst_mask = null;
            }
            dst_mask = dst_mask ?? new Mat(dst.rows(), dst.cols(), CvType.CV_8UC1, Scalar.all(0));

            // Get facial contour points.
            GetFacialContourPoints(src_landmarkPoints, src_facialContourPoints);
            GetFacialContourPoints(dst_landmarkPoints, dst_facialContourPoints);

            // Get facial contour rect.
            OpenCVForUnity.Rect src_facialContourRect = Imgproc.boundingRect(new MatOfPoint(src_facialContourPoints));
            OpenCVForUnity.Rect dst_facialContourRect = Imgproc.boundingRect(new MatOfPoint(dst_facialContourPoints));
            src_facialContourRect = src_facialContourRect.intersect(new OpenCVForUnity.Rect(0, 0, src.width(), src.height()));
            dst_facialContourRect = dst_facialContourRect.intersect(new OpenCVForUnity.Rect(0, 0, dst.width(), dst.height()));

            Mat src_ROI      = new Mat(src, src_facialContourRect);
            Mat dst_ROI      = new Mat(dst, dst_facialContourRect);
            Mat src_mask_ROI = new Mat(src_mask, src_facialContourRect);
            Mat dst_mask_ROI = new Mat(dst_mask, dst_facialContourRect);

            GetPointsInFrame(src_mask_ROI, src_facialContourPoints, src_facialContourPoints);
            GetPointsInFrame(dst_mask_ROI, dst_facialContourPoints, dst_facialContourPoints);

            src_mask_ROI.setTo(new Scalar(0));
            dst_mask_ROI.setTo(new Scalar(0));
            Imgproc.fillConvexPoly(src_mask_ROI, new MatOfPoint(src_facialContourPoints), new Scalar(255));
            Imgproc.fillConvexPoly(dst_mask_ROI, new MatOfPoint(dst_facialContourPoints), new Scalar(255));

            Texture2D LUTTex;

            if (LUTTexDict.ContainsKey(id))
            {
                LUTTex = LUTTexDict[id];
            }
            else
            {
                LUTTex = new Texture2D(256, 1, TextureFormat.RGB24, false);
                LUTTexDict.Add(id, LUTTex);
            }

            FaceMaskShaderUtils.CalculateLUT(src_ROI, dst_ROI, src_mask_ROI, dst_mask_ROI, LUTTex);

            return(LUTTex);
        }
        private void DetectInRegion(Mat img, Rect r, List <Rect> detectedObjectsInRegions)
        {
            Rect r0 = new Rect(new Point(), img.size());
            Rect r1 = new Rect(r.x, r.y, r.width, r.height);

            Rect.inflate(r1, (int)((r1.width * innerParameters.coeffTrackingWindowSize) - r1.width) / 2,
                         (int)((r1.height * innerParameters.coeffTrackingWindowSize) - r1.height) / 2);
            r1 = Rect.intersect(r0, r1);

            if (r1 != null && (r1.width <= 0) || (r1.height <= 0))
            {
                Debug.Log("DetectionBasedTracker::detectInRegion: Empty intersection");
                return;
            }


            int d = Math.Min(r.width, r.height);

            d = (int)Math.Round(d * innerParameters.coeffObjectSizeToTrack);


            MatOfRect tmpobjects = new MatOfRect();

            Mat img1 = new Mat(img, r1); //subimage for rectangle -- without data copying

            cascade.detectMultiScale(img1, tmpobjects, 1.1, 2, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size(d, d), new Size());


            Rect[] tmpobjectsArray = tmpobjects.toArray();
            int    len             = tmpobjectsArray.Length;

            for (int i = 0; i < len; i++)
            {
                Rect tmp    = tmpobjectsArray [i];
                Rect curres = new Rect(new Point(tmp.x + r1.x, tmp.y + r1.y), tmp.size());
                detectedObjectsInRegions.Add(curres);
            }
        }
Exemplo n.º 4
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();


            #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            // HololensCameraStream always returns image data in BGRA format.
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.BGRA32, false);
            #else
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            #endif

            texture.wrapMode = TextureWrapMode.Clamp;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);


            processingAreaRect = new OpenCVForUnity.Rect((int)(webCamTextureMat.cols() * (outsideClippingRatio.x - clippingOffset.x)), (int)(webCamTextureMat.rows() * (outsideClippingRatio.y + clippingOffset.y)),
                                                         (int)(webCamTextureMat.cols() * (1f - outsideClippingRatio.x * 2)), (int)(webCamTextureMat.rows() * (1f - outsideClippingRatio.y * 2)));
            processingAreaRect = processingAreaRect.intersect(new OpenCVForUnity.Rect(0, 0, webCamTextureMat.cols(), webCamTextureMat.rows()));


            dstMat            = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
            dstMatClippingROI = new Mat(dstMat, processingAreaRect);

            // fill all black.
            //Imgproc.rectangle (dstMat, new Point (0, 0), new Point (dstMat.width (), dstMat.height ()), new Scalar (0, 0, 0, 0), -1);


            grayMat = new Mat(dstMatClippingROI.rows(), dstMatClippingROI.cols(), CvType.CV_8UC1);
            lineMat = new Mat(dstMatClippingROI.rows(), dstMatClippingROI.cols(), CvType.CV_8UC1);
            maskMat = new Mat(dstMatClippingROI.rows(), dstMatClippingROI.cols(), CvType.CV_8UC1);

            //create a striped background.
            bgMat = new Mat(dstMatClippingROI.rows(), dstMatClippingROI.cols(), CvType.CV_8UC1, new Scalar(255));
            for (int i = 0; i < bgMat.rows() * 2.5f; i = i + 4)
            {
                Imgproc.line(bgMat, new Point(0, 0 + i), new Point(bgMat.cols(), -bgMat.cols() + i), new Scalar(0), 1);
            }

            grayPixels = new byte[grayMat.cols() * grayMat.rows() * grayMat.channels()];
            maskPixels = new byte[maskMat.cols() * maskMat.rows() * maskMat.channels()];


            quad_renderer = gameObject.GetComponent <Renderer> () as Renderer;
            quad_renderer.sharedMaterial.SetTexture("_MainTex", texture);
            quad_renderer.sharedMaterial.SetVector("_VignetteOffset", new Vector4(clippingOffset.x, clippingOffset.y));

            Matrix4x4 projectionMatrix;
            #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            projectionMatrix = webCamTextureToMatHelper.GetProjectionMatrix();
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            #else
            //This value is obtained from PhotoCapture's TryGetProjectionMatrix() method.I do not know whether this method is good.
            //Please see the discussion of this thread.Https://forums.hololens.com/discussion/782/live-stream-of-locatable-camera-webcam-in-unity
            projectionMatrix     = Matrix4x4.identity;
            projectionMatrix.m00 = 2.31029f;
            projectionMatrix.m01 = 0.00000f;
            projectionMatrix.m02 = 0.09614f;
            projectionMatrix.m03 = 0.00000f;
            projectionMatrix.m10 = 0.00000f;
            projectionMatrix.m11 = 4.10427f;
            projectionMatrix.m12 = -0.06231f;
            projectionMatrix.m13 = 0.00000f;
            projectionMatrix.m20 = 0.00000f;
            projectionMatrix.m21 = 0.00000f;
            projectionMatrix.m22 = -1.00000f;
            projectionMatrix.m23 = 0.00000f;
            projectionMatrix.m30 = 0.00000f;
            projectionMatrix.m31 = 0.00000f;
            projectionMatrix.m32 = -1.00000f;
            projectionMatrix.m33 = 0.00000f;
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            #endif

            quad_renderer.sharedMaterial.SetFloat("_VignetteScale", vignetteScale);


            float halfOfVerticalFov = Mathf.Atan(1.0f / projectionMatrix.m11);
            float aspectRatio       = (1.0f / Mathf.Tan(halfOfVerticalFov)) / projectionMatrix.m00;
            Debug.Log("halfOfVerticalFov " + halfOfVerticalFov);
            Debug.Log("aspectRatio " + aspectRatio);

            //
            //Imgproc.rectangle (dstMat, new Point (0, 0), new Point (webCamTextureMat.width (), webCamTextureMat.height ()), new Scalar (126, 126, 126, 255), -1);
            //
        }
Exemplo n.º 5
0
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log("OnWebCamTextureToMatHelperInited");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
            Debug.Log("webCamTextureMat.width " + webCamTextureMat.width() + " webCamTextureMat.height " + webCamTextureMat.height());


            processingAreaRect = new OpenCVForUnity.Rect((int)(webCamTextureMat.cols() * (outsideClippingRatio.x - clippingOffset.x)), (int)(webCamTextureMat.rows() * (outsideClippingRatio.y + clippingOffset.y)),
                                                         (int)(webCamTextureMat.cols() * (1f - outsideClippingRatio.x * 2)), (int)(webCamTextureMat.rows() * (1f - outsideClippingRatio.y * 2)));
            processingAreaRect = processingAreaRect.intersect(new OpenCVForUnity.Rect(0, 0, webCamTextureMat.cols(), webCamTextureMat.rows()));

            Debug.Log("webCamTextureMat.width " + webCamTextureMat.width() + " webCamTextureMat.height " + webCamTextureMat.height());
            Debug.Log("processingAreaRect.x " + processingAreaRect.x + " processingAreaRect.y " + processingAreaRect.y + " processingAreaRect.width " + processingAreaRect.width + " processingAreaRect.height " + processingAreaRect.height);


            processingAreaMat = new Mat(processingAreaRect.height, processingAreaRect.width, CvType.CV_8UC4);

            grayMat = new Mat(processingAreaMat.rows(), processingAreaMat.cols(), CvType.CV_8UC1);

            faces = new MatOfRect();


            quad_renderer = gameObject.GetComponent <Renderer> () as Renderer;
            quad_renderer.sharedMaterial.SetTexture("_MainTex", texture);
            quad_renderer.sharedMaterial.SetVector("_VignetteOffset", new Vector4(clippingOffset.x, clippingOffset.y));

            //This value is obtained from PhotoCapture's TryGetProjectionMatrix() method.I do not know whether this method is good.
            //Please see the discussion of this thread.Https://forums.hololens.com/discussion/782/live-stream-of-locatable-camera-webcam-in-unity
            Matrix4x4 projectionMatrix = Matrix4x4.identity;

            projectionMatrix.m00 = 2.31029f;
            projectionMatrix.m01 = 0.00000f;
            projectionMatrix.m02 = 0.09614f;
            projectionMatrix.m03 = 0.00000f;
            projectionMatrix.m10 = 0.00000f;
            projectionMatrix.m11 = 4.10427f;
            projectionMatrix.m12 = -0.06231f;
            projectionMatrix.m13 = 0.00000f;
            projectionMatrix.m20 = 0.00000f;
            projectionMatrix.m21 = 0.00000f;
            projectionMatrix.m22 = -1.00000f;
            projectionMatrix.m23 = 0.00000f;
            projectionMatrix.m30 = 0.00000f;
            projectionMatrix.m31 = 0.00000f;
            projectionMatrix.m32 = -1.00000f;
            projectionMatrix.m33 = 0.00000f;
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            quad_renderer.sharedMaterial.SetFloat("_VignetteScale", vignetteScale);


            float halfOfVerticalFov = Mathf.Atan(1.0f / projectionMatrix.m11);
            float aspectRatio       = (1.0f / Mathf.Tan(halfOfVerticalFov)) / projectionMatrix.m00;

            Debug.Log("halfOfVerticalFov " + halfOfVerticalFov);
            Debug.Log("aspectRatio " + aspectRatio);
        }
        private void UpdateTrackedObjects(List <Rect> detectedObjects)
        {
            int N1 = (int)trackedObjects.Count;
            int N2 = (int)detectedObjects.Count;

            for (int i = 0; i < N1; i++)
            {
                trackedObjects [i].numDetectedFrames++;
            }

            int[] correspondence = new int[N2];
            for (int i = 0; i < N2; i++)
            {
                correspondence [i] = (int)TrackedState.NEW_RECTANGLE;
            }


            for (int i = 0; i < N1; i++)
            {
                TrackedObject curObject = trackedObjects [i];

                int bestIndex = -1;
                int bestArea  = -1;

                int numpositions = (int)curObject.lastPositions.Count;

                //if (numpositions > 0) UnityEngine.Debug.LogError("numpositions > 0 is false");

                Rect prevRect = curObject.lastPositions [numpositions - 1];

                for (int j = 0; j < N2; j++)
                {
                    if (correspondence [j] >= 0)
                    {
                        //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + i + " is rejected, because it has correspondence=" + correspondence[j]);
                        continue;
                    }
                    if (correspondence [j] != (int)TrackedState.NEW_RECTANGLE)
                    {
                        //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it is intersected with another rectangle");
                        continue;
                    }

                    Rect r = Rect.intersect(prevRect, detectedObjects [j]);
                    if (r != null && (r.width > 0) && (r.height > 0))
                    {
                        //LOGD("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect, r={%d, %d, %d x %d}",
                        //        r.x, r.y, r.width, r.height);
                        correspondence [j] = (int)TrackedState.INTERSECTED_RECTANGLE;

                        if (r.area() > bestArea)
                        {
                            //LOGD("DetectionBasedTracker::updateTrackedObjects: The area of intersection is %d, it is better than bestArea=%d", r.area(), bestArea);
                            bestIndex = j;
                            bestArea  = (int)r.area();
                        }
                    }
                }

                if (bestIndex >= 0)
                {
                    //LOGD("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=%d is j=%d", i, bestIndex);
                    correspondence [bestIndex] = i;

                    for (int j = 0; j < N2; j++)
                    {
                        if (correspondence [j] >= 0)
                        {
                            continue;
                        }

                        Rect r = Rect.intersect(detectedObjects [j], detectedObjects [bestIndex]);
                        if (r != null && (r.width > 0) && (r.height > 0))
                        {
                            //LOGD("DetectionBasedTracker::updateTrackedObjects: Found intersection between "
                            //    "rectangles j=%d and bestIndex=%d, rectangle j=%d is marked as intersected", j, bestIndex, j);
                            correspondence [j] = (int)TrackedState.INTERSECTED_RECTANGLE;
                        }
                    }
                }
                else
                {
                    //LOGD("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i=%d ", i);
                    curObject.numFramesNotDetected++;
                }
            }

            //LOGD("DetectionBasedTracker::updateTrackedObjects: start second cycle");
            for (int j = 0; j < N2; j++)
            {
                int i = correspondence [j];
                if (i >= 0)  //add position
                //Debug.Log("DetectionBasedTracker::updateTrackedObjects: add position");
                {
                    trackedObjects [i].lastPositions.Add(detectedObjects [j]);
                    while ((int)trackedObjects [i].lastPositions.Count > (int)innerParameters.numLastPositionsToTrack)
                    {
                        trackedObjects [i].lastPositions.Remove(trackedObjects [i].lastPositions [0]);
                    }
                    trackedObjects [i].numFramesNotDetected = 0;
                }
                else if (i == (int)TrackedState.NEW_RECTANGLE)     //new object
                //Debug.Log("DetectionBasedTracker::updateTrackedObjects: new object");
                {
                    trackedObjects.Add(new TrackedObject(detectedObjects [j]));
                }
                else
                {
                    //Debug.Log ("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection");
                }
            }

            int           t = 0;
            TrackedObject it;

            while (t < trackedObjects.Count)
            {
                it = trackedObjects [t];

                if ((it.numFramesNotDetected > parameters.maxTrackLifetime)
                    ||
                    ((it.numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow)
                     &&
                     (it.numFramesNotDetected > innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown)))
                {
                    //int numpos = (int)it.lastPositions.Count;
                    //if (numpos > 0) UnityEngine.Debug.LogError("numpos > 0 is false");
                    //Rect r = it.lastPositions [numpos - 1];
                    //Debug.Log("DetectionBasedTracker::updateTrackedObjects: deleted object " + r.x + " " + r.y + " " + r.width + " " + r.height);

                    trackedObjects.Remove(it);
                }
                else
                {
                    t++;
                }
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();


            #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            // HololensCameraStream always returns image data in BGRA format.
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.BGRA32, false);
            #else
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            #endif

            texture.wrapMode = TextureWrapMode.Clamp;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            Debug.Log("ScreenMat.width " + webCamTextureMat.width() + " ScreenMat.height " + webCamTextureMat.height());


            processingAreaRect = new OpenCVForUnity.Rect((int)(webCamTextureMat.cols() * (outsideClippingRatio.x - clippingOffset.x)), (int)(webCamTextureMat.rows() * (outsideClippingRatio.y + clippingOffset.y)),
                                                         (int)(webCamTextureMat.cols() * (1f - outsideClippingRatio.x * 2)), (int)(webCamTextureMat.rows() * (1f - outsideClippingRatio.y * 2)));
            processingAreaRect = processingAreaRect.intersect(new OpenCVForUnity.Rect(0, 0, webCamTextureMat.cols(), webCamTextureMat.rows()));


            grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);

            //

            //HL
            quad_renderer = gameObject.GetComponent <Renderer>() as Renderer;
            quad_renderer.sharedMaterial.SetTexture("_MainTex", texture);
            quad_renderer.sharedMaterial.SetVector("_VignetteOffset", new Vector4(clippingOffset.x, clippingOffset.y));

            Matrix4x4 projectionMatrix;
#if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            projectionMatrix = webCamTextureToMatHelper.GetProjectionMatrix();
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
#else
            //19n
            //Matrix4x4 projectionMatrix2 = webCamTextureToMatHelper.GetProjectionMatrix();
            //Matrix4x4 camera2WorldMatrix = webCamTextureToMatHelper.GetCameraToWorldMatrix();

            //HoloLensCameraStream.Resolution _resolution = CameraStreamHelper.Instance.GetLowestResolution();

            //Vector3 imageCenterDirection = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix2, _resolution, new Vector2(_resolution.width / 2, _resolution.height / 2));
            //Vector3 imageBotRightDirection = LocatableCameraUtils.PixelCoordToWorldCoord(camera2WorldMatrix, projectionMatrix2, _resolution, new Vector2(_resolution.width, _resolution.height));
            ////_laser.ShootLaserFrom(camera2WorldMatrix.GetColumn(3), imageBotRightDirection, 10f, _botRightMaterial);
            //Debug.Log(imageBotRightDirection);



            //HL
            //This value is obtained from PhotoCapture's TryGetProjectionMatrix() method.I do not know whether this method is good.
            //Please see the discussion of this thread.Https://forums.hololens.com/discussion/782/live-stream-of-locatable-camera-webcam-in-unity
            projectionMatrix     = Matrix4x4.identity;
            projectionMatrix.m00 = 2.31029f;
            projectionMatrix.m01 = 0.00000f;
            projectionMatrix.m02 = 0.09614f;
            projectionMatrix.m03 = 0.00000f;
            projectionMatrix.m10 = 0.00000f;
            projectionMatrix.m11 = 4.10427f;
            projectionMatrix.m12 = -0.06231f;
            projectionMatrix.m13 = 0.00000f;
            projectionMatrix.m20 = 0.00000f;
            projectionMatrix.m21 = 0.00000f;
            projectionMatrix.m22 = -1.00000f;
            projectionMatrix.m23 = 0.00000f;
            projectionMatrix.m30 = 0.00000f;
            projectionMatrix.m31 = 0.00000f;
            projectionMatrix.m32 = -1.00000f;
            projectionMatrix.m33 = 0.00000f;
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
#endif

            quad_renderer.sharedMaterial.SetFloat("_VignetteScale", vignetteScale);


            float halfOfVerticalFov = Mathf.Atan(1.0f / projectionMatrix.m11);
            float aspectRatio       = (1.0f / Mathf.Tan(halfOfVerticalFov)) / projectionMatrix.m00;
            Debug.Log("halfOfVerticalFov " + halfOfVerticalFov);
            Debug.Log("aspectRatio " + aspectRatio);

            //13n
            monotracker       = TrackerKCF.create();
            bbox              = new Rect2d();
            selectedPointList = new List <Point>();



            SetupGestureRecognizer();
        }