Beispiel #1
0
    double normalizeArea(RegionCandidate candidate)
    {
        var maxArea = searchRect.area();

        candidate.calcArea();
        return(candidate.area / maxArea);
    }
Beispiel #2
0
    //平滑影像(若與上一張圖片相差過少將不更新畫面)
    private Mat SmoothesImage(Mat currentImage)
    {
        Mat hierarchy = new Mat();
        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat diffImage = new Mat();

        if (_smoothesImage == null)
        {
            _smoothesImage = new Mat(currentImage.height(), currentImage.width(), CvType.CV_8UC1);
            currentImage.copyTo(_smoothesImage);
        }
        Core.absdiff(currentImage, _smoothesImage, diffImage);
        Imgproc.findContours(diffImage, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
        for (int index = 0; index < contours.Count; index++)
        {
            OpenCVForUnity.Rect tempRect = Imgproc.boundingRect(contours[index]);
            //差異面積
            if (tempRect.area() > (MatchWidth * MatchHeight * _smoothesImagePer))
            {
                currentImage.copyTo(_smoothesImage);
                _DepthImageChangeFlag = true;
                return(currentImage);
            }
        }
        return(_smoothesImage);
    }
Beispiel #3
0
    //不辨識輪廓直接產生物體
    private List <OpenCVForUnity.Rect> analysisContours(List <MatOfPoint> contours)
    {
        List <OpenCVForUnity.Rect> depthImageChangeRectList = new List <OpenCVForUnity.Rect>();

        for (int index = 0; index < contours.Count; index++)
        {
            OpenCVForUnity.Rect testDepthRect = Imgproc.boundingRect(contours[index]);
            if (testDepthRect.height > 0 && testDepthRect.width > 0 && testDepthRect.area() > 0)
            {
                depthImageChangeRectList.Add(testDepthRect);
            }
        }
        return(depthImageChangeRectList);
    }
Beispiel #4
0
    //辨識輪廓
    private bool analysisContoursRect(int index, List <MatOfPoint> contours, Mat result, List <MatchObject> matchObject)
    {
        OpenCVForUnity.Rect _testDepthRect = Imgproc.boundingRect(contours[index]);
        float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (_testDepthRect.area() > minAreaSize)
        {
            //宣告放置點資料
            MatOfInt          hullInt       = new MatOfInt();
            List <Point>      hullPointList = new List <Point>();
            MatOfPoint        hullPointMat  = new MatOfPoint();
            List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
            MatOfInt4         defects       = new MatOfInt4();
            //篩選點資料
            MatOfPoint2f Temp2f = new MatOfPoint2f();
            //Convert contours(i) from MatOfPoint to MatOfPoint2f
            contours[index].convertTo(Temp2f, CvType.CV_32FC2);
            //Processing on mMOP2f1 which is in type MatOfPoint2f
            Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
            //Convert back to MatOfPoint and put the new values back into the contours list
            Temp2f.convertTo(contours[index], CvType.CV_32S);

            //计算轮廓围绕的凸形壳
            Imgproc.convexHull(contours[index], hullInt);
            List <Point> pointMatList = contours[index].toList();
            List <int>   hullIntList  = hullInt.toList();
            for (int j = 0; j < hullInt.toList().Count; j++)
            {
                hullPointList.Add(pointMatList[hullIntList[j]]);
                hullPointMat.fromList(hullPointList);
                hullPoints.Add(hullPointMat);
            }
            if (hullInt.toList().Count == 4)
            {
                if (!setMatchObject(index, pointMatList, contours, hullPoints, result, matchObject))
                {
                    //Debug.Log("setMatchObject fail");
                }
            }
            //清空記憶體
            defects.Dispose();
            hullPointList.Clear();
            hullPointMat.Dispose();
            hullInt.Dispose();
            hullPoints.Clear();
            return(true);
        }
        return(false);
    }
Beispiel #5
0
        private void getObjects(List <Rect> result)
        {
            result.Clear();

            for (int i = 0; i < trackedObjects.Count; i++)
            {
                Rect r = calcTrackedObjectPositionToShow(i);
                if (r.area() == 0)
                {
                    continue;
                }
                result.Add(r);
                //LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
            }
        }
Beispiel #6
0
    public RegionCandidateSet elliminateByArea(OpenCVForUnity.Rect searchRect, double minArea, double maxArea)
    {
        var    results   = new List <RegionCandidate> ();
        double wholeArea = searchRect.area();

        foreach (var candidate in this.candidates)
        {
            double areaRatio = candidate.area / wholeArea;
            if (minArea < areaRatio && areaRatio < maxArea)
            {
                results.Add(candidate);
            }
        }
        candidates = results;
        return(this);
    }
Beispiel #7
0
    public void score(List <RegionCandidate> candidates)
    {
        double max = searchRect.area();

        Point preCenter = ARUtil.getRectCenterPoint(previousRect);

        Point  centerPt    = ARUtil.getRectCenterPoint(searchRect);
        double maxDistance = (searchRect.width + searchRect.height);

        foreach (var candidate in candidates)
        {
            candidate.ellipse();              // 外接楕円を計算
            // 円形度 + 輪郭面積 + 画像中心からの近さ + 前フレームの領域からの近さ
            candidate.score = w_circularity * candidate.circularity
                              + w_area * candidate.area / max
                              + w_center * ARUtil.clothness(centerPt, candidate.center, maxDistance)
                              + w_previousFrame * ARUtil.clothness(preCenter, candidate.center, maxDistance);
        }
    }
Beispiel #8
0
        private void updateTrackedObjects(List <Rect> detectedObjects)
        {
            int N1 = (int)trackedObjects.Count;
            int N2 = (int)detectedObjects.Count;

            for (int i = 0; i < N1; i++)
            {
                trackedObjects [i].numDetectedFrames++;
            }

            int[] correspondence = new int[N2];
            for (int i = 0; i < N2; i++)
            {
                correspondence [i] = (int)TrackedState.NEW_RECTANGLE;
            }


            for (int i = 0; i < N1; i++)
            {
                TrackedObject curObject = trackedObjects [i];

                int bestIndex = -1;
                int bestArea  = -1;

                int numpositions = (int)curObject.lastPositions.Count;

                //if (numpositions > 0) UnityEngine.Debug.LogError("numpositions > 0 is false");

                Rect prevRect = curObject.lastPositions [numpositions - 1];

                for (int j = 0; j < N2; j++)
                {
                    if (correspondence [j] >= 0)
                    {
                        //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + i + " is rejected, because it has correspondence=" + correspondence[j]);
                        continue;
                    }
                    if (correspondence [j] != (int)TrackedState.NEW_RECTANGLE)
                    {
                        //Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it is intersected with another rectangle");
                        continue;
                    }

                    Rect r = Intersect(prevRect, detectedObjects [j]);
                    if ((r.width > 0) && (r.height > 0))
                    {
                        //LOGD("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect, r={%d, %d, %d x %d}",
                        //        r.x, r.y, r.width, r.height);
                        correspondence [j] = (int)TrackedState.INTERSECTED_RECTANGLE;

                        if (r.area() > bestArea)
                        {
                            //LOGD("DetectionBasedTracker::updateTrackedObjects: The area of intersection is %d, it is better than bestArea=%d", r.area(), bestArea);
                            bestIndex = j;
                            bestArea  = (int)r.area();
                        }
                    }
                }

                if (bestIndex >= 0)
                {
                    //LOGD("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=%d is j=%d", i, bestIndex);
                    correspondence [bestIndex] = i;

                    for (int j = 0; j < N2; j++)
                    {
                        if (correspondence [j] >= 0)
                        {
                            continue;
                        }

                        Rect r = Intersect(detectedObjects [j], detectedObjects [bestIndex]);
                        if ((r.width > 0) && (r.height > 0))
                        {
                            //LOGD("DetectionBasedTracker::updateTrackedObjects: Found intersection between "
                            //    "rectangles j=%d and bestIndex=%d, rectangle j=%d is marked as intersected", j, bestIndex, j);
                            correspondence [j] = (int)TrackedState.INTERSECTED_RECTANGLE;
                        }
                    }
                }
                else
                {
                    //LOGD("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i=%d ", i);
                    curObject.numFramesNotDetected++;
                }
            }

            //LOGD("DetectionBasedTracker::updateTrackedObjects: start second cycle");
            for (int j = 0; j < N2; j++)
            {
                int i = correspondence [j];
                if (i >= 0)                                                  //add position
                                                                             //Debug.Log("DetectionBasedTracker::updateTrackedObjects: add position");
                {
                    trackedObjects [i].lastPositions.Add(detectedObjects [j]);
                    while ((int)trackedObjects[i].lastPositions.Count > (int)innerParameters.numLastPositionsToTrack)
                    {
                        trackedObjects [i].lastPositions.Remove(trackedObjects [i].lastPositions [0]);
                    }
                    trackedObjects [i].numFramesNotDetected = 0;
                }
                else if (i == (int)TrackedState.NEW_RECTANGLE)                                                     //new object
                //Debug.Log("DetectionBasedTracker::updateTrackedObjects: new object");
                {
                    trackedObjects.Add(new TrackedObject(detectedObjects [j]));
                }
                else
                {
                    Debug.Log("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection");
                }
            }

            int           t = 0;
            TrackedObject it;

            while (t < trackedObjects.Count)
            {
                it = trackedObjects [t];

                if ((it.numFramesNotDetected > parameters.maxTrackLifetime)
                    ||
                    ((it.numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow)
                     &&
                     (it.numFramesNotDetected > innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown))
                    )
                {
                    //int numpos = (int)it.lastPositions.Count;
                    //if (numpos > 0) UnityEngine.Debug.LogError("numpos > 0 is false");
                    //Rect r = it.lastPositions [numpos - 1];

                    //Debug.Log("DetectionBasedTracker::updateTrackedObjects: deleted object " + r.x + " " + r.y + " " + r.width + " " + r.height);

                    trackedObjects.Remove(it);
                }
                else
                {
                    t++;
                }
            }
        }
Beispiel #9
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);


                if (!threadComm.shouldDetectInMultiThread)
                {
                    lock (thisLock) {
                        grayMat.copyTo(grayMat4Thread);
                    }
                    threadComm.shouldDetectInMultiThread = true;
                }


                OpenCVForUnity.Rect[] rects;

                if (didUpdateTheDetectionResult)
                {
                    lock (thisLock) {
                        //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
                        rectsWhereRegions = resultDetect.toArray();
                        rects             = resultDetect.toArray();
                    }
                    didUpdateTheDetectionResult = false;


                    for (int i = 0; i < rects.Length; i++)
                    {
                        Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
                    rectsWhereRegions = new Rect[trackedObjects.Count];

                    for (int i = 0; i < trackedObjects.Count; i++)
                    {
                        int n = trackedObjects [i].lastPositions.Count;
                        //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");

                        Rect r = trackedObjects [i].lastPositions [n - 1];
                        if (r.area() == 0)
                        {
                            Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
                            continue;
                        }

                        //correction by speed of rectangle
                        if (n > 1)
                        {
                            Point center      = centerRect(r);
                            Point center_prev = centerRect(trackedObjects [i].lastPositions [n - 2]);
                            Point shift       = new Point((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
                                                          (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);

                            r.x += (int)Math.Round(shift.x);
                            r.y += (int)Math.Round(shift.y);
                        }
                        rectsWhereRegions [i] = r;
                    }

                    rects = rectsWhereRegions;
                    for (int i = 0; i < rects.Length; i++)
                    {
                        Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 2);
                    }
                }

                if (rectsWhereRegions.Length > 0)
                {
                    detectedObjectsInRegions.Clear();

                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                updateTrackedObjects(detectedObjectsInRegions);

                getObjects(resultObjects);


                rects = resultObjects.ToArray();
                for (int i = 0; i < rects.Length; i++)
                {
                    //Debug.Log ("detect faces " + rects [i]);

                    Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                }



//				Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Beispiel #10
0
        /// <summary>
        /// Processes points by filter.
        /// </summary>
        /// <param name="img">Image mat.</param>
        /// <param name="srcPoints">Input points.</param>
        /// <param name="dstPoints">Output points.</param>
        /// <param name="drawDebugPoints">if true, draws debug points.</param>
        /// <returns>Output points.</returns>
        public override List <Vector2> Process(Mat img, List <Vector2> srcPoints, List <Vector2> dstPoints = null, bool drawDebugPoints = false)
        {
            if (srcPoints != null && srcPoints.Count != numberOfElements)
            {
                throw new ArgumentException("The number of elements is different.");
            }

            if (srcPoints != null)
            {
                if (dstPoints == null)
                {
                    dstPoints = new List <Vector2> ();
                }
                if (dstPoints != null && dstPoints.Count != numberOfElements)
                {
                    dstPoints.Clear();
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints.Add(new Vector2());
                    }
                }

                for (int i = 0; i < numberOfElements; i++)
                {
                    src_points [i].x = srcPoints [i].x;
                    src_points [i].y = srcPoints [i].y;
                }

                // clac diffDlib
                prevTrackPtsMat.fromList(src_points);
                OpenCVForUnity.Rect rect = Imgproc.boundingRect(prevTrackPtsMat);
                double diffDlib          = this.diffDlib * rect.area() / 40000.0 * diffCheckSensitivity;

                // if the face is moving so fast, use dlib to detect the face
                double diff = calDistanceDiff(src_points, last_points);
                if (drawDebugPoints)
                {
                    Debug.Log("variance:" + diff);
                }
                if (diff > diffDlib)
                {
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints [i] = srcPoints [i];
                    }

                    if (drawDebugPoints)
                    {
                        Debug.Log("DLIB");
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            Imgproc.circle(img, new Point(srcPoints [i].x, srcPoints [i].y), 2, new Scalar(255, 0, 0, 255), -1);
                        }
                    }

                    flag = false;
                }
                else
                {
                    if (!flag)
                    {
                        // Set initial state estimate.
                        Mat     statePreMat = KF.get_statePre();
                        float[] tmpStatePre = new float[statePreMat.total()];
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            tmpStatePre [i * 2]     = (float)srcPoints [i].x;
                            tmpStatePre [i * 2 + 1] = (float)srcPoints [i].y;
                        }
                        statePreMat.put(0, 0, tmpStatePre);
                        Mat     statePostMat = KF.get_statePost();
                        float[] tmpStatePost = new float[statePostMat.total()];
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            tmpStatePost [i * 2]     = (float)srcPoints [i].x;
                            tmpStatePost [i * 2 + 1] = (float)srcPoints [i].y;
                        }
                        statePostMat.put(0, 0, tmpStatePost);

                        flag = true;
                    }

                    // Kalman Prediction
                    KF.predict();

                    // Update Measurement
                    float[] tmpMeasurement = new float[measurement.total()];
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        tmpMeasurement [i * 2]     = (float)srcPoints [i].x;
                        tmpMeasurement [i * 2 + 1] = (float)srcPoints [i].y;
                    }
                    measurement.put(0, 0, tmpMeasurement);

                    // Correct Measurement
                    Mat     estimated    = KF.correct(measurement);
                    float[] tmpEstimated = new float[estimated.total()];
                    estimated.get(0, 0, tmpEstimated);
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        predict_points [i].x = tmpEstimated [i * 2];
                        predict_points [i].y = tmpEstimated [i * 2 + 1];
                    }
                    estimated.Dispose();

                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints [i] = new Vector2((float)predict_points [i].x, (float)predict_points [i].y);
                    }

                    if (drawDebugPoints)
                    {
                        Debug.Log("Kalman Filter");
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            Imgproc.circle(img, predict_points [i], 2, new Scalar(0, 255, 0, 255), -1);
                        }
                    }
                }

                for (int i = 0; i < numberOfElements; i++)
                {
                    last_points [i].x = src_points [i].x;
                    last_points [i].y = src_points [i].y;
                }

                return(dstPoints);
            }
            else
            {
                return(dstPoints == null ? srcPoints : dstPoints);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }


            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }


                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);


                if (!threadComm.shouldDetectInMultiThread)
                {
                    lock (thisLock) {
                        grayMat.copyTo(grayMat4Thread);
                    }
                    threadComm.shouldDetectInMultiThread = true;
                }


                OpenCVForUnity.Rect[] rects;

                if (didUpdateTheDetectionResult)
                {
                    lock (thisLock) {
                        //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
                        rectsWhereRegions = resultDetect.toArray();
                        rects             = resultDetect.toArray();
                    }
                    didUpdateTheDetectionResult = false;


                    for (int i = 0; i < rects.Length; i++)
                    {
                        Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 2);
                    }
                }
                else
                {
                    //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
                    rectsWhereRegions = new Rect[trackedObjects.Count];

                    for (int i = 0; i < trackedObjects.Count; i++)
                    {
                        int n = trackedObjects [i].lastPositions.Count;
                        //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");

                        Rect r = trackedObjects [i].lastPositions [n - 1];
                        if (r.area() == 0)
                        {
                            Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
                            continue;
                        }

                        //correction by speed of rectangle
                        if (n > 1)
                        {
                            Point center      = centerRect(r);
                            Point center_prev = centerRect(trackedObjects [i].lastPositions [n - 2]);
                            Point shift       = new Point((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
                                                          (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);

                            r.x += (int)Math.Round(shift.x);
                            r.y += (int)Math.Round(shift.y);
                        }
                        rectsWhereRegions [i] = r;
                    }

                    rects = rectsWhereRegions;
                    for (int i = 0; i < rects.Length; i++)
                    {
                        Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 2);
                    }
                }

                if (rectsWhereRegions.Length > 0)
                {
                    detectedObjectsInRegions.Clear();

                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                updateTrackedObjects(detectedObjectsInRegions);

                getObjects(resultObjects);


                rects = resultObjects.ToArray();
                for (int i = 0; i < rects.Length; i++)
                {
                    //Debug.Log ("detect faces " + rects [i]);

                    Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                }

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Beispiel #12
0
    //利用深度的輪廓做RGB的顏色判斷
    public Mat getContours(Mat srcColorMat, Mat srcDepthMat)
    {
        Mat ColorMat = new Mat();
        Mat DepthMat = new Mat();
        Mat HsvMat   = new Mat();

        srcColorMat.copyTo(ColorMat);
        srcDepthMat.copyTo(DepthMat);
        Imgproc.cvtColor(ColorMat, HsvMat, Imgproc.COLOR_BGR2HSV);

        List <ColorObject> colorObjects        = new List <ColorObject>();
        Mat                  resultMat         = new Mat(DepthMat.height(), DepthMat.width(), CvType.CV_8UC1);
        Mat                  hierarchy         = new Mat();
        List <Point>         ConsistP          = new List <Point>();
        List <MatOfPoint>    contours          = new List <MatOfPoint>();
        List <List <Point> > trianglePointList = new List <List <Point> >();

        Imgproc.findContours(DepthMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        int           numObjects    = contours.Count;
        List <Scalar> clickRGB      = new List <Scalar>();
        List <Scalar> clickHSV      = new List <Scalar>();
        List <int>    HullCountList = new List <int>();

        for (int i = 0; i < numObjects; i++)
        {
            Imgproc.drawContours(resultMat, contours, i, new Scalar(255), 1);
        }
        double[] GetRGB      = new double[10];
        float    minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (numObjects > 0)
        {
            for (int index = 0; index < numObjects; index++)
            {
                OpenCVForUnity.Rect R0 = Imgproc.boundingRect(contours[index]);

                if (R0.area() > minAreaSize)
                {
                    //宣告放置點資料
                    MatOfInt          hullInt       = new MatOfInt();
                    List <Point>      hullPointList = new List <Point>();
                    MatOfPoint        hullPointMat  = new MatOfPoint();
                    List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
                    MatOfInt4         defects       = new MatOfInt4();
                    //篩選點資料
                    MatOfPoint2f Temp2f = new MatOfPoint2f();
                    //Convert contours(i) from MatOfPoint to MatOfPoint2f
                    contours[index].convertTo(Temp2f, CvType.CV_32FC2);
                    //Processing on mMOP2f1 which is in type MatOfPoint2f
                    Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
                    //Convert back to MatOfPoint and put the new values back into the contours list
                    Temp2f.convertTo(contours[index], CvType.CV_32S);

                    //计算轮廓围绕的凸形壳
                    Imgproc.convexHull(contours[index], hullInt);
                    List <Point> pointMatList = contours[index].toList();
                    List <int>   hullIntList  = hullInt.toList();
                    for (int j = 0; j < hullInt.toList().Count; j++)
                    {
                        hullPointList.Add(pointMatList[hullIntList[j]]);
                        hullPointMat.fromList(hullPointList);
                        hullPoints.Add(hullPointMat);
                    }
                    ConsistP.Add(new Point(R0.x, R0.y));
                    ConsistP.Add(new Point(R0.x + R0.width, R0.y + R0.height));
                    ConsistP.Add(new Point(R0.x + R0.width, R0.y));
                    ConsistP.Add(new Point(R0.x, R0.y + R0.height));
                    clickRGB.Add(clickcolor(ColorMat, R0));
                    clickHSV.Add(clickcolor(HsvMat, R0));
                    HullCountList.Add(hullIntList.Count);
                    trianglePointList.Add(pointMatList);
                    //清空記憶體
                    defects.Dispose();
                    hullPointList.Clear();
                    hullPointMat.Dispose();
                    hullInt.Dispose();
                    hullPoints.Clear();


                    //Debug.Log("ID = " +  index + " Color = " + clickcolor(ColorMat, R0));
                }
            }
            //使用顏色找尋物體
            _matchColorObjectList = setColorMatchObject(ConsistP, trianglePointList, clickRGB, clickHSV, resultMat, HullCountList);
        }
        return(resultMat);
    }
        /// <summary>
        /// Processes points by filter.
        /// </summary>
        /// <param name="img">Image mat.</param>
        /// <param name="srcPoints">Input points.</param>
        /// <param name="dstPoints">Output points.</param>
        /// <param name="drawDebugPoints">if true, draws debug points.</param>
        /// <returns>Output points.</returns>
        public override List <Vector2> Process(Mat img, List <Vector2> srcPoints, List <Vector2> dstPoints = null, bool drawDebugPoints = false)
        {
            if (srcPoints != null && srcPoints.Count != numberOfElements)
            {
                throw new ArgumentException("The number of elements is different.");
            }

            if (srcPoints == null)
            {
                return(dstPoints == null ? srcPoints : dstPoints);
            }

            if (!flag)
            {
                if (img.channels() == 4)
                {
                    Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGBA2GRAY);
                }
                else if (img.channels() == 3)
                {
                    Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGB2GRAY);
                }
                else
                {
                    if (prevgray.total() == 0)
                    {
                        prevgray = img.clone();
                    }
                    else
                    {
                        img.copyTo(prevgray);
                    }
                }

                for (int i = 0; i < numberOfElements; i++)
                {
                    prevTrackPts[i] = new Point(srcPoints [i].x, srcPoints [i].y);
                }

                flag = true;
            }

            if (srcPoints != null)
            {
                if (dstPoints == null)
                {
                    dstPoints = new List <Vector2> ();
                }
                if (dstPoints != null && dstPoints.Count != numberOfElements)
                {
                    dstPoints.Clear();
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints.Add(new Vector2());
                    }
                }

                if (img.channels() == 4)
                {
                    Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGBA2GRAY);
                }
                else if (img.channels() == 3)
                {
                    Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGB2GRAY);
                }
                else
                {
                    if (gray.total() == 0)
                    {
                        gray = img.clone();
                    }
                    else
                    {
                        img.copyTo(gray);
                    }
                }

                if (prevgray.total() > 0)
                {
                    mOP2fPrevTrackPts.fromList(prevTrackPts);
                    mOP2fNextTrackPts.fromList(nextTrackPts);
                    Video.calcOpticalFlowPyrLK(prevgray, gray, mOP2fPrevTrackPts, mOP2fNextTrackPts, status, err);
                    prevTrackPts = mOP2fPrevTrackPts.toList();
                    nextTrackPts = mOP2fNextTrackPts.toList();

                    // clac diffDlib
                    prevTrackPtsMat.fromList(prevTrackPts);
                    OpenCVForUnity.Rect rect = Imgproc.boundingRect(prevTrackPtsMat);
                    double diffDlib          = this.diffDlib * rect.area() / 40000.0 * diffCheckSensitivity;

                    // if the face is moving so fast, use dlib to detect the face
                    double diff = calDistanceDiff(prevTrackPts, nextTrackPts);
                    if (drawDebugPoints)
                    {
                        Debug.Log("variance:" + diff);
                    }
                    if (diff > diffDlib)
                    {
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            nextTrackPts [i].x = srcPoints [i].x;
                            nextTrackPts [i].y = srcPoints [i].y;

                            dstPoints [i] = srcPoints [i];
                        }

                        if (drawDebugPoints)
                        {
                            Debug.Log("DLIB");
                            for (int i = 0; i < numberOfElements; i++)
                            {
                                Imgproc.circle(img, new Point(srcPoints [i].x, srcPoints [i].y), 2, new Scalar(255, 0, 0, 255), -1);
                            }
                        }
                    }
                    else
                    {
                        // In this case, use Optical Flow
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            dstPoints [i] = new Vector2((float)nextTrackPts [i].x, (float)nextTrackPts [i].y);
                        }

                        if (drawDebugPoints)
                        {
                            Debug.Log("Optical Flow");
                            for (int i = 0; i < numberOfElements; i++)
                            {
                                Imgproc.circle(img, nextTrackPts [i], 2, new Scalar(0, 0, 255, 255), -1);
                            }
                        }
                    }
                }
                Swap(ref prevTrackPts, ref nextTrackPts);
                Swap(ref prevgray, ref gray);
            }
            return(dstPoints);
        }
Beispiel #14
0
        // Update is called once per frame
        void Update()
        {
            #if !UNITY_WEBGL || UNITY_EDITOR
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());


                Mat rgbaMatClipROI = new Mat(rgbaMat, processingAreaRect);
                Imgproc.cvtColor(rgbaMatClipROI, grayMat, Imgproc.COLOR_RGBA2GRAY);

                // fill all black.
//                Imgproc.rectangle (rgbaMatClipROI, new Point (0, 0), new Point (rgbaMat.width (), rgbaMat.height ()), new Scalar (0, 0, 0, 0), -1);

                Imgproc.equalizeHist(grayMat, grayMat);

                if (!threadComm.shouldDetectInMultiThread)
                {
                    grayMat.copyTo(grayMat4Thread);
                    threadComm.shouldDetectInMultiThread = true;
                }

//                OpenCVForUnity.Rect[] rects;

                if (didUpdateTheDetectionResult)
                {
                    didUpdateTheDetectionResult = false;
                    lock (thisLock) {
                        //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
                        rectsWhereRegions = resultDetect.toArray();
//                        rects = resultDetect.toArray ();
                    }

//                    for (int i = 0; i < rects.Length; i++) {
//                        Imgproc.rectangle (rgbaMatClipROI, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 0, 255, 255), 2);
//                    }
                }
                else
                {
                    //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
                    rectsWhereRegions = new Rect[trackedObjects.Count];

                    for (int i = 0; i < trackedObjects.Count; i++)
                    {
                        int n = trackedObjects [i].lastPositions.Count;
                        //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");

                        Rect r = trackedObjects [i].lastPositions [n - 1].clone();
                        if (r.area() == 0)
                        {
                            Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
                            continue;
                        }

                        //correction by speed of rectangle
                        if (n > 1)
                        {
                            Point center      = centerRect(r);
                            Point center_prev = centerRect(trackedObjects [i].lastPositions [n - 2]);
                            Point shift       = new Point((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
                                                          (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);

                            r.x += (int)Math.Round(shift.x);
                            r.y += (int)Math.Round(shift.y);
                        }
                        rectsWhereRegions [i] = r;
                    }


//                    rects = rectsWhereRegions;
//                    for (int i = 0; i < rects.Length; i++) {
//                        Imgproc.rectangle (rgbaMatClipROI, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 255, 0, 255), 2);
//                    }
                }

                if (rectsWhereRegions.Length > 0)
                {
                    detectedObjectsInRegions.Clear();

                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                updateTrackedObjects(detectedObjectsInRegions);

                getObjects(resultObjects);
                //getSmoothingObjects (resultObjects);


                int rectsCount = resultObjects.Count;
                UnityEngine.Rect[] overlayRects = new UnityEngine.Rect[rectsCount];

                float width   = rgbaMat.width();
                float height  = rgbaMat.height();
                float offsetX = processingAreaRect.x / width;
                float offsetY = processingAreaRect.y / height;

                for (int i = 0; i < resultObjects.Count; i++)
                {
                    overlayRects[i] = new UnityEngine.Rect(offsetX + (resultObjects[i].x / width)
                                                           , offsetY + (resultObjects[i].y / height)
                                                           , resultObjects[i].width / width
//                        , resultObjects[i].height/height);
                                                           , resultObjects[i].width / height);
                }
                rectOverlay.DrawRects(overlayRects);


                rgbaMatClipROI.Dispose();
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;;

                Vector3 ccCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(0.0f, 0.0f, overlayDistance));
                Vector3 tlCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(-overlayDistance, overlayDistance, overlayDistance));

                //position
                Vector3 position = cameraToWorldMatrix.MultiplyPoint3x4(ccCameraSpacePos);
                gameObject.transform.position = position;

                //scale
                Vector3 scale = new Vector3(Mathf.Abs(tlCameraSpacePos.x - ccCameraSpacePos.x) * 2, Mathf.Abs(tlCameraSpacePos.y - ccCameraSpacePos.y) * 2, 1);
                gameObject.transform.localScale = scale;

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));
                gameObject.transform.rotation = rotation;

                rectOverlay.UpdateOverlayTransform(gameObject.transform);
            }
            #else
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();
                Imgproc.putText(rgbaMat, "WebGL platform does not support multi-threading.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
            #endif
        }
Beispiel #15
0
        // Update is called once per frame
        void Update()
        {
            #if !UNITY_WEBGL || UNITY_EDITOR
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());


                Mat rgbaMatClipROI = new Mat(rgbaMat, processingAreaRect);
                Imgproc.cvtColor(rgbaMatClipROI, grayMat, Imgproc.COLOR_RGBA2GRAY);

                if (!isShowingWebCamImage)
                {
                    // fill all black.
                    Imgproc.rectangle(rgbaMatClipROI, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 0), -1);
                }

                Imgproc.equalizeHist(grayMat, grayMat);

                if (!threadComm.shouldDetectInMultiThread)
                {
                    grayMat.copyTo(grayMat4Thread);
                    threadComm.shouldDetectInMultiThread = true;
                }

                OpenCVForUnity.Rect[] rects;

                if (didUpdateTheDetectionResult)
                {
                    didUpdateTheDetectionResult = false;
                    lock (thisLock) {
                        //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
                        rectsWhereRegions = resultDetect.toArray();
                        rects             = resultDetect.toArray();
                    }

                    if (isShowingDebugFaceRects)
                    {
                        for (int i = 0; i < rects.Length; i++)
                        {
                            Imgproc.rectangle(rgbaMatClipROI, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 2);
                        }
                    }
                }
                else
                {
                    //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
                    rectsWhereRegions = new Rect[trackedObjects.Count];

                    for (int i = 0; i < trackedObjects.Count; i++)
                    {
                        int n = trackedObjects [i].lastPositions.Count;
                        //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");

                        Rect r = trackedObjects [i].lastPositions [n - 1].clone();
                        if (r.area() == 0)
                        {
                            Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
                            continue;
                        }

                        //correction by speed of rectangle
                        if (n > 1)
                        {
                            Point center      = centerRect(r);
                            Point center_prev = centerRect(trackedObjects [i].lastPositions [n - 2]);
                            Point shift       = new Point((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
                                                          (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);

                            r.x += (int)Math.Round(shift.x);
                            r.y += (int)Math.Round(shift.y);
                        }
                        rectsWhereRegions [i] = r;
                    }


                    rects = rectsWhereRegions;
                    if (isShowingDebugFaceRects)
                    {
                        for (int i = 0; i < rects.Length; i++)
                        {
                            Imgproc.rectangle(rgbaMatClipROI, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 2);
                        }
                    }
                }

                if (rectsWhereRegions.Length > 0)
                {
                    detectedObjectsInRegions.Clear();

                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                updateTrackedObjects(detectedObjectsInRegions);

                getObjects(resultObjects);
                //getSmoothingObjects (resultObjects);


                rects = resultObjects.ToArray();
                for (int i = 0; i < rects.Length; i++)
                {
                    //Debug.Log ("detect faces " + rects [i]);
                    Imgproc.rectangle(rgbaMatClipROI, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                }


                if (isShowingDebugFaceRects)
                {
                    Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(255, 0, 0, 255), 2);
                    Imgproc.rectangle(rgbaMat, processingAreaRect.tl(), processingAreaRect.br(), new Scalar(255, 0, 0, 255), 2);
                }

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());

                rgbaMatClipROI.Dispose();
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;;
                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                texture.wrapMode = TextureWrapMode.Clamp;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }
            #else
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();
                Imgproc.putText(rgbaMat, "WebGL platform does not support multi-threading.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
            #endif
        }
Beispiel #16
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();
                grayMat.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);


            // Starts the camera
            webCamTexture.Play();

            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
#else
                if (webCamTexture.didUpdateThisFrame)
                {
#endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.transform.eulerAngles = new Vector3(0, 0, 0);
#if (UNITY_ANDROID || UNITY_IPHONE || UNITY_WP_8_1) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
#endif
                    //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);

                    //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
                    //										float scaleX = 1;
                    //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
                    //										if (webCamTexture.videoRotationAngle == 270)
                    //												scaleY = -1.0f;
                    //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);



                    cascade       = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    regionCascade = new CascadeClassifier(Utils.getFilePath("lbpcascade_frontalface.xml"));

                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

#if (UNITY_ANDROID || UNITY_IPHONE || UNITY_WP_8_1) && !UNITY_EDITOR
                    Camera.main.orthographicSize = webCamTexture.width / 2;
#else
                    Camera.main.orthographicSize = webCamTexture.height / 2;
#endif


                    initThread();

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }



#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
#else
            if (webCamTexture.didUpdateThisFrame)
            {
#endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }


                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);



                didUpdateThisFrame = true;

                                                                #if UNITY_WSA
                                                                #if NETFX_CORE
                ThreadWorker();
                                                                #endif
                                                                #else
                ThreadWorker();
                                                                #endif

                OpenCVForUnity.Rect[] rects;

                if (resultDetect != null)
                {
                    //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
                    rectsWhereRegions = resultDetect.toArray();


                    rects = resultDetect.toArray();
                    for (int i = 0; i < rects.Length; i++)
                    {
                        Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 2);
                    }



                    resultDetect = null;
                }
                else
                {
                    //Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
                    rectsWhereRegions = new Rect[trackedObjects.Count];

                    for (int i = 0; i < trackedObjects.Count; i++)
                    {
                        int n = trackedObjects [i].lastPositions.Count;
                        //if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");

                        Rect r = trackedObjects [i].lastPositions [n - 1];
                        if (r.area() == 0)
                        {
                            Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
                            continue;
                        }

                        //correction by speed of rectangle
                        if (n > 1)
                        {
                            Point center      = centerRect(r);
                            Point center_prev = centerRect(trackedObjects [i].lastPositions [n - 2]);
                            Point shift       = new Point((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
                                                          (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);

                            r.x += (int)Math.Round(shift.x);
                            r.y += (int)Math.Round(shift.y);
                        }
                        rectsWhereRegions [i] = r;
                    }

                    rects = rectsWhereRegions;
                    for (int i = 0; i < rects.Length; i++)
                    {
                        Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 2);
                    }
                }

                if (rectsWhereRegions.Length > 0)
                {
                    detectedObjectsInRegions.Clear();

                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                updateTrackedObjects(detectedObjectsInRegions);

                getObjects(resultObjects);


                rects = resultObjects.ToArray();
                for (int i = 0; i < rects.Length; i++)
                {
                    //Debug.Log ("detect faces " + rects [i]);

                    Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                }

                Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }