Ejemplo n.º 1
0
        private MatOfPoint OrderCornerPoints(MatOfPoint corners)
        {
            if (corners.size().area() <= 0 || corners.rows() < 4)
            {
                return(corners);
            }

            // rearrange the points in the order of upper left, upper right, lower right, lower left.
            using (Mat x = new Mat(corners.size(), CvType.CV_32SC1))
                using (Mat y = new Mat(corners.size(), CvType.CV_32SC1))
                    using (Mat d = new Mat(corners.size(), CvType.CV_32SC1))
                        using (Mat dst = new Mat(corners.size(), CvType.CV_32SC2))
                        {
                            Core.extractChannel(corners, x, 0);
                            Core.extractChannel(corners, y, 1);

                            // the sum of the upper left points is the smallest and the sum of the lower right points is the largest.
                            Core.add(x, y, d);
                            Core.MinMaxLocResult result = Core.minMaxLoc(d);
                            dst.put(0, 0, corners.get((int)result.minLoc.y, 0));
                            dst.put(2, 0, corners.get((int)result.maxLoc.y, 0));

                            // the difference in the upper right point is the smallest, and the difference in the lower left is the largest.
                            Core.subtract(y, x, d);
                            result = Core.minMaxLoc(d);
                            dst.put(1, 0, corners.get((int)result.minLoc.y, 0));
                            dst.put(3, 0, corners.get((int)result.maxLoc.y, 0));

                            dst.copyTo(corners);
                        }
            return(corners);
        }
Ejemplo n.º 2
0
        private Mat PerspectiveTransform(Mat image, MatOfPoint corners)
        {
            if (corners.size().area() <= 0 || corners.rows() < 4)
            {
                return(image);
            }

            Point[] pts = corners.toArray();
            Point   tl  = pts[0];
            Point   tr  = pts[1];
            Point   br  = pts[2];
            Point   bl  = pts[3];

            double widthA   = Math.Sqrt((br.x - bl.x) * (br.x - bl.x) + (br.y - bl.y) * (br.y - bl.y));
            double widthB   = Math.Sqrt((tr.x - tl.x) * (tr.x - tl.x) + (tr.y - tl.y) * (tr.y - tl.y));
            int    maxWidth = Math.Max((int)widthA, (int)widthB);

            double heightA   = Math.Sqrt((tr.x - br.x) * (tr.x - br.x) + (tr.y - br.y) * (tr.y - br.y));
            double heightB   = Math.Sqrt((tl.x - bl.x) * (tl.x - bl.x) + (tl.y - bl.y) * (tl.y - bl.y));
            int    maxHeight = Math.Max((int)heightA, (int)heightB);

            maxWidth  = (maxWidth < 1) ? 1 : maxWidth;
            maxHeight = (maxHeight < 1) ? 1 : maxHeight;

            Mat src = new Mat();

            corners.convertTo(src, CvType.CV_32FC2);
            Mat dst = new Mat(4, 1, CvType.CV_32FC2);

            dst.put(0, 0, 0, 0, maxWidth - 1, 0, maxWidth - 1, maxHeight - 1, 0, maxHeight - 1);

            // compute and apply the perspective transformation matrix.
            Mat outputMat            = new Mat(maxHeight, maxWidth, image.type(), new Scalar(0, 0, 0, 255));
            Mat perspectiveTransform = Imgproc.getPerspectiveTransform(src, dst);

            Imgproc.warpPerspective(image, outputMat, perspectiveTransform, new Size(outputMat.cols(), outputMat.rows()));

            // return the transformed image.
            return(outputMat);
        }
Ejemplo n.º 3
0
        private void Find4PointContours(Mat image, List <MatOfPoint> contours)
        {
            contours.Clear();
            List <MatOfPoint> tmp_contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();

            Imgproc.findContours(image, tmp_contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            foreach (var cnt in tmp_contours)
            {
                MatOfInt hull = new MatOfInt();
                Imgproc.convexHull(cnt, hull, false);

                Point[] cnt_arr  = cnt.toArray();
                int[]   hull_arr = hull.toArray();
                Point[] pts      = new Point[hull_arr.Length];
                for (int i = 0; i < hull_arr.Length; i++)
                {
                    pts[i] = cnt_arr[hull_arr[i]];
                }

                MatOfPoint2f ptsFC2    = new MatOfPoint2f(pts);
                MatOfPoint2f approxFC2 = new MatOfPoint2f();
                MatOfPoint   approxSC2 = new MatOfPoint();

                double arclen = Imgproc.arcLength(ptsFC2, true);
                Imgproc.approxPolyDP(ptsFC2, approxFC2, 0.01 * arclen, true);
                approxFC2.convertTo(approxSC2, CvType.CV_32S);

                if (approxSC2.size().area() != 4)
                {
                    continue;
                }

                contours.Add(approxSC2);
            }
        }
Ejemplo n.º 4
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // change the color space to YUV.
                Imgproc.cvtColor(rgbaMat, yuvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(yuvMat, yuvMat, Imgproc.COLOR_RGB2YUV);
                // grap only the Y component.
                Core.extractChannel(yuvMat, yMat, 0);

                // blur the image to reduce high frequency noises.
                Imgproc.GaussianBlur(yMat, yMat, new Size(3, 3), 0);
                // find edges in the image.
                Imgproc.Canny(yMat, yMat, 50, 200, 3);

                // find contours.
                List <MatOfPoint> contours = new List <MatOfPoint>();
                Find4PointContours(yMat, contours);

                // pick the contour of the largest area and rearrange the points in a consistent order.
                MatOfPoint maxAreaContour = GetMaxAreaContour(contours);
                maxAreaContour = OrderCornerPoints(maxAreaContour);

                bool found = (maxAreaContour.size().area() > 0);
                if (found)
                {
                    // trasform the prospective of original image.
                    using (Mat transformedMat = PerspectiveTransform(rgbaMat, maxAreaContour))
                    {
                        outputDisplayAreaMat.setTo(new Scalar(0, 0, 0, 255));

                        if (transformedMat.width() <= outputDisplayAreaMat.width() && transformedMat.height() <= outputDisplayAreaMat.height() &&
                            transformedMat.total() >= outputDisplayAreaMat.total() / 16)
                        {
                            int x = outputDisplayAreaMat.width() / 2 - transformedMat.width() / 2;
                            int y = outputDisplayAreaMat.height() / 2 - transformedMat.height() / 2;
                            using (Mat dstAreaMat = new Mat(outputDisplayAreaMat, new OpenCVForUnity.CoreModule.Rect(x, y, transformedMat.width(), transformedMat.height())))
                            {
                                transformedMat.copyTo(dstAreaMat);
                            }
                        }
                    }
                }

                if (isDebugMode)
                {
                    // draw edge image.
                    Imgproc.cvtColor(yMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);

                    // draw all found conours.
                    Imgproc.drawContours(rgbaMat, contours, -1, DEBUG_CONTOUR_COLOR, 1);
                }

                if (found)
                {
                    // draw max area contour.
                    Imgproc.drawContours(rgbaMat, new List <MatOfPoint> {
                        maxAreaContour
                    }, -1, CONTOUR_COLOR, 2);

                    if (isDebugMode)
                    {
                        // draw corner numbers.
                        for (int i = 0; i < maxAreaContour.toArray().Length; i++)
                        {
                            var pt = maxAreaContour.get(i, 0);
                            Imgproc.putText(rgbaMat, i.ToString(), new Point(pt[0], pt[1]), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, DEBUG_CORNER_NUMBER_COLOR, 1, Imgproc.LINE_AA, false);
                        }
                    }
                }

                rgbaMat.copyTo(inputDisplayAreaMat);

                Utils.fastMatToTexture2D(displayMat, texture, true, 0, true);
            }
        }
Ejemplo n.º 5
0
    //public delegate void Process(int[] tgrdeteced);
    void tagramDetect(Mat t_rgbaMat, Action <TangramResultModel, List <MyShape> > prc)
    {
        List <MyShape> lms = new List <MyShape>();

        System.Diagnostics.Stopwatch watch = null;

        long elapsedMs;
        TangramResultModel trm = null;

        Observable.Start(() =>
        {
            mut.WaitOne();
            Imgproc.resize(t_rgbaMat, rgbaMat, new Size(nW_goc, nH_goc));
            watch = System.Diagnostics.Stopwatch.StartNew();

            if (warp != null)
            {
                warp.Init(rgbaMat);
                Mat wMat = warp.warpPerspective(rgbaMat);
                rgbaMat  = wMat.submat(0, nH, 0, nW);
            }
            else
            {
                rgbaMat = rgbaMat.submat(0, nH, 0, nW);
            }

            all_thresh      = Mat.zeros(nH, nW, CvType.CV_8UC3);
            all_thresh_afct = Mat.zeros(nH, nW, CvType.CV_8UC3);
            dbMat           = Mat.zeros(nH, nW, CvType.CV_8UC3);
            all_thresh_af   = Mat.zeros(nH, nW, CvType.CV_8UC3);

            rgbaMat.copyTo(rgbMat);
            rgbMat.convertTo(rgbMat2, CvType.CV_8UC3, 0.8, 60);
            rgbMat2.copyTo(rgbMat2copy);
            rgbMat.convertTo(rgbMat3, CvType.CV_8UC3, 1, 60);
            rgbMat.convertTo(rgbMat4, CvType.CV_8UC3, 1.25, 35);
            rgbMat.convertTo(rgbMat, CvType.CV_8UC3, 1.25, 35);


            Imgproc.cvtColor(rgbMat, hsvMat, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat2, hsvMat2, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat3, hsvMat3, Imgproc.COLOR_RGB2HSV);
            Imgproc.cvtColor(rgbMat3, hsvMat4, Imgproc.COLOR_RGB2HSV);

            watch.Stop();
            elapsedMs = watch.ElapsedMilliseconds;

            Mat markers = Mat.zeros(rgbaMat.size(), CvType.CV_32SC1);

            watch = System.Diagnostics.Stopwatch.StartNew();

            for (int obj_i = 0; obj_i < ls_obj.Length; obj_i++)
            {
                var obj = ls_obj[obj_i];

                if (obj_i == (int)tgr.ORANGE | obj_i == (int)tgr.YELLOW | obj_i == (int)tgr.GREEN)
                {
                    Core.inRange(hsvMat2, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }
                else if (obj_i == (int)tgr.LIGHTBLUE)
                {
                    Core.inRange(hsvMat, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }
                else
                {
                    Core.inRange(hsvMat, obj.getHSVmin(), obj.getHSVmax(), thresholdMat);
                }


                if (obj_i == (int)tgr.RED)
                {
                    Core.inRange(hsvMat, new Scalar(0, 20, 45), new Scalar(5, 255, 255), thresholdMat2);
                    thresholdMat2.copyTo(thresholdMat, thresholdMat2);
                }


                thresholdMatArr[obj_i] = thresholdMat.clone();
            }

            //thresholdMatArr[(int)tgr.LIGHTBLUE].setTo(new Scalar(0), thresholdMatArr[(int)tgr.BLUE]);
            //thresholdMatArr[(int)tgr.LIGHTBLUE].setTo(new Scalar(0), thresholdMatArr[(int)tgr.GREEN]);


            for (int obj_i = 0; obj_i < ls_obj.Length; obj_i++)
            {
                var obj = ls_obj[obj_i];

                all_cts.Clear();
                thresholdMat = thresholdMatArr[obj_i];
                if (toggle_db[obj_i] == true)
                {
                    all_thresh.setTo(obj.ColorRGB, thresholdMat);
                }

                if (true | obj_i == (int)tgr.PURPLE | obj_i == (int)tgr.YELLOW | obj_i == (int)tgr.RED | obj_i == (int)tgr.GREEN | obj_i == (int)tgr.ORANGE)
                {
                    Imgproc.erode(thresholdMat, thresholdMat2, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)), new Point(-1, -1), 1);
                }
                if (obj_i == (int)tgr.LIGHTBLUE | obj_i == (int)tgr.PURPLE)
                {
                    Imgproc.erode(thresholdMat, thresholdMat2, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5)), new Point(-1, -1), 1);
                }

                if (toggle_db[obj_i] == true)
                {
                    all_thresh_af.setTo(obj.ColorRGB, thresholdMat2);
                }
                all_thresh_afct.setTo(new Scalar(obj_i + 1), thresholdMat2);

                color_filter.Add(thresholdMat2.clone());

                Imgproc.findContours(thresholdMat2, all_cts, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
                Scalar c = obj.getColor();

                for (int ct_i = 0; ct_i < all_cts.Count; ct_i++)
                {
                    double area = Imgproc.contourArea(all_cts[ct_i]);
                    // if (area < MIN_OBJECT_AREA)
                    if (area < MIN_OBJECT_AREAS[obj_i] * 0.55)
                    {
                        all_cts.RemoveAt(ct_i);
                        ct_i--;
                    }
                    if (area > MAX_OBJECT_AREAS[obj_i] * 1.3)
                    {
                        all_cts.RemoveAt(ct_i);
                        ct_i--;
                    }
                }

                MyShape chon = null;
                MyShape ms   = new MyShape();
                float dt     = 1000000;

                for (int ct_i = 0; ct_i < all_cts.Count; ct_i++)
                {
                    var ct      = all_cts[ct_i];
                    var peri    = Imgproc.arcLength(new MatOfPoint2f(ct.toArray()), true);
                    var epsilon = 0.1 * peri;
                    if (obj_i == (int)tgr.ORANGE || obj_i == (int)tgr.YELLOW)
                    {
                        epsilon = 0.065 * peri;
                    }
                    Imgproc.approxPolyDP(new MatOfPoint2f(ct.toArray()), approx_ct, epsilon, true);

                    MatOfInt pts_cvh = new MatOfInt();
                    Imgproc.convexHull(ct, pts_cvh, true);
                    var cvh_numPts  = pts_cvh.toArray().Length;
                    Point[] cvh_pts = new Point[cvh_numPts];
                    var ct_pts      = ct.toArray();



                    for (int i = 0; i < cvh_numPts; i++)
                    {
                        var i1     = pts_cvh.toArray()[i];
                        var p1     = ct_pts[i1];
                        cvh_pts[i] = p1;

                        try
                        {
                            if (debug == true)
                            {
                                var i2 = pts_cvh.toArray()[(i + 1) % cvh_numPts];
                                var p2 = ct_pts[i2];
                                Imgproc.circle(rgbMat2, p1, 1, c, 2);
                            }
                        }
                        catch (Exception e)
                        {
                            Utilities.LogFormat("Here3:{0},{1},{2}", rgbMat2 == null, p1 == null, c == null);
                            Utilities.Log("Exception is {0}", e.ToString());
                            Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                        }
                    }


                    MatOfPoint2f approx_cvh = new MatOfPoint2f();

                    var epsilon2 = peri * 0.1;
                    if (obj_i == (int)tgr.ORANGE)
                    {
                        epsilon2 = peri * 0.065;
                    }
                    Imgproc.approxPolyDP(new MatOfPoint2f(cvh_pts), approx_cvh, epsilon2, true);

                    var ct_ori            = new MatOfPoint(ct.toArray());
                    MatOfPoint approx_ct2 = new MatOfPoint(approx_ct.toArray());

                    List <MatOfPoint> approx_cvh2 = new List <MatOfPoint>();
                    approx_cvh2.Add(new MatOfPoint(approx_cvh.toArray()));

                    var mu    = Imgproc.moments(approx_cvh2[0], true);
                    cterTgr.x = mu.m10 / mu.m00;
                    cterTgr.y = mu.m01 / mu.m00;

                    if (approx_ct2.size().height == 3 | approx_ct2.size().height == 4)
                    {
                        var points    = approx_cvh2[0].toArray();
                        var numpoints = points.Length;

                        ms._id = obj_i;
                        ms.ps  = new Point[numpoints];


                        double rat = 1.16;
                        if (obj_i == (int)tgr.PURPLE)
                        {
                            rat = 1.20;
                        }
                        else if (obj_i == (int)tgr.LIGHTBLUE)
                        {
                            rat = 1.20;
                        }
                        else if (obj_i == (int)tgr.RED | obj_i == (int)tgr.BLUE)
                        {
                            rat = 1.09;
                        }
                        else if (obj_i == (int)tgr.YELLOW)
                        {
                            rat = 1.10;
                        }
                        else if (obj_i == (int)tgr.ORANGE)
                        {
                            rat = 1.10;
                        }
                        else if (obj_i == (int)tgr.GREEN)
                        {
                            rat = 1.10;
                        }

                        var ind_huyen = 0;
                        var max       = -1d;

                        if (numpoints == 3 || numpoints == 4)
                        {
                            for (int p_i = 0; p_i < numpoints; p_i++)
                            {
                                var p  = points[p_i];
                                var p2 = points[(p_i + 1) % numpoints];

                                var vect = p - cterTgr;

                                vect = vect * rat;

                                var p_new     = cterTgr + vect;
                                points[p_i].x = (int)(p_new.x * 100) / 100f;
                                points[p_i].y = (int)(p_new.y * 100) / 100f;


                                if (numpoints == 4)
                                {
                                    ms.ps[p_i] = p_new;
                                }

                                if (numpoints == 3)
                                {
                                    var vt     = p2 - p;
                                    var length = vt.x * vt.x + vt.y * vt.y;
                                    if (length > max)
                                    {
                                        ind_huyen = p_i;
                                        max       = length;
                                    }
                                }
                            }
                        }

                        if (numpoints == 3)
                        {
                            var i_nhon1 = ind_huyen;
                            var i_nhon2 = (ind_huyen + 1) % numpoints;
                            var i_vuong = (ind_huyen + 2) % numpoints;

                            ms.ps[0] = points[i_vuong];
                            ms.ps[1] = points[i_nhon1];
                            ms.ps[2] = points[i_nhon2];
                        }
                        else if (numpoints == 4)
                        {
                            if (obj_i == (int)tgr.ORANGE)
                            {
                                var vt_cheo1   = ms.ps[0] - ms.ps[2];
                                var vt_cheo2   = ms.ps[1] - ms.ps[3];
                                var leng_cheo1 = vt_cheo1.x * vt_cheo1.x + vt_cheo1.y * vt_cheo1.y;
                                var leng_cheo2 = vt_cheo2.x * vt_cheo2.x + vt_cheo2.y * vt_cheo2.y;
                                var i_nhon     = 0;
                                if (leng_cheo2 > leng_cheo1)
                                {
                                    i_nhon = 1;
                                }

                                ms.ps[0] = points[i_nhon];
                                ms.ps[1] = points[(i_nhon + 1)];
                                ms.ps[2] = points[(i_nhon + 2)];
                                ms.ps[3] = points[(i_nhon + 3) % numpoints];

                                var i_prvNhon   = (i_nhon + 4 - 1) % numpoints;
                                var i_aftNhon   = i_nhon + 1;
                                var vt_prvNhon  = points[i_prvNhon] - points[i_nhon];
                                var vt_aftNhon  = points[i_aftNhon] - points[i_nhon];
                                var len_prvNhon = vt_prvNhon.x * vt_prvNhon.x + vt_prvNhon.y * vt_prvNhon.y;
                                var len_aftNhon = vt_aftNhon.x * vt_aftNhon.x + vt_aftNhon.y * vt_aftNhon.y;

                                Imgproc.line(dbMat, points[i_prvNhon], points[i_nhon], c, 1);

                                if (len_prvNhon > len_aftNhon)
                                {
                                    ms.isFlip = true;
                                    Imgproc.putText(dbMat, " IsFLIP", ms.ps[3], 1, 1, c, 1);
                                }
                                else
                                {
                                    ms.isFlip = false;
                                    Imgproc.putText(dbMat, " IsNOTFLIP", ms.ps[3], 1, 1, c, 1);
                                }
                            }
                        }

                        var centerMat = new Point(rgbMat.width() / 2f, rgbMat.height() / 2f);
                        var vtLech    = centerMat - cterTgr;
                        var dt2       = vtLech.x * vtLech.x + vtLech.y * vtLech.y;
                        if (dt2 < dt)
                        {
                            chon = ms;
                        }
                    }
                    try
                    {
                        Imgproc.circle(rgbMat, cterTgr, 1, c, 1);
                        Imgproc.putText(rgbMat, mu.m00.ToString(), cterTgr, 1, 1, c, 1);
                    }
                    catch (Exception e)
                    {
                        Utilities.LogFormat("Here2:{0},{1},{2}", rgbMat == null, cterTgr == null, c == null);
                        Utilities.Log("Exception is {0}", e.ToString());
                        Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                    }

                    //if (approx_ct2.size().height == 3 | approx_ct2.size().height == 4) break;
                }

                if (chon != null)
                {
                    lms.Add(chon);

                    var ps = chon.ps;
                    for (int i = 0; i < ps.Length; i++)
                    {
                        var p1 = ps[i];
                        var p2 = ps[(i + 1) % ps.Length];

                        try
                        {
                            Imgproc.line(rgbMat2, p1, p2, c, 1);
                            Imgproc.line(all_thresh_afct, p1, p2, new Scalar(255, 255, 255), 1);
                            Imgproc.line(dbMat, p1, p2, c, 1);
                            Imgproc.circle(dbMat, p1, 1, c);
                        }
                        catch (Exception e)
                        {
                            Utilities.LogFormat("Here1:{0},{1},{2}", rgbMat2 == null, p1 == null, p2 == null);
                            Utilities.Log("Exception is {0}", e.ToString());
                            Utilities.Log("Trace is {0}", e.StackTrace.ToString());
                        }
                    }
                }

                watch.Stop();
                elapsedMs = watch.ElapsedMilliseconds;
            }

            TangramShape msl = new TangramShape();
            msl.datas        = lms;
            var json         = JsonUtility.ToJson(msl);

            watch = System.Diagnostics.Stopwatch.StartNew();
            trm   = tangramFeatureModelList.Detect(msl.datas.ToArray());
            watch.Stop();
            elapsedMs = watch.ElapsedMilliseconds;

            mut.ReleaseMutex();
        }).ObserveOnMainThread().Subscribe((rx) =>
        {
            prc(trm, lms);
            if (debug == true)
            {
                mut.WaitOne();

                if (texture != null && debug == true)
                {
                    Utils.matToTexture2D(dbMat, texture);
                }
                if (dbText1 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat2copy, dbText1);
                }
                if (dbText2 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat3, dbText2);
                }
                if (dbText3 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat4, dbText3);
                }
                if (dbText4 != null && debug == true)
                {
                    Utils.matToTexture2D(rgbMat, dbText4);
                }

                all_thresh_afct = all_thresh_afct * 25;
                Imgproc.cvtColor(rgbMat2, rgbMat2, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(all_thresh, all_thresh, Imgproc.COLOR_RGBA2RGB);
                Mat a = new Mat(all_thresh.size(), CvType.CV_8UC3);
                Core.addWeighted(all_thresh, 0.2, rgbMat2, 0.8, 0, a);
                if (dbText5 != null && debug == true)
                {
                    Utils.matToTexture2D(a, dbText5);
                }
                if (dbText6 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh, dbText6);
                }
                if (dbText7 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh_afct, dbText7);
                }
                if (dbText8 != null && debug == true)
                {
                    Utils.matToTexture2D(all_thresh_af, dbText8);
                }
                mut.ReleaseMutex();
            }
        });
    }