Example #1
0
        public static void Mat_to_vector_vector_Point(Mat m, List <MatOfPoint> pts)
        {
            if (m != null)
            {
                m.ThrowIfDisposed();
            }

            if (pts == null)
            {
                throw new CvException("Output List can't be null");
            }

            if (m == null)
            {
                throw new CvException("Input Mat can't be null");
            }

            List <Mat> mats = new List <Mat>(m.rows());

            Mat_to_vector_Mat(m, mats);
            foreach (Mat mi in mats)
            {
                MatOfPoint pt = new MatOfPoint(mi);
                pts.Add(pt);
                mi.release();
            }
            mats.Clear();
        }
Example #2
0
        private MatOfPoint OrderCornerPoints(MatOfPoint corners)
        {
            if (corners.size().area() <= 0 || corners.rows() < 4)
            {
                return(corners);
            }

            // rearrange the points in the order of upper left, upper right, lower right, lower left.
            using (Mat x = new Mat(corners.size(), CvType.CV_32SC1))
                using (Mat y = new Mat(corners.size(), CvType.CV_32SC1))
                    using (Mat d = new Mat(corners.size(), CvType.CV_32SC1))
                        using (Mat dst = new Mat(corners.size(), CvType.CV_32SC2))
                        {
                            Core.extractChannel(corners, x, 0);
                            Core.extractChannel(corners, y, 1);

                            // the sum of the upper left points is the smallest and the sum of the lower right points is the largest.
                            Core.add(x, y, d);
                            Core.MinMaxLocResult result = Core.minMaxLoc(d);
                            dst.put(0, 0, corners.get((int)result.minLoc.y, 0));
                            dst.put(2, 0, corners.get((int)result.maxLoc.y, 0));

                            // the difference in the upper right point is the smallest, and the difference in the lower left is the largest.
                            Core.subtract(y, x, d);
                            result = Core.minMaxLoc(d);
                            dst.put(1, 0, corners.get((int)result.minLoc.y, 0));
                            dst.put(3, 0, corners.get((int)result.maxLoc.y, 0));

                            dst.copyTo(corners);
                        }
            return(corners);
        }
Example #3
0
        private void UpdateConvexityDefection(
            MatOfPoint contour, MatOfInt hullIndices, double defectMinY, RecordHandDetectResult resultSetter
            )
        {
            var contourArray = contour.toArray();
            var convexDefect = new MatOfInt4();

            Imgproc.convexityDefects(contour, hullIndices, convexDefect);

            resultSetter.ConvexDefectVectors.Clear();

            int convexDefectCount = convexDefect.rows();

            if (convexDefectCount > 0)
            {
                for (int i = 0; i < convexDefectCount; i++)
                {
                    convexDefect.get(i, 0, _convexityDefectSetValues);
                    Point farPoint = contourArray[_convexityDefectSetValues[2]];
                    int   depth    = _convexityDefectSetValues[3];
                    if (depth > DefectThreasholdValue && farPoint.y < defectMinY)
                    {
                        var nearPoint1 = contourArray[_convexityDefectSetValues[0]];
                        var nearPoint2 = contourArray[_convexityDefectSetValues[1]];
                        resultSetter.ConvexDefectVectors.Add(new Vector2(
                                                                 (float)(nearPoint1.x * 0.5f + nearPoint2.x * 0.5 - farPoint.x),
                                                                 (float)(nearPoint1.y * 0.5f + nearPoint2.y * 0.5 - farPoint.y)
                                                                 ));
                    }
                }
            }

            //ここまでやりきると全データが有効に更新されている。
            resultSetter.HasValidHandArea = true;
        }
        public void ProcessFinger(Mat rgbaImage)
        {
            Imgproc.pyrDown(rgbaImage, mPyrDownMat);
            Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
            Imgproc.cvtColor(mPyrDownMat, mRGBAMat, Imgproc.COLOR_RGB2RGBA);
            Imgproc.cvtColor(mPyrDownMat, mYCrCbMat, Imgproc.COLOR_RGB2YCrCb);

            Core.inRange(mHsvMat, fLowerBoundHSV, fUpperBoundHSV, fMaskHSV);

            fMask = fMaskHSV;

            Imgproc.dilate(fMask, fDilatedMask, new Mat());

            List <MatOfPoint> contoursFinger = new List <MatOfPoint>();

            Imgproc.findContours(fDilatedMask, contoursFinger, fHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

            if (contoursFinger.Count == 0)
            {
                FingerContour = null;
                return;
            }

            // Find max contour area
            double     maxArea        = 0;
            MatOfPoint biggestContour = null;

            foreach (MatOfPoint each in contoursFinger)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.contourArea(wrapper);
                if (area > maxArea)
                {
                    maxArea        = area;
                    biggestContour = each;
                }
            }
            if (maxArea < 130)
            {
                FingerContour = null;
                return;
            }

            //Debug.Log("Finger contour area" + maxArea.ToString());

            MatOfPoint2f contours_res2f = new MatOfPoint2f();

            MatOfPoint2f biggestContour2f = new MatOfPoint2f(biggestContour.toArray());

            Imgproc.approxPolyDP(biggestContour2f, contours_res2f, 3, true);
            FingerContour = new MatOfPoint(contours_res2f.toArray());
            contours_res2f.Dispose();
            biggestContour2f.Dispose();
            if (Imgproc.contourArea(FingerContour) > mMinContourArea * maxArea)
            {
                Core.multiply(FingerContour, new Scalar(4, 4), FingerContour);
            }
        }
Example #5
0
        private static void ShapeContextDistanceExtractorSaample()
        {
            var src   = Cv2.ImRead(@"data\shapes.png", ImreadModes.Color);
            var gray  = src.CvtColor(ColorConversionCodes.BGR2GRAY);
            var canny = gray.Canny(100, 200);

            Point[][]        contours;
            HierarchyIndex[] hierarchy;
            canny.FindContours(
                out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);

            Mat dst = src.Clone();

            Cv2.DrawContours(dst, new Point[][] { contours[4] }, -1, Scalar.Red, 2);
            Cv2.DrawContours(dst, new Point[][] { contours[5] }, -1, Scalar.Yellow, 2);

            var distanceExtractor = ShapeContextDistanceExtractor.Create();

            //var distanceExtractor = HausdorffDistanceExtractor.Create();

            using (var inputA = MatOfPoint.FromArray(contours[4]))
                using (var inputB = MatOfPoint.FromArray(contours[5]))
                {
                    var distance = distanceExtractor.ComputeDistance(inputA, inputB);
                    Console.WriteLine(distance); // always 0
                }

            Window.ShowImages(dst);
        }
    private void GetCubies(List <MatOfPoint> contours, Mat imgMat, int index, List <Cubies> cubies)
    {
        MatOfPoint2f matOfPoint2f = new MatOfPoint2f();
        MatOfPoint2f approxCurve  = new MatOfPoint2f();
        MatOfPoint   approx       = new MatOfPoint();

        foreach (var contour in contours)
        {
            matOfPoint2f.fromList(contour.toList());
            Imgproc.approxPolyDP(matOfPoint2f, approxCurve, 0.1 * Imgproc.arcLength(matOfPoint2f, true), true);

            try
            {
                approxCurve.convertTo(approx, CvType.CV_32S);
                OpenCVForUnity.Rect rect = Imgproc.boundingRect(approx);

                if (approx.total() == 4)
                {
                    cubies.Add(new Cubies(rect.x, rect.y, colorsList[index]));
                    Imgproc.rectangle(imgMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 40, 150), 2);
                }
            }
            catch (ArgumentOutOfRangeException e) { }
        }

        print("Number of cubies: " + cubies.Count);
    }
Example #7
0
        public static bool isHeart(List <Point> shape)
        {
            //Check number of vertices
            if (shape.Count < 20)
            {
                return(false);
            }

            MatOfPoint shape_area = new MatOfPoint();

            shape_area.fromList(shape);

            MatOfPoint2f shape_area2f = new MatOfPoint2f(shape_area.toArray());

            //   if (Imgproc.contourArea(shape_area) > 6000)
            //       return false;

            double area  = Imgproc.contourArea(shape_area);
            double perim = Imgproc.arcLength(shape_area2f, true);
            double ratio = area / perim;

            if (ratio < 18 || ratio > 23)
            {
                return(false);
            }

            for (int i = 1; i < shape.Count; i++)
            {
                if (distanceTwoPoints(shape[i - 1], shape[i]) > 20)
                {
                    return(true);
                }
            }
            return(false);
        }
    private Scalar colorRed = new Scalar(255, 0, 0, 125); // Red color

    void Start()
    {
        for (int i = 0; i < WebCamTexture.devices.Length; i++)
        {
            Debug.Log(WebCamTexture.devices[i].name);
        }

        mCamera       = new WebCamTexture();
        matOpFlowThis = new Mat();
        matOpFlowPrev = new Mat();
        MOPcorners    = new MatOfPoint();
        mMOP2fptsThis = new MatOfPoint2f();
        mMOP2fptsPrev = new MatOfPoint2f();
        mMOP2fptsSafe = new MatOfPoint2f();
        mMOBStatus    = new MatOfByte();
        mMOFerr       = new MatOfFloat();

        mCamera.Play();

        rgbaMat = new Mat(mCamera.height, mCamera.width, CvType.CV_8UC4);
        texture = new Texture2D(mCamera.width, mCamera.height, TextureFormat.RGBA32, false);
        colors  = new Color32[mCamera.width * mCamera.height];

        GetComponent <Renderer>().material.mainTexture = texture;
    }
    public static Point GetCenter(MatOfPoint matOfPoint)
    {
        var points = matOfPoint.toArray();
        var minX   = points.OrderBy(e => e.x).First().x;
        var minY   = points.OrderBy(e => e.y).First().y;

        return(new Point(minX + ((double)matOfPoint.width()) / 2.0, ((double)matOfPoint.height()) / 2.0));
    }
Example #10
0
        public OFPointsFilter(int numberOfElements) : base(numberOfElements)
        {
            diffDlib        = diffDlib * (double)numberOfElements / 68.0;
            prevTrackPtsMat = new MatOfPoint();

            // Initialize Optical Flow
            InitializeOpticalFlow();
        }
Example #11
0
 public RegionCandidate()
 {
     index         = 0;
     contour       = new MatOfPoint();
     contour2f     = new MatOfPoint2f();
     _area         = 0.0;
     _circularity  = 0.0;
     _boundingRect = null;
 }
Example #12
0
    private int CompareContourRows(MatOfPoint mp1, MatOfPoint mp2)
    {
        int rows1 = mp1.rows();
        int rows2 = mp2.rows();

        if (rows1 == rows2) { return 0; }
        else if (rows1 > rows2) { return -1; }
        else { return 1; }
    }
Example #13
0
        // Use this for initialization
        void Start()
        {
            Mat imgMat = new Mat(500, 500, CvType.CV_8UC3, new Scalar(0, 0, 0));

            Debug.Log("imgMat.ToString() " + imgMat.ToString());


            int        rand_num  = 50;
            MatOfPoint pointsMat = new MatOfPoint();

            pointsMat.alloc(rand_num);

            Core.randu(pointsMat, 100, 400);

            Point[] points = pointsMat.toArray();
            for (int i = 0; i < rand_num; ++i)
            {
                Imgproc.circle(imgMat, points [i], 2, new Scalar(255, 255, 255), -1);
            }


            MatOfInt hullInt = new MatOfInt();

            Imgproc.convexHull(pointsMat, hullInt);


            List <Point> pointMatList  = pointsMat.toList();
            List <int>   hullIntList   = hullInt.toList();
            List <Point> hullPointList = new List <Point> ();

            for (int j = 0; j < hullInt.toList().Count; j++)
            {
                hullPointList.Add(pointMatList [hullIntList [j]]);
            }

            MatOfPoint hullPointMat = new MatOfPoint();

            hullPointMat.fromList(hullPointList);

            List <MatOfPoint> hullPoints = new List <MatOfPoint> ();

            hullPoints.Add(hullPointMat);



            Imgproc.drawContours(imgMat, hullPoints, -1, new Scalar(0, 255, 0), 2);


            Imgproc.cvtColor(imgMat, imgMat, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
    public static OpenCVForUnity.CoreModule.Rect GetRect(MatOfPoint matOfPoint)
    {
        var points = matOfPoint.toArray();
        var minX   = points.OrderBy(e => e.x).First().x;
        var minY   = points.OrderBy(e => e.y).First().y;
        var maxX   = points.OrderByDescending(e => e.x).First().x;
        var maxY   = points.OrderByDescending(e => e.y).First().y;
        var rect   = new OpenCVForUnity.CoreModule.Rect((int)minX, (int)minY, (int)(maxX - minX), (int)(maxY - minY));

        return(rect);
    }
        double ComputeAVGXForContour(MatOfPoint contour)
        {
            double sum = 0;

            Point[] points = contour.toArray();
            foreach (Point p in points)
            {
                sum += p.x;
            }
            return(sum / points.Length);
        }
Example #16
0
        /// <summary>
        /// Points the of vertices.
        /// </summary>
        /// <param name="contour">Contour.</param>
        private static void _pointOfVertices(Mat rgbaMat, MatOfPoint contour)
        {
            //multiplyでガウシアンピラミッドで分解されたサイズを掛け算で実画像サイズに戻す
            Core.multiply(contour, new Scalar(4, 4), contour);

            //輪郭の頂点がまだらにあるので識別しやすいようにポリゴン近似でサンプリングする。
            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());
        }
    IEnumerator Init()
    {
        yield return(new WaitForSeconds(1));

        IsStarted          = true;
        webCamTextureToMat = FindObjectOfType <WebCamTextureToMat>() as WebCamTextureToMat;
        Debug.Log("MotionDetectInited");


        Mat webCamTextureMat = webCamTextureToMat.GetMat();

        Debug.Log("webCamTextureMat -- c : " + webCamTextureMat.cols() + " r : " + webCamTextureMat.rows());
        //Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

        colors  = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()];
        texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);

        matOpFlowThis = new Mat();
        matOpFlowPrev = new Mat();
        MOPcorners    = new MatOfPoint();
        mMOP2fptsThis = new MatOfPoint2f();
        mMOP2fptsPrev = new MatOfPoint2f();
        mMOP2fptsSafe = new MatOfPoint2f();
        mMOBStatus    = new MatOfByte();
        mMOFerr       = new MatOfFloat();


        gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

        Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

        float width  = 0;
        float height = 0;

        width  = gameObject.transform.localScale.x;
        height = gameObject.transform.localScale.y;

        float widthScale  = (float)Screen.width / width;
        float heightScale = (float)Screen.height / height;

        if (widthScale < heightScale)
        {
            Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
        }
        else
        {
            Camera.main.orthographicSize = height / 2;
        }

        gameObject.GetComponent <Renderer>().material.mainTexture = texture;

        //          webCamTextureToMatHelper.Play ();
        yield return(null);
    }
				// Use this for initialization
				void Start ()
				{
	
						Mat imgMat = new Mat (500, 500, CvType.CV_8UC3, new Scalar (0, 0, 0));
						Debug.Log ("imgMat dst ToString " + imgMat.ToString ());


						int rand_num = 50;
						MatOfPoint pointsMat = new MatOfPoint ();
						pointsMat.alloc (rand_num);

						Core.randu (pointsMat, 100, 400);

						Point[] points = pointsMat.toArray ();
						for (int i=0; i<rand_num; ++i) {
						
								Core.circle (imgMat, points [i], 2, new Scalar (255, 255, 255), -1);
						}

	
						MatOfInt hullInt = new MatOfInt ();
						Imgproc.convexHull (pointsMat, hullInt);


						List<Point> pointMatList = pointsMat.toList ();
						List<int> hullIntList = hullInt.toList ();
						List<Point> hullPointList = new List<Point> ();

						for (int j=0; j < hullInt.toList().Count; j++) {
								hullPointList.Add (pointMatList [hullIntList [j]]);
						}

						MatOfPoint hullPointMat = new MatOfPoint ();
		
						hullPointMat.fromList (hullPointList);

						List<MatOfPoint> hullPoints = new List<MatOfPoint> ();

						hullPoints.Add (hullPointMat);
		
		
		
						Imgproc.drawContours (imgMat, hullPoints, -1, new Scalar (0, 255, 0), 2);


						Imgproc.cvtColor (imgMat, imgMat, Imgproc.COLOR_BGR2RGB);

						Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
						Utils.matToTexture2D (imgMat, texture);
		
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				}
Example #19
0
    //辨識輪廓
    private bool analysisContoursRect(int index, List <MatOfPoint> contours, Mat result, List <MatchObject> matchObject)
    {
        OpenCVForUnity.Rect _testDepthRect = Imgproc.boundingRect(contours[index]);
        float minAreaSize = _minDepthObjectSizePer * _drawBlock.MatchHeight * _drawBlock.MatchWidth;

        if (_testDepthRect.area() > minAreaSize)
        {
            //宣告放置點資料
            MatOfInt          hullInt       = new MatOfInt();
            List <Point>      hullPointList = new List <Point>();
            MatOfPoint        hullPointMat  = new MatOfPoint();
            List <MatOfPoint> hullPoints    = new List <MatOfPoint>();
            MatOfInt4         defects       = new MatOfInt4();
            //篩選點資料
            MatOfPoint2f Temp2f = new MatOfPoint2f();
            //Convert contours(i) from MatOfPoint to MatOfPoint2f
            contours[index].convertTo(Temp2f, CvType.CV_32FC2);
            //Processing on mMOP2f1 which is in type MatOfPoint2f
            Imgproc.approxPolyDP(Temp2f, Temp2f, 30, true);
            //Convert back to MatOfPoint and put the new values back into the contours list
            Temp2f.convertTo(contours[index], CvType.CV_32S);

            //计算轮廓围绕的凸形壳
            Imgproc.convexHull(contours[index], hullInt);
            List <Point> pointMatList = contours[index].toList();
            List <int>   hullIntList  = hullInt.toList();
            for (int j = 0; j < hullInt.toList().Count; j++)
            {
                hullPointList.Add(pointMatList[hullIntList[j]]);
                hullPointMat.fromList(hullPointList);
                hullPoints.Add(hullPointMat);
            }
            if (hullInt.toList().Count == 4)
            {
                if (!setMatchObject(index, pointMatList, contours, hullPoints, result, matchObject))
                {
                    //Debug.Log("setMatchObject fail");
                }
            }
            //清空記憶體
            defects.Dispose();
            hullPointList.Clear();
            hullPointMat.Dispose();
            hullInt.Dispose();
            hullPoints.Clear();
            return(true);
        }
        return(false);
    }
Example #20
0
    protected static MatOfPoint convertIndexToPoint(MatOfInt index, MatOfPoint contour)
    {
        Point[] arrPoint  = contour.toArray();
        int[]   arrIndex  = index.toArray();
        Point[] arrResult = new Point[arrIndex.Length];

        for (int i = 0; i < arrIndex.Length; i++)
        {
            arrResult[i] = arrPoint[arrIndex[i]];
        }

        MatOfPoint hull = new MatOfPoint();

        hull.fromArray(arrResult);
        return(hull);
    }
Example #21
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            Utils.fastMatToTexture2D(webCamTextureMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("width", webCamTextureMat.width().ToString());
                fpsMonitor.Add("height", webCamTextureMat.height().ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            matOpFlowThis = new Mat();
            matOpFlowPrev = new Mat();
            MOPcorners    = new MatOfPoint();
            mMOP2fptsThis = new MatOfPoint2f();
            mMOP2fptsPrev = new MatOfPoint2f();
            mMOP2fptsSafe = new MatOfPoint2f();
            mMOBStatus    = new MatOfByte();
            mMOFerr       = new MatOfFloat();
        }
Example #22
0
        public static List <Point> filterPolygon(List <Point> approx_polygon)
        {
            while (true)
            {
                double max_ar    = 0;
                int    max_ar_id = 0;
                for (int k = 0; k < approx_polygon.Count; k++)
                {
                    List <Point> cur_polygon = new List <Point>();

                    for (int i = 0; i < approx_polygon.Count; i++)
                    {
                        cur_polygon.Add(approx_polygon[i]);
                    }

                    cur_polygon.Remove(cur_polygon[0 + k]);

                    MatOfPoint cur_area    = new MatOfPoint();
                    MatOfPoint approx_area = new MatOfPoint();
                    cur_area.fromList(cur_polygon);
                    approx_area.fromList(approx_polygon);

                    double area_ratio = Imgproc.contourArea(cur_area) / Imgproc.contourArea(approx_area);

                    // Debug.Log("ratio" + area_ratio);

                    if (area_ratio > max_ar)
                    {
                        max_ar    = area_ratio;
                        max_ar_id = k;
                    }
                }

                //If area still large enough remove a vertex
                if (max_ar > 0.8)
                {
                    // Debug.Log("Remove vertex  " + max_ar_id);
                    approx_polygon.Remove(approx_polygon.ToArray()[0 + max_ar_id]);
                }
                else
                {
                    break;
                }
            }

            return(approx_polygon);
        }
Example #23
0
        public static bool isStar(List <Point> shape)
        {
            double[] length = new double[5], angle = new double[5];

            if (shape.Count != 5)
            {
                return(false);
            }

            MatOfPoint shape_area = new MatOfPoint();

            shape_area.fromList(shape);

            if (!(Imgproc.contourArea(shape_area) > 6000 && Imgproc.contourArea(shape_area) < 10000))
            {
                return(false);
            }

            //Calculate side lengths
            length[0] = distanceTwoPoints(shape[0], shape[1]);
            length[1] = distanceTwoPoints(shape[1], shape[2]);
            length[2] = distanceTwoPoints(shape[2], shape[3]);
            length[3] = distanceTwoPoints(shape[3], shape[4]);
            length[4] = distanceTwoPoints(shape[4], shape[0]);

            //Calculate angles
            angle[0] = angleThreePoints(shape[0], shape[1], shape[2]);
            angle[1] = angleThreePoints(shape[1], shape[2], shape[3]);
            angle[2] = angleThreePoints(shape[2], shape[3], shape[4]);
            angle[3] = angleThreePoints(shape[3], shape[4], shape[0]);
            angle[4] = angleThreePoints(shape[4], shape[0], shape[1]);

            //Star check
            if (angle[0] > 98 && angle[0] < 128 &&
                angle[1] > 98 && angle[1] < 128 &&
                angle[2] > 98 && angle[2] < 128 &&
                angle[3] > 98 && angle[3] < 128 &&
                angle[4] > 98 && angle[4] < 128)
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
Example #24
0
        private void EstimateHand(Mat mat, List <MatOfPoint> contours, RecordHandDetectResult resultSetter)
        {
            //画像処理としてはcontourがあったが、今調べてる側については
            if (contours.Count == 0)
            {
                resultSetter.HasValidHandArea = false;
                return;
            }

            var contour = SelectLargestContour(contours);

            var boundRect = Imgproc.boundingRect(contour);
            //画像の下側で手首の凹み部分を検出することがあるのを、指の凹みと誤認識しないためのガードです。
            double defectMinY = boundRect.y + boundRect.height * 0.7;

            var pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            var handArea       = Imgproc.minAreaRect(pointMat);
            var handAreaCenter = handArea.center;
            var handAreaSize   = handArea.size;

            //方向固定のBoundを使うとこう。
            resultSetter.HandAreaCenter   = new Vector2(boundRect.x + boundRect.width / 2, boundRect.y + boundRect.height / 2);
            resultSetter.HandAreaSize     = new Vector2(boundRect.width, boundRect.height);
            resultSetter.HandAreaRotation = (float)handArea.angle;

            //OBBを使うとこうなるが、これだけだとangleが45度超えてるときの挙動が直感に反する事があるので要注意
            // resultSetter.HandAreaCenter = new Vector2((float)handAreaCenter.x, (float)handAreaCenter.y);
            // resultSetter.HandAreaSize = new Vector2((float)handAreaSize.width, (float)handAreaSize.height);
            // resultSetter.HandAreaRotation = (float)handArea.angle;

            Imgproc.convexHull(contour, _hullIndices);
            var hullIndicesArray = _hullIndices.toArray();

            //通常ありえないが、凸包がちゃんと作れてないケース
            if (hullIndicesArray.Length < 3)
            {
                resultSetter.HasValidHandArea = false;
                return;
            }

            UpdateConvexityDefection(contour, _hullIndices, defectMinY, resultSetter);
        }
Example #25
0
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log("OnWebCamTextureToMatHelperInited");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            colors  = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()];
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);

            matOpFlowThis = new Mat();
            matOpFlowPrev = new Mat();
            MOPcorners    = new MatOfPoint();
            mMOP2fptsThis = new MatOfPoint2f();
            mMOP2fptsPrev = new MatOfPoint2f();
            mMOP2fptsSafe = new MatOfPoint2f();
            mMOBStatus    = new MatOfByte();
            mMOFerr       = new MatOfFloat();


            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            //			webCamTextureToMatHelper.Play ();
        }
Example #26
0
        public static bool isSquare(List <Point> shape)
        {
            double [] length = new double[4], angle = new double[4];

            //Check number of vertices
            //cout << "	Vertex Num: " << shape.size() << endl;
            //cout << "	Area: " << contourArea(shape) << endl;

            if (shape.Count != 4)
            {
                return(false);
            }

            MatOfPoint shape_area = new MatOfPoint();

            shape_area.fromList(shape);

            //   if (!(Imgproc.contourArea(shape_area) > 8000 && Imgproc.contourArea(shape_area) < 12000))
            //       return false;
            //
            //Calculate side lengths
            length[0] = distanceTwoPoints(shape[0], shape[1]);
            length[1] = distanceTwoPoints(shape[1], shape[2]);
            length[2] = distanceTwoPoints(shape[2], shape[3]);
            length[3] = distanceTwoPoints(shape[3], shape[0]);

            //Calculate angles
            angle[0] = angleThreePoints(shape[0], shape[1], shape[2]);
            angle[1] = angleThreePoints(shape[1], shape[2], shape[3]);
            angle[2] = angleThreePoints(shape[2], shape[3], shape[0]);
            angle[3] = angleThreePoints(shape[3], shape[0], shape[1]);

            //Square check
            if (angle[0] > 80 && angle[0] < 100 &&
                angle[1] > 80 && angle[1] < 100 &&
                angle[2] > 80 && angle[2] < 100 &&
                angle[3] > 80 && angle[3] < 100)
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
    public void WarpTriangle(Mat imgSrc, Mat imgDest, MatOfPoint triPointsSource, MatOfPoint triPointsDest)
    {
        // These are the points (from p1 to p2)
        var triDestArray   = triPointsDest.toArray();
        var triSourceArray = triPointsSource.toArray();

        // This is the bounding rects (from r1 to r)
        var boundingRectDestination = Imgproc.boundingRect(triPointsDest);
        var boundingRectSource      = Imgproc.boundingRect(triPointsSource);

        // Offset points by left top corner of the respective rectangles
        // r1 and t1, r and t
        var triSourceOffset  = new List <Point>();
        var triDestOffset    = new List <Point>();
        var triDestOffsetInt = new List <Point>();

        for (int i = 0; i < 3; i++)
        {
            triDestOffset.Add(new Point(triDestArray[i].x - boundingRectDestination.x, triDestArray[i].y - boundingRectDestination.y));
            triDestOffsetInt.Add(new Point(Math.Round(triDestArray[i].x - boundingRectDestination.x), Math.Round(triDestArray[i].y - boundingRectDestination.y))); // for fillConvexPoly
            triSourceOffset.Add(new Point(triSourceArray[i].x - boundingRectSource.x, triSourceArray[i].y - boundingRectSource.y));
        }

        var matTriDestOffsetInt = new MatOfPoint(triDestOffsetInt.ToArray());

        // Get mask by filling triangle
        Mat mask = Mat.zeros(boundingRectDestination.height, boundingRectDestination.width, CvType.CV_8UC1);

        Imgproc.fillConvexPoly(mask, matTriDestOffsetInt, new Scalar(1, 1, 1));

        // Image rect
        Mat img1Rect = new Mat(imgSrc, boundingRectSource);

        // Target image is in r/destination triangle bounds
        Mat warpImage1 = Mat.zeros(boundingRectDestination.height, boundingRectDestination.width, img1Rect.type());

        //Do the affine transform warp
        applyAffineTransform(warpImage1, img1Rect, new MatOfPoint2f(triSourceOffset.ToArray()), new MatOfPoint2f(triDestOffset.ToArray()));

        // Copy triangular region of the rectangular patch to the output image
        Core.multiply(warpImage1, mask, warpImage1);
        var sub = imgDest.submat(boundingRectDestination);

        warpImage1.copyTo(sub, mask);
    }
Example #28
0
        /// <summary>
        /// Perimeter the specified a.
        /// </summary>
        /// <param name="a">The alpha component.</param>
        float perimeter(MatOfPoint a)
        {
            List <Point> aList = a.toList();

            float sum = 0, dx = 0, dy = 0;

            for (int i = 0; i < aList.Count; i++)
            {
                int i2 = (i + 1) % aList.Count;

                dx = (float)aList [i].x - (float)aList [i2].x;
                dy = (float)aList [i].y - (float)aList [i2].y;

                sum += Mathf.Sqrt(dx * dx + dy * dy);
            }

            return(sum);
        }
    /// <summary>
    /// Warps the source image, into the destination image
    /// using the source triangles, warping to the destination
    /// triangles.
    /// </summary>
    /// <param name="imgSrc"></param>
    /// <param name="imgDest"></param>
    /// <param name="triListSrc"></param>
    /// <param name="triListDest"></param>
    private void WarpImages(Mat imgSrc, Mat imgDest, List <float> triListSrc, List <float> triListDest)
    {
        for (var i = 0; i < triListSrc.Count; i += 6)
        {
            var point1      = new Point(triListSrc[i], triListSrc[i + 1]);
            var point2      = new Point(triListSrc[i + 2], triListSrc[i + 3]);
            var point3      = new Point(triListSrc[i + 4], triListSrc[i + 5]);
            var pointArray1 = new Point[] { point1, point2, point3 };
            var mat1        = new MatOfPoint(pointArray1);

            var point4      = new Point(triListDest[i], triListDest[i + 1]);
            var point5      = new Point(triListDest[i + 2], triListDest[i + 3]);
            var point6      = new Point(triListDest[i + 4], triListDest[i + 5]);
            var pointArray2 = new Point[] { point4, point5, point6 };
            var mat2        = new MatOfPoint(pointArray2);

            WarpTriangle(imgSrc, imgDest, mat1, mat2);
        }
    }
Example #30
0
        public void Process(Mat rgbaImage)
        {
            Log.Info(TAG, "Process rgbaImages");

            Imgproc.PyrDown(rgbaImage, mPyrDownMat);
            Imgproc.PyrDown(mPyrDownMat, mPyrDownMat);

            Imgproc.CvtColor(mPyrDownMat, mHsvMat, Imgproc.ColorRgb2hsvFull);

            Core.InRange(mHsvMat, mLowerBound, mUpperBound, mMask);
            Imgproc.Dilate(mMask, mDilatedMask, new Mat());

            IList <MatOfPoint> contours = new JavaList <MatOfPoint>();

            Imgproc.FindContours(mDilatedMask, contours, mHierarchy, Imgproc.RetrExternal, Imgproc.ChainApproxSimple);

            // Find max contour area
            double maxArea = 0;

            foreach (var each in contours)
            {
                MatOfPoint wrapper = each;
                double     area    = Imgproc.ContourArea(wrapper);
                if (area > maxArea)
                {
                    maxArea = area;
                }
                Log.Info(TAG, "Process rgbaImages\t-- Imgproc.ContourArea(wrapper)");
            }

            // Filter contours by area and resize to fit the original image size
            mContours.Clear();
            foreach (var each in contours)
            {
                MatOfPoint contour = each;
                if (Imgproc.ContourArea(contour) > mMinContourArea * maxArea)
                {
                    Core.Multiply(contour, new Scalar(4, 4), contour);
                    mContours.Add(contour);
                    Log.Info(TAG, "Process rgbaImages\t-- mContours.Add(contour)");
                }
            }
        }
Example #31
0
    public YarnPictorial(Pictorial copy, MatOfPoint contourPoints)
    {
        if (copy.curve == null)
        {
            curve = null;
        }
        else
        {
            GameObject.Instantiate(copy.curve);
        }
        drawing       = copy.drawing;
        debugColor    = copy.debugColor;
        yarn          = copy.yarn;
        adjusted      = copy.adjusted;
        healedStep    = copy.healedStep;
        healedStepGap = copy.healedStepGap;
        doubleHealed  = copy.doubleHealed;

        this.contourPoints = contourPoints;
    }
Example #32
0
        public KFPointsFilter(int numberOfElements) : base(numberOfElements)
        {
            diffDlib        = diffDlib * (double)numberOfElements / 68.0;
            prevTrackPtsMat = new MatOfPoint();

            src_points = new List <Point> ();
            for (int i = 0; i < numberOfElements; i++)
            {
                src_points.Add(new Point(0.0, 0.0));
            }
            last_points = new List <Point> ();
            for (int i = 0; i < numberOfElements; i++)
            {
                last_points.Add(new Point(0.0, 0.0));
            }

            // Initialize Kalman Filter
            stateNum   = numberOfElements * 4;
            measureNum = numberOfElements * 2;
            InitializeKalmanFilter();
        }
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log ("OnWebCamTextureToMatHelperInited");

                        Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

                        colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
                        texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

                        matOpFlowThis = new Mat ();
                        matOpFlowPrev = new Mat ();
                        MOPcorners = new MatOfPoint ();
                        mMOP2fptsThis = new MatOfPoint2f ();
                        mMOP2fptsPrev = new MatOfPoint2f ();
                        mMOP2fptsSafe = new MatOfPoint2f ();
                        mMOBStatus = new MatOfByte ();
                        mMOFerr = new MatOfFloat ();

                        gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);

                        Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

                        float width = 0;
                        float height = 0;

                        width = gameObject.transform.localScale.x;
                        height = gameObject.transform.localScale.y;

                        float widthScale = (float)Screen.width / width;
                        float heightScale = (float)Screen.height / height;
                        if (widthScale < heightScale) {
                                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                        } else {
                                Camera.main.orthographicSize = height / 2;
                        }

                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        //			webCamTextureToMatHelper.Play ();
        }
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();

                                matOpFlowThis.Dispose ();
                                matOpFlowPrev.Dispose ();
                                MOPcorners.Dispose ();
                                mMOP2fptsThis.Dispose ();
                                mMOP2fptsPrev.Dispose ();
                                mMOP2fptsSafe.Dispose ();
                                mMOBStatus.Dispose ();
                                mMOFerr.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();
                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                                        while (webCamTexture.width <= 16) {
                                                webCamTexture.GetPixels32 ();
                                                yield return new WaitForEndOfFrame ();
                                        }
                                        #endif
                                #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        matOpFlowThis = new Mat ();
                                        matOpFlowPrev = new Mat ();
                                        MOPcorners = new MatOfPoint ();
                                        mMOP2fptsThis = new MatOfPoint2f ();
                                        mMOP2fptsPrev = new MatOfPoint2f ();
                                        mMOP2fptsSafe = new MatOfPoint2f ();
                                        mMOBStatus = new MatOfByte ();
                                        mMOFerr = new MatOfFloat ();

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        updateLayout ();

                                        screenOrientation = Screen.orientation;
                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }
Example #35
0
        void drawFaceFeatures(PersonFace personFace, Mat illustratedImg)
        {
            Rect faceRect = personFace.GetFace();
            Rect mouthRect = personFace.GetMouth();
            Rect noseRect = personFace.GetNose();
            Rect[] eyesRects = personFace.GetEyes();

            //Draw face division line
            double[] faceLineData = personFace.GetFaceLineData();
            PointGenerator faceLine = new PointGenerator(faceLineData[0], faceLineData[1]);
            Point faceTopPoint = faceLine.GetFromY(faceRect.y);
            Point faceBottomPoint = faceLine.GetFromY(faceRect.y + faceRect.height);

            //Imgproc.line(illustratedImg, faceTopPoint, faceBottomPoint, new Scalar(255, 0, 0), 1);

            //Get face feature angle
            double faceFeatureAngle = Math.Atan(faceLineData[0]);
            faceFeatureAngle = RadianToDegree(faceFeatureAngle);
            faceFeatureAngle += faceFeatureAngle > 0 ? -90 : 90;

            //Draw face lateral boundaries lines
            //Detect right and left eye
            Rect rightEye, leftEye;
            if (eyesRects[0].x > eyesRects[1].x)
            {
                rightEye = eyesRects[1];
                leftEye = eyesRects[0];
            }
            else
            {
                rightEye = eyesRects[0];
                leftEye = eyesRects[1];
            }

            //get eye line generator
            PointGenerator eyeLines = new PointGenerator(
                getRectCenter(rightEye), getRectCenter(leftEye));

            Point leftFacePoint = eyeLines.GetFromX(getRectCenter(leftEye).x + leftEye.width);
            Point rightFacePoint = eyeLines.GetFromX(getRectCenter(rightEye).x - rightEye.width);

            /* CvInvoke.Circle(image, leftFacePoint, 20,
                 new Bgr(Color.Green).MCvScalar, -1);

             CvInvoke.Circle(image, rightFacePoint, 20,
                 new Bgr(Color.Blue).MCvScalar, -1);*/

            //Get line generators for each side of the face
            double faceLineSlope = faceLineData[0];

            //Left side
            double leftFaceSideOffset = leftFacePoint.y - leftFacePoint.x * faceLineSlope;
            PointGenerator leftFaceLine = new PointGenerator(faceLineSlope, leftFaceSideOffset);

            Point startPointL = leftFaceLine.GetFromY(0);
            Point endPointL = leftFaceLine.GetFromY(illustratedImg.Height);

            //Right side
            double rightFaceSideOffset = rightFacePoint.y - rightFacePoint.x * faceLineSlope;
            PointGenerator rightFaceLine = new PointGenerator(faceLineSlope, rightFaceSideOffset);

            Point startPointR = rightFaceLine.GetFromY(0);
            Point endPointR = rightFaceLine.GetFromY(illustratedImg.Height);

            //Imgproc.line(illustratedImg, startPointL, endPointL, new Scalar(0,255,0), 5);
            //Imgproc.line(illustratedImg, startPointR, endPointR,new Scalar(255,0,0), 3);

            //Draw mouth line
            //Put center on the top for the mouth stay in the middle of the mouth square
            Point mouthCenter = new Point(mouthRect.x + mouthRect.width / 2, mouthRect.y);
            Size mouthSize = new Size(mouthRect.width / 2, mouthRect.height / 2);

            Point mCenter = getRectCenter(mouthRect);

            //Get mouth line generator
            double aFactMouth = Math.Tan(Math.Atan(faceLineSlope) + Math.PI / 2);
            double bfactMouth = mCenter.y - mCenter.x * aFactMouth;
            PointGenerator mouthLine = new PointGenerator(aFactMouth, bfactMouth);

            double leftFaceMouthCrossX = (bfactMouth - leftFaceSideOffset) /
                (faceLineSlope - aFactMouth);

            double rightFaceMouthCrossX = (bfactMouth - rightFaceSideOffset) /
                (faceLineSlope - aFactMouth);

            Point leftFaceMouthCross = mouthLine.GetFromX(leftFaceMouthCrossX);
            Point rightFaceMouthCross = mouthLine.GetFromX(rightFaceMouthCrossX);

            //Get face top line
            double afactTopFace = aFactMouth;   //use the mouth line since this uses the same slope
            double bfactTopFace = faceTopPoint.y - faceTopPoint.x * afactTopFace;
            PointGenerator faceTopLine = new PointGenerator(afactTopFace, bfactTopFace);

            double leftTopFaceCrossX = (bfactTopFace - leftFaceSideOffset) /
                 (faceLineSlope - afactTopFace);

            double rightTopFaceCrossX = (bfactTopFace - rightFaceSideOffset) /
                (faceLineSlope - afactTopFace);

            Point leftTopFaceCross = faceTopLine.GetFromX(leftTopFaceCrossX);
            Point rightTopFaceCross = faceTopLine.GetFromX(rightTopFaceCrossX);

            /*CvInvoke.Circle(illustratedImg, leftTopFaceCross, 5, new MCvScalar(), -1);
            CvInvoke.Circle(illustratedImg, rightTopFaceCross, 5, new MCvScalar(), -1);
            CvInvoke.Circle(illustratedImg, leftFaceMouthCross, 5, new MCvScalar(), -1);
            CvInvoke.Circle(illustratedImg, rightFaceMouthCross, 5, new MCvScalar(), -1);
            CvInvoke.Circle(illustratedImg, faceBottomPoint, 5, new MCvScalar(), -1);*/

            MatOfPoint facePointsMat = new MatOfPoint(leftTopFaceCross,
                rightTopFaceCross,
                rightFaceMouthCross,
                faceBottomPoint,
                leftFaceMouthCross);

            //CvInvoke.Polylines(image, facePointsVector, true, new Bgr(172, 203, 227).MCvScalar, 1);

            Imgproc.fillConvexPoly(illustratedImg, facePointsMat, new Scalar(255,255,255));

            Imgproc.ellipse(illustratedImg, mouthCenter, mouthSize, faceFeatureAngle, 0, 180, new Scalar(0,0,0), 2);

            Point p1 = faceTopLine.GetFromX(0);
            Point p2 = faceTopLine.GetFromX(illustratedImg.Width);

            //Imgproc.line(illustratedImg, p1, p2, new Scalar(0, 0, 0), 3);

            //Draw nose line
            Point noseCenter = new Point(noseRect.x + noseRect.width / 2,
                noseRect.y + noseRect.height / 2);
            Size noseSize = new Size(noseRect.width / 2, noseRect.height / 2);
            double noseAngle = Math.Atan(faceLineData[0]);
            noseAngle = RadianToDegree(noseAngle);

            Imgproc.ellipse(illustratedImg, noseCenter, noseSize, noseAngle, 0, 180, new Scalar(0, 0, 0), 2);

            //Draw eyes ellipses
            foreach (Rect eye in personFace.GetEyes())
            {
                Point eyeCenter = new Point(eye.x + eye.width / 2, eye.y + eye.height / 2);
                Size ellipseSize = new Size(eye.width / 5, eye.height / 2);

                Imgproc.ellipse(illustratedImg, eyeCenter, ellipseSize, faceFeatureAngle, 0, 360, new Scalar(0, 0, 0), -1);
            }

            Imgproc.line(illustratedImg, faceBottomPoint, new Point(illustratedImg.width() / 2, illustratedImg.height()), new Scalar(0, 0, 0));
        }
Example #36
0
                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        if (screenOrientation != Screen.orientation) {
                                screenOrientation = Screen.orientation;
                                updateLayout ();
                        }

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                //flip to correct direction.
                                if (webCamDevice.isFrontFacing) {
                                        if (webCamTexture.videoRotationAngle == 0) {
                                                Core.flip (rgbaMat, rgbaMat, 1);
                                        } else if (webCamTexture.videoRotationAngle == 90) {
                                                Core.flip (rgbaMat, rgbaMat, 0);
                                        }
                                        if (webCamTexture.videoRotationAngle == 180) {
                                                Core.flip (rgbaMat, rgbaMat, 0);
                                        } else if (webCamTexture.videoRotationAngle == 270) {
                                                Core.flip (rgbaMat, rgbaMat, 1);
                                        }
                                } else {
                                        if (webCamTexture.videoRotationAngle == 180) {
                                                Core.flip (rgbaMat, rgbaMat, -1);
                                        } else if (webCamTexture.videoRotationAngle == 270) {
                                                Core.flip (rgbaMat, rgbaMat, -1);
                                        }
                                }

                                Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                                Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                                Point[] points = roiPointList.ToArray ();

                                if (roiPointList.Count == 4) {

                                        using (Mat backProj = new Mat ()) {
                                                Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0);

                                                RotatedRect r = Video.CamShift (backProj, roiRect, termination);
                                                r.points (points);
                                        }

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {

                                if(Input.GetTouch(0).phase == TouchPhase.Ended){

                                    roiPointList.Clear ();
                                }

                            }
                                        #else
                                        if (Input.GetMouseButtonUp (0)) {
                                                roiPointList.Clear ();
                                        }
            #endif
                                }

                                if (roiPointList.Count < 4) {

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {
                                Touch t = Input.GetTouch(0);
                                if(t.phase == TouchPhase.Ended){
                                    roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main));
            //									Debug.Log ("touch X " + t.position.x);
            //									Debug.Log ("touch Y " + t.position.y);

                                    if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                    }
                                }

                            }
            #else
                                        //Mouse
                                        if (Input.GetMouseButtonUp (0)) {

                                                roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
            //												Debug.Log ("mouse X " + Input.mousePosition.x);
            //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                                                if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                                }
                                        }
            #endif

                                        if (roiPointList.Count == 4) {

                                                using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) {
                                                        roiRect = Imgproc.boundingRect (roiPointMat);
                                                }

                                                if (roiHistMat != null) {
                                                        roiHistMat.Dispose ();
                                                        roiHistMat = null;
                                                }
                                                roiHistMat = new Mat ();

                                                using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                                                using (Mat maskMat = new Mat ()) {

                                                        Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180));
                                                        Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

            //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                                                }
                                        }
                                }

                                if (points.Length < 4) {

                                        for (int i = 0; i < points.Length; i++) {
                                                Core.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2);
                                        }

                                } else {

                                        for (int i = 0; i < 4; i++) {
                                                Core.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2);
                                        }

                                        Core.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2);
                                }

                                Core.putText (rgbaMat, "PLEASE TOUCH 4 POINTS", new Point (5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.height / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                shouldUseFrontFacing = !shouldUseFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }
Example #37
0
 /// <summary>
 /// Computes convex hull for a set of 2D points.
 /// </summary>
 /// <param name="points">The input 2D point set, represented by CV_32SC2 or CV_32FC2 matrix</param>
 /// <param name="clockwise">If true, the output convex hull will be oriented clockwise, 
 /// otherwise it will be oriented counter-clockwise. Here, the usual screen coordinate 
 /// system is assumed - the origin is at the top-left corner, x axis is oriented to the right, 
 /// and y axis is oriented downwards.</param>
 /// <returns>The output convex hull. It is a vector of points that form the 
 /// hull (must have the same type as the input points).</returns>
 public Point[] ConvexHullPoints(InputArray points, bool clockwise = false)
 {
     var dst = new MatOfPoint();
     Cv2.ConvexHull(points, dst, clockwise, true);
     return dst.ToArray();
 }
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();

                                matOpFlowThis.Dispose ();
                                matOpFlowPrev.Dispose ();
                                MOPcorners.Dispose ();
                                mMOP2fptsThis.Dispose ();
                                mMOP2fptsPrev.Dispose ();
                                mMOP2fptsSafe.Dispose ();
                                mMOBStatus.Dispose ();
                                mMOFerr.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();
                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        matOpFlowThis = new Mat ();
                                        matOpFlowPrev = new Mat ();
                                        MOPcorners = new MatOfPoint ();
                                        mMOP2fptsThis = new MatOfPoint2f ();
                                        mMOP2fptsPrev = new MatOfPoint2f ();
                                        mMOP2fptsSafe = new MatOfPoint2f ();
                                        mMOBStatus = new MatOfByte ();
                                        mMOFerr = new MatOfFloat ();

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif
            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = webCamTexture.width / 2;
                                        #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                if (mMOP2fptsPrev.rows () == 0) {

                                        // first time through the loop so we need prev and this mats
                                        // plus prev points
                                        // get this mat
                                        Imgproc.cvtColor (rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                                        // copy that to prev mat
                                        matOpFlowThis.copyTo (matOpFlowPrev);

                                        // get prev corners
                                        Imgproc.goodFeaturesToTrack (matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                                        mMOP2fptsPrev.fromArray (MOPcorners.toArray ());

                                        // get safe copy of this corners
                                        mMOP2fptsPrev.copyTo (mMOP2fptsSafe);
                                } else {
                                        // we've been through before so
                                        // this mat is valid. Copy it to prev mat
                                        matOpFlowThis.copyTo (matOpFlowPrev);

                                        // get this mat
                                        Imgproc.cvtColor (rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                                        // get the corners for this mat
                                        Imgproc.goodFeaturesToTrack (matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                                        mMOP2fptsThis.fromArray (MOPcorners.toArray ());

                                        // retrieve the corners from the prev mat
                                        // (saves calculating them again)
                                        mMOP2fptsSafe.copyTo (mMOP2fptsPrev);

                                        // and save this corners for next time through

                                        mMOP2fptsThis.copyTo (mMOP2fptsSafe);
                                }

                                /*
            Parameters:
            prevImg first 8-bit input image
            nextImg second input image
            prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
            nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
            status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
            err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
            */
                                Video.calcOpticalFlowPyrLK (matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                                if (!mMOBStatus.empty ()) {
                                        List<Point> cornersPrev = mMOP2fptsPrev.toList ();
                                        List<Point> cornersThis = mMOP2fptsThis.toList ();
                                        List<byte> byteStatus = mMOBStatus.toList ();

                                        int x = 0;
                                        int y = byteStatus.Count - 1;

                                        for (x = 0; x < y; x++) {
                                                if (byteStatus [x] == 1) {
                                                        Point pt = cornersThis [x];
                                                        Point pt2 = cornersPrev [x];

                                                        Core.circle (rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                                                        Core.line (rgbaMat, pt, pt2, colorRed, iLineThickness);
                                                }
                                        }
                                }

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }
            }
        }
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                hsvMat.Dispose ();
                                if (roiHistMat != null)
                                        roiHistMat.Dispose ();
                                roiPointList.Clear ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        hsvMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC3);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif

            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = (((float)Screen.height/(float)Screen.width) * (float)webCamTexture.height) / 2.0f;
            #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }

                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                                Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                                Point[] points = roiPointList.ToArray ();

                                if (roiPointList.Count == 4) {

                                        using (Mat backProj = new Mat ()) {
                                                Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0);

                                                RotatedRect r = Video.CamShift (backProj, roiRect, termination);
                                                r.points (points);
                                        }

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {

                                if(Input.GetTouch(0).phase == TouchPhase.Ended){

                                    roiPointList.Clear ();
                                }

                            }
                                        #else
                                        if (Input.GetMouseButtonUp (0)) {
                                                roiPointList.Clear ();
                                        }
            #endif
                                }

                                if (roiPointList.Count < 4) {

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {
                                Touch t = Input.GetTouch(0);
                                if(t.phase == TouchPhase.Ended){
                                    roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main));
            //									Debug.Log ("touch X " + t.position.x);
            //									Debug.Log ("touch Y " + t.position.y);

                                    if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                    }
                                }

                            }
            #else
                                        //Mouse
                                        if (Input.GetMouseButtonUp (0)) {

                                                roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
            //												Debug.Log ("mouse X " + Input.mousePosition.x);
            //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                                                if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                                }
                                        }
            #endif

                                        if (roiPointList.Count == 4) {

                                                using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) {
                                                        roiRect = Imgproc.boundingRect (roiPointMat);
                                                }

                                                if (roiHistMat != null) {
                                                        roiHistMat.Dispose ();
                                                        roiHistMat = null;
                                                }
                                                roiHistMat = new Mat ();

                                                using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                                                using (Mat maskMat = new Mat ()) {

                                                        Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180));
                                                        Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

            //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                                                }
                                        }
                                }

                                if (points.Length < 4) {

                                        for (int i = 0; i < points.Length; i++) {
                                                Core.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2);
                                        }

                                } else {

                                        for (int i = 0; i < 4; i++) {
                                                Core.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2);
                                        }

                                        Core.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2);
                                }

                                Core.putText (rgbaMat, "PLEASE TOUCH 4 POINTS", new Point (5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }

                /// <summary>
                /// Converts the screen point.
                /// </summary>
                /// <returns>The screen point.</returns>
                /// <param name="screenPoint">Screen point.</param>
                /// <param name="quad">Quad.</param>
                /// <param name="cam">Cam.</param>
                static Point convertScreenPoint (Point screenPoint, GameObject quad, Camera cam)
                {
                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
            #else
                        Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
            #endif

                        Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2);
                        Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2);

                        srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
                        dstRectMat.put (0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y);

                        Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat);

            //						Debug.Log ("srcRectMat " + srcRectMat.dump ());
            //						Debug.Log ("dstRectMat " + dstRectMat.dump ());
            //						Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ());

                        MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint);
                        MatOfPoint2f dstPointMat = new MatOfPoint2f ();

                        Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform);

            //						Debug.Log ("srcPointMat " + srcPointMat.dump ());
            //						Debug.Log ("dstPointMat " + dstPointMat.dump ());

                        return dstPointMat.toArray () [0];
                }
            }
		/// <summary>
		/// Perimeter the specified a.
		/// </summary>
		/// <param name="a">The alpha component.</param>
		float perimeter (MatOfPoint a)
		{
				List<Point> aList = a.toList ();

				float sum = 0, dx = 0, dy = 0;
			
				for (int i=0; i<aList.Count; i++) {
						int i2 = (i + 1) % aList.Count;
				
						dx = (float)aList [i].x - (float)aList [i2].x;
						dy = (float)aList [i].y - (float)aList [i2].y;
				
						sum += Mathf.Sqrt (dx * dx + dy * dy);
				}
			
				return sum;
		}
        // Update is called once per frame
        void Update()
        {
            if (roiPointList.Count == 4) {

                #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                //Touch
                int touchCount = Input.touchCount;
                if (touchCount == 1)
                {
                    Touch t = Input.GetTouch(0);
                    if(t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)){
                        roiPointList.Clear ();
                    }
                }
                #else
                if (Input.GetMouseButtonUp (0) && !EventSystem.current.IsPointerOverGameObject()) {
                    roiPointList.Clear ();
                }
                #endif
            }

            if (roiPointList.Count < 4) {

                #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                //Touch
                int touchCount = Input.touchCount;
                if (touchCount == 1)
                {
                    Touch t = Input.GetTouch(0);
                    if(t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)){
                        roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main));
                        //Debug.Log ("touch X " + t.position.x);
                        //Debug.Log ("touch Y " + t.position.y);

                        if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                            roiPointList.RemoveAt (roiPointList.Count - 1);
                        }
                    }
                }
                #else
                //Mouse
                if (Input.GetMouseButtonUp (0) && !EventSystem.current.IsPointerOverGameObject()) {

                    roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
                    //                                              Debug.Log ("mouse X " + Input.mousePosition.x);
                    //                                              Debug.Log ("mouse Y " + Input.mousePosition.y);

                    if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                        roiPointList.RemoveAt (roiPointList.Count - 1);
                    }
                }
                #endif

                if (roiPointList.Count == 4) {
                    shouldStartCamShift = true;
                }
            }

            if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {

                Mat rgbaMat = webCamTextureToMatHelper.GetMat ();

                Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                Point[] points = roiPointList.ToArray ();

                if (shouldStartCamShift) {
                    shouldStartCamShift = false;

                    using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) {
                        roiRect = Imgproc.boundingRect (roiPointMat);
                    }

                    if (roiHistMat != null) {
                        roiHistMat.Dispose ();
                        roiHistMat = null;
                    }
                    roiHistMat = new Mat ();

                    using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                    using (Mat maskMat = new Mat ()) {
                        Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180));
                        Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

                        //Debug.Log ("roiHist " + roiHistMat.ToString ());
                    }
                }else if (roiPointList.Count == 4) {
                    using (Mat backProj = new Mat ()) {
                        Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0);

                        RotatedRect r = Video.CamShift (backProj, roiRect, termination);
                        r.points (points);
                    }
                }

                if (points.Length < 4) {
                    for (int i = 0; i < points.Length; i++) {
                        Imgproc.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2);
                    }

                } else {
                    for (int i = 0; i < 4; i++) {
                        Imgproc.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2);
                    }

                    Imgproc.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2);
                }

                Imgproc.putText (rgbaMat, "Please touch the 4 points surrounding the tracking object.", new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

            //              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D (rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
        /// <summary>
        /// Hands the pose estimation process.
        /// </summary>
        public void handPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
                        Imgproc.GaussianBlur (rgbaMat, rgbaMat, new OpenCVForUnity.Size (3, 3), 1, 1);
                        //Imgproc.medianBlur(mRgba, mRgba, 3);

                        if (!isColorSelected)
                                return;

                        List<MatOfPoint> contours = detector.getContours ();
                        detector.process (rgbaMat);

            //						Debug.Log ("Contours count: " + contours.Count);

                        if (contours.Count <= 0) {
                                return;
                        }

                        RotatedRect rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [0].toArray ()));

                        double boundWidth = rect.size.width;
                        double boundHeight = rect.size.height;
                        int boundPos = 0;

                        for (int i = 1; i < contours.Count; i++) {
                                rect = Imgproc.minAreaRect (new MatOfPoint2f (contours [i].toArray ()));
                                if (rect.size.width * rect.size.height > boundWidth * boundHeight) {
                                        boundWidth = rect.size.width;
                                        boundHeight = rect.size.height;
                                        boundPos = i;
                                }
                        }

                        OpenCVForUnity.Rect boundRect = Imgproc.boundingRect (new MatOfPoint (contours [boundPos].toArray ()));
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), boundRect.br (), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //						Debug.Log (
            //						" Row start [" +
            //								(int)boundRect.tl ().y + "] row end [" +
            //								(int)boundRect.br ().y + "] Col start [" +
            //								(int)boundRect.tl ().x + "] Col end [" +
            //								(int)boundRect.br ().x + "]");

                        double a = boundRect.br ().y - boundRect.tl ().y;
                        a = a * 0.7;
                        a = boundRect.tl ().y + a;

            //						Debug.Log (
            //						" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

                        //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 );
                        Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

                        MatOfPoint2f pointMat = new MatOfPoint2f ();
                        Imgproc.approxPolyDP (new MatOfPoint2f (contours [boundPos].toArray ()), pointMat, 3, true);
                        contours [boundPos] = new MatOfPoint (pointMat.toArray ());

                        MatOfInt hull = new MatOfInt ();
                        MatOfInt4 convexDefect = new MatOfInt4 ();
                        Imgproc.convexHull (new MatOfPoint (contours [boundPos].toArray ()), hull);

                        if (hull.toArray ().Length < 3)
                                return;

                        Imgproc.convexityDefects (new MatOfPoint (contours [boundPos]	.toArray ()), hull, convexDefect);

                        List<MatOfPoint> hullPoints = new List<MatOfPoint> ();
                        List<Point> listPo = new List<Point> ();
                        for (int j = 0; j < hull.toList().Count; j++) {
                                listPo.Add (contours [boundPos].toList () [hull.toList () [j]]);
                        }

                        MatOfPoint e = new MatOfPoint ();
                        e.fromList (listPo);
                        hullPoints.Add (e);

                        List<MatOfPoint> defectPoints = new List<MatOfPoint> ();
                        List<Point> listPoDefect = new List<Point> ();
                        for (int j = 0; j < convexDefect.toList().Count; j = j+4) {
                                Point farPoint = contours [boundPos].toList () [convexDefect.toList () [j + 2]];
                                int depth = convexDefect.toList () [j + 3];
                                if (depth > threasholdSlider.value && farPoint.y < a) {
                                        listPoDefect.Add (contours [boundPos].toList () [convexDefect.toList () [j + 2]]);
                                }
            //								Debug.Log ("defects [" + j + "] " + convexDefect.toList () [j + 3]);
                        }

                        MatOfPoint e2 = new MatOfPoint ();
                        e2.fromList (listPo);
                        defectPoints.Add (e2);

            //						Debug.Log ("hull: " + hull.toList ());
            //						Debug.Log ("defects: " + convexDefect.toList ());

                        Imgproc.drawContours (rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            //                      int defectsTotal = (int)convexDefect.total();
            //						Debug.Log ("Defect total " + defectsTotal);

                        this.numberOfFingers = listPoDefect.Count;
                        if (this.numberOfFingers > 5)
                                this.numberOfFingers = 5;

            //						Debug.Log ("numberOfFingers " + numberOfFingers);

            //						Core.putText (mRgba, "" + numberOfFingers, new Point (mRgba.cols () / 2, mRgba.rows () / 2), Core.FONT_HERSHEY_PLAIN, 4.0, new Scalar (255, 255, 255, 255), 6, Core.LINE_AA, false);
                        numberOfFingersText.text = numberOfFingers.ToString ();

                        foreach (Point p in listPoDefect) {
                                Imgproc.circle (rgbaMat, p, 6, new Scalar (255, 0, 255, 255), -1);
                        }
        }
Example #43
0
        /// <summary>
        /// 2値画像中の輪郭を検出します.
        /// </summary>
        /// <param name="image">入力画像,8ビット,シングルチャンネル.0以外のピクセルは 1として,0のピクセルは0のまま扱われます.
        /// また,この関数は,輪郭抽出処理中に入力画像 image の中身を書き換えます.</param>
        /// <param name="contours">検出された輪郭.各輪郭は,点のベクトルとして格納されます.</param>
        /// <param name="mode">輪郭抽出モード</param>
        /// <param name="method">輪郭の近似手法</param>
        /// <param name="offset">オプションのオフセット.各輪郭点はこの値の分だけシフトします.これは,ROIの中で抽出された輪郭を,画像全体に対して位置づけて解析する場合に役立ちます.</param>
#else
        /// <summary>
        /// Finds contours in a binary image.
        /// </summary>
        /// <param name="image">Source, an 8-bit single-channel image. Non-zero pixels are treated as 1’s. 
        /// Zero pixels remain 0’s, so the image is treated as binary.
        /// The function modifies the image while extracting the contours.</param> 
        /// <param name="contours">Detected contours. Each contour is stored as a vector of points.</param>
        /// <param name="mode">Contour retrieval mode</param>
        /// <param name="method">Contour approximation method</param>
        /// <param name="offset"> Optional offset by which every contour point is shifted. 
        /// This is useful if the contours are extracted from the image ROI and then they should be analyzed in the whole image context.</param>
#endif
        public static void FindContours(InputOutputArray image, out MatOfPoint[] contours,
            ContourRetrieval mode, ContourChain method, Point? offset = null)
        {
            if (image == null)
                throw new ArgumentNullException("image");
            image.ThrowIfNotReady();

            CvPoint offset0 = offset.GetValueOrDefault(new Point());
            IntPtr contoursPtr;
            NativeMethods.imgproc_findContours2_OutputArray(image.CvPtr, out contoursPtr, (int)mode, (int)method, offset0);

            using (VectorOfMat contoursVec = new VectorOfMat(contoursPtr))
            {
                contours = contoursVec.ToArray<MatOfPoint>();
            }

            image.Fix();
        }
Example #44
0
		private Point[] fit (Mat image,
			    Point[] init,
			    OpenCVForUnity.Size ssize,
			    bool robust,
			    int itol,
			    double ftol)
		{
				int n = smodel.npts (); 
//		assert((int(init.size())==n) && (pmodel.n_patches()==n));
//				Debug.Log ("init.size())==n " + init.Length + " " + n);
//				Debug.Log ("pmodel.n_patches()==n " + pmodel.n_patches () + " " + n);
				smodel.calc_params (init, new Mat (), 3.0f);
				Point[] pts = smodel.calc_shape ();

				//find facial features in image around current estimates
				Point[] peaks = pmodel.calc_peaks (image, pts, ssize);

				//optimise
				if (!robust) {
						smodel.calc_params (peaks, new Mat (), 3.0f); //compute shape model parameters        
						pts = smodel.calc_shape (); //update shape
				} else {
						using (Mat weight = new Mat (n, 1, CvType.CV_32F))
						using (Mat weight_sort = new Mat (n, 1, CvType.CV_32F)) {
								Point[] pts_old = pts;
								for (int iter = 0; iter < itol; iter++) {
										//compute robust weight
										for (int i = 0; i < n; i++) {
												using (MatOfPoint tmpMat = new MatOfPoint (new Point (pts [i].x - peaks [i].x, pts [i].y - peaks [i].y))) {
														weight.put (i, 0, new float[]{(float)Core.norm (tmpMat)});
												}
										}

										Core.sort (weight, weight_sort, Core.SORT_EVERY_COLUMN | Core.SORT_ASCENDING);


										double var = 1.4826 * (float)weight_sort.get (n / 2, 0) [0];


										if (var < 0.1)
												var = 0.1;

										Core.pow (weight, 2, weight);


										Core.multiply (weight, new Scalar (-0.5 / (var * var)), weight);

										Core.exp (weight, weight);
						
										//compute shape model parameters    
										smodel.calc_params (peaks, weight, 3.0f);

						
										//update shape
										pts = smodel.calc_shape ();
						
										//check for convergence
										float v = 0;
										for (int i = 0; i < n; i++) {
												using (MatOfPoint tmpMat = new MatOfPoint (new Point (pts [i].x - pts_old [i].x, pts [i].y - pts_old [i].y))) {
														v += (float)Core.norm (tmpMat);
												}
										}
										if (v < ftol) {
												break;
										} else {
												pts_old = pts;
										}
								}
						}
				}
				return pts;

		}