/// <summary>
        /// Perform Delaunay triangulation on a given set of landmark points.
        /// </summary>
        /// <param name="points">The landmark points to use for triangulation.</param>
        /// <returns>A list of Triangle structures that each refer to a single triangle of landmark points.</returns>
        public static IEnumerable <Triangle> GetDelaunayTriangles(IEnumerable <Point> points)
        {
            var result = new List <Triangle>();

            // calculate the bounding box around the points
            var rect = Cv2.BoundingRect(points);

            rect.Inflate(10, 10);

            // the Subdiv2D class handles Delaunay triangulation
            // first we add all points, and then start triangulation
            Vec6f[] triangles;
            using (var subdiv = new Subdiv2D(rect))
            {
                foreach (var p in points)
                {
                    var cv_p = new OpenCvSharp.Point2f(p.X, p.Y);
                    subdiv.Insert(cv_p);
                }
                triangles = subdiv.GetTriangleList();
            }

            // return result as an enumeration of triangle structs
            return(from t in triangles
                   let p1 = new Point(t[0], t[1])
                            let p2 = new Point(t[2], t[3])
                                     let p3 = new Point(t[4], t[5])
                                              where rect.Contains(p1) && rect.Contains(p2) && rect.Contains(p3)
                                              select new Triangle(p1, p2, p3));
        }
예제 #2
0
        public static void FindTriQuadrant()
        {
            Bitmap image     = (Bitmap)Image.FromFile("res3/supercruisetarget.png");
            Mat    source    = BitmapConverter.ToMat(image);
            Mat    sourceHSV = source.CvtColor(ColorConversionCodes.BGR2HSV);

            /* Paint.Net uses HSV [0..360], [0..100], [0..100].
             * OpenCV uses H: 0 - 180, S: 0 - 255, V: 0 - 255
             * Paint.NET colors:
             * 50   94  100     bright yellow
             * 27   93  90      orange
             * 24   91  74      brown
             * 16   73  25      almost background (low V)
             * suggested range [20..55], [80..100], [50..100] (paint.net)
             * suggested range [10..27], [200..255], [128..255] (openCV
             * */
            Mat mask = sourceHSV.InRange(InputArray.Create(new int[] { 10, 200, 128 }), InputArray.Create(new int[] { 27, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Window w3         = new Window("yellowfilter", sourceHSVFiltered.CvtColor(ColorConversionCodes.HSV2BGR));
            Mat    sourceGrey = sourceHSVFiltered.Split()[2]; // Value channel is pretty good as a greyscale conversion
            Window w4         = new Window("yellowFilterValue", sourceGrey);

            CircleSegment[] circles2 = sourceGrey.HoughCircles(
                HoughMethods.Gradient,
                dp: 1f,       /* resolution scaling factor?  full resolution seems to work better */
                minDist: 100, /* if we find more than one then we go to the second analysis, the crosshair is probably blue as well*/
                param1: 100,  /* default was fine after experimentation */
                param2: 13,   /* required quality factor. 9 finds too many, 14 finds too few */
                minRadius: 40,
                maxRadius: 47);
            foreach (CircleSegment circle in circles2)
            {
                var quarterCircle = new OpenCvSharp.Point2f(circle.Radius, circle.Radius);
                source.Rectangle(circle.Center - quarterCircle, circle.Center + quarterCircle, new Scalar(0, 255, 0));
            }


            Mat    templatepointer = new Mat("res3/squaretarget.png", ImreadModes.GrayScale);
            Mat    matches         = sourceGrey.MatchTemplate(templatepointer, TemplateMatchModes.CCoeffNormed);
            Window w6 = new Window("pointer", matches);

            OpenCvSharp.Point minloc, maxloc;
            matches.MinMaxLoc(out minloc, out maxloc);

            source.Rectangle(maxloc, maxloc + new OpenCvSharp.Point(templatepointer.Size().Width, templatepointer.Size().Height), new Scalar(255, 255, 0));

            Window w5 = new Window("result", source);
        }
예제 #3
0
        static bool Distance_between_contour_and_center(OpenCvSharp.Point2f center, OpenCvSharp.Point[] contour)
        {
            double        diff       = 0;
            bool          glass_flag = false;
            List <double> diff_list  = new List <double>();
            List <int>    x_list     = new List <int>();
            List <int>    y_list     = new List <int>();
            int           x1         = (int)center.X;
            int           y1         = (int)center.Y;

            foreach (OpenCvSharp.Point contour_point in contour)
            {
                int x2 = contour_point.X;
                int y2 = contour_point.Y;
                x_list.Add(x2);
                y_list.Add(y2);
                diff_list.Add(Math.Sqrt(Math.Pow((x1 - x2), 2) + Math.Pow((y1 - y2), 2)));
                //Console.WriteLine(Math.Sqrt(Math.Pow((x1 - x2), 2) + Math.Pow((y1 - y2), 2)));
            }

            int x_max = x_list.Max();
            int x_min = x_list.Min();
            int y_max = y_list.Max();
            int y_min = y_list.Min();

            //Console.WriteLine("x_max " + x_max + " x_min " + x_min + " y_max " + y_max + " y_min " + y_min);
            // 玻璃上的裂縫

            if (((y_max < 710 && y_max > 630) && (y_min < 710 && y_min > 630)))
            {
                glass_flag = true;
            }
            //Console.WriteLine("\nMax: " + diff_list.Max());
            //Console.WriteLine("Min: " + diff_list.Min());
            diff = diff_list.Max() - diff_list.Min();
            //Console.WriteLine(diff);
            //Console.WriteLine(diff_list.Max());

            //RotatedRect rotateRect = Cv2.MinAreaRect(contour);
            //Console.WriteLine(rotateRect.Size.Width + " "+ rotateRect.Size.Height);
            return(!(glass_flag) && (diff > 10 || diff_list.Max() > 700));
        }
예제 #4
0
 public static extern void imgproc_minEnclosingCircle_InputArray(IntPtr points, out Point2f center, out float radius);
예제 #5
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="pt"></param>
 /// <returns></returns>
 public int FindNearest(Point2f pt)
 {
     return(FindNearest(pt, out var nearestPt));
 }
예제 #6
0
 public static extern ExceptionStatus imgproc_Subdiv2D_getVertex(IntPtr obj, int vertex, out int firstEdge, out Point2f returnValue);
예제 #7
0
 public static extern ExceptionStatus imgproc_Subdiv2D_locate(IntPtr obj, Point2f pt, out int edge, out int vertex, out int returnValue);
예제 #8
0
 public static extern void core_FileStorage_shift_Point2f(IntPtr fs, Point2f val);
예제 #9
0
 public static extern int imgproc_Subdiv2D_insert(IntPtr obj, Point2f pt);
예제 #10
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="center"></param>
 /// <param name="size"></param>
 /// <param name="angle"></param>
 public RotatedRect(Point2f center, Size2f size, float angle)
 {
     Center = center;
     Size   = size;
     Angle  = angle;
 }
예제 #11
0
        /// <summary>
        /// 指定した点がこの矩形に含まれているかどうかを判断する
        /// </summary>
        /// <param name="pt">点</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Determines if the specified point is contained within the rectangular region defined by this Rectangle.
        /// </summary>
        /// <param name="pt">point</param>
        /// <returns></returns>
#endif
        public bool Contains(Point2f pt)
        {
            return(Contains(pt.X, pt.Y));
        }
 public static extern ExceptionStatus core_Mat_push_back_Point2f(IntPtr self, Point2f v);
예제 #13
0
 public static extern double imgproc_pointPolygonTest_Point2f(Point2f[] contour, int contourLength,
                                                              Point2f pt, int measureDist);
예제 #14
0
 public static extern double imgproc_pointPolygonTest_InputArray(IntPtr contour, Point2f pt, int measureDist);
예제 #15
0
 public static extern void imgproc_minEnclosingCircle_Point2f(Point2f[] points, int pointsLength,
                                                              out Point2f center, out float radius);
예제 #16
0
        /// <summary>
        /// 指定した点がこの矩形に含まれているかどうかを判断する
        /// </summary>
        /// <param name="pt">点</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Determines if the specified point is contained within the rectangular region defined by this Rectangle.
        /// </summary>
        /// <param name="pt">point</param>
        /// <returns></returns>
#endif
        public readonly bool Contains(Point2f pt)
        {
            return(Contains(pt.X, pt.Y));
        }
예제 #17
0
        /// <summary>
        /// 初期化
        /// </summary>
        /// <param name="center">円の中心</param>
        /// <param name="radius">半径</param>
#else
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="center">center</param>
        /// <param name="radius">radius</param>
#endif
        public CircleSegment(Point2f center, float radius)
        {
            Center = center;
            Radius = radius;
        }
예제 #18
0
 public static extern int imgproc_Subdiv2D_locate(IntPtr obj, Point2f pt, out int edge, out int vertex);
예제 #19
0
 public static extern ExceptionStatus imgproc_Subdiv2D_insert1(IntPtr obj, Point2f pt, out int returnValue);
예제 #20
0
 public static extern int imgproc_Subdiv2D_findNearest(IntPtr obj, Point2f pt, out Point2f nearestPt);
예제 #21
0
 public static extern ExceptionStatus imgproc_Subdiv2D_findNearest(IntPtr obj, Point2f pt, out Point2f nearestPt, out int returnValue);
예제 #22
0
 public static extern int imgproc_Subdiv2D_edgeOrg(IntPtr obj, int edge, out Point2f orgpt);
예제 #23
0
 public static extern ExceptionStatus imgproc_Subdiv2D_edgeDst(IntPtr obj, int edge, out Point2f dstPt, out int returnValue);
예제 #24
0
 public static extern int imgproc_Subdiv2D_edgeDst(IntPtr obj, int edge, out Point2f dstpt);
예제 #25
0
 public static void Run(IntPtr ptr, Point2f v) => NativeMethods.core_Mat_push_back_Point2f(ptr, v);
예제 #26
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="pt"></param>
        /// <returns></returns>
        public int FindNearest(Point2f pt)
        {
            Point2f nearestPt;

            return(FindNearest(pt, out nearestPt));
        }
예제 #27
0
 public static extern void core_Mat_push_back_Point2f(IntPtr self, Point2f v);
예제 #28
0
    public void work(OpenCvSharp.Mat frame)
    {
        // sending the frame in the capturer (_tracker)
        // pbio::CVRawImage cvri_frame;
        byte[] data = new byte[frame.Total() * frame.Type().Channels];
        Marshal.Copy(frame.DataStart, data, 0, (int)data.Length);
        RawImage         ri_frame = new RawImage(frame.Width, frame.Height, RawImage.Format.FORMAT_BGR, data);
        List <RawSample> samples  = _tracker.capture(ri_frame);

        // clone the frame for drawing on it
        OpenCvSharp.Mat draw_image = frame.Clone();
        // handle each face on the frame separately
        for (int i = 0; i < samples.Count; ++i)
        {
            RawSample sample = samples[i];

            // get a face rectangle
            RawSample.Rectangle rectangle = sample.getRectangle();

            // set a point to place information for this face
            OpenCvSharp.Point2f text_point = new OpenCvSharp.Point2f(
                rectangle.x + rectangle.width + 3,
                rectangle.y + 10);

            const float text_line_height = 22;

            // draw facial points
            // red color for all points
            // green for left eye
            // yellow for right eye
            // (yes, there is a mess with left and right eyes in face_sdk api,
            // but if we fix it now we will lose compatibility with previous versions)
            if (_flag_points)
            {
                List <Point> points = sample.getLandmarks();

                for (int j = -2; j < points.Count; ++j)
                {
                    Point p =
                        j == -2 ?
                        sample.getLeftEye() :
                        j == -1 ?
                        sample.getRightEye() :
                        points[j];

                    OpenCvSharp.Scalar color =
                        j == -2 ?
                        new OpenCvSharp.Scalar(50, 255, 50) :
                        j == -1 ?
                        new OpenCvSharp.Scalar(50, 255, 255) :
                        new OpenCvSharp.Scalar(50, 50, 255);


                    OpenCvSharp.Cv2.Circle(
                        draw_image,
                        new OpenCvSharp.Point2f(p.x, p.y),
                        j < 0 ? 4 : 2,
                        color,
                        -1,
                        OpenCvSharp.LineTypes.AntiAlias);
                }
            }

            // draw rectangle
            if (_flag_positions)
            {
                OpenCvSharp.Cv2.Rectangle(
                    draw_image,
                    new OpenCvSharp.Rect(
                        rectangle.x,
                        rectangle.y,
                        rectangle.width,
                        rectangle.height),
                    new OpenCvSharp.Scalar(50, 50, 255),
                    2,
                    OpenCvSharp.LineTypes.AntiAlias);
            }

            // draw age and gender
            if (_flag_age_gender)
            {
                AgeGenderEstimator.AgeGender age_gender = _age_geder_estimator.estimateAgeGender(sample);

                string age_text = "age: ";

                switch (age_gender.age)
                {
                case AgeGenderEstimator.Age.AGE_KID: age_text += "kid    "; break;

                case AgeGenderEstimator.Age.AGE_YOUNG: age_text += "young  "; break;

                case AgeGenderEstimator.Age.AGE_ADULT: age_text += "adult  "; break;

                case AgeGenderEstimator.Age.AGE_SENIOR: age_text += "senior "; break;
                }

                age_text += string.Format("years: {0:G3}", age_gender.age_years);

                puttext(
                    draw_image,
                    age_text,
                    text_point);
                text_point.Y += text_line_height;

                puttext(
                    draw_image,
                    age_gender.gender == AgeGenderEstimator.Gender.GENDER_FEMALE ? "gender: female" :
                    age_gender.gender == AgeGenderEstimator.Gender.GENDER_MALE ? "gender: male" : "?",
                    text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw emotions
            if (_flag_emotions)
            {
                List <EmotionsEstimator.EmotionConfidence> emotions =
                    _emotions_estimator.estimateEmotions(sample);

                for (int j = 0; j < emotions.Count; ++j)
                {
                    EmotionsEstimator.Emotion emotion = emotions[j].emotion;
                    float confidence = emotions[j].confidence;

                    OpenCvSharp.Cv2.Rectangle(
                        draw_image,
                        new OpenCvSharp.Rect(
                            (int)text_point.X,
                            (int)text_point.Y - (int)text_line_height / 2,
                            (int)(100 * confidence),
                            (int)text_line_height),
                        emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL  ? new OpenCvSharp.Scalar(255, 0, 0) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY    ? new OpenCvSharp.Scalar(0, 255, 0) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY    ? new OpenCvSharp.Scalar(0, 0, 255) :
                        emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? new OpenCvSharp.Scalar(0, 255, 255) :
                        new OpenCvSharp.Scalar(0, 0, 0),
                        -1);

                    puttext(
                        draw_image,
                        emotion == EmotionsEstimator.Emotion.EMOTION_NEUTRAL  ? "neutral" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_HAPPY    ? "happy" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_ANGRY    ? "angry" :
                        emotion == EmotionsEstimator.Emotion.EMOTION_SURPRISE ? "surprise" : "?",
                        text_point + new OpenCvSharp.Point2f(100, 0));

                    text_point.Y += text_line_height;

                    text_point.Y += text_line_height / 3;
                }
            }


            // draw angles text
            if (_flag_angles)
            {
                string yaw, pitch, roll;
                yaw   = string.Format("yaw: {0}", (0.1f * (int)10 * sample.getAngles().yaw + 0.5f));
                pitch = string.Format("pitch: {0}", (0.1f * (int)10 * sample.getAngles().pitch + 0.5f));
                roll  = string.Format("roll: {0}", (0.1f * (int)10 * sample.getAngles().roll + 0.5f));

                puttext(draw_image, yaw, text_point);
                text_point.Y += text_line_height;

                puttext(draw_image, pitch, text_point);
                text_point.Y += text_line_height;

                puttext(draw_image, roll, text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw angles vectors
            if (_flag_angles_vectors)
            {
                RawSample.Angles angles = sample.getAngles();

                float cos_a = (float)Math.Cos(angles.yaw * OpenCvSharp.Cv2.PI / 180);
                float sin_a = (float)Math.Sin(angles.yaw * OpenCvSharp.Cv2.PI / 180);

                float cos_b = (float)Math.Cos(angles.pitch * OpenCvSharp.Cv2.PI / 180);
                float sin_b = (float)Math.Sin(angles.pitch * OpenCvSharp.Cv2.PI / 180);

                float cos_c = (float)Math.Cos(angles.roll * OpenCvSharp.Cv2.PI / 180);
                float sin_c = (float)Math.Sin(angles.roll * OpenCvSharp.Cv2.PI / 180);

                OpenCvSharp.Point3f[] xyz =
                {
                    new OpenCvSharp.Point3f(cos_a * cos_c,        -sin_c, -sin_a),
                    new OpenCvSharp.Point3f(sin_c,         cos_b * cos_c, -sin_b),
                    new OpenCvSharp.Point3f(sin_a,         sin_b,         cos_a * cos_b)
                };

                OpenCvSharp.Point2f center = new OpenCvSharp.Point2f(
                    (sample.getLeftEye().x + sample.getRightEye().x) * 0.5f,
                    (sample.getLeftEye().y + sample.getRightEye().y) * 0.5f);

                float length = (rectangle.width + rectangle.height) * 0.3f;

                for (int c = 0; c < 3; ++c)
                {
                    OpenCvSharp.Cv2.Line(
                        draw_image,
                        center,
                        center + new OpenCvSharp.Point2f(xyz[c].X, -xyz[c].Y) * length,
                        c == 0 ? new OpenCvSharp.Scalar(50, 255, 255) :
                        c == 1 ? new OpenCvSharp.Scalar(50, 255, 50) :
                        c == 2 ? new OpenCvSharp.Scalar(50, 50, 255) : new OpenCvSharp.Scalar(),
                        2,
                        OpenCvSharp.LineTypes.AntiAlias);
                }
            }

            // draw quality text
            if (_flag_quality)
            {
                QualityEstimator.Quality q =
                    _quality_estimator.estimateQuality(sample);

                string lighting, noise, sharpness, flare;

                lighting = "lighting: " + q.lighting.ToString();
                puttext(draw_image, lighting, text_point);
                text_point.Y += text_line_height;

                noise = "noise: " + q.noise.ToString();
                puttext(draw_image, noise, text_point);
                text_point.Y += text_line_height;

                sharpness = "sharpness: " + q.sharpness.ToString();
                puttext(draw_image, sharpness, text_point);
                text_point.Y += text_line_height;

                flare = "flare: " + q.flare.ToString();
                puttext(draw_image, flare, text_point);
                text_point.Y += text_line_height;

                text_point.Y += text_line_height / 3;
            }

            // draw liveness text
            if (_flag_liveness)
            {
                // here we get/create the liveness estimator that work with this face
                int id = sample.getID();
                if (!id2le.ContainsKey(id))
                {
                    id2le[id] = _service.createLivenessEstimator();
                }

                LivenessEstimator le = id2le[id];

                // add information to the estimator
                le.addSample(sample);

                // get result
                LivenessEstimator.Liveness liveness = le.estimateLiveness();

                puttext(
                    draw_image,
                    "liveness: " + (
                        liveness == LivenessEstimator.Liveness.REAL ? "real" :
                        liveness == LivenessEstimator.Liveness.FAKE ? "fake" :
                        liveness == LivenessEstimator.Liveness.NOT_ENOUGH_DATA ? "not enough data" : "??"),
                    text_point);

                text_point.Y += text_line_height;
                text_point.Y += text_line_height / 3;
            }

            // draw face quality
            if (_flag_face_quality)
            {
                float quality = _face_quality_estimator.estimateQuality(sample);

                string ss = "face quality: " + quality.ToString();
                puttext(draw_image, ss, text_point);
                text_point.Y += text_line_height;
                text_point.Y += text_line_height / 3;
            }

            // draw face cuts
            for (int cut_i = 0; cut_i < 3; ++cut_i)
            {
                if ((cut_i == 0 && !_flag_cutting_base) ||
                    (cut_i == 1 && !_flag_cutting_full) ||
                    (cut_i == 2 && !_flag_cutting_token))
                {
                    continue;
                }

                puttext(
                    draw_image,
                    cut_i == 0 ? "base cut:" :
                    cut_i == 1 ? "full cut:" :
                    cut_i == 2 ? "token cut:" : "?? cut",
                    text_point);
                text_point.Y += text_line_height / 2;

                MemoryStream obuf = new MemoryStream();
                sample.cutFaceImage(
                    obuf,
                    RawSample.ImageFormat.IMAGE_FORMAT_BMP,
                    cut_i == 0 ? RawSample.FaceCutType.FACE_CUT_BASE :
                    cut_i == 1 ? RawSample.FaceCutType.FACE_CUT_FULL_FRONTAL :
                    cut_i == 2 ? RawSample.FaceCutType.FACE_CUT_TOKEN_FRONTAL :
                    (RawSample.FaceCutType) 999);

                byte[] sbuf = obuf.ToArray();

                // const OpenCvSharp.Mat_<uchar> cvbuf(1, sbuf.length(), (uchar*) sbuf.c_str());

                OpenCvSharp.Mat img = OpenCvSharp.Cv2.ImDecode(sbuf, OpenCvSharp.ImreadModes.Unchanged);

                OpenCvSharp.Cv2.Resize(img, img, OpenCvSharp.Size.Zero, 0.3, 0.3);


                int img_rect_x = (int)Math.Max(0, -text_point.X);
                int img_rect_y = (int)Math.Max(0, -text_point.Y);

                int img_rect_width = (int)Math.Min(
                    img.Cols - img_rect_x,
                    draw_image.Cols - Math.Max(0, text_point.X));

                int img_rect_height = (int)Math.Min(
                    img.Rows - img_rect_y,
                    draw_image.Rows - Math.Max(0, text_point.Y));

                if (img_rect_width <= 0 || img_rect_height <= 0)
                {
                    continue;
                }

                OpenCvSharp.Rect img_rect = new OpenCvSharp.Rect(img_rect_x, img_rect_y, img_rect_width, img_rect_height);

                img[img_rect].CopyTo(
                    draw_image[new OpenCvSharp.Rect(
                                   (int)Math.Max(0, text_point.X),
                                   (int)Math.Max(0, text_point.Y),
                                   img_rect.Width,
                                   img_rect.Height)]);

                text_point.Y += text_line_height / 2;
                text_point.Y += img.Rows;


                text_point.Y += text_line_height / 3;
            }
        }
        // draw checkboxes
        for (int i = 0; i < flags_count; ++i)
        {
            OpenCvSharp.Rect rect  = flag_rect(i);
            OpenCvSharp.Rect rect2 = new OpenCvSharp.Rect(rect.X + 5, rect.Y + 5, rect.Width - 10, rect.Height - 10);

            OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(255), -1);
            OpenCvSharp.Cv2.Rectangle(draw_image, rect, OpenCvSharp.Scalar.All(0), 2, OpenCvSharp.LineTypes.AntiAlias);

            if (get_flag(i))
            {
                OpenCvSharp.Cv2.Rectangle(draw_image, rect2, OpenCvSharp.Scalar.All(0), -1, OpenCvSharp.LineTypes.AntiAlias);
            }

            puttext(
                draw_image,
                flag_name(i),
                new OpenCvSharp.Point2f(rect.X + rect.Width + 3, rect.Y + rect.Height - 3));
        }


        // show image with drawed information
        OpenCvSharp.Cv2.ImShow("demo", draw_image);

        // register callback on mouse events
        OpenCvSharp.Cv2.SetMouseCallback("demo", (OpenCvSharp.CvMouseCallback)onMouse);
    }
예제 #29
0
 public static extern ExceptionStatus core_FileStorage_shift_Point2f(IntPtr fs, Point2f val);
예제 #30
0
 public static extern void imgproc_getRectSubPix(IntPtr image, Size patchSize, Point2f center, IntPtr patch, int patchType);