Esempio n. 1
0
        /// <summary>
        /// System.Drawing.BitmapからOpenCVのMatへ変換して返す.
        /// </summary>
        /// <param name="src">変換するSystem.Drawing.Bitmap</param>
        /// <returns>変換結果のMat</returns>
#else
        /// <summary>
        /// Converts System.Drawing.Bitmap to Mat
        /// </summary>
        /// <param name="src">System.Drawing.Bitmap object to be converted</param>
        /// <returns>A Mat object which is converted from System.Drawing.Bitmap</returns>
#endif
        public static Mat ToMat(this Bitmap src)
        {
            if (src == null)
                throw new ArgumentNullException("src");

            int w = src.Width;
            int h = src.Height;
            int channels;
            switch (src.PixelFormat)
            {
                case PixelFormat.Format24bppRgb:
                case PixelFormat.Format32bppRgb:
                    channels = 3; break;
                case PixelFormat.Format32bppArgb:
                case PixelFormat.Format32bppPArgb:
                    channels = 4; break;
                case PixelFormat.Format8bppIndexed:
                case PixelFormat.Format1bppIndexed:
                    channels = 1; break;
                default:
                    throw new NotImplementedException();
            }

            Mat dst = new Mat(h, w, MatType.CV_8UC(channels));
            ToMat(src, dst);
            return dst;
        }
        /// <summary>
        /// 
        /// </summary>
        /// <param name="img"></param>
        /// <param name="templ"></param>
        /// <param name="results"></param>
        /// <param name="cost"></param>
        /// <param name="templScale"></param>
        /// <param name="maxMatches"></param>
        /// <param name="minMatchDistance"></param>
        /// <param name="padX"></param>
        /// <param name="padY"></param>
        /// <param name="scales"></param>
        /// <param name="minScale"></param>
        /// <param name="maxScale"></param>
        /// <param name="orientationWeight"></param>
        /// <param name="truncate"></param>
        /// <returns></returns>
        public static int ChamferMatching(
            Mat img, Mat templ,
                                  out Point[][] results, out float[] cost,
                                  double templScale=1, int maxMatches = 20,
                                  double minMatchDistance = 1.0, int padX = 3,
                                  int padY = 3, int scales = 5, double minScale = 0.6, double maxScale = 1.6,
                                  double orientationWeight = 0.5, double truncate = 20)
        {
            if (img == null)
                throw new ArgumentNullException("img");
            if (templ == null)
                throw new ArgumentNullException("templ");
            img.ThrowIfDisposed();
            templ.ThrowIfDisposed();
            
            using (var resultsVec = new VectorOfVectorPoint())
            using (var costVec = new VectorOfFloat())
            {
                int ret = NativeMethods.contrib_chamerMatching(
                    img.CvPtr, templ.CvPtr, resultsVec.CvPtr, costVec.CvPtr, 
                    templScale, maxMatches, minMatchDistance,
                    padX, padY, scales, minScale, maxScale, orientationWeight, truncate);
                GC.KeepAlive(img);
                GC.KeepAlive(templ);

                results = resultsVec.ToArray();
                cost = costVec.ToArray();

                return ret;
            }
        }
Esempio n. 3
0
        private static void Main(string[] args)
        {
            Mat src = new Mat("data/tsukuba_left.png", LoadMode.GrayScale);
            Mat dst20 = new Mat();
            Mat dst40 = new Mat();
            Mat dst44 = new Mat();

            using (CLAHE clahe = Cv2.CreateCLAHE())
            {
                clahe.ClipLimit = 20;
                clahe.Apply(src, dst20);
                clahe.ClipLimit = 40;
                clahe.Apply(src, dst40);
                clahe.TilesGridSize = new Size(4, 4);
                clahe.Apply(src, dst44);
            }

            Window.ShowImages(src, dst20, dst40, dst44);

            /*var img1 = new IplImage("data/lenna.png", LoadMode.Color);
            var img2 = new IplImage("data/match2.png", LoadMode.Color);
            Surf(img1, img2);*/

            //Mat[] mats = StitchingPreprocess(400, 400, 10);
            //Stitching(mats);
            //Track();
            //Run();
        }
Esempio n. 4
0
        private void updateImage()
        {
            var p = this.Dispatcher.BeginInvoke(new Action(() =>
            {
                try
                {
                    this.jpegDec     = new JpegBitmapDecoder(this.m_receivedMemory, BitmapCreateOptions.PreservePixelFormat, BitmapCacheOption.None);
                    this.bitmapsorce = this.jpegDec.Frames[0];
                    if (m_WritableBitmap == null)
                    {
                        this.m_WritableBitmap  = new WriteableBitmap(this.bitmapsorce);
                        this.m_data            = new byte[this.bitmapsorce.PixelWidth * this.bitmapsorce.PixelHeight * this.bitmapsorce.Format.BitsPerPixel / 8];
                        this.Image_Main.Source = this.m_WritableBitmap;
                    }
                    this.mat = new Mat(this.bitmapsorce.PixelWidth, this.bitmapsorce.PixelHeight, MatType.CV_8UC3, this.m_data);

                    Cv2.ImShow("Test" + this.TitleName, this.mat);

                    this.bitmapsorce.CopyPixels(this.m_data, this.bitmapsorce.PixelWidth * this.bitmapsorce.Format.BitsPerPixel / 8, 0);
                    this.m_WritableBitmap.WritePixels(new Int32Rect(0, 0, this.m_WritableBitmap.PixelWidth, this.m_WritableBitmap.PixelHeight), this.m_data, this.bitmapsorce.PixelWidth * this.bitmapsorce.Format.BitsPerPixel / 8, 0);
                    this.bitmapsorce = null;
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.Message);
                }
            }));

            p.Wait();
        }
Esempio n. 5
0
        //セットアップ
        public KinectImage()
        #region
        {
            //キネクト
            this.kinect = KinectSensor.GetDefault();
            
            //bodyIndexFrameの処理
            this.bodyIndexFrameDes = this.kinect.BodyIndexFrameSource.FrameDescription;
            this.bodyIndexFrameReader = this.kinect.BodyIndexFrameSource.OpenReader();
            this.bodyIndexFrameReader.FrameArrived += this.BodyIndexFrame_Arrived;
            //画像情報
            this.kinectImgPackage = new ShadowPackage();
            this.imageWidth =  this.bodyIndexFrameDes.Width;  // imgW;
            this.imageHeight = this.bodyIndexFrameDes.Height; // imgH;

            this.imageBytePerPixel = (int)this.bodyIndexFrameDes.BytesPerPixel;
            this.bitmapRec = new Int32Rect(0, 0, this.imageWidth, this.imageHeight);
            this.bitmapStride = (int)(this.imageWidth * this.imageBytePerPixel);
           
            this.bodyIndexBuffer = new byte[this.imageWidth *
                                                this.imageHeight * this.imageBytePerPixel];
            this.kinectImage = new Mat(this.imageHeight, this.imageWidth, MatType.CV_8UC1);
            //キネクト開始
            this.kinect.Open();
            
        }
Esempio n. 6
0
        private static Mat[] StitchingPreprocess(int width, int height, int count)
        {
            Mat source = new Mat(@"C:\Penguins.jpg", LoadMode.Color);
            Mat result = source.Clone();

            var rand = new Random();
            var mats = new List<Mat>();
            for (int i = 0; i < count; i++)
            {
                int x1 = rand.Next(source.Cols - width);
                int y1 = rand.Next(source.Rows - height);
                int x2 = x1 + width;
                int y2 = y1 + height;

                result.Line(new Point(x1, y1), new Point(x1, y2), new Scalar(0, 0, 255));
                result.Line(new Point(x1, y2), new Point(x2, y2), new Scalar(0, 0, 255));
                result.Line(new Point(x2, y2), new Point(x2, y1), new Scalar(0, 0, 255));
                result.Line(new Point(x2, y1), new Point(x1, y1), new Scalar(0, 0, 255));

                Mat m = source[new Rect(x1, y1, width, height)];
                mats.Add(m.Clone());
                //string outFile = String.Format(@"C:\temp\stitching\{0:D3}.png", i);
                //m.SaveImage(outFile);
            }

            result.SaveImage(@"C:\temp\parts.png");
            using (new Window(result))
            {
                Cv.WaitKey();
            }

            return mats.ToArray();
        }
Esempio n. 7
0
        /// <summary>
        /// Cannyアルゴリズムを用いて,画像のエッジを検出します.
        /// </summary>
        /// <param name="image">8ビット,シングルチャンネルの入力画像</param>
        /// <param name="edges">出力されるエッジのマップ. image  と同じサイズ,同じ型</param>
        /// <param name="threshold1">ヒステリシスが存在する処理の,1番目の閾値</param>
        /// <param name="threshold2">ヒステリシスが存在する処理の,2番目の閾値</param>
        /// <param name="apertureSize">Sobelオペレータのアパーチャサイズ [既定値はApertureSize.Size3]</param>
        /// <param name="L2gradient">画像勾配の強度を求めるために,より精度の高い L2ノルムを利用するか,L1ノルムで十分(false)かを指定します. [既定値はfalse]</param>
#else
        /// <summary>
        /// Finds edges in an image using Canny algorithm.
        /// </summary>
        /// <param name="image">Single-channel 8-bit input image</param>
        /// <param name="edges">The output edge map. It will have the same size and the same type as image</param>
        /// <param name="threshold1">The first threshold for the hysteresis procedure</param>
        /// <param name="threshold2">The second threshold for the hysteresis procedure</param>
        /// <param name="apertureSize">Aperture size for the Sobel operator [By default this is ApertureSize.Size3]</param>
        /// <param name="L2gradient">Indicates, whether the more accurate L2 norm should be used to compute the image gradient magnitude (true), or a faster default L1 norm is enough (false). [By default this is false]</param>
#endif
        public static void Canny(Mat image, Mat edges, double threshold1, double threshold2, ApertureSize apertureSize = ApertureSize.Size3, bool L2gradient = false)
        {
            if (image == null)
                throw new ArgumentNullException("image");
            if (edges == null)
                throw new ArgumentNullException("edges");
            CppInvoke.cv_Canny(image.CvPtr, edges.CvPtr, threshold1, threshold2, apertureSize, L2gradient);
        }
Esempio n. 8
0
 public Mat Filter(Mat src)
 {
     var dst = new Mat();
     using (var k = GetMat())
     {
         Cv2.Filter2D(src, dst, MatType.CV_8U, k);
         return dst;
     }
 }
Esempio n. 9
0
        /// <summary>
        /// 与えられたデータセットの最近傍探索インデックスを作成します.
        /// </summary>
        /// <param name="features">インデックス作成対象となる特徴(点)が格納された, CV_32F 型の行列.この行列のサイズは matrix is num _ features x feature _ dimensionality となります</param>
        /// <param name="params">params – インデックスパラメータを含む構造体.作成されるインデックスの種類は,このパラメータの種類に依存します</param>
#else
        /// <summary>
        /// Constructs a nearest neighbor search index for a given dataset.
        /// </summary>
        /// <param name="features">features – Matrix of type CV _ 32F containing the features(points) to index. The size of the matrix is num _ features x feature _ dimensionality.</param>
        /// <param name="params">Structure containing the index parameters. The type of index that will be constructed depends on the type of this parameter. </param>
#endif
        public Index(Mat features, IndexParams @params)
        {
            if (features == null)
                throw new ArgumentNullException("features");
            if (@params == null)
                throw new ArgumentNullException("params");

            ptr = NativeMethods.flann_Index_construct(features.CvPtr, @params.CvPtr);
            if (ptr == IntPtr.Zero)
                throw new OpenCvSharpException("Failed to create Index");
        }
Esempio n. 10
0
 public Task<Mat> FilterAsync(Mat src)
 {
     return Task.Run(() =>
     {
         var dst = new Mat();
         using (var k = GetMat())
         {
             Cv2.Filter2D(src, dst, MatType.CV_8U, k);
             return dst;
         }
     });
 }
Esempio n. 11
0
        private static void Run()
        {
            var dm = DescriptorMatcher.Create("BruteForce");
            dm.Clear();

            Console.WriteLine(Cv2.GetCudaEnabledDeviceCount());

            string[] algoNames = Algorithm.GetList();
            Console.WriteLine(String.Join("\n", algoNames));

            SIFT al1 = Algorithm.Create<SIFT>("Feature2D.SIFT");
            string[] ppp = al1.GetParams();
            Console.WriteLine(ppp);
            var t = al1.ParamType("contrastThreshold");
            double d = al1.GetDouble("contrastThreshold");
            t.ToString();
            d.ToString();

            var src = new Mat("img/lenna.png");
            var rand = new Random();
            var memory = new List<long>(100);

            var a1 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40));
            var a2 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40));
            var a3 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40));
            a3.ToString();

            for (long i = 0;; i++)
            {
                SIFT a = Algorithm.Create<SIFT>("Feature2D.SIFT");
                a.ToString();

                for (int j = 0; j < 200; j++)
                {
                    int c1 = rand.Next(100, 400);
                    int c2 = rand.Next(100, 400);
                    Mat temp = src.Row[c1];
                    src.Row[c1] = src.Row[c2];
                    src.Row[c2] = temp;
                }

                memory.Add(MyProcess.WorkingSet64);
                if (memory.Count >= 100)
                {
                    double average = memory.Average();
                    Console.WriteLine("{0:F3}MB", average / 1024.0 / 1024.0);
                    memory.Clear();
                    GC.Collect();
                }
            }

        }
Esempio n. 12
0
 /// <summary>
 /// 
 /// </summary>
 /// <param name="mat"></param>
 internal MatExpr(Mat mat)
 {
     if(mat == null)
         throw new ArgumentNullException("mat");
     try
     {
         this.ptr = NativeMethods.core_MatExpr_new(mat.CvPtr);
     }
     catch (BadImageFormatException ex)
     {
         throw PInvokeHelper.CreateException(ex);
     }
 }
Esempio n. 13
0
        public static void ConvertMaps(Mat map1, Mat map2, Mat dstmap1, Mat dstmap2, MatrixType dstmap1type, bool nninterpolation = false)
        {
            if (map1 == null)
                throw new ArgumentNullException("map1");
            if (map2 == null)
                throw new ArgumentNullException("map2");
            if (dstmap1 == null)
                throw new ArgumentNullException("dstmap1");
            if (dstmap2 == null)
                throw new ArgumentNullException("dstmap2");

            CppInvoke.cv_convertMaps(map1.CvPtr, map2.CvPtr, dstmap1.CvPtr, dstmap2.CvPtr, dstmap1type, nninterpolation);
        }
Esempio n. 14
0
        /// <summary>
        /// WriteableBitmapをMatに変換する
        /// </summary>
        /// <param name="src">変換するWriteableBitmap</param>
        /// <returns>OpenCvSharpで扱えるMat</returns>
#else
        /// <summary>
        /// Converts WriteableBitmap to Mat
        /// </summary>
        /// <param name="src">Input WriteableBitmap</param>
        /// <returns>IplImage</returns>
#endif
        public static Mat ToMat(this WriteableBitmap src)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src");
            }

            int w = src.PixelWidth;
            int h = src.PixelHeight;            
            MatType type = GetOptimumType(src.Format);
            Mat dst = new Mat(h, w, type);
            ToMat(src, dst);
            return dst;
        }
Esempio n. 15
0
 /// <summary>
 /// Displays the image in the specified window
 /// </summary>
 /// <param name="winname">Name of the window.</param>
 /// <param name="mat">Image to be shown.</param>
 public static void ImShow(string winname, Mat mat)
 {
     if (string.IsNullOrEmpty(winname))
         throw new ArgumentNullException("winname");
     if (mat == null)
         throw new ArgumentNullException("mat");
     try
     {
         NativeMethods.highgui_imshow(winname, mat.CvPtr);
     }
     catch (BadImageFormatException ex)
     {
         throw PInvokeHelper.CreateException(ex);
     }
 }
Esempio n. 16
0
 public static ImageData Filter(string fileName, SpatialFilterKernel kernel)
 {
     try
     {
         var src = new Mat(fileName);
         var bmp = kernel.FilterWriteableBitmap(src);
         var images = new ImageData();
         images.SourceImage = src.ToWriteableBitmap();
         images.ResultImage = bmp;
         return images;
     }
     finally
     {
     }
 }
Esempio n. 17
0
        /// <summary>
        /// converts rotation vector to rotation matrix using Rodrigues transformation
        /// </summary>
        /// <param name="vector">Input rotation vector (3x1).</param>
        /// <param name="matrix">Output rotation matrix (3x3).</param>
        /// <param name="jacobian">Optional output Jacobian matrix, 3x9, which is a matrix of partial derivatives of the output array components with respect to the input array components.</param>
        public static void Rodrigues(double[] vector, out double[,] matrix, out double[,] jacobian)
        {
            if (vector == null)
                throw new ArgumentNullException("vector");
            if (vector.Length != 3)
                throw new ArgumentException("vector.Length != 3");

            using (var vectorM = new Mat(3, 1, MatType.CV_64FC1, vector))
            using (var matrixM = new MatOfDouble())
            using (var jacobianM = new MatOfDouble())
            {
                NativeMethods.calib3d_Rodrigues_VecToMat(vectorM.CvPtr, matrixM.CvPtr, jacobianM.CvPtr);
                matrix = matrixM.ToRectangularArray();
                jacobian = jacobianM.ToRectangularArray();
            }
        }
Esempio n. 18
0
        /// <summary>
        /// converts rotation matrix to rotation vector using Rodrigues transformation
        /// </summary>
        /// <param name="matrix">Input rotation matrix (3x3).</param>
        /// <param name="vector">Output rotation vector (3x1).</param>
        /// <param name="jacobian">Optional output Jacobian matrix, 3x9, which is a matrix of partial derivatives of the output array components with respect to the input array components.</param>
        public static void Rodrigues(double[,] matrix, out double[] vector, out double[,] jacobian)
        {
            if (matrix == null)
                throw new ArgumentNullException("matrix");
            if (matrix.GetLength(0) != 3 || matrix.GetLength(1) != 3)
                throw new ArgumentException("matrix must be double[3,3]");

            using (var matrixM = new Mat(3, 3, MatType.CV_64FC1, matrix))
            using (var vectorM = new MatOfDouble())
            using (var jacobianM = new MatOfDouble())
            {
                NativeMethods.calib3d_Rodrigues_MatToVec(matrixM.CvPtr, vectorM.CvPtr, jacobianM.CvPtr);
                vector = vectorM.ToArray();
                jacobian = jacobianM.ToRectangularArray();
            }
        }
Esempio n. 19
0
        public void SetStreamFrame(Mat frame, string text)
        {
            using (var bitmap = frame.ToBitmap())
            using (var graphics = Graphics.FromImage(bitmap))
            {
                var h = bitmap.Height / 2;
                var w = bitmap.Width / 2;

                graphics.DrawRectangle(Pens.Crimson, Config.MainX - Config.MainXDelta, h, 2 * Config.MainXDelta, h);
                graphics.DrawRectangle(Pens.Chartreuse, 0, h, w - 20, h);

                streamBox.Image = new Bitmap(bitmap, streamBox.Size);
            }

            cameraInfo.Invoke((MethodInvoker)(() => cameraInfo.Text = text));
        }
Esempio n. 20
0
        /// <summary>
        /// Draw keypoints.
        /// </summary>
        /// <param name="image"></param>
        /// <param name="keypoints"></param>
        /// <param name="outImage"></param>
        /// <param name="color"></param>
        /// <param name="flags"></param>
        public static void DrawKeypoints(Mat image, IEnumerable<KeyPoint> keypoints, Mat outImage,
            Scalar? color = null, DrawMatchesFlags flags = DrawMatchesFlags.Default)
        {
            if (image == null)
                throw new ArgumentNullException("image");
            if (outImage == null)
                throw new ArgumentNullException("outImage");
            if (keypoints == null)
                throw new ArgumentNullException("keypoints");
            image.ThrowIfDisposed();
            outImage.ThrowIfDisposed();

            KeyPoint[] keypointsArray = EnumerableEx.ToArray(keypoints);
            Scalar color0 = color.GetValueOrDefault(Scalar.All(-1));
            NativeMethods.features2d_drawKeypoints(image.CvPtr, keypointsArray, keypointsArray.Length,
                outImage.CvPtr, color0, (int)flags);
        }
Esempio n. 21
0
        private static void Stitching(Mat[] images)
        {
            var stitcher = Stitcher.CreateDefault(false);

            Mat pano = new Mat();

            Console.Write("Stitching 処理開始...");
            var status = stitcher.Stitch(images, pano);
            Console.WriteLine(" 完了 {0}", status);

            pano.SaveImage(@"C:\temp\pano.png");
            Window.ShowImages(pano);

            foreach (Mat image in images)
            {
                image.Dispose();
            }
        }
Esempio n. 22
0
 public KinectDevice()
 {
     //kinect設定
     this.kinect = KinectSensor.GetDefault();
     //設定とハンドラ
     //colorImage
     #region
     this.colorImageFormat = ColorImageFormat.Bgra;
     this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
     this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
     this.colorFrameReader.FrameArrived += ColorFrame_Arrived;
     this.colors = new byte[this.colorFrameDescription.Width
                                    * this.colorFrameDescription.Height
                                    * this.colorFrameDescription.BytesPerPixel];
     #endregion
     //骨格情報
     #region
     this.bodyFrameReader = this.kinect.BodyFrameSource.OpenReader();
     this.bodyFrameReader.FrameArrived += BodyFrame_Arrived;
     #endregion
     //震度情報
     #region
     this.depthFrameReader = this.kinect.DepthFrameSource.OpenReader();
     this.depthFrameReader.FrameArrived += DepthFrame_Arrived;
     this.depthFrameDescription = this.kinect.DepthFrameSource.FrameDescription;
     this.depthBuffer = new ushort[this.depthFrameDescription.LengthInPixels];
     #endregion
     //BodyIndex
     #region
     this.bodyIndexFrameDes = this.kinect.BodyIndexFrameSource.FrameDescription;
     this.bodyIndexFrameReader = this.kinect.BodyIndexFrameSource.OpenReader();
     this.bodyIndexFrameReader.FrameArrived += this.BodyIndexFrame_Arrived;
     this.bodyIndexBuffer = new byte[this.bodyIndexFrameDes.Width *
                                         this.bodyIndexFrameDes.Height * this.bodyIndexFrameDes.BytesPerPixel];
     #endregion
     //kinect開始
     this.package = new ShadowPackage();
     this.imageWidth = this.bodyIndexFrameDes.Width; 
     this.imageHeight = this.bodyIndexFrameDes.Height; 
     this.imageBytePerPixel = (int)this.bodyIndexFrameDes.BytesPerPixel;
     this.kinectImage = new Mat(this.imageHeight, this.imageWidth, MatType.CV_8UC1);
     this.kinect.Open();
 }
Esempio n. 23
0
        private static void Surf(IplImage img1, IplImage img2)
        {
            Mat src = new Mat(img1, true);
            Mat src2 = new Mat(img2, true);
            //Detect the keypoints and generate their descriptors using SURF
            SURF surf = new SURF(500, 4, 2, true);
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();
            surf.Run(src, null, out keypoints1, descriptors1);
            surf.Run(src2, null, out keypoints2, descriptors2);
            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);
            DMatch[] matches = matcher.Match(descriptors1, descriptors2);//例外が発生する箇所
            Mat view = new Mat();
            Cv2.DrawMatches(src, keypoints1, src2, keypoints2, matches, view);

            Window.ShowImages(view);
        }
Esempio n. 24
0
        /// <summary>
        /// 高速なマルチスケール Hesian 検出器を用いて keypoint を検出します.
        /// </summary>
        /// <param name="img"></param>
        /// <param name="mask"></param>
        /// <returns></returns>
#else
        /// <summary>
        /// detects keypoints using fast multi-scale Hessian detector
        /// </summary>
        /// <param name="img"></param>
        /// <param name="mask"></param>
        /// <returns></returns>
#endif
        public KeyPoint[] Extract(Mat img, Mat mask)
        {
            if (img == null)
                throw new ArgumentNullException("img");

            CvMat imgMat = img.ToCvMat();
            CvMat maskMat = (mask == null) ? null : mask.ToCvMat();

            CvSURFPoint[] keypoints;
            float[][] descriptors;
            Cv.ExtractSURF(imgMat, maskMat, out keypoints, out descriptors, this);

            KeyPoint[] result = new KeyPoint[keypoints.Length];
            for (int i = 0; i < result.Length; i++)
            {
                CvSURFPoint kpt = keypoints[i];
                result[i] = new KeyPoint(kpt.Pt, (float) kpt.Size, kpt.Dir, kpt.Hessian, GetPointOctave(kpt, this));
            }
            return result;
        }
Esempio n. 25
0
        /// <summary>
        /// Performs object detection with a multi-scale window.
        /// </summary>
        /// <param name="img">Source image. CV_8UC1 and CV_8UC4 types are supported for now.</param>
        /// <param name="foundWeights"></param>
        /// <param name="hitThreshold">Threshold for the distance between features and SVM classifying plane.</param>
        /// <param name="winStride">Window stride. It must be a multiple of block stride.</param>
        /// <param name="padding">Mock parameter to keep the CPU interface compatibility. It must be (0,0).</param>
        /// <param name="scale">Coefficient of the detection window increase.</param>
        /// <param name="groupThreshold">Coefficient to regulate the similarity threshold. 
        /// When detected, some objects can be covered by many rectangles. 0 means not to perform grouping.</param>
        /// <returns>Detected objects boundaries.</returns>
        public virtual Rect[] DetectMultiScale(Mat img, out double[] foundWeights,
            double hitThreshold = 0, Size? winStride = null, Size? padding = null, double scale = 1.05, int groupThreshold = 2)
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            if (img == null)
                throw new ArgumentNullException("img");
            img.ThrowIfDisposed();

            Size winStride0 = winStride.GetValueOrDefault(new Size());
            Size padding0 = padding.GetValueOrDefault(new Size());
            using (var flVec = new VectorOfRect())
            using (var foundWeightsVec = new VectorOfDouble())
            {
                NativeMethods.objdetect_HOGDescriptor_detectMultiScale(ptr, img.CvPtr, flVec.CvPtr, foundWeightsVec.CvPtr,
                    hitThreshold, winStride0, padding0, scale, groupThreshold);
                foundWeights = foundWeightsVec.ToArray();
                return flVec.ToArray();
            }
        }
Esempio n. 26
0
        /// <summary>
        /// Performs object detection without a multi-scale window.
        /// </summary>
        /// <param name="img">Source image. CV_8UC1 and CV_8UC4 types are supported for now.</param>
        /// <param name="weights"></param>
        /// <param name="hitThreshold">Threshold for the distance between features and SVM classifying plane. 
        /// Usually it is 0 and should be specfied in the detector coefficients (as the last free coefficient). 
        /// But if the free coefficient is omitted (which is allowed), you can specify it manually here.</param>
        /// <param name="winStride">Window stride. It must be a multiple of block stride.</param>
        /// <param name="padding">Mock parameter to keep the CPU interface compatibility. It must be (0,0).</param>
        /// <param name="searchLocations"></param>
        /// <returns>Left-top corner points of detected objects boundaries.</returns>
        public virtual Point[] Detect(Mat img, out double[] weights, 
            double hitThreshold = 0, Size? winStride = null, Size? padding = null, Point[] searchLocations = null)
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            if (img == null)
                throw new ArgumentNullException("img");
            img.ThrowIfDisposed();

            Size winStride0 = winStride.GetValueOrDefault(new Size());
            Size padding0 = padding.GetValueOrDefault(new Size());
            using (var flVec = new VectorOfPoint())
            using (var weightsVec = new VectorOfDouble())
            {
                int slLength = (searchLocations != null) ? searchLocations.Length : 0;
                NativeMethods.objdetect_HOGDescriptor_detect(ptr, img.CvPtr, flVec.CvPtr, weightsVec.CvPtr,
                    hitThreshold, winStride0, padding0, searchLocations, slLength);
                weights = weightsVec.ToArray();
                return flVec.ToArray();
            }
        }
Esempio n. 27
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="img"></param>
        /// <param name="winStride"></param>
        /// <param name="padding"></param>
        /// <param name="locations"></param>
        /// <returns></returns>
        public virtual float[] Compute(Mat img, Size? winStride = null, Size? padding = null, Point[] locations = null)
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            if (img == null)
                throw new ArgumentNullException("img");

            Size winStride0 = winStride.GetValueOrDefault(new Size());
            Size padding0 = padding.GetValueOrDefault(new Size());
            using (var flVec = new VectorOfFloat())
            {
                int length = (locations != null) ? locations.Length : 0;
                NativeMethods.objdetect_HOGDescriptor_compute(ptr, img.CvPtr, flVec.CvPtr, winStride0, padding0, locations, length);
                return flVec.ToArray();
            }
        }
Esempio n. 28
0
 internal Indexer(Mat parent)
     : base(parent)
 {
     this.ptr = (byte *)parent.Data.ToPointer();
 }
Esempio n. 29
0
        /// <summary>
        /// Matオブジェクトから初期化
        /// </summary>
        /// <param name="mat">Matオブジェクト</param>
#else
        /// <summary>
        /// Initializes by Mat object
        /// </summary>
        /// <param name="mat">Managed Mat object</param>
#endif
        public MatOfByte(Mat mat)
            : base(mat)
        {
        }
Esempio n. 30
0
 /// <summary>
 /// Creates/Sets a matrix header for the specified row/column span.
 /// </summary>
 /// <param name="start"></param>
 /// <param name="end"></param>
 /// <param name="value"></param>
 public virtual void Set(int start, int end, Mat value)
 {
     this[start, end] = value;
 }
Esempio n. 31
0
        /// <summary>
        /// evaluate specified ROI and return confidence value for each location in multiple scales
        /// </summary>
        /// <param name="img"></param>
        /// <param name="foundLocations"></param>
        /// <param name="locations"></param>
        /// <param name="hitThreshold"></param>
        /// <param name="groupThreshold"></param>
        public void DetectMultiScaleROI(
            Mat img,
            out Rect[] foundLocations,
            out DetectionROI[] locations,
            double hitThreshold = 0,
            int groupThreshold = 0)
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            if (img == null)
                throw new ArgumentNullException("img");
            img.ThrowIfDisposed();

            using (var flVec = new VectorOfRect())
            using (var scalesVec = new VectorOfDouble())
            using (var locationsVec = new VectorOfVectorPoint())
            using (var confidencesVec = new VectorOfVectorDouble())
            {
                NativeMethods.objdetect_HOGDescriptor_detectMultiScaleROI(
                    ptr, img.CvPtr, flVec.CvPtr, 
                    scalesVec.CvPtr, locationsVec.CvPtr, confidencesVec.CvPtr,
                    hitThreshold, groupThreshold);
                foundLocations = flVec.ToArray();

                double[] s = scalesVec.ToArray();
                Point[][] l = locationsVec.ToArray();
                double[][] c = confidencesVec.ToArray();

                if(s.Length != l.Length || l.Length != c.Length)
                    throw new OpenCvSharpException("Invalid result data 'locations'");
                locations = new DetectionROI[s.Length];
                for (int i = 0; i < s.Length; i++)
                {
                    locations[i] = new DetectionROI
                    {
                        Scale = s[i],
                        Locations = l[i],
                        Confidences = c[i]
                    };
                }
            }
        }
Esempio n. 32
0
 /// <summary>
 /// Creates/Sets a matrix header for the specified matrix row/column.
 /// </summary>
 /// <param name="pos"></param>
 /// <param name="value"></param>
 public virtual void Set(int pos, Mat value)
 {
     this[pos] = value;
 }
Esempio n. 33
0
        /// <summary>
        /// Matオブジェクトから初期化
        /// </summary>
        /// <param name="mat">Matオブジェクト</param>
#else
        /// <summary>
        /// Initializes by Mat object
        /// </summary>
        /// <param name="mat">Managed Mat object</param>
#endif
        public MatOfInt4(Mat mat)
            : base(mat)
        {
        }
Esempio n. 34
0
        /// <summary>
        /// Matオブジェクトから初期化
        /// </summary>
        /// <param name="mat">Matオブジェクト</param>
#else
        /// <summary>
        /// Initializes by Mat object
        /// </summary>
        /// <param name="mat">Managed Mat object</param>
#endif
        public MatOfUShort(Mat mat)
            : base(mat.CvPtr)
        {
        }
Esempio n. 35
0
        /// <summary>
        /// Matオブジェクトから初期化
        /// </summary>
        /// <param name="mat">Matオブジェクト</param>
#else
        /// <summary>
        /// Initializes by Mat object
        /// </summary>
        /// <param name="mat">Managed Mat object</param>
#endif
        public MatOfDMatch(Mat mat)
            : base(mat)
        {
        }
Esempio n. 36
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="img"></param>
        /// <param name="grad"></param>
        /// <param name="angleOfs"></param>
        /// <param name="paddingTL"></param>
        /// <param name="paddingBR"></param>
        public virtual void ComputeGradient(Mat img, Mat grad, Mat angleOfs, Size? paddingTL = null, Size? paddingBR = null)
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            if (img == null)
                throw new ArgumentNullException("img");
            if (grad == null)
                throw new ArgumentNullException("grad");
            if (angleOfs == null)
                throw new ArgumentNullException("angleOfs");
            img.ThrowIfDisposed();
            grad.ThrowIfDisposed();
            angleOfs.ThrowIfDisposed();

            Size paddingTL0 = paddingTL.GetValueOrDefault(new Size());
            Size paddingBR0 = paddingBR.GetValueOrDefault(new Size());
            NativeMethods.objdetect_HOGDescriptor_computeGradient(ptr, img.CvPtr, grad.CvPtr, angleOfs.CvPtr, paddingTL0, paddingBR0);
        }
Esempio n. 37
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="parent"></param>
 protected internal MatRowColIndexer(Mat parent)
 {
     this.parent = parent;
 }
Esempio n. 38
0
        /// <summary>
        /// evaluate specified ROI and return confidence value for each location
        /// </summary>
        /// <param name="img"></param>
        /// <param name="locations"></param>
        /// <param name="foundLocations"></param>
        /// <param name="confidences"></param>
        /// <param name="hitThreshold"></param>
        /// <param name="winStride"></param>
        /// <param name="padding"></param>
        public void DetectROI(
            Mat img, Point[] locations, out Point[] foundLocations, out double[] confidences,
            double hitThreshold = 0, Size? winStride = null, Size? padding = null)
        {
            if (disposed)
                throw new ObjectDisposedException("HOGDescriptor");
            if (img == null)
                throw new ArgumentNullException("img");
            if (locations == null)
                throw new ArgumentNullException("locations");
            img.ThrowIfDisposed();

            Size winStride0 = winStride.GetValueOrDefault(new Size());
            Size padding0 = padding.GetValueOrDefault(new Size());
            using (var flVec = new VectorOfPoint())
            using (var cVec = new VectorOfDouble())
            {
                NativeMethods.objdetect_HOGDescriptor_detectROI(ptr, img.CvPtr, locations, locations.Length,
                    flVec.CvPtr, cVec.CvPtr, hitThreshold, winStride0, padding0);
                foundLocations = flVec.ToArray();
                confidences = cVec.ToArray();
            }
        }
Esempio n. 39
0
 /// <summary>
 /// Creates/Sets a matrix header for the specified row/column span.
 /// </summary>
 /// <param name="range"></param>
 /// <param name="value"></param>
 public virtual void Set(Range range, Mat value)
 {
     this[range.Start, range.End] = value;
 }
Esempio n. 40
0
        /// <summary>
        /// StarDetectorアルゴリズムによりキーポイントを取得する
        /// </summary>
        /// <param name="image">8ビット グレースケールの入力画像</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Retrieves keypoints using the StarDetector algorithm.
        /// </summary>
        /// <param name="image">The input 8-bit grayscale image</param>
        /// <returns></returns>
#endif
        public KeyPoint[] Run(Mat image)
        {
            if (image == null)
                throw new ArgumentNullException("image");
            image.ThrowIfDisposed();

            IntPtr keypoints;
            NativeMethods.features2d_StarDetector_detect(ptr, image.CvPtr, out keypoints);

            using (VectorOfKeyPoint keypointsVec = new VectorOfKeyPoint(keypoints))
            {
                return keypointsVec.ToArray();
            }
        }
Esempio n. 41
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="image"></param>
 /// <param name="mask"></param>
 /// <returns></returns>
 public KeyPoint[] Run(Mat image, Mat mask)
 {
     ThrowIfDisposed();
     return(base.Detect(image, mask));
 }