示例#1
0
            public ImageData(string path, string correspondingValue, Emgu.CV.Features2D.ORBDetector orbDetector, KAZE featureDetector)
            {
                this.path  = path;
                this.image = new Image <Bgr, byte>(path);
                orbDetector.DetectAndCompute(image, null, keyPointsOrb, descriptorOrb, false);
                UMat uObservedImage = image.Mat.GetUMat(AccessType.Read);

                //featureDetector.DetectAndCompute(image, null, keyPointsSurf, descriptorSurf, false);
                featureDetector.DetectAndCompute(uObservedImage, null, keyPointsSurf, descriptorSurf, false);
                this.correspondingValue = correspondingValue;
            }
示例#2
0
 internal static extern IntPtr cveCudaORBCreate(
    int numberOfFeatures, 
    float scaleFactor, 
    int nLevels, 
    int edgeThreshold, 
    int firstLevel, 
    int WTA_K, 
    ORBDetector.ScoreType scoreType, 
    int patchSize, 
    int fastThreshold, 
    [MarshalAs(CvInvoke.BoolMarshalType)]
    bool blurForDescriptor, 
    ref IntPtr feature2D, 
    ref IntPtr feature2DAsync);
示例#3
0
 /// <summary>
 /// Create a ORBDetector using the specific values
 /// </summary>
 /// <param name="numberOfFeatures">The number of desired features.</param>
 /// <param name="scaleFactor">Coefficient by which we divide the dimensions from one scale pyramid level to the next.</param>
 /// <param name="nLevels">The number of levels in the scale pyramid.</param>
 /// <param name="firstLevel">The level at which the image is given. If 1, that means we will also look at the image.<paramref name="scaleFactor"/> times bigger</param>
 /// <param name="edgeThreshold">How far from the boundary the points should be.</param>
 /// <param name="WTK_A">How many random points are used to produce each cell of the descriptor (2, 3, 4 ...).</param>
 /// <param name="scoreType">Type of the score to use.</param>
 /// <param name="patchSize">Patch size.</param>
 /// <param name="blurForDescriptor">Blur for descriptor</param>
 /// <param name="fastThreshold">Fast threshold</param>
 public CudaORBDetector(
    int numberOfFeatures = 500, 
    float scaleFactor = 1.2f, 
    int nLevels = 8, 
    int edgeThreshold = 31, 
    int firstLevel = 0, 
    int WTK_A = 2, 
    ORBDetector.ScoreType scoreType = ORBDetector.ScoreType.Harris, 
    int patchSize = 31,
    int fastThreshold = 20, 
    bool blurForDescriptor = false)
 {
    _ptr = CudaInvoke.cveCudaORBCreate(
       numberOfFeatures, scaleFactor, nLevels, edgeThreshold, firstLevel, WTK_A, scoreType, patchSize, fastThreshold, blurForDescriptor,
       ref _feature2D, ref _feature2DAsyncPtr);
 }
示例#4
0
        static void Main(string[] args)
        {
            var image      = new Image <Bgr, byte>("RGB.jpg").Resize(0.4, Inter.Area);
            var image_gray = image.Convert <Gray, byte>();
            //CvInvoke.CvtColor(image, image_gray, ColorConversion.Bgr2Gray);

            var orbDetector = new Emgu.CV.Features2D.ORBDetector(200, 1.1f, 2, 31, 0, 2, Emgu.CV.Features2D.ORBDetector.ScoreType.Harris, 31);
            //var keyPoints = new VectorOfKeyPoint();
            var descriptors = new UMat();
            //fastDetector.DetectAndCompute(image_gray, null, keyPoints, descriptors, false);
            //Features2DToolbox.DrawKeypoints(image, keyPoints, image, new Bgr(255, 255, 0), Features2DToolbox.KeypointDrawType.DrawRichKeypoints);
            var keyPoints = orbDetector.Detect(image_gray);

            foreach (var point in keyPoints)
            {
                CvInvoke.Circle(image, new Point((int)point.Point.X, (int)point.Point.Y), 1, new MCvScalar(0, 0, 255, 255), 2);
            }
            CvInvoke.Imshow("result", image);
            CvInvoke.WaitKey();
        }
示例#5
0
 public void TestORB()
 {
    ORBDetector orb = new ORBDetector(700);
    //String[] parameters = orb.GetParamNames();
    EmguAssert.IsTrue(TestFeature2DTracker(orb, orb), "Unable to find homography matrix");
 }