コード例 #1
0
ファイル: CVUtil.cs プロジェクト: paweenwich/MyAutoIt
        public static Rectangle GetRect(VectorOfKeyPoint vKeyPoint)
        {
            //Rectangle ret = new Rectangle(0,0,0,0);
            int minx = 10000;
            int miny = 10000;
            int maxx = 0;
            int maxy = 0;

            for (int i = 0; i < vKeyPoint.Size; i++)
            {
                MKeyPoint p = vKeyPoint[i];
                if (p.Point.X < minx)
                {
                    minx = (int)p.Point.X;
                }
                if (p.Point.X > maxx)
                {
                    maxx = (int)p.Point.X;
                }
                if (p.Point.Y < miny)
                {
                    miny = (int)p.Point.Y;
                }
                if (p.Point.Y > maxy)
                {
                    maxy = (int)p.Point.Y;
                }
            }
            return(new Rectangle(minx, miny, maxx - minx, maxy - miny));
        }
コード例 #2
0
        /// <summary>
        /// Convert the standard vector to an array of float
        /// </summary>
        /// <returns>An array of float</returns>
        public MKeyPoint[] ToArray()
        {
            MKeyPoint[] res    = new MKeyPoint[Size];
            GCHandle    handle = GCHandle.Alloc(res, GCHandleType.Pinned);

            VectorOfKeyPointCopyData(_ptr, handle.AddrOfPinnedObject());
            handle.Free();
            return(res);
        }
コード例 #3
0
 /// <summary>
 /// Get the item in the specific index
 /// </summary>
 /// <param name="index">The index</param>
 /// <returns>The item in the specific index</returns>
 public MKeyPoint this[int index]
 {
     get
     {
         MKeyPoint result = new MKeyPoint();
         VectorOfKeyPointGetItem(_ptr, index, ref result);
         return(result);
     }
 }
コード例 #4
0
        protected override VideoCloudPoints ReadFrom(string path)
        {
            using (var file = File.OpenRead(path))
            {
                using (var stream = new BinaryReader(file))
                {
                    var videoPath       = stream.ReadString();
                    var frameCounts     = stream.ReadInt32();
                    var videCloudPoints = new VideoCloudPoints(videoPath, frameCounts);

                    for (int i = 0; i < frameCounts; i++)
                    {
                        var keysCount = stream.ReadInt32();
                        var array     = new MKeyPoint[keysCount];
                        for (int j = 0; j < keysCount; j++)
                        {
                            var keyFeature = new MKeyPoint();
//                            keyFeature.Angle = stream.ReadSingle();
//                            keyFeature.ClassId = stream.ReadInt32();
//                            keyFeature.Octave = stream.ReadInt32();
//                            keyFeature.Response = stream.ReadSingle();
//                            keyFeature.Size = stream.ReadSingle();
                            keyFeature.Point = new PointF(stream.ReadSingle(), stream.ReadSingle());
                            array[j]         = keyFeature;
                        }
                        videCloudPoints.SetKeyFeatures(i, new VectorOfKeyPoint(array));
                    }

                    int matchesListCount = stream.ReadInt32();
                    for (int i = 0; i < matchesListCount; i++)
                    {
                        int matchesCount      = stream.ReadInt32();
                        int firstFrameIndex   = -1;
                        int firstFeatureIndex = -1;

                        for (int j = 0; j < matchesCount; j++)
                        {
                            int frameIndex   = stream.ReadInt32();
                            int featureIndex = stream.ReadInt32();
                            if (firstFeatureIndex != -1 && firstFeatureIndex != -1)
                            {
                                videCloudPoints.Unite(firstFrameIndex, firstFeatureIndex, frameIndex, featureIndex);
                            }
                            else
                            {
                                firstFrameIndex   = frameIndex;
                                firstFeatureIndex = featureIndex;
                            }
                        }
                    }
                    return(videCloudPoints);
                }
            }
        }
コード例 #5
0
 /// <summary>
 /// Convert the standard vector to an array of KeyPoint
 /// </summary>
 /// <returns>An array of KeyPoint</returns>
 public MKeyPoint[] ToArray()
 {
     MKeyPoint[] res = new MKeyPoint[Size];
     if (res.Length > 0)
     {
         GCHandle handle = GCHandle.Alloc(res, GCHandleType.Pinned);
         CvInvoke.VectorOfKeyPointCopyData(_ptr, handle.AddrOfPinnedObject());
         handle.Free();
     }
     return(res);
 }
コード例 #6
0
ファイル: Utils.cs プロジェクト: Lewis945/RubiksCubeSolver
        public static VectorOfKeyPoint GetKeyPointsVector(VectorOfPointF points)
        {
            var keyPoints = new MKeyPoint[points.Size];

            for (int i = 0; i < points.Size; i++)
            {
                keyPoints[i] = new MKeyPoint()
                {
                    Point    = points[i],
                    Size     = 1,
                    Angle    = 0,
                    Response = 0
                };
            }

            return(new VectorOfKeyPoint(keyPoints));
        }
コード例 #7
0
ファイル: NodeLibrary.cs プロジェクト: okeanz/IPS
        static object[] DetectAndCompute2(UMat img, Feature2D detector, Feature2D computer) // находит и обрабатывает дескрипторы изображения
        {
            object[]         outp        = new object[0];
            UMat             descriptors = new UMat();
            var              mkp         = new MKeyPoint[0];
            VectorOfKeyPoint keypoints;

            try
            {
                mkp       = detector.Detect(img);
                keypoints = new VectorOfKeyPoint(mkp);
                computer.Compute(img, keypoints, descriptors);
                outp = new object[] { keypoints, descriptors };
            }
            finally
            {
            }
            return(outp);
        }
コード例 #8
0
        public Mat FingerprintDescriptor(Mat input)
        {
            var harris_normalised = PrepareImage(input);

            float            threshold  = 125.0f;
            List <MKeyPoint> mKeyPoints = new List <MKeyPoint>();
            Mat rescaled = new Mat();
            VectorOfKeyPoint keypoints = new VectorOfKeyPoint();
            double           scale = 1.0, shift = 0.0;

            CvInvoke.ConvertScaleAbs(harris_normalised, rescaled, scale, shift);
            Mat[]       mat         = new Mat[] { rescaled, rescaled, rescaled };
            VectorOfMat vectorOfMat = new VectorOfMat(mat);

            int[] from_to  = { 0, 0, 1, 1, 2, 2 };
            Mat   harris_c = new Mat(rescaled.Size, DepthType.Cv8U, 3);

            CvInvoke.MixChannels(vectorOfMat, harris_c, from_to);
            for (int x = 0; x < harris_c.Width; x++)
            {
                for (int y = 0; y < harris_c.Height; y++)
                {
                    if (GetFloatValue(harris_c, x, y) > threshold)
                    {
                        MKeyPoint m = new MKeyPoint
                        {
                            Size  = 1,
                            Point = new PointF(x, y)
                        };
                        mKeyPoints.Add(m);
                    }
                }
            }

            keypoints.Push(mKeyPoints.ToArray());
            Mat         descriptors = new Mat();
            ORBDetector ORBCPU      = new ORBDetector();

            ORBCPU.Compute(_input_thinned, keypoints, descriptors);

            return(descriptors);
        }
コード例 #9
0
ファイル: test.cs プロジェクト: chencen2000/testMQ
        static void test_match()
        {
            Bitmap             b1     = new Bitmap(@"C:\test\test_1\temp_menu.jpg");
            Bitmap             sl     = new Bitmap(@"C:\test\scroll_left.jpg");
            Image <Gray, Byte> slicon = new Image <Gray, byte>(sl);

            slicon = slicon.Not();
            slicon.Save("temp_1.jpg");
            Image <Gray, Byte> test = new Image <Gray, Byte>(b1);
            //long l;
            //Mat r = DrawMatches.Draw(slicon.Mat, test.Mat, out l);
            //r.Save("temp_2.jpg");

            //SURF surfCPU = new SURF(400);
            //Brisk surfCPU = new Brisk();
            SIFT             surfCPU             = new SIFT();
            VectorOfKeyPoint modelKeyPoints      = new VectorOfKeyPoint();
            VectorOfKeyPoint observedKeyPoints   = new VectorOfKeyPoint();
            UMat             modelDescriptors    = new UMat();
            UMat             observedDescriptors = new UMat();

            surfCPU.DetectAndCompute(slicon, null, modelKeyPoints, modelDescriptors, false);
            surfCPU.DetectAndCompute(test, null, observedKeyPoints, observedDescriptors, false);

            var indices    = new Matrix <int>(observedDescriptors.Rows, 2);
            var dists      = new Matrix <float>(observedDescriptors.Rows, 2);
            var flannIndex = new Index(modelDescriptors, new KMeansIndexParams());

            flannIndex.KnnSearch(observedDescriptors, indices, dists, 2);
            for (int i = 0; i < indices.Rows; i++)
            {
                if (dists.Data[i, 0] < (0.6 * dists.Data[i, 1]))
                {
                    int idx1 = indices[i, 0];
                    int idx2 = indices[i, 1];
                    Program.logIt(string.Format("{0}-{1}", indices[i, 0], indices[i, 1]));
                    MKeyPoint p1 = modelKeyPoints[idx1];
                    MKeyPoint p2 = observedKeyPoints[idx2];
                    Program.logIt(string.Format("{0}-{1}", p1.Point, p2.Point));
                }
            }
        }
コード例 #10
0
            public static MKeyPoint[] toKeyPoints(this Vector2[] p)
            {
                MKeyPoint[] result = new MKeyPoint[p.Length];

                for (int i = 0; i < p.Length; i++)
                {
                    result[i].Point.X = p[i].x;

                    result[i].Point.Y = p[i].y;

                    result[i].Octave = 0;

                    result[i].Size = 2;

                    result[i].Angle = 0;

                    result[i].Response = 1;
                }

                return(result);
            }
コード例 #11
0
ファイル: Frame.cs プロジェクト: Omar-Bsoul/Features-Finder
        public void SetFeaturesVector(IEnumerable <uint> value)
        {
            KazePoints.Clear();

            for (int i = 0; i < value.Count() / 7; i++)
            {
                MKeyPoint keyPoint = new MKeyPoint {
                    Angle   = value.ElementAt(i + 0),
                    ClassId = (int)value.ElementAt(i + 1),
                    Octave  = (int)value.ElementAt(i + 2),
                    Point   = new System.Drawing.PointF {
                        X = value.ElementAt(i + 3),
                        Y = value.ElementAt(i + 4),
                    },
                    Response = value.ElementAt(i + 5),
                    Size     = value.ElementAt(i + 6),
                };

                KazePoints.Add(keyPoint);
            }
        }
コード例 #12
0
ファイル: TestMatching.cs プロジェクト: KFlaga/Egomotion
        public void TestThreeViewsCorrespondences()
        {
            MKeyPoint[] kps1 = new MKeyPoint[]
            {
                new MKeyPoint()
                {
                    Point = new PointF(0, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(5, 2)
                },
                new MKeyPoint()
                {
                    Point = new PointF(5, 7)
                },
                new MKeyPoint()
                {
                    Point = new PointF(2, 1)
                },
                new MKeyPoint()
                {
                    Point = new PointF(-4, 2)
                },
            };

            MKeyPoint[] kps2a = new MKeyPoint[]
            {
                new MKeyPoint()
                {
                    Point = new PointF(0, 2)
                },
                new MKeyPoint()
                {
                    Point = new PointF(9, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(9, -4)
                },
                new MKeyPoint()
                {
                    Point = new PointF(6, 1)
                },
                new MKeyPoint()
                {
                    Point = new PointF(-3, -3)
                },
            };

            MKeyPoint[] kps2b = new MKeyPoint[]
            {
                new MKeyPoint()
                {
                    Point = new PointF(0, 2)
                },
                new MKeyPoint()
                {
                    Point = new PointF(9, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(9, -4)
                },
                new MKeyPoint()
                {
                    Point = new PointF(6, 1)
                },
                new MKeyPoint()
                {
                    Point = new PointF(-3, -3)
                },
            };

            MKeyPoint[] kps3 = new MKeyPoint[]
            {
                new MKeyPoint()
                {
                    Point = new PointF(0, 4)
                },
                new MKeyPoint()
                {
                    Point = new PointF(11, 2)
                },
                new MKeyPoint()
                {
                    Point = new PointF(11, -6)
                },
                new MKeyPoint()
                {
                    Point = new PointF(8, 1)
                },
                new MKeyPoint()
                {
                    Point = new PointF(-7, 0)
                },
            };

            MDMatch[] matches12 = new MDMatch[4]
            {
                new MDMatch {
                    QueryIdx = 1, TrainIdx = 0
                },
                new MDMatch {
                    QueryIdx = 0, TrainIdx = 3
                },
                new MDMatch {
                    QueryIdx = 2, TrainIdx = 1
                },
                new MDMatch {
                    QueryIdx = 4, TrainIdx = 2
                },
            };

            MDMatch[] matches23 = new MDMatch[4]
            {
                new MDMatch {
                    QueryIdx = 0, TrainIdx = 4
                },
                new MDMatch {
                    QueryIdx = 1, TrainIdx = 2
                },
                new MDMatch {
                    QueryIdx = 4, TrainIdx = 0
                },
                new MDMatch {
                    QueryIdx = 2, TrainIdx = 1
                },
            };

            MatchingResult match12 = new MatchingResult()
            {
                LeftKps  = kps1,
                RightKps = kps2a,
                Matches  = new Emgu.CV.Util.VectorOfDMatch(matches12)
            };
            MatchingResult match23 = new MatchingResult()
            {
                LeftKps  = kps2b,
                RightKps = kps3,
                Matches  = new Emgu.CV.Util.VectorOfDMatch(matches23)
            };

            List <Correspondences.MatchPair> correspondences = Correspondences.FindCorrespondences12to23(match12, match23);

            Assert.AreEqual(3, correspondences.Count);
            // They are sorted same as matches23
            var c = correspondences[0];

            Assert.AreEqual(1, c.Match12.QueryIdx);
            Assert.AreEqual(0, c.Match12.TrainIdx);
            Assert.AreEqual(0, c.Match23.QueryIdx);
            Assert.AreEqual(4, c.Match23.TrainIdx);

            Assert.AreEqual(kps1[1].Point, c.Kp1.Point);
            Assert.AreEqual(kps2a[0].Point, c.Kp2.Point);
            Assert.AreEqual(kps3[4].Point, c.Kp3.Point);

            c = correspondences[1];
            Assert.AreEqual(2, c.Match12.QueryIdx);
            Assert.AreEqual(1, c.Match12.TrainIdx);
            Assert.AreEqual(1, c.Match23.QueryIdx);
            Assert.AreEqual(2, c.Match23.TrainIdx);

            c = correspondences[2];
            Assert.AreEqual(4, c.Match12.QueryIdx);
            Assert.AreEqual(2, c.Match12.TrainIdx);
            Assert.AreEqual(2, c.Match23.QueryIdx);
            Assert.AreEqual(1, c.Match23.TrainIdx);
        }
コード例 #13
0
ファイル: TestMatching.cs プロジェクト: KFlaga/Egomotion
        public void TestMatchClosePoints()
        {
            Image <Arthmetic, double> desc1 = new Image <Arthmetic, double>(new double[, , ] {
                { { 2 }, { 2 }, },
                { { 1 }, { 0 }, },
                { { 2 }, { 0 }, },
                { { 0 }, { 1 }, },
                { { 1 }, { 1 }, },
                { { 0 }, { 2 }, },
            });

            Image <Arthmetic, double> desc2 = new Image <Arthmetic, double>(new double[, , ] {
                { { 1 }, { 0 }, },
                { { 1 }, { 1 }, },
                { { 0 }, { 1 }, },
                { { 2 }, { 0 }, },
                { { 2 }, { 2 }, },
                { { 0 }, { 2 }, },
            });

            MKeyPoint[] kps1 = new MKeyPoint[]
            {
                new MKeyPoint()
                {
                    Point = new PointF(15, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(0, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(6, -5)
                },
                new MKeyPoint()
                {
                    Point = new PointF(3, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(12, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(9, 0)
                },
            };

            MKeyPoint[] kps2 = new MKeyPoint[]
            {
                new MKeyPoint()
                {
                    Point = new PointF(0, 2)
                },
                new MKeyPoint()
                {
                    Point = new PointF(9, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(3, 5)
                },
                new MKeyPoint()
                {
                    Point = new PointF(6, 7)
                },
                new MKeyPoint()
                {
                    Point = new PointF(15, 0)
                },
                new MKeyPoint()
                {
                    Point = new PointF(9, 7)
                },
            };

            var matches = MatchClosePoints.Match(kps1, kps2, desc1.Mat, desc2.Mat, Emgu.CV.Features2D.DistanceType.L2, 5.5, true);
        }
コード例 #14
0
        List <float> ExtractSiftFeatureVector(TaggedImage image, int keyPointCount, SiftSortingMethod sortingMethod, bool doDrawImage)
        {
            // use the emgu functions to gather keypoints

            VectorOfKeyPoint vectorOfKeypoints = new VectorOfKeyPoint();

            Mat output = image.GetMat().Clone(); // only needed for drawing

            sift.DetectAndCompute(image.GetMat(), null, vectorOfKeypoints, output, false);

            // put it into useful data formats

            List <MKeyPoint> keyPoints = new List <MKeyPoint>(vectorOfKeypoints.ToArray());

            // sort

            switch (sortingMethod)
            {
            case SiftSortingMethod.Response:
                keyPoints.Sort((p1, p2) => p1.Response < p2.Response ? 1 : (p1.Response == p2.Response ? 0 : -1));
                break;

            case SiftSortingMethod.Size:
                keyPoints.Sort((p1, p2) => p1.Size < p2.Size ? 1 : (p1.Size == p2.Size ? 0 : -1));
                break;

            case SiftSortingMethod.None:
            default:
                break;
            }

            // expand/trim
            while (keyPoints.Count < keyPointCount)
            {
                keyPoints.Add(new MKeyPoint());
            }

            if (keyPoints.Count > keyPointCount)
            {
                keyPoints.RemoveRange(keyPointCount, keyPoints.Count - keyPointCount);
            }

            // visualize

            if (doDrawImage)
            {
                vectorOfKeypoints = new VectorOfKeyPoint(keyPoints.ToArray());

                Features2DToolbox.DrawKeypoints(image.GetMat(), vectorOfKeypoints, output, new Bgr(0, 0, 255), Features2DToolbox.KeypointDrawType.DrawRichKeypoints);

                String win1 = "SIFT";          //The name of the window
                CvInvoke.NamedWindow(win1);    //Create the window using the specific name

                CvInvoke.Imshow(win1, output); //Show the image
                CvInvoke.WaitKey(0);           //Wait for the key pressing event
                CvInvoke.DestroyWindow(win1);  //Destroy the window if key is pressed
            }

            // convert to list

            List <float> result = new List <float>(5 * keyPointCount);

            for (int i = 0; i < keyPoints.Count; ++i)
            {
                MKeyPoint current = keyPoints[i];

                result.Add(current.Point.X / (float)output.Size.Width);
                result.Add(current.Point.Y / (float)output.Size.Height);
                result.Add(current.Size);
                result.Add(current.Angle);
                result.Add(current.Response);
            }

            return(result);
        }
コード例 #15
0
 public ImagePointContext(Image image, MKeyPoint keyPoint)
 {
     Image    = image;
     KeyPoint = keyPoint;
 }
コード例 #16
0
ファイル: FeatureTracker.cs プロジェクト: red-pencil/ISWC18
 public Vector2 NormalizePoint(MKeyPoint p)
 {
     return(new Vector2(p.Point.X / (float)_width, p.Point.Y / (float)_height));
 }
コード例 #17
0
 internal static extern void VectorOfKeyPointGetItem(IntPtr vec, int index, ref MKeyPoint element);
コード例 #18
0
 internal static extern void VectorOfKeyPointGetItem(IntPtr keypoints, int index, ref MKeyPoint keypoint);
コード例 #19
0
        private List <MDMatch> clusterBasedOnPoseEstimation(List <MDMatch> matches, KeyPoints queryKeyPoints, PreProcessedImage trainingData)
        {
            // If no training data is provided then we cannot compute pose (LeafAnalysisV1), hence just return back the original set
            if (trainingData == null ||
                matches == null ||
                !matches.Any())
            {
                return(matches);
            }

            Dictionary <MDMatch, List <MDMatch> > clusters = new Dictionary <MDMatch, List <MDMatch> >(matches.Count);
            List <PoseEstimate> poseEstimates = new List <PoseEstimate>(matches.Count);

            foreach (MDMatch match in matches)
            {
                MKeyPoint queryKeyPoint    = queryKeyPoints.Points[match.QueryIdx];
                MKeyPoint trainingKeyPoint = trainingData.KeyPoints.Points[match.TrainIdx];

                PoseEstimate estimate = new PoseEstimate();
                estimate.Match = match;
                estimate.Dx    = trainingKeyPoint.Point.X - queryKeyPoint.Point.X;
                estimate.Dy    = trainingKeyPoint.Point.Y - queryKeyPoint.Point.Y;
                estimate.Ds    = trainingKeyPoint.Octave / queryKeyPoint.Octave;
                estimate.Do    = trainingKeyPoint.Angle - queryKeyPoint.Angle;

                poseEstimates.Add(estimate);
                // Initialize clusters for each individual match
                // Next we will add other matches which belong to this cluster
                clusters.Add(match, new List <MDMatch>(new MDMatch[] { match }));
            }

            const double errorThreshold = 5;

            // Compute cluster membership
            foreach (PoseEstimate estimate in poseEstimates)
            {
                foreach (PoseEstimate otherEstimate in poseEstimates)
                {
                    // Ignore self
                    if (estimate == otherEstimate)
                    {
                        continue;
                    }

                    double error = estimate.RMSE(otherEstimate);
                    //Console.WriteLine("Error: " + trainingData.Category + ": " + error);
                    if (error < errorThreshold)
                    {
                        clusters[estimate.Match].Add(otherEstimate.Match);
                    }
                }
            }

            // Finally pick the largest cluster
            List <MDMatch> result        = null;
            int            sizeOfCluster = -1;;

            foreach (KeyValuePair <MDMatch, List <MDMatch> > cluster in clusters)
            {
                if (cluster.Value.Count == sizeOfCluster)
                {
                    // Tie breaker: choose the cluster with smaller overall distances
                    if (result.Sum(item => item.Distance) > cluster.Value.Sum(item => item.Distance))
                    {
                        result = cluster.Value;
                    }
                }
                else if (cluster.Value.Count > sizeOfCluster)
                {
                    sizeOfCluster = cluster.Value.Count;
                    result        = cluster.Value;
                }
            }

            return(result);
        }
コード例 #20
0
        /// <summary>
        /// Identify good matches using RANSAC
        /// </summary>/// symmetrical matches
        /// keypoint1
        /// keypoint2
        /// the number of symmetrical matches
        Matrix <double> ApplyRANSAC(Matrix <float> matches, VectorOfKeyPoint keyPoints1, VectorOfKeyPoint keyPoints2, int matchesNumber)
        {
            selPoints1 = new Matrix <float>(matchesNumber, 2);
            selPoints2 = new Matrix <float>(matchesNumber, 2);

            int selPointsIndex = 0;

            for (int i = 0; i < matches.Rows; i++)
            {
                if (matches[i, 0] == 0 && matches[i, 1] == 0)
                {
                    continue;
                }

                //Get the position of left keypoints
                float x = keyPoints1[(int)matches[i, 0]].Point.X;
                float y = keyPoints1[(int)matches[i, 0]].Point.Y;
                selPoints1[selPointsIndex, 0] = x;
                selPoints1[selPointsIndex, 1] = y;
                //Get the position of right keypoints
                x = keyPoints2[(int)matches[i, 1]].Point.X;
                y = keyPoints2[(int)matches[i, 1]].Point.Y;
                selPoints2[selPointsIndex, 0] = x;
                selPoints2[selPointsIndex, 1] = y;
                selPointsIndex++;
            }

            Matrix <double> fundamentalMatrix = new Matrix <double>(3, 3);
            //IntPtr status = CvInvoke.cvCreateMat(1, matchesNumber, MAT_DEPTH.CV_8U);
            //Matrix<double> status = new Matrix<double>(1, matchesNumber);
            IntPtr statusp = CvInvoke.cvCreateMat(1, matchesNumber, MAT_DEPTH.CV_8U);
            IntPtr points1 = CreatePointListPointer(selPoints1);
            IntPtr points2 = CreatePointListPointer(selPoints2);

            //IntPtr fundamentalMatrixp = CvInvoke.cvCreateMat(3, 3, MAT_DEPTH.CV_32F);

            //Compute F matrix from RANSAC matches
            CvInvoke.cvFindFundamentalMat(
                points1,            //selPoints1   points in first image
                points2,            //selPoints2   points in second image
                fundamentalMatrix,  //fundamental matrix
                CV_FM.CV_FM_RANSAC, //RANSAC method
                this._Distance,     //Use 3.0 for default. The parameter is used for RANSAC method only.
                this._Confidence,   //Use 0.99 for default. The parameter is used for RANSAC or LMedS methods only.
                statusp);           //The array is computed only in RANSAC and LMedS methods.

            Matrix <int> status = new Matrix <int>(1, matchesNumber, statusp);

            //Matrix<double> fundamentalMatrix = new Matrix<double>(3, 3, fundamentalMatrixp);
            if (this._RefineF)
            {
                matchesNumber = 0;
                for (int i = 0; i < status.Cols; i++)
                {
                    if (status[0, i] >= 1)  // ==1
                    {
                        matchesNumber++;
                    }
                }
                selPoints1 = new Matrix <float>(matchesNumber, 2);
                selPoints2 = new Matrix <float>(matchesNumber, 2);

                modelKeyPoints    = new VectorOfKeyPoint();
                observedKeyPoints = new VectorOfKeyPoint();

                int statusIndex = -1;
                selPointsIndex = 0;
                for (int i = 0; i < matches.Rows; i++)
                {
                    if (matches[i, 0] == 0 && matches[i, 1] == 0)
                    {
                        continue;
                    }

                    statusIndex++;
                    if (status[0, statusIndex] >= 1)  // == 1
                    {
                        //Get the position of left keypoints
                        float x = keyPoints1[(int)matches[i, 0]].Point.X;
                        float y = keyPoints1[(int)matches[i, 0]].Point.Y;
                        selPoints1[selPointsIndex, 0] = x;
                        selPoints1[selPointsIndex, 1] = y;

                        MKeyPoint[] kpt = new MKeyPoint[1];
                        kpt[0]         = new MKeyPoint();
                        kpt[0].Point.X = x; kpt[0].Point.Y = y;
                        modelKeyPoints.Push(kpt);

                        //Get the position of right keypoints
                        x = keyPoints2[(int)matches[i, 1]].Point.X;
                        y = keyPoints2[(int)matches[i, 1]].Point.Y;
                        selPoints2[selPointsIndex, 0] = x;
                        selPoints2[selPointsIndex, 1] = y;

                        MKeyPoint[] kpt2 = new MKeyPoint[1];
                        kpt2[0]         = new MKeyPoint();
                        kpt2[0].Point.X = x; kpt2[0].Point.Y = y;
                        observedKeyPoints.Push(kpt2);
                        selPointsIndex++;
                    }
                }

                status = new Matrix <int>(1, matchesNumber);

                mask = new Matrix <byte>(matchesNumber, 1);
                for (int i = 0; i < mask.Rows; i++)
                {
                    mask[i, 0] = 0;  // don't draw lines, we will do it our selves
                }                    // set this to one if you wanted to use Features2DToolbox.DrawMatches
                // to draw correspondences

                indices = new Matrix <int>(matchesNumber, 2); // not being used as we draw correspondences
                for (int i = 0; i < indices.Rows; i++)        // ourselves
                {
                    indices[i, 0] = i;                        // has a problem in drawing lines, so we will drawe ourselves
                    indices[i, 1] = i;                        // this is not being used in our code
                }

                //Compute F matrix from RANSAC matches   // we can do additional RANSAC filtering
                //CvInvoke.cvFindFundamentalMat(         // but first RANSAC gives good results so not used
                //    selPoints1, //points in first image
                //    selPoints2, //points in second image
                //    fundamentalMatrix,  //fundamental matrix
                //    CV_FM.CV_FM_RANSAC, //RANSAC method
                //    this._Distance,  //Use 3.0 for default. The parameter is used for RANSAC method only.
                //    this._Confidence, //Use 0.99 for default. The parameter is used for RANSAC or LMedS methods only.
                //    status);//The array is computed only in RANSAC and LMedS methods.
                // we will need to copy points from selPoints1 and 2 based on status if above was uncommented
            }
            return(fundamentalMatrix);
        }
コード例 #21
0
ファイル: VectorOfKeyPoint.cs プロジェクト: fajoy/RTSPExample
 internal static extern void VectorOfKeyPointGetItem(IntPtr keypoints, int index, ref MKeyPoint keypoint);
コード例 #22
0
ファイル: CVUtil.cs プロジェクト: paweenwich/MyAutoIt
 public static String ToString(MKeyPoint keyPoint, String indent = "")
 {
     return(indent + JsonConvert.SerializeObject(keyPoint));
 }
コード例 #23
0
ファイル: KeyPoint.cs プロジェクト: mstath/EmguCV.Workbench
 public KeyPoint(MKeyPoint pixel)
 {
     _pixel = pixel;
 }