public static Mat vector_DMatch_to_Mat(List <DMatch> matches)
        {
            Mat res;
            int count = (matches != null) ? matches.Count : 0;

            if (count > 0)
            {
                res = new Mat(count, 1, CvType.CV_64FC4);
                double[] buff = new double[count * 4];
                for (int i = 0; i < count; i++)
                {
                    DMatch m = matches[i];
                    buff[4 * i]     = m.queryIdx;
                    buff[4 * i + 1] = m.trainIdx;
                    buff[4 * i + 2] = m.imgIdx;
                    buff[4 * i + 3] = m.distance;
                }
                res.put(0, 0, buff);
            }
            else
            {
                res = new Mat();
            }
            return(res);
        }
Exemple #2
0
 public static Mat GetHomography(Mat mMain, Mat mSecondary)
 {
     KeyPoint[] keypoints  = null;
     KeyPoint[] keypoints2 = null;
     using (SIFT sIFT = SIFT.Create(1000))
     {
         using (Mat mat = new Mat())
         {
             using (Mat mat2 = new Mat())
             {
                 sIFT.DetectAndCompute(mMain, new Mat(), out keypoints, mat);
                 sIFT.DetectAndCompute(mSecondary, new Mat(), out keypoints2, mat2);
                 FlannBasedMatcher flannBasedMatcher = new FlannBasedMatcher();
                 DMatch[]          array             = new DMatch[0];
                 array = flannBasedMatcher.Match(mat, mat2);
                 List <Point2f> list  = new List <Point2f>();
                 List <Point2f> list2 = new List <Point2f>();
                 for (int i = 0; i < array.Length; i++)
                 {
                     list.Add(keypoints[array[i].QueryIdx].Pt);
                     list2.Add(keypoints2[array[i].TrainIdx].Pt);
                 }
                 return(Cv2.FindHomography(InputArray.Create(list2), InputArray.Create(list), HomographyMethods.Ransac));
             }
         }
     }
 }
Exemple #3
0
        private void ValidateMatch(DMatch match)
        {
            var count = match.Expression != null?match.Expression.GetElementCount() : -1;

            for (var i = 0; i < match.Entries.Count; i++)
            {
                var e = match.Entries[i];

                if (e.Guard == null && e.Pattern is DNamePattern name)
                {
                    name.IsConstructor = IsTypeExists(name.Name);
                }

                if (e.Guard == null)
                {
                    var j = i;
                    while (j > 0)
                    {
                        j--;
                        var prev = match.Entries[j];

                        if (prev.Guard == null && !CanFollow(e.Pattern, prev.Pattern))
                        {
                            AddWarning(CompilerWarning.UnreachableMatchEntry, e.Location, e.Pattern, prev.Pattern);
                            break;
                        }
                    }
                }

                CheckPattern(e.Pattern, count, e.Pattern.GetElementCount());
            }
        }
        /// <summary>
        /// Crosses the matcher.
        /// </summary>
        /// <returns>The matcher.</returns>
        /// <param name="queryDescriptors">Query descriptors.</param>
        /// <param name="trainDescriptors">Train descriptors.</param>
        public static IList <DMatch> CrossMatcher(MatOfFloat queryDescriptors, MatOfFloat trainDescriptors)
        {
            MatOfDMatch   matchQT = new MatOfDMatch(), matchTQ = new MatOfDMatch();
            List <DMatch> bmatch = new List <DMatch>();

            DMatch[] dmatch;
            if (trainDescriptors.cols() <= 0)
            {
                throw new ApplicationException("CrossMatcherの引数trainDescriptorsがありません。");
            }
            matcher.match(queryDescriptors, trainDescriptors, matchQT);
            if (queryDescriptors.cols() <= 0)
            {
                throw new ApplicationException("CrossMatcherの引数queryDescriptorsがありません。");
            }
            matcher.match(trainDescriptors, queryDescriptors, matchTQ);
            for (int i = 0; i < matchQT.rows(); i++)
            {
                DMatch forward  = matchQT.toList()[i];
                DMatch backward = matchTQ.toList()[forward.trainIdx];
                if (backward.trainIdx == forward.queryIdx)
                {
                    bmatch.Add(forward);
                }
            }
            dmatch = bmatch.ToArray();
            bmatch.Clear();
            return(dmatch);
        }
Exemple #5
0
        /// <summary>
        /// To avoid NaN's when best match has zero distance we will use inversed ratio.
        /// KNN match will return 2 nearest matches for each query descriptor
        /// </summary>
        List <DMatch> GetMatches(BFMatcher matcher, Mat queryDescriptors, Mat trainDescriptors)
        {
            List <DMatch> matchesList = new List <DMatch>();

            if (enableRatioTest)
            {
                float      minRatio = 1.0f / 1.5f;
                DMatch[][] dm       = matcher.KnnMatch(queryDescriptors, trainDescriptors, 2);

                for (int i = 0; i < dm.Length; i++)
                {
                    DMatch bestMatch   = dm[i][0];
                    DMatch betterMatch = dm[i][1];

                    float distanceRatio = bestMatch.Distance / betterMatch.Distance;

                    if (distanceRatio < minRatio)
                    {
                        matchesList.Add(bestMatch);
                    }
                }
            }
            else
            {
                matchesList.AddRange(matcher.Match(queryDescriptors, trainDescriptors));
            }
            return(matchesList);
        }
        void OnFast()
        {
            Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_01.jpg");
            Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_02.jpg");

            Mat image1 = new Mat(), image2 = new Mat();

            Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY);
            Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY);
            KeyPoint[] keyPoint1 = Cv2.FAST(image1, 50, true);
            KeyPoint[] keyPoint2 = Cv2.FAST(image2, 50, true);
            using (Mat descriptor1 = new Mat())
                using (Mat descriptor2 = new Mat())
                    using (var orb = ORB.Create(50))
                        using (var matcher = new BFMatcher())
                        {
                            orb.Compute(image1, ref keyPoint1, descriptor1);
                            orb.Compute(image2, ref keyPoint2, descriptor2);
                            Debug.Log(string.Format("keyPoints has {0},{1} items.", keyPoint1.Length, keyPoint2.Length));
                            Debug.Log(string.Format("descriptor has {0},{1} items.", descriptor1.Rows, descriptor2.Rows));

                            List <DMatch> goodMatchePoints = new List <DMatch>();
                            var           dm = matcher.KnnMatch(descriptor1, descriptor2, 2);

                            #region matched 175
                            for (int i = 0; i < dm.Length; i++)
                            {
                                if (dm[i][0].Distance < 0.6 * dm[i][1].Distance)
                                {
                                    goodMatchePoints.Add(dm[i][0]);
                                }
                            }
                            #endregion

                            #region matched 90
                            float minRatio = 1.0f / 1.5f;
                            for (int i = 0; i < dm.Length; i++)
                            {
                                DMatch bestMatch     = dm[i][0];
                                DMatch betterMatch   = dm[i][1];
                                float  distanceRatio = bestMatch.Distance / betterMatch.Distance;
                                if (distanceRatio < minRatio)
                                {
                                    goodMatchePoints.Add(bestMatch);
                                }
                            }
                            #endregion

                            var dstMat = new Mat();
                            Debug.Log(string.Format("matchePoints has {0} items", goodMatchePoints.Count));
                            Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, goodMatchePoints, dstMat);
                            t2d = Utils.MatToTexture2D(dstMat);
                        }

            Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

            SrcSprite.sprite = dst_sp;
        }
Exemple #7
0
    private void Build(DMatch node, Hints hints, CompilerContext ctx)
    {
        ValidateMatch(node);
        StartScope(ScopeKind.Lexical, node.Location);

        ctx = new(ctx)
        {
            MatchExit = cw.DefineLabel()
        };

        var sys  = AddVariable();
        var push = hints.Append(Push);

        if (node.Expression != null)
        {
            Build(node.Expression, push.Remove(Last), ctx);
        }

        cw.PopVar(sys);
        var sysVar = new ScopeVar(sys);

        foreach (var e in node.Entries)
        {
            BuildEntry(e, sysVar, push, ctx);
        }

        //It is some kind of a hack, but Expression can be null
        //only if this match is inside try/catch
        if (node.Expression != null)
        {
            ThrowError(DyError.MatchFailed);
            cw.Fail();
        }
        else
        {
            cw.PushVar(sysVar);
            cw.Fail();
        }

        cw.MarkLabel(ctx.MatchExit);
        cw.Nop();
        PopIf(hints);
        EndScope();
    }
Exemple #8
0
        /// <summary>
        /// Gets the matches.
        /// </summary>
        /// <param name="queryDescriptors">Query descriptors.</param>
        /// <param name="matches">Matches.</param>
        void getMatches(Mat queryDescriptors, MatOfDMatch matches)
        {
            List <DMatch> matchesList = new List <DMatch>();

            //matches.clear();

            if (enableRatioTest)
            {
                // To avoid NaN's when best match has zero distance we will use inversed ratio.
                float minRatio = 1.0f / 1.5f;

                // KNN match will return 2 nearest matches for each query descriptor
                m_matcher.knnMatch(queryDescriptors, m_knnMatches, 2);

                for (int i = 0; i < m_knnMatches.Count; i++)
                {
                    List <DMatch> m_knnMatchesList = m_knnMatches[i].toList();

                    DMatch bestMatch   = m_knnMatchesList[0];
                    DMatch betterMatch = m_knnMatchesList[1];

                    float distanceRatio = bestMatch.distance / betterMatch.distance;

                    // Pass only matches where distance ratio between
                    // nearest matches is greater than 1.5 (distinct criteria)
                    if (distanceRatio < minRatio)
                    {
                        matchesList.Add(bestMatch);
                    }
                }

                matches.fromList(matchesList);
            }
            else
            {
                matches.fromList(matchesList);

                // Perform regular match
                m_matcher.match(queryDescriptors, matches);
            }

            //Debug.Log ("getMatches " + matches.ToString ());
        }
Exemple #9
0
        public void GetSetArrayDMatch()
        {
            var data = new DMatch[]
            {
                new DMatch(1, 2, 3),
                new DMatch(2, 4, 6),
                new DMatch(3, 6, 9),
                new DMatch(4, 7, 12),
            };

            var mat = new Mat(2, 2, MatType.CV_32FC4);

            mat.SetArray(0, 0, data);

            var data2 = new DMatch[mat.Total()];

            mat.GetArray(0, 0, data2);

            Assert.Equal(data, data2);
        }
        /// <summary>
        /// Converts std::vector to managed array
        /// </summary>
        /// <returns></returns>
        public DMatch[][] ToArray()
        {
            var size1 = GetSize1();

            if (size1 == 0)
            {
                return(Array.Empty <DMatch[]>());
            }
            var size2 = GetSize2();

            var ret = new DMatch[size1][];

            for (var i = 0; i < size1; i++)
            {
                ret[i] = new DMatch[size2[i]];
            }

            using var retPtr = new ArrayAddress2 <DMatch>(ret);
            NativeMethods.vector_vector_DMatch_copy(ptr, retPtr.GetPointer());
            GC.KeepAlive(this);
            return(ret);
        }
Exemple #11
0
 public static extern ExceptionStatus core_FileNode_read_DMatch(IntPtr node, out DMatch returnValue);
Exemple #12
0
        public static MatchResult Match(Bitmap src, TrainedTemplate trainedTemplate)
        {
            try
            {
                Mat      originalImage = Bitmap2Mat(src);
                DateTime begin         = DateTime.Now;
                SURF     surf          = new SURF();
                //获取原图的特征点
                KeyPoint[] originalKeyPoints = surf.Detect(originalImage);
                //提取原图的特征点描述;
                Mat originalDescriptors = new Mat();
                surf.Compute(originalImage, ref originalKeyPoints, originalDescriptors);


                //开始匹配
                DescriptorMatcher descriptorMatcher = DescriptorMatcher.Create("FlannBased");

                /**
                 * knnMatch方法的作用就是在给定特征描述集合中寻找最佳匹配
                 * 使用KNN-matching算法,令K=2,则每个match得到两个最接近的descriptor,然后计算最接近距离和次接近距离之间的比值,当比值大于既定值时,才作为最终match。
                 */
                DMatch[][]    matches         = descriptorMatcher.KnnMatch(trainedTemplate.templateDescriptors, originalDescriptors, 2);
                List <DMatch> goodMatchesList = new List <DMatch>();
                foreach (DMatch[] match in matches)
                {
                    DMatch m1 = match[0];
                    DMatch m2 = match[1];
                    if (m1.Distance <= m2.Distance * nndrRatio)
                    {
                        goodMatchesList.Add(m1);
                    }
                }
                //当匹配后的特征点大于等于 4 个,则认为模板图在原图中,该值可以自行调整
                if (goodMatchesList.Count >= 4)
                {
                    //Console.WriteLine("模板图在原图匹配成功!");
                    List <KeyPoint> templateKeyPointList = trainedTemplate.templateKeyPoints.ToList();
                    List <KeyPoint> originalKeyPointList = originalKeyPoints.ToList();
                    List <Point2f>  objectPoints         = new List <Point2f>();
                    List <Point2f>  scenePoints          = new List <Point2f>();
                    foreach (DMatch goodMatch in goodMatchesList)
                    {
                        objectPoints.Add(templateKeyPointList[goodMatch.QueryIdx].Pt);
                        scenePoints.Add(originalKeyPointList[goodMatch.TrainIdx].Pt);
                    }
                    MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
                    foreach (Point2f p in objectPoints)
                    {
                        objMatOfPoint2f.Add(p);
                    }

                    MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
                    foreach (Point2f p in scenePoints)
                    {
                        scnMatOfPoint2f.Add(p);
                    }
                    //使用 findHomography 寻找匹配上的关键点的变换
                    Mat homography = Cv2.FindHomography(objMatOfPoint2f, scnMatOfPoint2f, OpenCvSharp.HomographyMethod.Ransac, 3);

                    /**
                     * 透视变换(Perspective Transformation)是将图片投影到一个新的视平面(Viewing Plane),也称作投影映射(Projective Mapping)。
                     */
                    Mat templateCorners         = new Mat(4, 1, MatType.CV_32FC2);
                    Mat templateTransformResult = new Mat(4, 1, MatType.CV_32FC2);
                    templateCorners.Set <Point2f>(0, 0, new Point2f(0, 0));
                    templateCorners.Set <Point2f>(1, 0, new Point2f(trainedTemplate.templateImage.Cols, 0));
                    templateCorners.Set <Point2f>(2, 0, new Point2f(trainedTemplate.templateImage.Cols, trainedTemplate.templateImage.Rows));
                    templateCorners.Set <Point2f>(3, 0, new Point2f(0, trainedTemplate.templateImage.Rows));

                    //使用 perspectiveTransform 将模板图进行透视变以矫正图象得到标准图片
                    Cv2.PerspectiveTransform(templateCorners, templateTransformResult, homography);

                    //矩形四个顶点
                    Point2f pointA = templateTransformResult.Get <Point2f>(0, 0);
                    Point2f pointB = templateTransformResult.Get <Point2f>(1, 0);
                    Point2f pointC = templateTransformResult.Get <Point2f>(2, 0);
                    Point2f pointD = templateTransformResult.Get <Point2f>(3, 0);

                    MatchResult matchResult = new MatchResult();
                    matchResult.top    = (int)pointA.Y;
                    matchResult.left   = (int)pointA.X;
                    matchResult.right  = (int)pointB.X;
                    matchResult.bottom = (int)pointD.Y;
                    matchResult.time   = DateTime.Now.Subtract(begin).TotalMilliseconds;
                    return(matchResult);
                }
                else
                {
                    return(null);
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                return(null);
            }
        }
        void OnHarris()
        {
            Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_01.jpg");
            Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_02.jpg");

            Mat image1 = new Mat(), image2 = new Mat();

            Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY);
            Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY);

            KeyPoint[] keyPoint1 = null, keyPoint2 = null;

            using (var gFTTDetector = GFTTDetector.Create(500))
                using (var orb = ORB.Create(20))
                    using (Mat descriptor1 = new Mat())
                        using (Mat descriptor2 = new Mat())
                            using (var matcher = new BFMatcher(NormTypes.L2))
                            {
                                keyPoint1 = gFTTDetector.Detect(image1);
                                keyPoint2 = gFTTDetector.Detect(image2);

                                orb.Compute(image1, ref keyPoint1, descriptor1);
                                orb.Compute(image2, ref keyPoint2, descriptor2);

                                List <DMatch> goodMatchePoints = new List <DMatch>();
                                DMatch[][]    dm = matcher.KnnMatch(descriptor1, descriptor2, 2);

                                #region matched 30
                                //for (int i = 0; i < dm.Length; i++)
                                //{
                                //    if (dm[i][0].Distance < 0.6 * dm[i][1].Distance)
                                //    {
                                //        goodMatchePoints.Add(dm[i][0]);
                                //    }
                                //}
                                #endregion
                                #region matched 48
                                float minRatio = 1.0f / 1.5f;
                                for (int i = 0; i < dm.Length; i++)
                                {
                                    DMatch bestMatch   = dm[i][0];
                                    DMatch betterMatch = dm[i][1];

                                    float distanceRatio = bestMatch.Distance / betterMatch.Distance;

                                    if (distanceRatio < minRatio)
                                    {
                                        goodMatchePoints.Add(bestMatch);
                                    }
                                }
                                #endregion

                                var dstMat = new Mat();
                                Debug.Log(string.Format("matchePoints has {0} items", goodMatchePoints.Count));
                                Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, goodMatchePoints, dstMat);
                                t2d = VideoDetectorExample.Utils.MatToTexture2D(dstMat);
                            }
            Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

            SrcSprite.sprite = dst_sp;
        }
Exemple #14
0
        //Set Frame image from Webcam to start matching&detection
        //img에 Mat 하나를 등록. 본격적인 매칭을 시작하도록 한다
        public static void setImg(TestSURF thisobj, Form1 mainform, IplImage imgFromCam)
        {
            if (thisobj.isComputing)
            {
                return;
            }


            thisobj.isComputing = true;

            //---frame 특징점, 디스크립터
            //---frame image's keypoints and descriptor
            KeyPoint[] f_keypoints;
            Mat        f_descriptor;

            Mat imgOrig;            //캠 영상 (Original)
            Mat img;                //캠 영상 (Grayscale)

            //Convert to GrayScale Mat
            imgOrig = Cv2.CvArrToMat(imgFromCam);
            img     = new Mat();
            Cv2.CvtColor(imgOrig, img, ColorConversion.BgrToGray);



            //---------------------1. 디스크립터 추출 (keypoint & descriptor retrieval)
            f_keypoints  = new KeyPoint[10000];
            f_descriptor = new Mat();

            f_keypoints = thisobj.surfobj.Detect(img);                      //SIFT keypoint
            thisobj.surfobj.Compute(img, ref f_keypoints, f_descriptor);    //SIFT descriptor



            //---------------------2. 매칭 (descriptor Matching)
            DMatch[] matches = new DMatch[10000];

            try
            {
                matches = thisobj.fm.Match(thisobj.t_descriptor, f_descriptor);          //MATCHING

                //matching error will be caught in this block
            }
            catch { return; }


            //record proper distances for choosing Good Matches
            //좋은 매치를 찾기 위해 디스크립터 간 매칭 거리를 기록한다
            double max_dist = 0;
            double min_dist = 100;

            for (int i = 0; i < thisobj.t_descriptor.Rows; i++)
            {
                double dist = matches[i].Distance;

                if (dist < min_dist)
                {
                    min_dist = dist;
                }
                if (dist > max_dist)
                {
                    max_dist = dist;
                }
            }



            //---------------------3. gootmatch 탐색 (calculating goodMatches)
            List <DMatch> good_matches = new List <DMatch>();

            for (int i = 0; i < thisobj.t_descriptor.Rows; i++)
            {
                if (matches[i].Distance < 3 * min_dist)
                {
                    good_matches.Add(matches[i]);
                }
            }

            /*
             * KeyPoint[] goodkey = new KeyPoint[good_matches.Count];
             * for(int goodidx = 0; goodidx < good_matches.Count; goodidx++)
             * {
             *  goodkey[goodidx] = new KeyPoint((f_keypoints[good_matches.ElementAt(goodidx).TrainIdx].Pt.X), (f_keypoints[good_matches.ElementAt(goodidx).TrainIdx].Pt.Y), f_keypoints[good_matches.ElementAt(goodidx).TrainIdx].Size);
             * }
             */


            //Goodmatches의 keypoint 중, target과 frame 이미지에 해당하는 keypoint 정리
            Point2d[] target_lo = new Point2d[good_matches.Count];
            Point2d[] frame_lo  = new Point2d[good_matches.Count];


            for (int i = 0; i < good_matches.Count; i++)
            {
                target_lo[i] = new Point2d(thisobj.t_keypoints[good_matches.ElementAt(i).QueryIdx].Pt.X,
                                           thisobj.t_keypoints[good_matches.ElementAt(i).QueryIdx].Pt.Y);
                frame_lo[i] = new Point2d(f_keypoints[good_matches.ElementAt(i).TrainIdx].Pt.X,
                                          f_keypoints[good_matches.ElementAt(i).TrainIdx].Pt.Y);
            }


            //Homography for RANSAC
            Mat hom = new Mat();


            //-------------------------------4. RANSAC
            hom = Cv2.FindHomography(target_lo, frame_lo, HomographyMethod.Ransac);

            Point2d[] frame_corners;
            frame_corners = Cv2.PerspectiveTransform(thisobj.obj_corners, hom);



            //Mat -> iplimage
            //IplImage returnimg = (IplImage)imgOrig;

            mainform.setDetectionRec((int)frame_corners[0].X, (int)frame_corners[0].Y,
                                     (int)frame_corners[1].X, (int)frame_corners[1].Y,
                                     (int)frame_corners[2].X, (int)frame_corners[2].Y,
                                     (int)frame_corners[3].X, (int)frame_corners[3].Y);

            mainform.isComputing = false;
            thisobj.isComputing  = false;

            //Cv2.DrawKeypoints(imgOrig, goodkey, imgOrig);
            //Cv2.DrawKeypoints(img, f_keypoints, img);
            //Cv2.ImWrite("temtem.png", img);

            return;
        }
Exemple #15
0
        // 描画処理
        private void videoRendering(object sender, NewFrameEventArgs eventArgs)
        {
            Bitmap img = (Bitmap)eventArgs.Frame.Clone();

            // Debug.WriteLine(DateTime.Now + ":" + "描画更新");
            // Debug.WriteLine(mode);

            try
            {
                //pictureBoxCamera.Image = img;

                temp = BitmapConverter.ToMat(img);//比較先画像

                //特徴量の検出と特徴量ベクトルの計算
                akaze.DetectAndCompute(temp, null, out key_point2, descriptor2);

                //画像2の特徴点をoutput2に出力
                Cv2.DrawKeypoints(temp, key_point2, output2);
                //Cv2.ImShow("output2", output2);

                pictureBoxCamera.Image = BitmapConverter.ToBitmap(output2);

                matcher = DescriptorMatcher.Create("BruteForce");
                matches = matcher.Match(descriptor1, descriptor2);

                //閾値以下の要素数のカウント
                for (int i = 0; i < key_point1.Length && i < key_point2.Length; ++i)
                {
                    if (matches[i].Distance < threshold)
                    {
                        ++good_match_length;
                    }
                }

                DMatch[] good_matches = new DMatch[good_match_length];//閾値以下の要素数で定義

                //good_matchesに格納していく
                int j = 0;
                for (int i = 0; i < key_point1.Length && i < key_point2.Length; ++i)
                {
                    if (matches[i].Distance < threshold)
                    {
                        good_matches[j] = matches[i];
                        ++j;
                    }
                }

                //good_matchesの個数デバッグ表示
                Debug.WriteLine(j);
                Invoke((MethodInvoker) delegate()
                {
                    labelMatch.Text = j.ToString();
                });

                //類似点の数が多ければチェックボックスの状態に応じて非常停止
                if (j >= 16)
                {
                    //非常停止
                    if (checkBoxStop.Checked == true)
                    {
                        //WebRequest request = WebRequest.Create("https://maker.ifttt.com/trigger/raspberry/with/key/gHPH_xDKR664IVIr2YtRRj6BbQoQi-K0mCowIJCGPF3");
                        //WebResponse response = request.GetResponse();
                    }

                    //アラート音
                    if (checkBoxAlert.Checked == true)
                    {
                        // _mediaPlayer.settings.volume = 20;
                        _mediaPlayer.URL = @"D:\DCIM\app\AkazeAlert\PcCameraApp\Resources\decision1.mp3";
                        _mediaPlayer.controls.play();
                    }
                }

                Cv2.DrawMatches(mat, key_point1, temp, key_point2, good_matches, output3);
                //Cv2.ImShow("output3", output3);

                pictureBoxResult.Image = BitmapConverter.ToBitmap(output3);
            }
            catch
            {
                pictureBoxCamera.Image = img;
            }
        }
Exemple #16
0
        /// <summary>
        /// 图像匹配
        /// </summary>
        /// <param name="templateImage">模板图</param>
        /// <param name="originalImage">原图</param>
        /// <param name="nndrRatio">距离阈值,一般取0.5</param>
        public static void matchImage(Mat templateImage, Mat originalImage, float nndrRatio)
        {
            DateTime start = DateTime.Now;
            //指定特征点算法SURF
            SURF surf = new SURF();


            //获取模板图的特征点
            KeyPoint[] templateKeyPoints = surf.Detect(templateImage);
            //提取模板图的特征描述
            //Mat templateDescriptors = new Mat(templateImage.Rows, templateImage.Cols, templateImage.Type());
            Mat templateDescriptors = new Mat();

            surf.Compute(templateImage, ref templateKeyPoints, templateDescriptors);



            //获取原图的特征点
            KeyPoint[] originalKeyPoints = surf.Detect(originalImage);
            //提取原图的特征点描述;
            Mat originalDescriptors = new Mat();

            surf.Compute(originalImage, ref originalKeyPoints, originalDescriptors);


            //开始匹配
            DescriptorMatcher descriptorMatcher = DescriptorMatcher.Create("FlannBased");//或者使用

            /**
             * knnMatch方法的作用就是在给定特征描述集合中寻找最佳匹配
             * 使用KNN-matching算法,令K=2,则每个match得到两个最接近的descriptor,然后计算最接近距离和次接近距离之间的比值,当比值大于既定值时,才作为最终match。
             */
            DMatch[][]    matches         = descriptorMatcher.KnnMatch(templateDescriptors, originalDescriptors, 2);
            List <DMatch> goodMatchesList = new List <DMatch>();

            foreach (DMatch[] match in matches)
            {
                DMatch m1 = match[0];
                DMatch m2 = match[1];
                if (m1.Distance <= m2.Distance * nndrRatio)
                {
                    goodMatchesList.Add(m1);
                }
            }
            //当匹配后的特征点大于等于 4 个,则认为模板图在原图中,该值可以自行调整
            if (goodMatchesList.Count >= 4)
            {
                //Console.WriteLine("模板图在原图匹配成功!");
                List <KeyPoint> templateKeyPointList = templateKeyPoints.ToList();
                List <KeyPoint> originalKeyPointList = originalKeyPoints.ToList();
                List <Point2f>  objectPoints         = new List <Point2f>();
                List <Point2f>  scenePoints          = new List <Point2f>();
                foreach (DMatch goodMatch in goodMatchesList)
                {
                    objectPoints.Add(templateKeyPointList[goodMatch.QueryIdx].Pt);
                    scenePoints.Add(originalKeyPointList[goodMatch.TrainIdx].Pt);
                }
                MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
                foreach (Point2f p in objectPoints)
                {
                    objMatOfPoint2f.Add(p);
                }

                MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
                foreach (Point2f p in scenePoints)
                {
                    scnMatOfPoint2f.Add(p);
                }
                //使用 findHomography 寻找匹配上的关键点的变换
                Mat homography = Cv2.FindHomography(objMatOfPoint2f, scnMatOfPoint2f, OpenCvSharp.HomographyMethod.Ransac, 3);

                /**
                 * 透视变换(Perspective Transformation)是将图片投影到一个新的视平面(Viewing Plane),也称作投影映射(Projective Mapping)。
                 */
                Mat templateCorners         = new Mat(4, 1, MatType.CV_32FC2);
                Mat templateTransformResult = new Mat(4, 1, MatType.CV_32FC2);
                templateCorners.Set <Point2f>(0, 0, new Point2f(0, 0));
                templateCorners.Set <Point2f>(1, 0, new Point2f(templateImage.Cols, 0));
                templateCorners.Set <Point2f>(2, 0, new Point2f(templateImage.Cols, templateImage.Rows));
                templateCorners.Set <Point2f>(3, 0, new Point2f(0, templateImage.Rows));

                //使用 perspectiveTransform 将模板图进行透视变以矫正图象得到标准图片
                Cv2.PerspectiveTransform(templateCorners, templateTransformResult, homography);

                //矩形四个顶点
                Point2f pointA = templateTransformResult.Get <Point2f>(0, 0);
                Point2f pointB = templateTransformResult.Get <Point2f>(1, 0);
                Point2f pointC = templateTransformResult.Get <Point2f>(2, 0);
                Point2f pointD = templateTransformResult.Get <Point2f>(3, 0);

                //将匹配的图像用用四条线框出来
                Cv2.Line(originalImage, pointA, pointB, new Scalar(0, 255, 0), 1); //上 A->B
                Cv2.Line(originalImage, pointB, pointC, new Scalar(0, 255, 0), 1); //右 B->C
                Cv2.Line(originalImage, pointC, pointD, new Scalar(0, 255, 0), 1); //下 C->D
                Cv2.Line(originalImage, pointD, pointA, new Scalar(0, 255, 0), 1); //左 D->A

                Cv2.PutText(originalImage, "time:" + DateTime.Now.Subtract(start).TotalMilliseconds + "ms", new Point(10, originalImage.Height - 10), FontFace.HersheySimplex, 0.5, new Scalar(255, 255, 255));
                Cv2.ImWrite(@"C:\Users\Administrator\Desktop\result.jpg", originalImage);
            }
        }
Exemple #17
0
        // http://docs.opencv.org/3.0-beta/modules/features2d/doc/features2d.html
        // http://docs.opencv.org/3.0-beta/modules/features2d/doc/feature_detection_and_description.html
        // http://docs.opencv.org/3.0-beta/doc/tutorials/features2d/akaze_matching/akaze_matching.html
        /// <summary>
        ///  Compare images with a feature detection algorithm
        /// </summary>
        /// <param name="mat_image1"> 1st image (OpenCv Mat)</param>
        /// <param name="mat_image2"> 2nd image (OpenCv Mat)</param>
        /// <param name="feature_count">number of feature keypoints found</param>
        /// <param name="match_count">number of matches founds</param>
        /// <param name="view">image of the feature and good matches</param>
        /// <returns>Similarity % (#good matches/ # matches)</returns>
        private static double CompareFeatures(Mat mat_image1, Mat mat_image2, out double feature_count, out double match_count, out Bitmap view)
        {
            match_count   = 0;
            feature_count = 0;

            int nmatch  = 0;
            int ngmatch = 0;

            view = new Bitmap(1, 1);

            // stop here if one of the image does not seem to be valid
            if (mat_image1 == null)
            {
                return(0);
            }
            if (mat_image1.Empty())
            {
                return(0);
            }
            if (mat_image2 == null)
            {
                return(0);
            }
            if (mat_image2.Empty())
            {
                return(0);
            }

            try
            {
                // Detect the keypoints and generate their descriptors

                var detector = AKAZE.Create();
                //var detector = BRISK.Create();
                //var detector = ORB.Create(); // require grayscale

                /*
                 * // grayscale
                 * Cv2.CvtColor(mat_image1, mat_image1, ColorConversionCodes.BGR2GRAY);
                 * Cv2.CvtColor(mat_image2, mat_image2, ColorConversionCodes.BGR2GRAY);
                 * mat_image1.EqualizeHist();
                 * mat_image2.EqualizeHist();
                 */

                var descriptors1 = new MatOfFloat();
                var descriptors2 = new MatOfFloat();
                var keypoints1   = new KeyPoint[1];
                var keypoints2   = new KeyPoint[1];
                try
                {
                    keypoints1 = detector.Detect(mat_image1);
                    keypoints2 = detector.Detect(mat_image2);
                    if (keypoints1 != null)
                    {
                        detector.Compute(mat_image1, ref keypoints1, descriptors1);
                        if (descriptors1 == null)
                        {
                            return(0);
                        }
                    }
                    if (keypoints2 != null)
                    {
                        detector.Compute(mat_image2, ref keypoints2, descriptors2);
                        if (descriptors2 == null)
                        {
                            return(0);
                        }
                    }
                }
                catch (System.AccessViolationException) { }
                catch (Exception) { }

                // Find good matches  (Nearest neighbor matching ratio)
                float nn_match_ratio = 0.95f;

                var matcher    = new BFMatcher(NormTypes.Hamming);
                var nn_matches = new DMatch[1][];
                try
                {
                    nn_matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                }
                catch (System.AccessViolationException) { }
                catch (Exception) { }

                var good_matches = new List <DMatch>();
                var matched1     = new List <KeyPoint>();
                var matched2     = new List <KeyPoint>();
                var inliers1     = new List <KeyPoint>();
                var inliers2     = new List <KeyPoint>();

                if (nn_matches != null && nn_matches.Length > 0)
                {
                    for (int i = 0; i < nn_matches.GetLength(0); i++)
                    {
                        if (nn_matches[i].Length >= 2)
                        {
                            DMatch first = nn_matches[i][0];
                            float  dist1 = nn_matches[i][0].Distance;
                            float  dist2 = nn_matches[i][1].Distance;

                            if (dist1 < nn_match_ratio * dist2)
                            {
                                good_matches.Add(first);
                                matched1.Add(keypoints1[first.QueryIdx]);
                                matched2.Add(keypoints2[first.TrainIdx]);
                            }
                        }
                    }
                }

                // Count matches & features
                feature_count = keypoints1.Length + keypoints2.Length;
                nmatch        = nn_matches.Length;
                match_count   = nmatch;
                ngmatch       = good_matches.Count;

                // Draw matches view
                var mview = new Mat();

                // show images + good matchs
                if (keypoints1.Length > 0 && keypoints2.Length > 0)
                {
                    Cv2.DrawMatches(mat_image1, keypoints1, mat_image2, keypoints2, good_matches.ToArray(), mview);
                    view = BitmapConverter.ToBitmap(mview);
                }
                else
                {
                    // no matchs
                    view = new Bitmap(1, 1);
                }
            }
            catch (System.AccessViolationException e)
            {
                Console.Error.WriteLine("Access Error  => CompareFeatures : \n{0}", e.Message);
            }
            catch (Exception)
            {
                // Console.Error.WriteLine("Error  => CompareFeatures : \n{0}", e.Message);
            }

            // similarity = 0  when there was no feature  or no match
            if (feature_count <= 0)
            {
                return(0);
            }
            if (nmatch <= 0)
            {
                return(0);
            }

            // similarity = ratio of good matches/ # matches
            var similarity = 100.0 * ngmatch / nmatch;

            return(similarity);
        }
Exemple #18
0
        private void bn_Match_Click(object sender, RoutedEventArgs e)
        {
            if (listImage.Count > 0)
            {
                SubWindow.Win_Matching win = new SubWindow.Win_Matching(listImage);
                if (win.ShowDialog() == true)
                {
                    int    mode     = win.cb_Mode.SelectedIndex;
                    int    idxSrc   = win.cb_Src.SelectedIndex;
                    int    idxTmpl  = win.cb_Tmpl.SelectedIndex;
                    string strTitle = listImage[_nSelWin].Title;
                    Mat    matSrc   = listImage[idxSrc].fn_GetImage();
                    Mat    matTmpl  = listImage[idxTmpl].fn_GetImage();
                    Mat    matDst   = new Mat();
                    int    width    = matSrc.Cols;
                    int    height   = matSrc.Rows;
                    timeStart = DateTime.Now;

                    if (mode == 0)// Template
                    {
                        Mat matResult = new Mat();
                        Cv2.MatchTemplate(matSrc, matTmpl, matResult, TemplateMatchModes.SqDiffNormed);

                        OpenCvSharp.Point matchLoc = new OpenCvSharp.Point();
                        unsafe
                        {
                            float *pData = (float *)matResult.DataPointer;
                            float  fMin  = 1.0f;
                            for (int stepY = 0; stepY < matResult.Rows; stepY++)
                            {
                                for (int stepX = 0; stepX < matResult.Cols; stepX++)
                                {
                                    if (fMin >= pData[stepY * matResult.Cols + stepX])
                                    {
                                        fMin       = pData[stepY * matResult.Cols + stepX];
                                        matchLoc.X = stepX;
                                        matchLoc.Y = stepY;
                                    }
                                }
                            }
                        }


                        matDst = matSrc.Clone();
                        Cv2.CvtColor(matDst, matDst, ColorConversionCodes.GRAY2BGR);

                        Cv2.Rectangle(matDst, new OpenCvSharp.Rect(matchLoc.X, matchLoc.Y, matTmpl.Cols, matTmpl.Rows), new Scalar(0, 255, 0));
                    }
                    else if (mode == 1)// SIFT
                    {
                        OpenCvSharp.Features2D.SIFT detector = OpenCvSharp.Features2D.SIFT.Create();
                        KeyPoint[] keypoint1, keypoint2;
                        Mat        matDescriptor1 = new Mat();
                        Mat        matDescriptor2 = new Mat();
                        detector.DetectAndCompute(matTmpl, new Mat(), out keypoint1, matDescriptor1);
                        detector.DetectAndCompute(matSrc, new Mat(), out keypoint2, matDescriptor2);
                        BFMatcher matcher  = new BFMatcher();
                        DMatch[]  dMatches = matcher.Match(matDescriptor1, matDescriptor2);
                        if (dMatches.Length > 0)
                        {
                            int      GOOD  = Math.Min(50, (int)(dMatches.Length * 0.1));
                            DMatch[] dGood = new DMatch[GOOD];
                            for (int step = 0; step < GOOD; step++)
                            {
                                dGood[step] = new DMatch();
                                dGood[step] = dMatches[step];
                            }

                            Cv2.DrawMatches(matTmpl, keypoint1, matSrc, keypoint2, dGood, matDst, Scalar.All(-1), Scalar.All(-1), new List <byte>(), DrawMatchesFlags.NotDrawSinglePoints);
                        }
                    }
                    else if (mode == 2)// SURF
                    {
                        OpenCvSharp.XFeatures2D.SURF detector = OpenCvSharp.XFeatures2D.SURF.Create(800);
                        KeyPoint[] keypoint1, keypoint2;
                        Mat        matDescriptor1 = new Mat();
                        Mat        matDescriptor2 = new Mat();
                        detector.DetectAndCompute(matTmpl, new Mat(), out keypoint1, matDescriptor1);
                        detector.DetectAndCompute(matSrc, new Mat(), out keypoint2, matDescriptor2);
                        BFMatcher matcher  = new BFMatcher();
                        DMatch[]  dMatches = matcher.Match(matDescriptor1, matDescriptor2);
                        if (dMatches.Length > 0)
                        {
                            int      GOOD  = Math.Min(50, (int)(dMatches.Length * 0.1));
                            DMatch[] dGood = new DMatch[GOOD];
                            for (int step = 0; step < GOOD; step++)
                            {
                                dGood[step] = new DMatch();
                                dGood[step] = dMatches[step];
                            }

                            Cv2.DrawMatches(matTmpl, keypoint1, matSrc, keypoint2, dGood, matDst, Scalar.All(-1), Scalar.All(-1), new List <byte>(), DrawMatchesFlags.NotDrawSinglePoints);
                        }
                    }

                    fn_WriteLog($"[Matching] {strTitle} ({(DateTime.Now - timeStart).TotalMilliseconds} ms)");
                    fn_NewImage(matDst, $"Matching {mode}");
                }
            }
        }
Exemple #19
0
 public static extern ExceptionStatus core_FileStorage_shift_DMatch(IntPtr fs, DMatch val);
Exemple #20
0
        public Mat Stitch()
        {
            Mat src1Gray = new Mat();
            Mat src2Gray = new Mat();

            Cv2.CvtColor(src1Color, src1Gray, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2Color, src2Gray, ColorConversionCodes.BGR2GRAY);

            // Setting hyperparameters
            int numBestMatch = 10;

            // Detect the keypoints and generate their descriptors using SIFT
            SIFT sift = SIFT.Create();

            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();

            sift.DetectAndCompute(src1Gray, null, out keypoints1, descriptors1);
            sift.DetectAndCompute(src2Gray, null, out keypoints2, descriptors2);

            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher();

            DMatch[] matches = matcher.Match(descriptors1, descriptors2);

            // Sort the match points
            Comparison <DMatch> DMatchComparison = delegate(DMatch match1, DMatch match2)
            {
                if (match1 < match2)
                {
                    return(-1);
                }
                else
                {
                    return(1);
                }
            };

            Array.Sort(matches, DMatchComparison);

            // Get the best n match points
            int n = Math.Min(numBestMatch, keypoints1.Length);

            Point2f[] imagePoints1 = new Point2f[n];
            Point2f[] imagePoints2 = new Point2f[n];
            DMatch[]  bestMatches  = new DMatch[n];
            for (int i = 0; i < n; i++)
            {
                imagePoints1[i] = keypoints1[matches[i].QueryIdx].Pt;
                imagePoints2[i] = keypoints2[matches[i].TrainIdx].Pt;
                bestMatches[i]  = matches[i];
            }

            // visiualize match result
            Mat matchImg = new Mat();

            Cv2.DrawMatches(src1Color, keypoints1, src2Color, keypoints2, bestMatches, matchImg, Scalar.All(-1), Scalar.All(-1), null, DrawMatchesFlags.NotDrawSinglePoints);
            using (new OpenCvSharp.Window("SIFT matching", WindowMode.AutoSize, matchImg))
            {
                Cv2.WaitKey();
            }

            // Get homographic matrix that represents a transformation.
            // The size of such matrix is 3x3, which can represents every possible matrix transformation in 2-D plane.
            Mat h**o = Cv2.FindHomography(InputArray.Create <Point2f>(imagePoints2), InputArray.Create <Point2f>(imagePoints1));

            // calculate the transformed location of the second image's conor
            // use this value to calculate the size of result image
            Point2f[] transfromedConors = transfromConors(src2Color.Size(), h**o);

            // make sure the result image is large enough
            double maxWidth  = src1Color.Width;
            double maxHeight = src1Color.Height;

            for (int i = 0; i < 4; i++)
            {
                if (transfromedConors[i].X > maxWidth)
                {
                    maxWidth = transfromedConors[i].X;
                }
                if (transfromedConors[i].Y > maxHeight)
                {
                    maxHeight = transfromedConors[i].Y;
                }
            }
            OpenCvSharp.Size resultSize = new OpenCvSharp.Size(maxWidth, maxHeight);

            // the position that the first image should be copied to in the final result
            int src1StartPositonY = 0;
            int src1StartPositonX = 0;

            // if still some X coordinate is less than 0, do shift operation along x-axis
            bool   shouldShiftX   = false;
            double shiftDistanceX = double.MinValue;

            for (int i = 0; i < 4; i++)
            {
                if (transfromedConors[i].X < 0)
                {
                    shouldShiftX   = true;
                    shiftDistanceX = Math.Max(shiftDistanceX, -transfromedConors[i].X);
                }
            }
            if (shouldShiftX)
            {
                /*
                 * matrix for shifting algong x-axis
                 * 1 0 d
                 * 0 1 0
                 * 0 0 1
                 */
                Mat shiftMatrix = new Mat(3, 3, h**o.Type());
                shiftMatrix.Set <double>(0, 0, 1);
                shiftMatrix.Set <double>(0, 1, 0);
                shiftMatrix.Set <double>(0, 2, shiftDistanceX);
                shiftMatrix.Set <double>(1, 0, 0);
                shiftMatrix.Set <double>(1, 1, 1);
                shiftMatrix.Set <double>(1, 2, 0);
                shiftMatrix.Set <double>(2, 0, 0);
                shiftMatrix.Set <double>(2, 1, 0);
                shiftMatrix.Set <double>(2, 2, 1);
                h**o              = shiftMatrix * h**o;
                resultSize.Width  = resultSize.Width + (int)shiftDistanceX;
                src1StartPositonX = (int)shiftDistanceX;
            }

            // if still some Y coordinate is less than 0, do shift operation along y-axis
            bool   shouldShiftY   = false;
            double shiftDistanceY = double.MinValue;

            for (int i = 0; i < 4; i++)
            {
                if (transfromedConors[i].Y < 0)
                {
                    shouldShiftY   = true;
                    shiftDistanceY = Math.Max(shiftDistanceY, -transfromedConors[i].Y);
                }
            }
            if (shouldShiftY)
            {
                /*
                 * matrix for shifting algong y-axis
                 * 1 0 0
                 * 0 1 d
                 * 0 0 1
                 */
                Mat shiftMatrix = new Mat(3, 3, h**o.Type());
                shiftMatrix.Set <double>(0, 0, 1);
                shiftMatrix.Set <double>(0, 1, 0);
                shiftMatrix.Set <double>(0, 2, 0);
                shiftMatrix.Set <double>(1, 0, 0);
                shiftMatrix.Set <double>(1, 1, 1);
                shiftMatrix.Set <double>(1, 2, shiftDistanceY);
                shiftMatrix.Set <double>(2, 0, 0);
                shiftMatrix.Set <double>(2, 1, 0);
                shiftMatrix.Set <double>(2, 2, 1);
                h**o = shiftMatrix * h**o;
                resultSize.Height = resultSize.Height + (int)shiftDistanceY;
                src1StartPositonY = (int)shiftDistanceY;
            }

            Mat result = new Mat();

            Cv2.WarpPerspective(src2Color, result, h**o, resultSize);
            src1Color.CopyTo(new Mat(result, new OpenCvSharp.Rect(src1StartPositonX, src1StartPositonY, src1Gray.Cols, src1Gray.Rows)));

            return(result);
        }