예제 #1
0
        public void ShowKeyPoints()
        {
            lstMat.Clear();
            lstModelDescriptors.Clear();
            var featureDetector = new SIFT();

            Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams();
            Emgu.CV.Flann.SearchParams      sp = new SearchParams();
            DescriptorMatcher matcher          = new FlannBasedMatcher(ip, sp);
            Rectangle         cropRect         = new Rectangle(842, 646, 70, 70);
            Mat mask = new Mat(new Size(70, 70), DepthType.Cv8U, 1);

            CvInvoke.Rectangle(mask, new Rectangle(0, 0, 70, 70), new MCvScalar(255, 255, 255), -1);
            CvInvoke.Circle(mask, new Point(35, 37), 22, new MCvScalar(0, 0, 0), -1);


            lstMat.Add(mask);
            String[] folders = { @"Linage2\Main\PartyAuto", @"Linage2\Main\PartyManual" };
            foreach (String folder in folders)
            {
                DirectoryInfo imageFolder = new DirectoryInfo(folder);
                FileInfo[]    files       = Utils.GetFilesByExtensions(imageFolder, ".jpg", ".png").ToArray();
                foreach (FileInfo finfo in files)
                {
                    Mat img  = CvInvoke.Imread(finfo.FullName, ImreadModes.Color);
                    Mat crop = CVUtil.crop_color_frame(img, cropRect);
                    //lstMat.Add(crop);
                    VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();
                    Mat modelDescriptors            = new Mat();
                    featureDetector.DetectAndCompute(crop, mask, modelKeyPoints, modelDescriptors, false);
                    lstModelDescriptors.Add(modelDescriptors);
                    Mat result = new Mat();
                    Features2DToolbox.DrawKeypoints(crop, modelKeyPoints, result, new Bgr(Color.Red));

                    lstMat.Add(result);
                    //BOWImgDescriptorExtractor bow = new BOWImgDescriptorExtractor(featureDetector, matcher);
                }
            }


            /*BOWKMeansTrainer bowtrainer = new BOWKMeansTrainer(1000, new MCvTermCriteria(10, 0.001), 1, Emgu.CV.CvEnum.KMeansInitType.PPCenters);
             * foreach (Mat m in lstModelDescriptors) {
             *  bowtrainer.Add(m);
             * }
             * Mat dict = new Mat();
             * bowtrainer.Cluster();
             * StringBuilder sb = new StringBuilder();
             * Image<Bgr, Byte> imgsave = dict.ToImage<Bgr, Byte>();
             *
             * (new XmlSerializer(typeof(Image<Bgr, Byte>))).Serialize(new StringWriter(sb), imgsave);
             * Console.WriteLine(sb.ToString());*/
        }
예제 #2
0
            public Point[] GetMatchBoundingPoint()
            {
                Rectangle rect = CVUtil.GetRect(matchFeatureData.keyPoints);

                PointF[] src =
                {
                    new PointF(rect.X,                  rect.Y),                   new PointF(rect.X,                  rect.Y + rect.Height - 1),
                    new PointF(rect.X + rect.Width - 1, rect.Y + rect.Height - 1), new PointF(rect.X + rect.Width - 1, rect.Y)
                };
                PointF[] points = CvInvoke.PerspectiveTransform(src, homography);
                foreach (var p in points)
                {
                    Console.WriteLine(p.ToString());
                }
                Point[] ap = Array.ConvertAll(points,
                                              new Converter <PointF, Point>(CVUtil.PointFToPoint));
                return(ap);
            }
예제 #3
0
        public Mat ProcessImage(Mat img)
        {
            if (colorIndex != -1)
            {
                img = img.Split()[colorIndex];
            }

            //CvInvoke.InRange(img, lowerLimit, upperLimit, imgOut);
            img = CVUtil.crop_color_frame(img, cropRect);
            Mat ret = new Mat();

            if (lower != null)
            {
                CvInvoke.InRange(img, lower, upper, ret);
            }
            else
            {
                ret = img;
            }
            return(ret);
        }
예제 #4
0
        private void testToolStripMenuItem_Click(object sender, EventArgs e)
        {
            lstMat.Clear();
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
            Mat testImage  = CvInvoke.Imread(@"Linage2\Main\PartyAuto\2e35av2fwbk.png", ImreadModes.Color);
            Mat modelImage = CVUtil.crop_color_frame(testImage, new Rectangle(842, 646, 70, 70));

            log(modelImage.ToString());
            Image <Bgr, Byte> img = modelImage.ToImage <Bgr, Byte>();

            CvInvoke.cvSetImageROI(img, new Rectangle(0, 0, 35, 35));

            //UMat uModelImage = modelImage.GetUMat(AccessType.Read);
            var featureDetector             = new SIFT();
            Mat modelDescriptors            = new Mat();
            VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();

            VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint();

            featureDetector.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);
            log("model size = " + modelKeyPoints.Size);
            Mat observedDescriptors = new Mat();

            featureDetector.DetectAndCompute(testImage, null, observedKeyPoints, observedDescriptors, false);

            int    k = 2;
            double uniquenessThreshold = 0.80;
            Mat    mask;
            Mat    homography = null;

            // Bruteforce, slower but more accurate
            // You can use KDTree for faster matching with slight loss in accuracy
            using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                PointF[] src =
                                {
                                    new PointF(0, 0), new PointF(0, modelImage.Height - 1), new PointF(modelImage.Width - 1, modelImage.Height - 1), new PointF(modelImage.Width - 1, 0)
                                };
                                PointF[] points = CvInvoke.PerspectiveTransform(src, homography);
                                foreach (var p in points)
                                {
                                    Console.WriteLine(p.ToString());
                                }
                                Point[] ap = Array.ConvertAll(points,
                                                              new Converter <PointF, Point>(CVUtil.PointFToPoint));

                                CvInvoke.Polylines(testImage, ap, true, new MCvScalar(255, 0, 0));
                                CvInvoke.Rectangle(testImage, new Rectangle(0, 0, 100, 100), new MCvScalar(255, 255, 0));
                                CvInvoke.Circle(testImage, new Point(100, 100), 50, new MCvScalar(255, 255, 0), -1);
                                lstMat.Add(testImage);
                            }
                            //Mat modelMatches = new Mat();
                            //Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, result, new Bgr(Color.Red));
                            //Features2DToolbox.DrawKeypoints(testImage, observedKeyPoints, result, new Bgr(Color.Red));
                            //Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, testImage, observedKeyPoints, matches, modelMatches,
                            //    new MCvScalar(255, 0, 0), new MCvScalar(0, 255, 0));
                            //lstMat.Add(modelMatches);

                            //Mat model1 = new Mat();
                            //Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, model1, new Bgr(Color.Red));
                            //lstMat.Add(model1);
                            //modelMatches = crop_color_frame(testImage,new Rectangle(842,646,70,70));
                        }
                    }
            log("Done " + mask.Size);

            Refresh();
        }
예제 #5
0
        public FeatureResult GetFeature(Bitmap bmpSrc)
        {
            Mat matTest = CVUtil.BitmapToMat(bmpSrc);

            matTest = ProcessImage(matTest);

            /*Bitmap bmp = Utils.cropImage(bmpSrc, cropRect);
             * Mat matTest = CVUtil.BitmapToMat(bmp);
             * if (colorIndex != -1)
             * {
             *  matTest = matTest.Split()[colorIndex];
             * }*/
            Mat observedDescriptors            = new Mat();
            VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint();

            featureDetector.DetectAndCompute(matTest, mask, observedKeyPoints, observedDescriptors, false);
            int    k = 2;
            double uniquenessThreshold = 0.80;

            //Mat homography = null;
            // Bruteforce, slower but more accurate
            // You can use KDTree for faster matching with slight loss in accuracy
            using (Emgu.CV.Flann.KdTreeIndexParams ip = new Emgu.CV.Flann.KdTreeIndexParams())
                using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
                        foreach (SimpleFeatureData sd in this)
                        {
                            matcher.Add(sd.descriptors);
                            //break;
                        }

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        lastMatches          = matches;
                        lastObserved         = matTest;
                        lastObservedKeyPoint = observedKeyPoints;
                        //Mat mat = new Mat();
                        //Features2DToolbox.DrawKeypoints(matTest, observedKeyPoints, mat, new Bgr(Color.Blue));
                        //FormOpenCV.lstMat.Add(mat);
                        //Console.WriteLine(CVUtil.ToString(observedDescriptors));
                        //Console.WriteLine(CVUtil.ToString(observedKeyPoints));
                        //Console.WriteLine(CVUtil.ToString(matches));
                        //Console.WriteLine(MatchesToString(matches));
                        Mat uniqueMask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        uniqueMask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, uniqueMask);

                        int nonZeroCount = CvInvoke.CountNonZero(uniqueMask);
                        if (nonZeroCount > 4)
                        {
                            //Console.WriteLine(CVUtil.ToString(uniqueMask));
                            String            retLabel = GetLabelFromMatches(matches, uniqueMask);
                            SimpleFeatureData mfd      = lastMatchFeatureData;
                            try
                            {
                                //int nonZeroCount2 = Features2DToolbox.VoteForSizeAndOrientation(mfd.keyPoints, observedKeyPoints, matches, uniqueMask, 1.5, 20);
                                //Console.WriteLine("nonZeroCount2=" + nonZeroCount2);
                                if (nonZeroCount > 4)
                                {
                                    Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(mfd.keyPoints, observedKeyPoints, matches, uniqueMask, 2);

                                    /*Console.WriteLine(CVUtil.ToString(homography));
                                     * Rectangle rect = CVUtil.GetRect(mfd.keyPoints);
                                     * PointF[] src = {
                                     *  new PointF(rect.X,rect.Y),new PointF(rect.X,rect.Y + rect.Height-1),
                                     *  new PointF(rect.X + rect.Width-1,rect.Y + rect.Height-1),new PointF(rect.X + rect.Width-1,rect.Y)
                                     * };
                                     * PointF[] points = CvInvoke.PerspectiveTransform(src, homography);
                                     * foreach (var p in points)
                                     * {
                                     *  Console.WriteLine(p.ToString());
                                     * }
                                     * Point[] ap = Array.ConvertAll(points,
                                     * new Converter<PointF, Point>(CVUtil.PointFToPoint));
                                     * Mat testImage = matTest.Clone();
                                     * CvInvoke.Polylines(testImage, ap, true, new MCvScalar(255, 0, 0));
                                     */
                                    //CvInvoke.Rectangle(testImage, new Rectangle(0, 0, 100, 100), new MCvScalar(255, 255, 0));
                                    //CvInvoke.Circle(testImage, new Point(100, 100), 50, new MCvScalar(255, 255, 0), -1);
                                    //lstMat.Add(testImage);
                                    FeatureResult ret = new FeatureResult();
                                    ret.keyPoint         = observedKeyPoints;
                                    ret.label            = retLabel;
                                    ret.homography       = homography;
                                    ret.matchFeatureData = mfd;
                                    return(ret);
                                }
                            }catch (Exception ex)
                            {
                            }
                        }
                        return(null);
                    }
        }