Example #1
1
 public static void SaveImageFile(Mat saveMat)
 {
     using (var sfd = new SaveFileDialog()
     {
         FileName = "新しいファイル.jpg",
         Filter = "JPEG ファイル(*.jpg;*.png)|*.jpg;*.png|すべてのファイル(*.*)|*.*",
         FilterIndex = 1,
         RestoreDirectory = true,
     })
     {
         if (sfd.ShowDialog() == DialogResult.OK)
         {
             if (!saveMat.SaveImage(sfd.FileName))
             {
                 Utils.ShowErrorMessage("ファイルを保存できませんでした。");
             }
         }
     }
 }
Example #2
1
        public void Run()
        {
            using (var img1 = new Mat(FilePath.Image.SurfBox))
            using (var img2 = new Mat(FilePath.Image.SurfBoxinscene))
            using (var descriptors1 = new Mat())
            using (var descriptors2 = new Mat())
            using (var matcher = new BFMatcher(NormTypes.L2SQR))
            using (var kaze = KAZE.Create())
            {
                KeyPoint[] keypoints1, keypoints2;
                kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1);
                kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2);

                DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U))
                {
                    mask.SetTo(new Scalar(255));
                    int nonZero = Cv2.CountNonZero(mask);
                    VoteForUniqueness(matches, mask);
                    nonZero = Cv2.CountNonZero(mask);
                    nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 20);

                    List<Point2f> obj = new List<Point2f>();
                    List<Point2f> scene = new List<Point2f>();
                    List<DMatch> goodMatchesList = new List<DMatch>();
                    //iterate through the mask only pulling out nonzero items because they're matches
                    for (int i = 0; i < mask.Rows; i++)
                    {
                        MatIndexer<byte> maskIndexer = mask.GetGenericIndexer<byte>();
                        if (maskIndexer[i] > 0)
                        {
                            obj.Add(keypoints1[matches[i][0].QueryIdx].Pt);
                            scene.Add(keypoints2[matches[i][0].TrainIdx].Pt);
                            goodMatchesList.Add(matches[i][0]);
                        }
                    }

                    List<Point2d> objPts = obj.ConvertAll(Point2fToPoint2d);
                    List<Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d);
                    if (nonZero >= 4)
                    {
                        Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask);
                        nonZero = Cv2.CountNonZero(mask);

                        if (homography != null)
                        {
                            Point2f[] objCorners = { new Point2f(0, 0),
                                      new Point2f(img1.Cols, 0),
                                      new Point2f(img1.Cols, img1.Rows),
                                      new Point2f(0, img1.Rows) };

                            Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography);

                            //This is a good concat horizontal
                            using (Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3))
                            using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height)))
                            using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height)))
                            {
                                img1.CopyTo(left);
                                img2.CopyTo(right);

                                byte[] maskBytes = new byte[mask.Rows * mask.Cols];
                                mask.GetArray(0, 0, maskBytes);
                                Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints);

                                List<List<Point>> listOfListOfPoint2D = new List<List<Point>>();
                                List<Point> listOfPoint2D = new List<Point>();
                                listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y));
                                listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y));
                                listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y));
                                listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y));
                                listOfListOfPoint2D.Add(listOfPoint2D);
                                img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2);

                                //This works too
                                //Cv2.Line(img3, scene_corners[0] + new Point2d(img1.Cols, 0), scene_corners[1] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                //Cv2.Line(img3, scene_corners[1] + new Point2d(img1.Cols, 0), scene_corners[2] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                //Cv2.Line(img3, scene_corners[2] + new Point2d(img1.Cols, 0), scene_corners[3] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                //Cv2.Line(img3, scene_corners[3] + new Point2d(img1.Cols, 0), scene_corners[0] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);

                                img3.SaveImage("Kaze_Output.png");
                                Window.ShowImages(img3);
                            }
                        }
                    }
                }
            }
        }
        private void LoadVideoFrames()
        {
            if (openCvCapture == null)
            {
                openCvCapture = new OpenCvSharp.VideoCapture(0);
            }
            OpenCvSharp.Mat frame        = new OpenCvSharp.Mat();
            bool            readingVideo = true;
            int             cnt          = 0;

            while (readingVideo)
            {
                if (openCvCapture.Read(frame))
                {
                    cnt++;
                    frame.SaveImage(@"d:\junk\testCamImages\image{cnt}.png");
                    byte[]      imagearray = frame.ImEncode(".png");
                    BitmapImage bmi        = new BitmapImage();
                    using (MemoryStream ms = new MemoryStream(imagearray))
                    {
                        ms.Position = 0;
                        bmi.BeginInit();
                        bmi.CacheOption  = BitmapCacheOption.OnLoad;
                        bmi.StreamSource = ms;
                        bmi.EndInit();
                    }


                    //this.ImageSource.Dispatcher.BeginInvoke(System.Windows.Threading.DispatcherPriority.Normal,
                    //    new Action(() =>
                    //        {
                    //            ImageSource = bmi;
                    //        }));


                    //capture.
                    OpenCvSharp.Cv2.ImShow("video", frame);
                    int key = OpenCvSharp.Cv2.WaitKey(27);
                    if (key == 27)
                    {
                        readingVideo = false;
                    }
                }
            }
        }