Пример #1
1
      /*
      public void TestCodeBookBGModel()
      {
         using (Capture capture = new Capture())
         using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>())
         {
            ImageViewer viewer = new ImageViewer();
            Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>();

            Application.Idle += delegate(Object sender, EventArgs args)
            {
               Mat frame = capture.QueryFrame();
               model.Apply(frame);
               viewer.Image = model.ForegroundMask; 
            };
            viewer.ShowDialog();
         }
      }

      public void TestBlobTracking()
      {
         MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams();
         fgparam.alpha1 = 0.1f;
         fgparam.alpha2 = 0.005f;
         fgparam.alpha3 = 0.1f;
         fgparam.delta = 2;
         fgparam.is_obj_without_holes = 1;
         fgparam.Lc = 32;
         fgparam.Lcc = 16;
         fgparam.minArea = 15;
         fgparam.N1c = 15;
         fgparam.N1cc = 25;
         fgparam.N2c = 25;
         fgparam.N2cc = 35;
         fgparam.perform_morphing = 0;
         fgparam.T = 0.9f;

         BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>();
         param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC);
         param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam);
         param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG);
         param.FGTrainFrames = 10;
         BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param);

         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0);

         using(ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               tracker.Process(capture.RetrieveBgrFrame());
               
               //Image<Bgr, Byte> img = capture.RetrieveBgrFrame();

               Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>();
               foreach (MCvBlob blob in tracker)
               {
                  img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2);
                  img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0));
               }
               viewer.Image = img;
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }*/
      
      public void TestCvBlob()
      {
         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5);
         using (CvTracks tracks = new CvTracks())
         using (ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         using (Mat fgMask = new Mat())
         {
            //BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
            BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true);
            //BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0);

            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               Mat frame = new Mat();
               capture.Retrieve(frame);
               bgModel.Apply(frame, fgMask);

               using (CvBlobDetector detector = new CvBlobDetector())
               using (CvBlobs blobs = new CvBlobs())
               {
                  detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs);
                  blobs.FilterByArea(100, int.MaxValue);

                  tracks.Update(blobs, 20.0, 10, 0);

                  Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size);

                  using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs))
                  {
                     frame.CopyTo(result, blobMask);
                  }
                  //CvInvoke.cvCopy(frame, result, blobMask);

                  foreach (KeyValuePair<uint, CvTrack> pair in tracks)
                  {
                     if (pair.Value.Inactive == 0) //only draw the active tracks.
                     {
                        CvBlob b = blobs[pair.Value.BlobLabel];
                        Bgr color = detector.MeanColor(b, frame.ToImage<Bgr, Byte>());
                        result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, CvEnum.FontFace.HersheySimplex, 0.5, color);
                        result.Draw(pair.Value.BoundingBox, color, 2);
                        Point[] contour = b.GetContour();
                        result.Draw(contour, new Bgr(0, 0, 255), 1);
                     }
                  }

                  viewer.Image = frame.ToImage<Bgr, Byte>().ConcateVertical(fgMask.ToImage<Bgr, Byte>().ConcateHorizontal(result));
               }
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }
        public LineDetectionFromFileTesting()
        {
            viewer = new ImageViewer(); //create an image viewer

            //Convert the image to grayscale and filter out the noise
            // gray = new Image<Gray, Byte>("C:/RoboSub/RoboImagesTest2/92c.png");
            fileImage = new Image<Bgr, Byte>(fileName);
            fileImage = fileImage.Resize(300, 200, Emgu.CV.CvEnum.INTER.CV_INTER_AREA, true);
            img = fileImage.Clone();
            gray = img.Convert<Gray, Byte>();
            // img = new Image<Bgr, Byte>("C:/RoboSub/RoboImagesTest2/92c.png");

            viewer.Size = new Size(fileImage.Width * 3, fileImage.Height * 3);

            Thread input = new Thread(getKeyboardInput);
            input.Start();
            Thread test = new Thread(testShapeDetection);
            test.Start();
            Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
            {
                //testShapeDetection();
            });
            viewer.ShowDialog();
            test.Abort();
            input.Abort();
        }
Пример #3
0
        public static void DetectBackground(String filepath)
        {
            Console.WriteLine("Running Heuristic Background Detector");

            var bg_color = Heuristics.DetectBackground( new System.Drawing.Bitmap(filepath));
            Console.WriteLine("R,G,B : " + bg_color.Red + "," + bg_color.Green + "," + bg_color.Blue);
            var display = new ImageViewer(new Emgu.CV.Image<Bgr,Byte>(600,600, bg_color), "Heuristic Background Detection Result");
            display.ShowDialog();
        }
Пример #4
0
 public void testCam()
 {
     ImageViewer viewer = new ImageViewer(); //create an image viewer
     Capture capture = new Capture(); //create a camera captue
     Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
     {  //run this until application closed (close button click on image viewer)
         viewer.Image = capture.QueryFrame(); //draw the image obtained from camera
     });
     viewer.ShowDialog(); //show the image viewer
 }
Пример #5
0
        public static void TestCodeBook()
        {
            int learningFrames = 40;
             using (Capture capture = new Capture("tree.avi"))
             using (BGCodeBookModel<Ycc> bgmodel = new BGCodeBookModel<Ycc>())
             {
            #region Set color thresholds values
            MCvBGCodeBookModel param = bgmodel.MCvBGCodeBookModel;
            param.modMin[0] = param.modMin[1] = param.modMin[2] = 3;
            param.modMax[0] = param.modMax[1] = param.modMax[2] = 10;
            param.cbBounds[0] = param.cbBounds[1] = param.cbBounds[2] = 10;
            bgmodel.MCvBGCodeBookModel = param;
            #endregion

            ImageViewer viewer = new ImageViewer();
            int count = 0;
            EventHandler processFrame = delegate(Object sender, EventArgs e)
            {
               Image<Bgr, Byte> img = capture.QueryFrame();
               if (img == null)
               {
                  return;
               }
               Image<Gray, byte> mask = new Image<Gray, Byte>(img.Size);
               mask.SetValue(255);

               viewer.Text = String.Format("Processing {0}th image. {1}", count++, learningFrames > 0 ? "(Learning)" : String.Empty);

               using (Image<Ycc, Byte> ycc = img.Convert<Ycc, Byte>()) //using YCC color space for BGCodeBook
               {
                  bgmodel.Update(ycc, ycc.ROI, mask);

                  if (learningFrames == 0) //training is completed
                     bgmodel.ClearStale(bgmodel.MCvBGCodeBookModel.t / 2, ycc.ROI, mask);

                  learningFrames--;
                  Image<Gray, Byte> m = bgmodel.ForgroundMask.Clone();
                  if (count == 56)
                  {
                     m = bgmodel.ForgroundMask.Clone();
                  }
                  //m._EqualizeHist();
                  viewer.Image = m;
                  //viewer.Image = img;
                  System.Threading.Thread.Sleep(100);
               }

               img.Dispose();
            };

            Application.Idle += processFrame;

            viewer.ShowDialog();
             }
        }
Пример #6
0
 public static void ShowImage(Image img)
 {
     if (img != null)
     {
         Bitmap map = img as Bitmap;
         Image<Bgr, byte> image = new Image<Bgr, byte>(map);
         ImageViewer iv = new ImageViewer(image);
         iv.StartPosition = FormStartPosition.CenterParent;
         iv.ShowDialog();
     }
 }
Пример #7
0
      public void TestKinect()
      {
         using (KinectCapture capture = new KinectCapture(KinectCapture.DeviceType.Kinect, KinectCapture.ImageGeneratorOutputMode.Vga30Hz))
         {
            ImageViewer viewer = new ImageViewer();
            Application.Idle += delegate(Object sender, EventArgs e)
            {
               //Image<Bgr, Byte> img = capture.RetrieveBgrFrame();
               capture.Grab();
               Mat img = new Mat();
               capture.RetrieveDisparityMap(img);
               viewer.Image = img;
            };

            viewer.ShowDialog();
         }
      }
Пример #8
0
        public static void GetBlobsFromImage(String filepath, Bgr color)
        {
            string imagesrc = filepath;
            Bitmap source = new Bitmap(imagesrc);
            Console.WriteLine("beginning flood fill...");
            Bitmap Mask = Preprocessing.FloodFill(source, 100, 100, 120, color);
            Console.WriteLine("flood fill complete...");
            Console.WriteLine("extracting objects...");
            List<Bitmap> extractedobj = Preprocessing.ExtractImages(source, Mask);
            Console.WriteLine("Extracted " + extractedobj.Count + " objects");
            // Display to the User
            var result = new Image<Bgr, Byte>(source);

            int ii = 0;
            foreach (Bitmap bm in extractedobj)
            {
                //  Bitmap bm2 = Preprocessing.Orient(bm);
                bm.Save("image" + ii++ + ".png");
            }

            Console.WriteLine("wrote files to disk");

            Image<Bgra, Byte> image = new Image<Bgra, byte>(Mask);
            ImageViewer display = new ImageViewer(image, "Mask");
            var scale = Math.Min(800.0/result.Height, 800.0/result.Width);
            display.ImageBox.SetZoomScale(scale, new Point(10, 10));
            display.ShowDialog();

            // Display Each Shred That is extracted
            foreach (var shred in  extractedobj)
            {
                Image<Bgra, Byte> cvShred = new Image<Bgra, byte>(shred);
                ImageViewer box = new ImageViewer(cvShred, "Mask");
                var shredScale = Math.Min(800.0/cvShred.Height, 800.0/cvShred.Width);
                display.ImageBox.SetZoomScale(shredScale, new Point(10, 10));
                box.ShowDialog();
            }

            // Save to Working Dir
        }
Пример #9
0
        public static void GetBlobsFromImage(String filepath)
        {
            string imagesrc = filepath;
            Bitmap source = new Bitmap(imagesrc);
            Bitmap Mask = Preprocessing.FloodFill(source, 100, 100, 100);
            List<Bitmap> extractedobj = Preprocessing.ExtractImages(source, Mask);
            // Display to the User
            var result = new Image<Bgr, Byte>(source);

            int ii = 0;
            foreach(Bitmap bm in extractedobj)
            {
                Bitmap bm2 = Preprocessing.Orient(bm);
                bm2.Save("image" + ii++ + ".jpg");
            }

            Emgu.CV.Image<Bgra, Byte> image = new Image<Bgra, byte>(Mask);
            ImageViewer display = new ImageViewer(image, "Mask");
            var scale = Math.Min(800.0 / (double)result.Height, 800.0 / (double)result.Width);
            display.ImageBox.SetZoomScale(scale, new Point(10, 10));
            display.ShowDialog();

            // Save to Working Dir
        }
Пример #10
0
 public void TestPlayVideo()
 {
    Capture capture = new Capture("car.avi");
    ImageViewer viewer = new ImageViewer(null);
     
    Application.Idle += delegate(Object sender, EventArgs e)
    {
       Mat m = capture.QueryFrame();
       if (m != null && !m.IsEmpty)
       {
          viewer.Image = m;
          Thread.Sleep(300);
       }
    };
    viewer.ShowDialog();
 }
Пример #11
0
      public void TestImageViewerFrameRate()
      {
         ImageViewer viewer = new ImageViewer(null);
         Image<Bgr, Byte> img = new Image<Bgr, Byte>(1024, 1024);
         
         Application.Idle += delegate(Object sender, EventArgs e)
         {
            double v = DateTime.Now.Ticks % 30;
            img.SetValue(new Bgr(v, v, v));

            viewer.Image = img;
         };
         viewer.ShowDialog();

      }
Пример #12
0
      public void TestImageViewer()
      {
         //System.Threading.Thread.CurrentThread.CurrentUICulture = new System.Globalization.CultureInfo("zh-CN");

         ImageViewer viewer = new ImageViewer(null);
         //viewer.Image = new Image<Bgr, Byte>(50, 50);
         //viewer.Image = null;
         //viewer.ImageBox.FunctionalMode = ImageBox.FunctionalModeOption.RightClickMenu;
         viewer.ShowDialog();

         //Application.Run(new ImageViewer(null));

      }
Пример #13
0
        public void DriveAndRecordImageTest()
        {
            InitializeMotors();
            DriveForward();

            // testCam();
            ImageViewer viewer = new ImageViewer(); //create an image viewer
            Capture capture = new Capture(3);
            long frame = 0;
            long time = DateTime.Now.Second;
            Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
            {
                if (time + 1 < DateTime.Now.Second)
                {
                    capture = new Capture(3);
                    time = DateTime.Now.Second;
                    capture.QueryFrame().Save("C:/RoboSub/Images/movingTest/forwardFrame" + frame + ".png");
                    frame++;
                }
                viewer.Image = capture.QueryFrame();
            });
            viewer.ShowDialog();

            foreach (Motor m in motors)
            {
                //System.Console.WriteLine("Stopping motor " + m.info.serial + ".");
                m.KillMotors();
            }
        }
Пример #14
0
 public static void TestSuperres()
 {
    ImageViewer viewer = new ImageViewer();
    //using (Capture capture = new Capture("car.avi"))
    using (Superres.FrameSource frameSource = new Superres.FrameSource("car.avi", false))
    using (Superres.SuperResolution sr = new Superres.SuperResolution(Superres.SuperResolution.OpticalFlowType.Btvl, frameSource))
    //using (Superres.SuperResolution sr = new Superres.SuperResolution(Superres.SuperResolution.OpticalFlowType.BTVL1_OCL, frameSource))
    {
       Stopwatch watch = new Stopwatch();
       int counter = 0;
       Application.Idle += delegate(object sender, EventArgs e)
       {
          watch.Reset();
          watch.Start();
          
          //Image<Bgr, byte> frame = frameSource.NextFrame();
          Mat frame = new Mat(); 
          sr.NextFrame(frame);
          //Image<Gray, Byte> frame = capture.QueryGrayFrame();
          watch.Stop();
          if (watch.ElapsedMilliseconds < 200)
          {
             Thread.Sleep(200 - (int)watch.ElapsedMilliseconds);
          }
          if (!frame.IsEmpty)
          {
             viewer.Image = frame;
             viewer.Text = String.Format("Frame {0}: {1} milliseconds.", counter++, watch.ElapsedMilliseconds);
          } else
          {
             viewer.Text = String.Format("{0} frames processed", counter);
          }
       };
       viewer.ShowDialog();
    }
 }
Пример #15
0
      /*
      public void TestPyrLK()
      {
         const int MAX_CORNERS = 500;
         Capture c = new Capture();
         ImageViewer viewer = new ImageViewer();
         Image<Gray, Byte> oldImage = null;
         Image<Gray, Byte> currentImage = null;
         Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
         {
            if (oldImage == null)
            {
               oldImage = c.QueryGrayFrame();
            }

            currentImage = c.QueryGrayFrame();
            Features2D.GFTTDetector detector = new Features2D.GFTTDetector(MAX_CORNERS, 0.05, 3, 3);
            
            //PointF[] features = oldImage.GoodFeaturesToTrack(MAX_CORNERS, 0.05, 3.0, 3, false, 0.04)[0];
            PointF[] shiftedFeatures;
            Byte[] status;
            float[] trackErrors;
            CvInvoke.CalcOpticalFlowPyrLK(oldImage, currentImage, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05),
               out shiftedFeatures, out status, out trackErrors);

            Image<Gray, Byte> displayImage = currentImage.Clone();
            for (int i = 0; i < features.Length; i++)
               displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2);

            oldImage = currentImage;
            viewer.Image = displayImage;
         });
         viewer.ShowDialog();
      }*/

     
      public void TestPyrLKGPU()
      {
         if (!CudaInvoke.HasCuda)
            return;

         const int MAX_CORNERS = 500;
         Capture c = new Capture();
         ImageViewer viewer = new ImageViewer();
         GpuMat oldImage = null;
         GpuMat currentImage = null;
         using (CudaGoodFeaturesToTrackDetector detector = new CudaGoodFeaturesToTrackDetector(DepthType.Cv8U, 1, MAX_CORNERS, 0.05, 3.0, 3, false, 0.04))
         using (CudaDensePyrLKOpticalFlow flow = new CudaDensePyrLKOpticalFlow(new Size(21, 21), 3, 30, false))
         {
            Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
            {
               if (oldImage == null)
               {
                  Mat bgrFrame = c.QueryFrame();
                  using (GpuMat oldBgrImage = new GpuMat(bgrFrame))
                  {
                     oldImage = new GpuMat();
                     CudaInvoke.CvtColor(oldBgrImage, oldImage, ColorConversion.Bgr2Gray);
                  }
               }

               using (Mat tmpFrame = c.QueryFrame())
               using (GpuMat tmp = new GpuMat(tmpFrame))
               {
                  currentImage = new GpuMat();
                  CudaInvoke.CvtColor(tmp, currentImage, ColorConversion.Bgr2Gray);
               }
               using (GpuMat f = new GpuMat())
               
               using (GpuMat vertex = new GpuMat())
               using (GpuMat colors = new GpuMat())
               using(GpuMat corners = new GpuMat())
               {
                  flow.Calc(oldImage, currentImage, f);

                  //CudaInvoke.CreateOpticalFlowNeedleMap(u, v, vertex, colors);
                  detector.Detect(oldImage, corners, null);
                  //GpuMat<float> detector.Detect(oldImage, null);
                  /*
                  //PointF[] features = oldImage.GoodFeaturesToTrack(MAX_CORNERS, 0.05, 3.0, 3, false, 0.04)[0];
                  PointF[] shiftedFeatures;
                  Byte[] status;
                  float[] trackErrors;
                  OpticalFlow.PyrLK(oldImage, currentImage, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05),
                     out shiftedFeatures, out status, out trackErrors);
                  */

                  Mat displayImage = new Mat();
                  currentImage.Download(displayImage);
                      
                  /*
                  for (int i = 0; i < features.Length; i++)
                     displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2);
                  */
                  oldImage = currentImage;
                  viewer.Image = displayImage;
               }
            });
            viewer.ShowDialog();
         }
      }
Пример #16
0
      //[Test]
      public void TestKalman()
      {
         Image<Bgr, Byte> img = new Image<Bgr, byte>(400, 400);

         SyntheticData syntheticData = new SyntheticData();

         //Matrix<float> state = new Matrix<float>(new float[] { 0.0f, 0.0f}); //initial guess

         #region initialize Kalman filter
         KalmanFilter tracker = new KalmanFilter(2, 1, 0);
         syntheticData.TransitionMatrix.Mat.CopyTo(tracker.TransitionMatrix);
         syntheticData.MeasurementMatrix.Mat.CopyTo(tracker.MeasurementMatrix);

         syntheticData.ProcessNoise.Mat.CopyTo(tracker.ProcessNoiseCov);
         syntheticData.MeasurementNoise.Mat.CopyTo(tracker.MeasurementNoiseCov);
         syntheticData.ErrorCovariancePost.Mat.CopyTo(tracker.ErrorCovPost);
         tracker.StatePost.SetTo(new float[] { 0.0f, 0.0f });
         #endregion 

         System.Converter<double, PointF> angleToPoint = 
            delegate(double radianAngle)
            {
               return new PointF(
                  (float)(img.Width / 2 + img.Width / 3 * Math.Cos(radianAngle)),
                  (float)(img.Height / 2 - img.Width / 3 * Math.Sin(radianAngle)));
            };

         Action<PointF, Bgr> drawCross =
           delegate(PointF point, Bgr color)
           {
              img.Draw(new Cross2DF(point, 15, 15), color, 1);
           };

         ImageViewer viewer = new ImageViewer();
         System.Windows.Forms.Timer timer = new System.Windows.Forms.Timer();
         timer.Interval = 200;
         timer.Tick += new EventHandler(delegate(object sender, EventArgs e)
         {
            Matrix<float> measurement = syntheticData.GetMeasurement();
            // adjust Kalman filter state 
            tracker.Correct(measurement.Mat);

            tracker.Predict();

            #region draw the state, prediction and the measurement

            float[] correctedState = new float[2];
            float[] predictedState = new float[2];
            tracker.StatePost.CopyTo(correctedState);
            tracker.StatePre.CopyTo(predictedState);
            PointF statePoint = angleToPoint(correctedState[0]);
            PointF predictPoint = angleToPoint(predictedState[0]);
            PointF measurementPoint = angleToPoint(measurement[0, 0]);

            img.SetZero(); //clear the image
            drawCross(statePoint, new Bgr(Color.White)); //draw current state in White
            drawCross(measurementPoint, new Bgr(Color.Red)); //draw the measurement in Red
            drawCross(predictPoint, new Bgr(Color.Green)); //draw the prediction (the next state) in green 
            img.Draw(new LineSegment2DF(statePoint, predictPoint), new Bgr(Color.Magenta), 1); //Draw a line between the current position and prediction of next position 

            //Trace.WriteLine(String.Format("Velocity: {0}", tracker.CorrectedState[1, 0]));
            #endregion

            syntheticData.GoToNextState();

            viewer.Image = img;
         });
         timer.Start();
         viewer.Disposed += delegate(Object sender, EventArgs e) { timer.Stop(); };
         viewer.Text = "Actual State: White; Measurement: Red; Prediction: Green";
         viewer.ShowDialog();
      }
Пример #17
0
        public static void Run(string filepath)
        {
            Console.WriteLine("Loading Image : " + filepath);
            Bitmap load = new Bitmap(filepath);

            var start = DateTime.Now;
            Console.WriteLine("Running Background Detection ...");
            Bgr backgroundColor = Heuristics.DetectBackground(load, 20);
            Console.WriteLine("Detected Background : " + backgroundColor.ToString());
            Console.WriteLine("Detected Background Completed in " + (DateTime.Now - start).TotalSeconds.ToString() +
                              " seconds");

            var backgroundGuess = new Image<Bgr, Byte>(100, 100, backgroundColor);
            ImageViewer display = new ImageViewer(backgroundGuess, "Mask");
            display.ShowDialog();

            Console.WriteLine("Running Shred Extraction ");
            Console.WriteLine("Image Size : " + load.Height * load.Width + " Pixels");

            string imagesrc = filepath;
            Bitmap source = new Bitmap(imagesrc);
            Console.WriteLine("beginning flood fill...");
            Bitmap Mask = Preprocessing.FloodFill(source, 100, 100, 50, backgroundColor);
            Console.WriteLine("flood fill complete...");
            Console.WriteLine("extracting objects...");
            List<Bitmap> extractedobj = Preprocessing.ExtractImages(source, Mask);
            Console.WriteLine("Extracted " + extractedobj.Count + " objects");

            // Display to the User
            var result = new Image<Bgr, Byte>(source);

            Image<Bgra, Byte> image = new Image<Bgra, byte>(Mask);
            ImageViewer maskView = new ImageViewer(image, "Mask");
            var scale = Math.Min(800.0 / result.Height, 800.0 / result.Width);
            maskView.ImageBox.SetZoomScale(scale, new Point(10, 10));
            maskView.ShowDialog();

            // Display Each Shred That is extracted
            foreach (var shred in extractedobj)
            {
                Image<Bgra, Byte> cvShred = new Image<Bgra, byte>(shred);
                ImageViewer box = new ImageViewer(cvShred, "Mask");
                var shredScale = Math.Min(800.0 / cvShred.Height, 800.0 / cvShred.Width);
                display.ImageBox.SetZoomScale(shredScale, new Point(10, 10));
                box.ShowDialog();
            }

            // Prompt for input directory and Write to file
            Console.Write("Enter Output Directory (Default is Working): ");
            string directory = Console.ReadLine();

            if (!Directory.Exists(directory))
            {
                Console.WriteLine("Writing to Working Directory");
                directory = string.Empty;
            }
            else
            {
                directory += "\\";
            }

            Console.WriteLine("wrote files to disk");
            int ii = 0;
            foreach (Bitmap bm in extractedobj)
            {
                Bitmap bm2 = Preprocessing.Orient(bm);
                bm2.Save(directory + "image" + ii++ + ".png");
            }
        }
Пример #18
0
 private void videoToolStripMenuItem_Click(object sender, EventArgs e)
 {
     viewer = new ImageViewer(); //create an image viewer
     capture = new Capture(); //create a camera captue
     Application.Idle += ProcessFrame;
     viewer.ShowDialog(); //show the image viewer
     viewer.FormClosed += Viewer_FormClosed;
 }
Пример #19
0
 private void dataGridView1_CellClick(object sender, DataGridViewCellEventArgs e)
 {
     if (e.ColumnIndex == dataGridView1.Columns["viewColumn"].Index)
      {
     int imageID = (int)dataGridView1.Rows[e.RowIndex].Cells["idColumn"].Value;
     ISession session = ImageDatabase.GetCurrentSession();
     Image<Bgr, Byte> image = session.Load<PersistentImage>(imageID);
     session.Close();
     using (ImageViewer viewer = new ImageViewer())
     {
        viewer.Image = image;
        viewer.ShowDialog();
     }
      }
 }
Пример #20
0
      /*
      public static void TestOnePassVideoStabilizerCamera()
      {
         ImageViewer viewer = new ImageViewer();
         using (Capture capture = new Capture())
         using (GaussianMotionFilter motionFilter = new GaussianMotionFilter())
         //using (Features2D.FastDetector detector = new Features2D.FastDetector(10, true))
         using (Features2D.SURF detector = new Features2D.SURF(500, false))
         //using (Features2D.ORBDetector detector = new Features2D.ORBDetector(500))
         using (OnePassStabilizer stabilizer = new OnePassStabilizer(capture))
         {
            stabilizer.SetMotionFilter(motionFilter);
            //motionEstimator.SetDetector(detector);

            //stabilizer.SetMotionEstimator(motionEstimator);
            Application.Idle += delegate(object sender, EventArgs e)
            {
               Image<Bgr, byte> frame = stabilizer.NextFrame();
               if (frame != null)
                  viewer.Image = frame;
            };
            viewer.ShowDialog();
         }
      }*/

      public static void TestOnePassVideoStabilizer()
      {
         ImageViewer viewer = new ImageViewer();
         using (Capture capture = new Capture("tree.avi"))
         using (CaptureFrameSource frameSource = new CaptureFrameSource(capture))
         using (OnePassStabilizer stabilizer = new OnePassStabilizer(frameSource))
         {
            Stopwatch watch = new Stopwatch();
            //stabilizer.SetMotionEstimator(motionEstimator);
            Application.Idle += delegate(object sender, EventArgs e)
            {
               watch.Reset();
               watch.Start();
               Mat frame = stabilizer.NextFrame();
               watch.Stop();
               if (watch.ElapsedMilliseconds < 200)
               {
                  Thread.Sleep(200 - (int)watch.ElapsedMilliseconds);
               }
               if (frame != null)
                  viewer.Image = frame;
            };
            viewer.ShowDialog();
         }
      }
Пример #21
0
 public static void TestTwoPassVideoStabilizer()
 {
    ImageViewer viewer = new ImageViewer();
    using (Capture capture = new Capture("tree.avi"))
    using (GaussianMotionFilter motionFilter = new GaussianMotionFilter(15, -1.0f))
    //using (Features2D.FastDetector detector = new Features2D.FastDetector(10, true))
    //using (Features2D.SURF detector = new Features2D.SURF(500, false))
    //using (Features2D.ORBDetector detector = new Features2D.ORBDetector(500))
    using (CaptureFrameSource frameSource = new CaptureFrameSource(capture))
    using (TwoPassStabilizer stabilizer = new TwoPassStabilizer(frameSource))
    {
       Stopwatch watch = new Stopwatch();
       //stabilizer.SetMotionEstimator(motionEstimator);
       Application.Idle += delegate(object sender, EventArgs e)
       {
          watch.Reset();
          watch.Start();
          Mat frame = stabilizer.NextFrame();
          watch.Stop();
          if (watch.ElapsedMilliseconds < 200)
          {
             Thread.Sleep(200 - (int) watch.ElapsedMilliseconds);
          }
          if (frame != null)
             viewer.Image = frame;
       };
       viewer.ShowDialog();
    }
 }
Пример #22
0
        public void SetFile(string filePath)
        {
            if (capture != null)
                capture.Dispose();

            capture = new Capture(filePath);

            nextFrame = capture.QueryFrame();
            if (nextFrame != null)
                isDirty = true;

            this.VideoSize = new Size2((int)capture.GetCaptureProperty( Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH),
                                       (int)capture.GetCaptureProperty( Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT));

            #if false
            Image<Bgr, byte> img = capture.QueryFrame();
            img = capture.QueryFrame();
            ImageViewer viewer = new ImageViewer();
            viewer.Image = img;
            viewer.ShowDialog();
            #endif
        }
Пример #23
0
 public static void TestCaptureFrameSource()
 {
    ImageViewer viewer = new ImageViewer();
    using (Capture capture = new Capture())
    using (CaptureFrameSource frameSource = new CaptureFrameSource(capture))
    {
       Application.Idle += delegate(object sender, EventArgs e)
       {
          Mat frame = frameSource.NextFrame();
          if (frame != null)
             viewer.Image = frame;
       };
       viewer.ShowDialog();
    }
 }
Пример #24
0
      /*
      public void TestGpuVibe()
      {
         int warmUpFrames = 20;

         GpuVibe<Gray> vibe = null;
         Image<Gray, Byte> mask = null;
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {  
               //run this until application closed (close button click on image viewer)
               
               using(Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
               using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
               using (CudaImage<Gray, Byte> gpuGray = gpuFrame.Convert<Gray, Byte>())
               {
                  if (warmUpFrames > 0)
                  {
                     warmUpFrames--;
                     return;
                  }
                  
                  if (vibe == null)
                  {
                     vibe = new GpuVibe<Gray>(1234567, gpuGray, null);
                     return;
                  }
                  else
                  {
                     vibe.Apply(gpuGray, null);
                     if (mask == null)
                        mask = new Image<Gray, byte>(vibe.ForgroundMask.Size);

                     vibe.ForgroundMask.Download(mask);
                     viewer.Image = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); //draw the image obtained from camera

                  }
               }
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }

      public void TestGpuBackgroundModel()
      {
         int warmUpFrames = 20;
         int totalFrames = 0;
         
         //CudaBackgroundSubtractorMOG2<Bgr>  bgModel = null;
         //CudaBackgroundSubtractorMOG<Bgr> bgModel = null;
         CudaBackgroundSubtractorGMG<Bgr> bgModel = null;
         //CudaBackgroundSubtractorFGD<Bgr> bgModel = null;

         Image<Gray, Byte> mask = null;
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               //run this until application closed (close button click on image viewer)
               totalFrames++;

               if (viewer != null && !viewer.IsDisposed)
               {
                  if (viewer.InvokeRequired)
                  {
                     viewer.Invoke((Action)delegate { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); });
                  }
                  else
                  {
                     viewer.Text = String.Format("Processing {0}th frame.", totalFrames); 
                  }
               }
               
               using (Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
               using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
               {
                  if (warmUpFrames > 0)
                  {
                     warmUpFrames--;
                     return;
                  }

                  if (bgModel == null)
                  {
                     //bgModel = new CudaBackgroundSubtractorMOG2<Bgr>(500, 16, true);
                     //bgModel = new CudaBackgroundSubtractorMOG<Bgr>(200, 5, 0.7, 0);
                     bgModel = new CudaBackgroundSubtractorGMG<Bgr>(120, 0.8);
                     bgModel.Apply(gpuFrame, -1.0f, null);
                     //bgModel = new CudaBackgroundSubtractorFGD<Bgr>(128, 15, 25, 64, 25, 40, true, 1, 0.1f, 0.005f, 0.1f, 2.0f, 0.9f, 15.0f);
                     //bgModel.Apply(gpuFrame, -1.0f);
                     
                     return;
                  }
                  else
                  {
                     bgModel.Apply(gpuFrame, -1.0f, null);
                     //bgModel.Apply(gpuFrame, -1.0f);
                     
                     if (mask == null)
                        mask = new Image<Gray, byte>(bgModel.ForgroundMask.Size);

                     bgModel.ForgroundMask.Download(mask);
                     Image<Bgr, Byte> result = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>());
                     if (viewer != null && !viewer.IsDisposed)
                     {
                        if (viewer.InvokeRequired)
                        {
                           viewer.Invoke((Action)delegate { viewer.Image = result; });
                        }
                        else
                        {
                           viewer.Image = result; //draw the image obtained from camera
                        }
                     }

                  }
               }
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }*/

      public void CameraTest()
      {
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {  //run this until application closed (close button click on image viewer)
               Mat m = new Mat();
               capture.Retrieve(m);
               viewer.Image = m; //draw the image obtained from camera
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }
Пример #25
0
      public void TestImage()
      {

         ImageViewer viewer = new ImageViewer();
         Application.Idle += delegate(Object sender, EventArgs e)
         {
            Image<Bgr, Byte> image = new Image<Bgr, byte>(400, 400);
            image.SetRandUniform(new MCvScalar(), new MCvScalar(255, 255, 255));
            image.Save("temp.jpeg");
            Image<Bgr, Byte> img = new Image<Bgr, byte>("temp.jpeg");
            viewer.Image = img;
         };

         viewer.ShowDialog();
         /*
         for (int i = 0; i < 10000; i++)
         {
            Image<Bgr, Byte> img = new Image<Bgr, byte>("temp.jpeg");
            viewer.Image = img;
         }*/


      }
Пример #26
0
        public void CameraTest2()
        {
            ImageViewer viewer = new ImageViewer();
             Capture capture = new Capture();
             Application.Idle += delegate(object sender, EventArgs e)
             {
            Image<Bgr, Byte> img = capture.QueryFrame();
            img = img.Resize(0.8, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
            Image<Gray, Byte> gray = img.Convert<Gray, Byte>();
            gray._EqualizeHist();

            viewer.Image = gray;
             };
             viewer.ShowDialog();
        }
Пример #27
0
        public void Run()
        {
            Rectangle biggestBlobForButtons;
            //ImageViewer viewer1 = new ImageViewer();
            //Thread aThread = new Thread(delegate() { viewer1.ShowDialog(); });
            //aThread.Start();
            ImageViewer viewer2 = new ImageViewer();
            Thread otherThread = new Thread(delegate() { viewer2.ShowDialog(); });
            otherThread.Start();

            while (runThread)
            {
                if (this.newImageFromWebcam)
                {
                    //this.newImageFromWebcam = false;
                    this.Image = new Bitmap(CaptureModel.Instance.Image);
                    webcamImage = new Image<Bgr, byte>(this.Image);

                    NewImageFromWebcam = false;

                    this.Image.Dispose();
                    cutImageForButtons = webcamImage.Copy(interestRectangleForButtons);
                    rangeImageForButtons = detectSkin(cutImageForButtons);
                    cutImageForButtons.Dispose();
                    //viewer1.Image = rangeImageForSideBar;
                    viewer2.Image = rangeImageForButtons;

                    using (MemStorage storage = new MemStorage())
                    {
                        Contour<Point> contour = rangeImageForButtons.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST, storage);
                        Contour<Point> biggest = null;

                        double currentArea = 0, maxArea = 0;

                        while (contour != null)
                        {
                            currentArea = contour.Area;
                            if (currentArea > maxArea)
                            {
                                maxArea = currentArea;
                                biggest = contour;
                            }
                            contour = contour.HNext;
                        }
                        if (biggest != null)
                            biggestBlobForButtons = biggest.BoundingRectangle;
                        else
                            biggestBlobForButtons = Rectangle.Empty;
                    }
                    if (biggestBlobForButtons.IsEmpty)
                    {
                        continue;
                    }

                    for (int i = 0; i < rectanglesForButtons.Length; i++)
                    {
                        Rectangle intersection = rectanglesForButtons[i];
                        intersection.Intersect(biggestBlobForButtons);
                        if (!intersection.IsEmpty && getArea(intersection) > 0.5 * getArea(rectanglesForButtons[i]))
                        {
                            if (!buttonPressedForButtonBar[i])
                            {
                                currentSteadyFramesForButtonBar[i]++;
                                if (currentSteadyFramesForButtonBar[i] == STEADY_FRAMES_THRESHOLD)
                                {
                                    currentSteadyFramesForButtonBar[i]=0;
                                    buttonPressedForButtonBar[i] = true;
                                    OnBarEvent(new PropertyChangedEventArgs("BUTTON," + i));
                                    Console.WriteLine("BUTTON," + i);
                                }
                            }
                        }
                        else
                        {
                            buttonPressedForButtonBar[i] = false;
                            currentSteadyFramesForButtonBar[i] = 0;
                        }
                    }
                }
                else
                {
                    resetButtonBar();
                }

                Thread.Sleep(300);
            }
        }
Пример #28
0
 /// <summary>
 ///     Event handler
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 private void Show_ItemClick(object sender, ItemClickEventArgs e)
 {
     try
     {
         using (var imageViewer = new ImageViewer(Image, "FZP"))
             imageViewer.ShowDialog();
     }
     catch (Exception exception)
     {
         XtraMessageBox.Show(exception.Message);
     }
 }
Пример #29
0
      /*
      public void CameraTest2()
      {
         using (ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               Image<Bgr, Byte> img = capture.RetrieveBgrFrame(0);
               img = img.Resize(0.8, Emgu.CV.CvEnum.Inter.Linear);
               Image<Gray, Byte> gray = img.Convert<Gray, Byte>();
               gray._EqualizeHist();
               viewer.Image = gray;

               capture.Pause();
               System.Threading.ThreadPool.QueueUserWorkItem(delegate
               {
                  Thread.Sleep(1000);
                  capture.Start();
               });
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }*/

      public void CameraTest3()
      {
         ImageViewer viewer = new ImageViewer();
         using (Capture capture = new Capture())
         {
            Application.Idle += delegate(object sender, EventArgs e)
            {
               Mat frame = capture.QueryFrame();
               if (frame != null)
               {
                  Bitmap bmp = frame.ToImage<Bgr, Byte>().ToBitmap();

                  viewer.Image = new Image<Bgr, Byte>(bmp);
                  
               }
            };
            viewer.ShowDialog();
         }
      }
Пример #30
-1
        public static void GetFloodFillMask(String filepath)
        {
            string imagesrc = filepath;
            var start = System.DateTime.Now;
            Bitmap source = new Bitmap(imagesrc);
            Bitmap mask = Preprocessing.FloodFill(source, 0, 0, 110);

            // Display to the User
            var result = new Image<Bgr, Byte>(mask);
            ImageViewer display = new ImageViewer(result, "Mask");
            var scale = Math.Min(800.0 / (double)result.Height, 800.0 / (double)result.Width);
            display.ImageBox.SetZoomScale(scale, new Point(10, 10));
            var stop = System.DateTime.Now;
            var difference = stop - start;
            Console.WriteLine("Total Time :"+ difference.ToString() );
            Console.WriteLine("Total Pixels: "+ source.Width*source.Height);
            display.ShowDialog();
        }