コード例 #1
0
    public virtual void ProcessImage(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image) {
      Emgu.CV.Image<Gray, byte> gray = image.Convert<Gray, byte>();
      gray._ThresholdBinary(new Gray(_threshold), new Gray(255.0));
      gray._Not();

      Parsley.Core.EllipseDetector ed = new Parsley.Core.EllipseDetector();
      ed.MinimumContourCount = _min_contour_count;

      List < Parsley.Core.DetectedEllipse > ellipses = 
        new List<Parsley.Core.DetectedEllipse>(ed.DetectEllipses(gray));

      List < Parsley.Core.DetectedEllipse > finals = 
        new List<Parsley.Core.DetectedEllipse>(
          ellipses.Where(e => { return e.Rating < _distance_threshold; })
        );

      finals.Sort(
        (a, b) => {
          double dista = a.Ellipse.MCvBox2D.center.X * a.Ellipse.MCvBox2D.center.X + a.Ellipse.MCvBox2D.center.Y * a.Ellipse.MCvBox2D.center.Y;
          double distb = b.Ellipse.MCvBox2D.center.X * b.Ellipse.MCvBox2D.center.X + b.Ellipse.MCvBox2D.center.Y * b.Ellipse.MCvBox2D.center.Y;
          return dista.CompareTo(distb);
        }
      );

      Bgr bgr = new Bgr(0, 255, 0);
      MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8);
      int count = 1;
      foreach (Parsley.Core.DetectedEllipse e in finals) {
        image.Draw(e.Ellipse, bgr, 2);
        image.Draw(count.ToString(), ref f, new System.Drawing.Point((int)e.Ellipse.MCvBox2D.center.X, (int)e.Ellipse.MCvBox2D.center.Y), bgr);
        count++;
      }
    }
コード例 #2
0
 protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> img) {
   Core.CalibrationPattern pattern = _pattern;
   if (pattern != null) {
     Image<Gray, Byte> gray = img.Convert<Gray, Byte>();
     pattern.FindPattern(gray);
     this.UpdateStatusDisplay(pattern.PatternFound);
     this.HandleCalibrateRequest();
     this.HandleTakeImageRequest();
     this.DrawCoordinateFrame(img);
     pattern.DrawPattern(img, pattern.ImagePoints, pattern.PatternFound);
   }
 }
コード例 #3
0
 protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> img)
 {
     Core.CalibrationPattern pattern = _pattern;
     if (pattern != null)
     { //cari pola kalibrasi jika marker kalibrasi tersedia
         Image<Gray, Byte> gray = img.Convert<Gray, Byte>(); //convert image to grayscale
         pattern.FindPattern(gray); //cari pola kalibrasi
         this.UpdateStatusDisplay(pattern.PatternFound);
         this.HandleCalibrateRequest();
         this.HandleTakeImageRequest();
         this.DrawCoordinateFrame(img);
         pattern.DrawPattern(img, pattern.ImagePoints, pattern.PatternFound);  //gambar AR pada marker jika pattern ditemukan
     }
 }
コード例 #4
0
    override protected void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> img) {
      Core.CalibrationPattern pattern = this.Context.CalibrationPattern;
      Emgu.CV.Image<Gray, Byte> gray = img.Convert<Gray, Byte>();
      gray._EqualizeHist();
      pattern.FindPattern(gray);

      if (pattern.PatternFound) {
        Emgu.CV.ExtrinsicCameraParameters ecp = _ex.Calibrate(pattern.ImagePoints);
        lock (Context.Viewer) {
          Matrix m = Matrix.Identity(4, 4);
          m.SetMatrix(0, 2, 0, 3, ecp.ExtrinsicMatrix.ToParsley());
          _board_transform.Matrix = m.ToInterop();
        }
      }

      pattern.DrawPattern(img, pattern.ImagePoints, pattern.PatternFound);
    }
コード例 #5
0
        public List<FaceScored> FindFaces(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image, CascadeClassifier cascadeClassifierFace, CascadeClassifier cascadeClassifierEye)
        {
            List<FaceScored> currentFaces = new List<FaceScored>();
            using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>())
            {
            gray._EqualizeHist();
            Size minFaceSize = new Size(minSizeFace , minSizeFace );
            Size maxFaceSize =  new Size(maxSizeFace , maxSizeFace );
            Size minEyeSize = new Size(minSizeEye , minSizeEye );
            Size maxEyeSize =  new Size(maxSizeEye , maxSizeEye );
            Rectangle[] facesDetected = cascadeClassifierFace.DetectMultiScale(gray, scaleFace , neighborsFace , minFaceSize,maxFaceSize);

            foreach (Rectangle f in facesDetected)
            {
                if (f.Width<35)
                    break;
                gray.ROI = f;

                Rectangle[] eyesDetected = cascadeClassifierEye.DetectMultiScale(gray, scaleEye, neighborsEye, minEyeSize, maxEyeSize);
                if (eyesDetected.Count() >0){
                    FaceScored faceModel = new FaceScored();
                    faceModel.FaceImage = gray.Bitmap;
                    faceModel.FaceImageFullColr = image.GetSubRect(f).Bitmap;
                    faceModel.Height = faceModel.FaceImage.Height;
                    faceModel.Width = faceModel.FaceImage.Width;
                    faceModel.EyesCount = eyesDetected.Count();

                    Gray avgf = new Gray();
                    MCvScalar avstd = new MCvScalar();
                    gray.AvgSdv(out avgf, out avstd);
                    faceModel.StdDev = avstd.V0;

                    currentFaces.Add(faceModel);
                    if(currentFaces.Count%5==0)
                        Console.WriteLine("FaceDetect Add every 5 faceModel" + faceModel.Width);
                    break;
                }
                gray.ROI = Rectangle.Empty;
                }
            }
            return currentFaces;
        }
コード例 #6
0
    protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> img) {
      Parsley.Core.ExtrinsicCalibration ec;
      ExtrinsicCameraParameters ecp;
      bool pattern_found = false;
      Core.CalibrationPattern p = _pattern;
      if (p != null) {
        Image<Gray, Byte> gray = img.Convert<Gray, Byte>();
        pattern_found = p.FindPattern(gray);
        p.DrawPattern(img, p.ImagePoints, p.PatternFound);

        // if pattern has been found ==> find extrinsics and draw the corresponding coordinate frame 
        if (pattern_found == true && Context.Setup.Camera.Intrinsics != null)
        {
          ec = new Parsley.Core.ExtrinsicCalibration(p.ObjectPoints, Context.Setup.Camera.Intrinsics);
          ecp = ec.Calibrate(p.ImagePoints);
          
          if(ecp != null)
            Core.Drawing.DrawCoordinateFrame(img, ecp, Context.Setup.Camera.Intrinsics);
        }
      }

      base.OnFrame(fp, img);
    }
コード例 #7
0
ファイル: Circles.cs プロジェクト: sivarajankumar/dentalsmile
    /// <summary>
    /// Find ellipses in image
    /// </summary>
    /// <param name="img">Image to search pattern for</param>
    /// <param name="image_points">Detected centers</param>
    /// <returns>True if pattern was found, false otherwise</returns>
    public override bool FindPattern(Emgu.CV.Image<Gray, byte> img, out System.Drawing.PointF[] image_points) {
      Emgu.CV.Image<Gray, byte> gray = img.Convert<Gray, byte>();
      gray._ThresholdBinary(new Gray(_binary_threshold), new Gray(255.0));
      gray._Not(); // Circles are black, black is considered backgroud, therefore flip.

      Parsley.Core.EllipseDetector ed = new Parsley.Core.EllipseDetector();
      ed.MinimumContourCount = this.MinimumContourCount;

      // Detect initial ellipses
      List<Parsley.Core.DetectedEllipse> ellipses =
        new List<Parsley.Core.DetectedEllipse>(ed.DetectEllipses(gray));

      // Filter out all ellipses below rating threshold
      List<Parsley.Core.DetectedEllipse> finals =
        new List<Parsley.Core.DetectedEllipse>(
          ellipses.Where(e => { return e.Rating < this.MeanDistanceThreshold; })
        );

      // At least the number of required ellipses need to be found
      if (finals.Count < _number_circle_centers.Width * _number_circle_centers.Height) {
        image_points = new System.Drawing.PointF[0];
        return false;
      }

      int[] marker_ids;
      if (!FindMarkerEllipses(gray, finals, out marker_ids)) {
        image_points = new System.Drawing.PointF[0];
        return false;
      }

      // Check that all markers are found
      if (marker_ids.Length != 4) {
        image_points = new System.Drawing.PointF[0];
        return false;
      }

      // Find intrinsic/extrinsic calibration matrices based on known marker correspondences
      Emgu.CV.IntrinsicCameraParameters icp;
      Emgu.CV.ExtrinsicCameraParameters ecp;
      ApproximatePlane(finals, marker_ids, out icp, out ecp, img.Size);

      // Project all object points to image points
      MCvPoint3D32f[] converted_object_points = Array.ConvertAll(
        this.ObjectPoints.ToArray(),
        value => { return value.ToEmguF(); });

      System.Drawing.PointF[] expected_image_points = 
        Emgu.CV.CameraCalibration.ProjectPoints(converted_object_points, ecp, icp);

      image_points =
        expected_image_points.Select(
          e => { return NearestEllipseCenter(finals, e); }
        ).Where(
          ne => { return Math.Sqrt(ne.dist2) < _ellipse_distance; }
        ).Select(
          ne => { return ne.center; }
        ).ToArray();

      // currently we need to detect all requested ellipses.
      return image_points.Length == _number_circle_centers.Width * _number_circle_centers.Height;

    }
コード例 #8
0
    protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> img) 
    {
      // Constraint checking
      if (!Context.Setup.Camera.HasIntrinsics) 
      {
        _on_roi = false;
        return;
      }

      if (_interactor.State == Parsley.UI.InteractionState.Interacting) 
      {
        _interactor.DrawIndicator(_interactor.Current, img);
      } 
      else 
      {
        _interactor.DrawIndicator(_r, img);
      }

      if (_on_roi && _pattern != null) 
      {
        Image<Gray, Byte> gray = img.Convert<Gray, Byte>();
        _pattern.IntrinsicParameters = Context.Setup.Camera.Intrinsics;

        try 
        {
          _pattern.FindPattern(gray, _r);
          if (_pattern.PatternFound) 
          {
            Parsley.Core.ExtrinsicCalibration ec = new Parsley.Core.ExtrinsicCalibration(_pattern.ObjectPoints, Context.Setup.Camera.Intrinsics);
            ExtrinsicCameraParameters ecp = ec.Calibrate(_pattern.ImagePoints);
            double[] deviations;
            Vector[] points;
            
            Core.ExtrinsicCalibration.CalibrationError(ecp,Context.Setup.Camera.Intrinsics,_pattern.ImagePoints,
                _pattern.ObjectPoints,out deviations,out points);

            double max_error = deviations.Max();
            if (max_error < _last_error) 
            {
              _last_detected_plane = ecp;
              _last_error = max_error;
              this.Logger.Info(String.Format("Extrinsics successfully calculated. Maximum error {0:F3}", _last_error));
            }
          } 
          else if (!_pattern.PatternFound & _last_detected_plane == null)
          {
            this.Logger.Warn("Pattern not found.");
          }
        }
        catch (System.Exception e) 
        {
          this.Logger.Warn(String.Format("Failed to determine extrinsic calibration: {0}", e.Message));
        }
      }
      if (_last_detected_plane != null) 
      {
        Core.Drawing.DrawCoordinateFrame(img, _last_detected_plane, Context.Setup.Camera.Intrinsics);
      }
    }
コード例 #9
0
    public void ProcessImage(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image) {
      Emgu.CV.Image<Gray, byte> gray = image.Convert<Gray, byte>();

      

      Emgu.CV.Image<Gray, byte> binary  = new Image<Gray,byte>(image.Size);
      CvInvoke.cvThreshold(gray, binary, 40, 255, THRESH.CV_THRESH_BINARY | THRESH.CV_THRESH_OTSU);
      binary._Not();
      Emgu.CV.Contour<System.Drawing.Point> contour_points = binary.FindContours();
      
      MemStorage storage = new MemStorage();
      Matrix<double> warp = new Matrix<double>(3, 3);

      while (contour_points != null) {
        Contour<Point> c = contour_points.ApproxPoly(contour_points.Perimeter * 0.05, storage);
        double p = c.Perimeter;
        if (c.Total == 4 && p > 300) {
          
          PointF[] src = new PointF[] { 
            new PointF(c[0].X, c[0].Y),
            new PointF(c[1].X, c[1].Y),
            new PointF(c[2].X, c[2].Y),
            new PointF(c[3].X, c[3].Y)};

          CvInvoke.cvGetPerspectiveTransform(src, _dest, warp);
          int flags = (int)INTER.CV_INTER_LINEAR + (int)WARP.CV_WARP_FILL_OUTLIERS;
          CvInvoke.cvWarpPerspective(gray, _roi, warp, flags, new MCvScalar(0));


          double min_error;
          Orientation orient;

          FindBestOrientation(out min_error, out orient);
          if (min_error < 0.4) {
            image.DrawPolyline(c.ToArray(), true, new Bgr(Color.Green), 2);
            System.Console.WriteLine(min_error + " " + orient);

            switch (orient) {
              case Orientation.Degrees0:
                image.Draw(new LineSegment2D(c[0], c[3]), new Bgr(System.Drawing.Color.Red), 2);
                break;
              case Orientation.Degrees90:
                image.Draw(new LineSegment2D(c[1], c[0]), new Bgr(System.Drawing.Color.Red), 2);
                break;
              case Orientation.Degrees180:
                image.Draw(new LineSegment2D(c[2], c[1]), new Bgr(System.Drawing.Color.Red), 2);
                break;
              case Orientation.Degrees270:
                image.Draw(new LineSegment2D(c[3], c[2]), new Bgr(System.Drawing.Color.Red), 2);
                break;
            }
            


          }

          // 0 degrees
        }
        contour_points = contour_points.HNext;
      }
    }
コード例 #10
0
        public List<Face> FindFaces(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image, string faceFileName, string eyeFileName, double scale, int neighbors, int minSize)
        {
            List<Face> faces = new List<Face>();
            List<Rectangle> facesRect = new List<Rectangle>();
            List<Rectangle> eyesRect = new List<Rectangle>();
            try
            {
                using (CascadeClassifier face = createClassifier(faceFileName))
                {
                    using (Image<Gray, Byte> gray = image.Convert<Gray, Byte>())
                    {
                        gray._EqualizeHist();
                        Rectangle[] facesDetected = face.DetectMultiScale(gray, scale, neighbors, new Size(minSize, minSize), Size.Empty);

                        foreach (Rectangle f in facesDetected)
                        {
                            using (Image<Gray, Byte> faceImg = gray.GetSubRect(f))
                            {
                                using (Image<Gray, Byte> clone = faceImg.Clone())
                                {
                                    Face facemodel = new Face();
                                    eyesRect = new List<Rectangle>(FindEyes(eyeFileName, clone));
                                    if (eyesRect != null && eyesRect.Count>0)
                                    {
                                        facemodel.EyesRects = eyesRect;
                                        facemodel.EyesCount = eyesRect.Count;
                                    }
                                    else
                                    {
                                        continue;
                                    }
                                    facemodel.FaceImage = clone.Bitmap;
                                    facemodel.Height = facemodel.FaceImage.Height;
                                    facemodel.Width = facemodel.FaceImage.Width;
                                    facemodel.FaceRect = f;
                                    facemodel.FramePosX = f.X;
                                    facemodel.FramePosY = f.Y;
                                    facemodel.ImageFrameSize = image.Size;

                                    Gray avgf = new Gray();
                                    MCvScalar avstd = new MCvScalar();
                                    clone.AvgSdv(out avgf, out avstd);
                                    facemodel.StdDev = avstd.V0;
                                    faces.Add(facemodel);
                                    if (faces.Count%5==0)
                                        Console.WriteLine("FaceDetect OpenCL every5 Add faceModel" + facemodel.Width);

                                    break;
                                }
                            }
                            gray.ROI = Rectangle.Empty;
                        }
                    }
                }
            }
            catch (Exception errFaceDet)
            {
                Console.WriteLine("ERROR - faceDetect OpenCL =" + errFaceDet);
            }
            return faces;
        }
コード例 #11
0
ファイル: SorterVision.cs プロジェクト: jack06215/tmc
        /// <summary>
        /// finds the chessboard in the image
        /// </summary>
        /// <param name="src">
        /// source image that will contain the chessboard
        /// </param>
        /// <param name="dim">
        /// the dimesions of the chessboard
        /// </param>
        /// <returns>
        /// returns the location of al the chessboard point
        /// </returns>
        private PointF[] FindPattern(Emgu.CV.Image<Gray, byte> src, Size dim)
        {
            //string win1 = "Test Window"; //The name of the window
            //CvInvoke.cvNamedWindow(win1); //Create the window using the specific name
            Image<Bgr, Byte> colr = src.Convert<Bgr, Byte>();
            PointF[] checkerBoardPoints = Emgu.CV.CameraCalibration.FindChessboardCorners(src, dim, Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.FILTER_QUADS);//CALIB_CB_TYPE.DEFAULT);

            //CameraCalibration.CalibrateCamera(

            int a = 0;
            foreach (PointF checkerBoardPoint in checkerBoardPoints)
            {//draw dots just for debuging atm
                Rectangle rect = new Rectangle();
                rect.X = (int)checkerBoardPoint.X;
                rect.Y = (int)checkerBoardPoint.Y;
                rect.Width = 2;
                rect.Height = 2;
                if (a == 0)
                {
                    colr.Draw(rect, new Bgr(Color.Blue), 1);
                    a++;
                }
                else
                {
                    colr.Draw(rect, new Bgr(Color.Red), 1);
                    a++;
                }
            }
            //CvInvoke.cvShowImage(win1, colr); //Show the image

            //CvInvoke.cvWaitKey(0);
            return checkerBoardPoints;
        }
コード例 #12
0
ファイル: EllipseDetection.cs プロジェクト: Raptek/STEM
        public void ProcessImage(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> image)
        {
            Emgu.CV.Image<Gray, byte> gray = image.Convert<Gray, byte>();
            gray._ThresholdBinary(new Gray(_threshold), new Gray(255.0));
            gray._Not();

            //Emgu.CV.Contour<System.Drawing.Point> c = gray.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_CODE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST);
            Emgu.CV.Contour<System.Drawing.Point> c = gray.FindContours();
            List<Ellipse> ellipses = new List<Ellipse>();

            while (c != null)
            {
                if (c.Count() >= _min_contour_count)
                {
                    System.Drawing.PointF[] mypoints = Array.ConvertAll(
                      c.ToArray<System.Drawing.Point>(),
                      value => new System.Drawing.PointF(value.X, value.Y)
                    );

                    Ellipse e = Emgu.CV.PointCollection.EllipseLeastSquareFitting(mypoints);
                    MCvBox2D box = e.MCvBox2D;
                    box.size.Height *= 0.5f;
                    box.size.Width *= 0.5f;
                    Ellipse final_ellipse = new Ellipse(box);

                    Matrix m = Matrix.Identity(3, 3);
                    m[0, 0] = Math.Cos(final_ellipse.MCvBox2D.angle);
                    m[0, 1] = -Math.Sin(final_ellipse.MCvBox2D.angle);
                    m[0, 2] = final_ellipse.MCvBox2D.center.X;
                    m[1, 0] = Math.Sin(final_ellipse.MCvBox2D.angle);
                    m[1, 1] = Math.Cos(final_ellipse.MCvBox2D.angle);
                    m[1, 2] = final_ellipse.MCvBox2D.center.Y;

                    Matrix inv = m.Inverse();
                    double rating = 0.0;
                    double a = final_ellipse.MCvBox2D.size.Width;
                    double b = final_ellipse.MCvBox2D.size.Height;

                    if (a < b)
                    {
                        double tmp = a;
                        a = b;
                        a = tmp;
                    }
                    foreach (System.Drawing.PointF p in mypoints)
                    {
                        Vector x = new Vector(new double[] { p.X, p.Y, 1 });
                        Matrix r = inv.Multiply(x.ToColumnMatrix());

                        rating += Math.Abs((Math.Pow(r[0, 0] / a, 2) + Math.Pow(r[1, 0] / b, 2)) - 1);
                    }
                    Console.WriteLine(rating);
                    if (rating < 50)
                    {
                        ellipses.Add(final_ellipse);
                    }

                }
                c = c.HNext;
            }

            ellipses.Sort(
              (a, b) =>
              {
                  double dista = a.MCvBox2D.center.X * a.MCvBox2D.center.X + a.MCvBox2D.center.Y * a.MCvBox2D.center.Y;
                  double distb = b.MCvBox2D.center.X * b.MCvBox2D.center.X + b.MCvBox2D.center.Y * b.MCvBox2D.center.Y;
                  return dista.CompareTo(distb);
              }
            );

            Bgr bgr = new Bgr(0, 255, 0);
            MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8);
            int count = 1;
            foreach (Ellipse e in ellipses)
            {
                image.Draw(e, bgr, 2);
                image.Draw(count.ToString(), ref f, new System.Drawing.Point((int)e.MCvBox2D.center.X, (int)e.MCvBox2D.center.Y), bgr);
                count++;
            }
        }
コード例 #13
0
 public SURFEngine(Emgu.CV.Image<Bgr, byte> roi)
     : this(roi.Convert<Gray, byte>())
 {
 }