Beispiel #1
0
        private void btn_process_Click(object sender, EventArgs e)
        {
            var image = picBox.Image;
            var src   = new Emgu.CV.Image <Bgra, byte>(new Bitmap(image));

            SplitChannels(src);

            // Perform binary thresolding here:
            var channels = src.Split();
            var alpha    = channels[3];
            var g        = src.Convert <Gray, byte>();

            // right now, alpha is all 255 (full white)
            // we need to apply thresold so that:
            // if it is black (or super low like < 5) then assign to 0;
            // Otherwise, it should be part of the image hence alpha = 255;
            var newAlpha = g.ThresholdBinary(new Gray(10), new Gray(255));

            var newImage = new Emgu.CV.Image <Bgra, byte>(
                new Emgu.CV.Image <Gray, byte>[]
            {
                channels[0],
                channels[1],
                channels[2],
                newAlpha
            });

            picBox.Image = newImage.ToBitmap();
        }
Beispiel #2
0
        protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
        {
            Parsley.Core.ExtrinsicCalibration ec;
            ExtrinsicCameraParameters         ecp;
            bool pattern_found = false;

            Core.CalibrationPattern p = _pattern;
            if (p != null)
            {
                Image <Gray, Byte> gray = img.Convert <Gray, Byte>();
                pattern_found = p.FindPattern(gray);
                p.DrawPattern(img, p.ImagePoints, p.PatternFound);

                // if pattern has been found ==> find extrinsics and draw the corresponding coordinate frame
                if (pattern_found == true && Context.Setup.Camera.Intrinsics != null)
                {
                    ec  = new Parsley.Core.ExtrinsicCalibration(p.ObjectPoints, Context.Setup.Camera.Intrinsics);
                    ecp = ec.Calibrate(p.ImagePoints);

                    if (ecp != null)
                    {
                        Core.Drawing.DrawCoordinateFrame(img, ecp, Context.Setup.Camera.Intrinsics);
                    }
                }
            }

            base.OnFrame(fp, img);
        }
Beispiel #3
0
        protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
        {
            // Constraint checking
            if (!Context.Setup.Camera.HasIntrinsics)
            {
                _on_roi = false;
                return;
            }

            if (_interactor.State == Parsley.UI.InteractionState.Interacting)
            {
                _interactor.DrawIndicator(_interactor.Current, img);
            }
            else
            {
                _interactor.DrawIndicator(_r, img);
            }

            if (_on_roi && _pattern != null)
            {
                Image <Gray, Byte> gray = img.Convert <Gray, Byte>();
                _pattern.IntrinsicParameters = Context.Setup.Camera.Intrinsics;

                try
                {
                    _pattern.FindPattern(gray, _r);
                    if (_pattern.PatternFound)
                    {
                        Parsley.Core.ExtrinsicCalibration ec  = new Parsley.Core.ExtrinsicCalibration(_pattern.ObjectPoints, Context.Setup.Camera.Intrinsics);
                        ExtrinsicCameraParameters         ecp = ec.Calibrate(_pattern.ImagePoints);
                        double[] deviations;
                        Vector[] points;

                        Core.ExtrinsicCalibration.CalibrationError(ecp, Context.Setup.Camera.Intrinsics, _pattern.ImagePoints,
                                                                   _pattern.ObjectPoints, out deviations, out points);

                        double max_error = deviations.Max();
                        if (max_error < _last_error)
                        {
                            _last_detected_plane = ecp;
                            _last_error          = max_error;
                            this.Logger.Info(String.Format("Extrinsics successfully calculated. Maximum error {0:F3}", _last_error));
                        }
                    }
                    else if (!_pattern.PatternFound & _last_detected_plane == null)
                    {
                        this.Logger.Warn("Pattern not found.");
                    }
                }
                catch (System.Exception e)
                {
                    this.Logger.Warn(String.Format("Failed to determine extrinsic calibration: {0}", e.Message));
                }
            }
            if (_last_detected_plane != null)
            {
                Core.Drawing.DrawCoordinateFrame(img, _last_detected_plane, Context.Setup.Camera.Intrinsics);
            }
        }
 protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
 {
     Core.CalibrationPattern pattern = _pattern;
     if (pattern != null)
     {
         Image <Gray, Byte> gray = img.Convert <Gray, Byte>();
         pattern.FindPattern(gray);
         this.UpdateStatusDisplay(pattern.PatternFound);
         this.HandleCalibrateRequest();
         this.HandleTakeImageRequest();
         this.DrawCoordinateFrame(img);
         pattern.DrawPattern(img, pattern.ImagePoints, pattern.PatternFound);
     }
 }
Beispiel #5
0
 protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
 {
     Core.CalibrationPattern pattern = _pattern;
     if (pattern != null)
     {                                                         //cari pola kalibrasi jika marker kalibrasi tersedia
         Image <Gray, Byte> gray = img.Convert <Gray, Byte>(); //convert image to grayscale
         pattern.FindPattern(gray);                            //cari pola kalibrasi
         this.UpdateStatusDisplay(pattern.PatternFound);
         this.HandleCalibrateRequest();
         this.HandleTakeImageRequest();
         this.DrawCoordinateFrame(img);
         pattern.DrawPattern(img, pattern.ImagePoints, pattern.PatternFound); //gambar AR pada marker jika pattern ditemukan
     }
 }
        override protected void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
        {
            Core.CalibrationPattern    pattern = this.Context.CalibrationPattern;
            Emgu.CV.Image <Gray, Byte> gray    = img.Convert <Gray, Byte>();
            gray._EqualizeHist();
            pattern.FindPattern(gray);

            if (pattern.PatternFound)
            {
                Emgu.CV.ExtrinsicCameraParameters ecp = _ex.Calibrate(pattern.ImagePoints);
                lock (Context.Viewer) {
                    Matrix m = Matrix.Identity(4, 4);
                    m.SetMatrix(0, 2, 0, 3, ecp.ExtrinsicMatrix.ToParsley());
                    _board_transform.Matrix = m.ToInterop();
                }
            }

            pattern.DrawPattern(img, pattern.ImagePoints, pattern.PatternFound);
        }
Beispiel #7
0
        public virtual void ProcessImage(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image)
        {
            Emgu.CV.Image <Gray, byte> gray = image.Convert <Gray, byte>();
            gray._ThresholdBinary(new Gray(_threshold), new Gray(255.0));
            gray._Not();

            Parsley.Core.EllipseDetector ed = new Parsley.Core.EllipseDetector();
            ed.MinimumContourCount = _min_contour_count;

            List <Parsley.Core.DetectedEllipse> ellipses =
                new List <Parsley.Core.DetectedEllipse>(ed.DetectEllipses(gray));

            List <Parsley.Core.DetectedEllipse> finals =
                new List <Parsley.Core.DetectedEllipse>(
                    ellipses.Where(e => { return(e.Rating < _distance_threshold); })
                    );

            finals.Sort(
                (a, b) => {
                double dista = a.Ellipse.MCvBox2D.center.X * a.Ellipse.MCvBox2D.center.X + a.Ellipse.MCvBox2D.center.Y * a.Ellipse.MCvBox2D.center.Y;
                double distb = b.Ellipse.MCvBox2D.center.X * b.Ellipse.MCvBox2D.center.X + b.Ellipse.MCvBox2D.center.Y * b.Ellipse.MCvBox2D.center.Y;
                return(dista.CompareTo(distb));
            }
                );

            Bgr     bgr   = new Bgr(0, 255, 0);
            MCvFont f     = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8);
            int     count = 1;

            foreach (Parsley.Core.DetectedEllipse e in finals)
            {
                image.Draw(e.Ellipse, bgr, 2);
                image.Draw(count.ToString(), ref f, new System.Drawing.Point((int)e.Ellipse.MCvBox2D.center.X, (int)e.Ellipse.MCvBox2D.center.Y), bgr);
                count++;
            }
        }
        private void RefreshGrayImage()
        {
            try
            {
                //LogHelper.logger.Info("RefreshGrayImage called...");
                if (pbScreen.Image == null)
                    return;

                //Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> cvImage = new Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte>(test as Bitmap);
                Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> cvImage = new Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte>(pbScreen.Image as Bitmap);
                //Emgu.CV.CvInvoke.cvShowImage("Current Image under use...", cvImage);

                double intensity = tbIntensity.Value;
                var binaryImage = cvImage.Convert<Gray, byte>().ThresholdBinary(new Gray(intensity), new Gray(255));
                //Emgu.CV.CvInvoke.cvShowImage("Current Image under use...", binaryImage);
                pbIntensityTest.Image = (binaryImage.Bitmap).Clone(new Rectangle(0, 0, binaryImage.Width, binaryImage.Height), (binaryImage.Bitmap).PixelFormat);

                txtIntensity.Text = tbIntensity.Value.ToString();
                ImageProcessingManager.IntensityValue = intensity;

                if (cbShowIntensityOnTop.Checked)
                {
                    pbScreen.Image = pbIntensityTest.Image;
                }
            }
            catch (Exception exception)
            {
                LogHelper.logger.Error("RefreshGrayImage: " + exception.Message);
                LogHelper.logger.Error("RefreshGrayImage: " + exception.StackTrace);
                MessageBox.Show("An error occurred. Please restart bot", "Chessbot", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            //LogHelper.logger.Info("RefreshGrayImage finished...");
        }
 public static Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> GetBinaryImage(Image inputImage, double intensity)
 {
     //LogHelper.logger.Info("GetBinaryImage called...");
     Image<Gray, byte> binaryImage = null;
     try
     {
         Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> cvImage = new Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte>(inputImage as Bitmap);
         binaryImage = cvImage.Convert<Gray, byte>().ThresholdBinary(new Gray(intensity), new Gray(255));
         //Emgu.CV.CvInvoke.cvShowImage("Current Image under use...", binaryImage);
     }
     catch (Exception exception)
     {
         LogHelper.logger.Error("GetNextBestMove: " + exception.Message);
         LogHelper.logger.Error("GetNextBestMove: " + exception.StackTrace);
         MessageBox.Show("An error occurred. Please restart bot", "Chessbot", MessageBoxButtons.OK, MessageBoxIcon.Error);
     }
     //LogHelper.logger.Info("GetBinaryImage finished...");
     return binaryImage;
 }
Beispiel #10
0
        static public List <TempleteMatchingBestOrder> DoTempleteMatching(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> f_Source,
                                                                          Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> f_Templete,
                                                                          Emgu.CV.CvEnum.TemplateMatchingType f_TempleteMatchingType = Emgu.CV.CvEnum.TemplateMatchingType.CcoeffNormed,
                                                                          float f_Threadhold    = 0.9F,
                                                                          int f_GetNumberOfBest = 10,
                                                                          bool f_IsUsePyramid   = false,
                                                                          int f_PyramidLevel    = 1)
        {
            List <TempleteMatchingBestOrder> t_SortScore = new List <TempleteMatchingBestOrder>();

            t_SortScore.Clear();
            Emgu.CV.Image <Emgu.CV.Structure.Gray, float> t_Result;
            if (f_IsUsePyramid == false)
            {
                t_Result = f_Source.Convert <Emgu.CV.Structure.Gray, byte>().MatchTemplate(f_Templete.Convert <Emgu.CV.Structure.Gray, byte>(), f_TempleteMatchingType);
                double[] t_MinValues, t_maxValues;
                Point[]  t_MinLocations, t_MaxLocations;
                t_Result.MinMax(out t_MinValues, out t_maxValues, out t_MinLocations, out t_MaxLocations);
                TempleteMatchingBestOrder t_Temp;
                for (int i = 0; i < t_Result.Rows; i++)
                {
                    for (int j = 0; j < t_Result.Cols; j++)
                    {
                        float t_Value = t_Result.Data[i, j, 0];
                        if (t_Value >= f_Threadhold)
                        {
                            t_Temp.s_Location = new Point(j, i);
                            t_Temp.s_Score    = t_Value;
                            t_SortScore.Add(t_Temp);
                        }
                    }
                }
            }
            else
            {
                Emgu.CV.Image <Emgu.CV.Structure.Gray, byte>[] t_PyramidSource   = new Emgu.CV.Image <Emgu.CV.Structure.Gray, byte> [f_PyramidLevel];
                Emgu.CV.Image <Emgu.CV.Structure.Gray, byte>[] t_PyramidTemplete = new Emgu.CV.Image <Emgu.CV.Structure.Gray, byte> [f_PyramidLevel];
                t_PyramidSource[0]   = f_Source.Convert <Emgu.CV.Structure.Gray, byte>().Clone();
                t_PyramidTemplete[0] = f_Templete.Convert <Emgu.CV.Structure.Gray, byte>().Clone();
                int[] t_Level = new int[f_PyramidLevel];
                t_Level[0] = 1;
                for (int i = 1; i < f_PyramidLevel; i++)
                {
                    t_Level[i]           = t_Level[i - 1] * 2;
                    t_PyramidSource[i]   = t_PyramidSource[i - 1].PyrDown().Clone();
                    t_PyramidTemplete[i] = t_PyramidTemplete[i - 1].PyrDown().Clone();
                }
                for (int i = f_PyramidLevel - 1; i >= 0; i--)
                {
                    for (int j = f_PyramidLevel - 1; j >= 0; j--)
                    {
                        if (t_PyramidTemplete[j].Width > t_PyramidSource[i].Width || t_PyramidTemplete[j].Height > t_PyramidSource[i].Height)
                        {
                            continue;
                        }
                        t_Result = t_PyramidSource[i].MatchTemplate(t_PyramidTemplete[j], f_TempleteMatchingType);
                        double[] t_MinValues, t_maxValues;
                        Point[]  t_MinLocations, t_MaxLocations;
                        t_Result.MinMax(out t_MinValues, out t_maxValues, out t_MinLocations, out t_MaxLocations);
                        if ((float)(t_maxValues[0]) <= f_Threadhold)
                        {
                            continue;
                        }
                        TempleteMatchingBestOrder t_Temp;
                        for (int x = 0; x < t_Result.Rows; x++)
                        {
                            for (int y = 0; y < t_Result.Cols; y++)
                            {
                                float t_Value = t_Result.Data[x, y, 0];
                                if (t_Value >= f_Threadhold)
                                {
                                    t_Temp.s_Location = new Point(y * t_Level[i], x * t_Level[i]);
                                    t_Temp.s_Score    = t_Value;
                                    t_SortScore.Add(t_Temp);
                                }
                            }
                        }
                    }
                }
            }
            if (t_SortScore.Count > f_GetNumberOfBest)
            {
                return(t_SortScore.OrderByDescending(o => o.s_Score).ToList().GetRange(0, f_GetNumberOfBest));
            }
            else
            {
                return(t_SortScore.OrderByDescending(o => o.s_Score).ToList().GetRange(0, t_SortScore.Count));
            }
        }
Beispiel #11
0
        public async Task <HttpResponseMessage> Index()
        {
            if (!Request.Content.IsMimeMultipartContent())
            {
                throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
            }

            Emgu.CV.CascadeClassifier cc = new Emgu.CV.CascadeClassifier(System.Web.Hosting.HostingEnvironment.MapPath("/haarcascade_frontalface_alt_tree.xml"));
            var provider = new MultipartMemoryStreamProvider();
            await Request.Content.ReadAsMultipartAsync(provider);

            foreach (var file in provider.Contents)
            {
                var filename = file.Headers.ContentDisposition.FileName.Trim('\"');
                var buffer   = await file.ReadAsByteArrayAsync();

                using (MemoryStream mStream = new MemoryStream(buffer, 0, buffer.Length))
                {
                    mStream.Position = 0;
                    //Do whatever you want with filename and its binary data.

                    using (Bitmap bmp = new Bitmap(mStream))
                    {
                        using (Emgu.CV.Image <Emgu.CV.Structure.Bgr, Int32> img = new Emgu.CV.Image <Emgu.CV.Structure.Bgr, Int32>(bmp))
                        {
                            if (img != null)
                            {
                                var grayframe = img.Convert <Emgu.CV.Structure.Gray, byte>();
                                var faces     = cc.DetectMultiScale(grayframe);//, 1.1, 10, Size.Empty);
                                int faceCount = 0;
                                foreach (var face in faces)
                                {
                                    // only returns the first face found
                                    faceCount++;
                                    using (Bitmap faceBmp = new Bitmap(face.Right - face.Left, face.Bottom - face.Top))
                                    {
                                        Graphics g = Graphics.FromImage(faceBmp);
                                        g.DrawImage(bmp, new Rectangle(0, 0, faceBmp.Width, faceBmp.Height), face.Left, face.Top, faceBmp.Width, faceBmp.Height, GraphicsUnit.Pixel);
                                        MemoryStream outStream = new MemoryStream();
                                        faceBmp.Save(outStream, System.Drawing.Imaging.ImageFormat.Jpeg);
                                        var result = new HttpResponseMessage(HttpStatusCode.OK)
                                        {
                                            Content = new ByteArrayContent(outStream.ToArray()),
                                        };
                                        result.Content.Headers.ContentDisposition =
                                            new System.Net.Http.Headers.ContentDispositionHeaderValue("attachment")
                                        {
                                            FileName = "face.jpg"
                                        };
                                        result.Content.Headers.ContentType   = new System.Net.Http.Headers.MediaTypeHeaderValue("image/jpeg");
                                        result.Content.Headers.ContentLength = outStream.Length;

                                        return(result);
                                    }
                                }
                            }
                        }
                    }
                }
            }

            throw new HttpResponseException(HttpStatusCode.InternalServerError);
        }
Beispiel #12
0
        /// <summary>
        /// Find ellipses in image
        /// </summary>
        /// <param name="img">Image to search pattern for</param>
        /// <param name="image_points">Detected centers</param>
        /// <returns>True if pattern was found, false otherwise</returns>
        public override bool FindPattern(Emgu.CV.Image <Gray, byte> img, out System.Drawing.PointF[] image_points)
        {
            Emgu.CV.Image <Gray, byte> gray = img.Convert <Gray, byte>();
            gray._ThresholdBinary(new Gray(_binary_threshold), new Gray(255.0));
            gray._Not(); // Circles are black, black is considered backgroud, therefore flip.

            Parsley.Core.EllipseDetector ed = new Parsley.Core.EllipseDetector();
            ed.MinimumContourCount = this.MinimumContourCount;

            // Detect initial ellipses
            List <Parsley.Core.DetectedEllipse> ellipses =
                new List <Parsley.Core.DetectedEllipse>(ed.DetectEllipses(gray));

            // Filter out all ellipses below rating threshold
            List <Parsley.Core.DetectedEllipse> finals =
                new List <Parsley.Core.DetectedEllipse>(
                    ellipses.Where(e => { return(e.Rating < this.MeanDistanceThreshold); })
                    );

            // At least the number of required ellipses need to be found
            if (finals.Count < _number_circle_centers.Width * _number_circle_centers.Height)
            {
                image_points = new System.Drawing.PointF[0];
                return(false);
            }

            int[] marker_ids;
            if (!FindMarkerEllipses(gray, finals, out marker_ids))
            {
                image_points = new System.Drawing.PointF[0];
                return(false);
            }

            // Check that all markers are found
            if (marker_ids.Length != 4)
            {
                image_points = new System.Drawing.PointF[0];
                return(false);
            }

            // Find intrinsic/extrinsic calibration matrices based on known marker correspondences
            Emgu.CV.IntrinsicCameraParameters icp;
            Emgu.CV.ExtrinsicCameraParameters ecp;
            ApproximatePlane(finals, marker_ids, out icp, out ecp, img.Size);

            // Project all object points to image points
            MCvPoint3D32f[] converted_object_points = Array.ConvertAll(
                this.ObjectPoints.ToArray(),
                value => { return(value.ToEmguF()); });

            System.Drawing.PointF[] expected_image_points =
                Emgu.CV.CameraCalibration.ProjectPoints(converted_object_points, ecp, icp);

            image_points =
                expected_image_points.Select(
                    e => { return(NearestEllipseCenter(finals, e)); }
                    ).Where(
                    ne => { return(Math.Sqrt(ne.dist2) < _ellipse_distance); }
                    ).Select(
                    ne => { return(ne.center); }
                    ).ToArray();

            // currently we need to detect all requested ellipses.
            return(image_points.Length == _number_circle_centers.Width * _number_circle_centers.Height);
        }
 protected Emgu.CV.Image <Emgu.CV.Structure.Hsv, Byte> GetHsvImage()
 {
     hsvImageCV = rawImageCV.Convert <Hsv, Byte>();
     CvInvoke.CvtColor(hsvImageCV, hsvImageCV, ColorConversion.Bgr2HsvFull);
     return(hsvImageCV);
 }
Beispiel #14
0
        private void button1_Click(object sender, EventArgs e)
        {
            DialogResult result = openFileDialog1.ShowDialog(this);
            if (result == DialogResult.OK)
            {
                Bitmap bmp = (Bitmap)Bitmap.FromStream(openFileDialog1.OpenFile());
                //pictureBox1.Image = bmp;
                Emgu.CV.HaarCascade cascade = new Emgu.CV.HaarCascade("haarcascade_frontalface_default.xml");
                Emgu.CV.HaarCascade eyeCascade = new Emgu.CV.HaarCascade("haarcascade_eye.xml");
                Emgu.CV.Image<Emgu.CV.Structure.Bgr, Byte> image = new Emgu.CV.Image<Emgu.CV.Structure.Bgr, Byte>(bmp);
                Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> gray = image.Convert<Emgu.CV.Structure.Gray, Byte>();
                System.Diagnostics.Stopwatch watch = System.Diagnostics.Stopwatch.StartNew();
                Emgu.CV.Structure.MCvAvgComp[][] faces = gray.DetectHaarCascade(cascade,
                    1.1000, 1, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20,20));
                watch.Stop();
                float scalex = pictureBox1.Width / image.Width;
                float scaley = pictureBox1.Height / image.Height;

                //MessageBox.Show(string.Format("Found {0} Faces in {1} milliseconds", faces[0].Count().ToString(), watch.ElapsedMilliseconds.ToString()));
                foreach (var f in faces[0])
                {
                    StringBuilder sb = new StringBuilder();
                    sb.Append(f.rect.ToString());
                    gray.ROI = f.rect;
                    Emgu.CV.Structure.MCvAvgComp[][] eyes = gray.DetectHaarCascade(eyeCascade,
                    1.1000, 1, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
                    gray.ROI = Rectangle.Empty;

                    if (eyes[0].Length < 2) continue;
                    sb.Append(" => ");
                    sb.Append(eyes[0].Length.ToString());
                    listBox1.Items.Add(sb.ToString());
                    //MessageBox.Show(eyes[0].Count().ToString());
                    Graphics g = Graphics.FromImage(bmp);
                    g.DrawRectangle(new Pen(Brushes.Red), f.rect.X,
                        f.rect.Y, f.rect.Width,
                        f.rect.Height);
                    foreach (var eye in eyes[0])
                    {
                        Rectangle r = new Rectangle(
                            eye.rect.X + f.rect.X,
                            eye.rect.Y + f.rect.Y,
                            eye.rect.Width, eye.rect.Height
                            );
                        g.DrawRectangle(new Pen(Brushes.Yellow), r);
                    }
                }

                pictureBox1.Image = bmp;
                //watch = System.Diagnostics.Stopwatch.StartNew();
                //openCV.Net.Haar.HaarClassifierCascade cascade2 = openCV.Net.Haar.HaarClassifierCascade.Parse(XDocument.Load("haarcascade_frontalface_default.xml"));
                //openCV.Net.Haar.Sequence<openCV.Net.Haar.AvgComp>
                //    seq = openCV.Net.FaceDetector.DetectFaces(bmp, cascade2);
                //watch.Stop();

                ////MessageBox.Show(string.Format("Found {0} Faces in {1} milliseconds", seq.Count.ToString(), watch.ElapsedMilliseconds.ToString()));
                //foreach (var a in seq)
                //{
                //    pictureBox1.CreateGraphics().DrawRectangle(new Pen(Brushes.Yellow), a.Rectangle.X * scalex,
                //        a.Rectangle.Y * scaley, a.Rectangle.Width * scalex,
                //        a.Rectangle.Height * scaley);
                //}

                //using (FaceDetector fd = new FaceDetector(LibFaces.ClassifierType.Front))
                //{
                //    var faces = fd.FindFaces(bmp);
                //    Graphics g = pictureBox1.CreateGraphics();
                //    g.DrawRectangles(new Pen(Brushes.Red), faces.ToArray());
                //}
            }
        }
Beispiel #15
0
        public void ProcessImage(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image)
        {
            Emgu.CV.Image <Gray, byte> gray = image.Convert <Gray, byte>();



            Emgu.CV.Image <Gray, byte> binary = new Image <Gray, byte>(image.Size);
            CvInvoke.cvThreshold(gray, binary, 40, 255, THRESH.CV_THRESH_BINARY | THRESH.CV_THRESH_OTSU);
            binary._Not();
            Emgu.CV.Contour <System.Drawing.Point> contour_points = binary.FindContours();

            MemStorage      storage = new MemStorage();
            Matrix <double> warp    = new Matrix <double>(3, 3);

            while (contour_points != null)
            {
                Contour <Point> c = contour_points.ApproxPoly(contour_points.Perimeter * 0.05, storage);
                double          p = c.Perimeter;
                if (c.Total == 4 && p > 300)
                {
                    PointF[] src = new PointF[] {
                        new PointF(c[0].X, c[0].Y),
                        new PointF(c[1].X, c[1].Y),
                        new PointF(c[2].X, c[2].Y),
                        new PointF(c[3].X, c[3].Y)
                    };

                    CvInvoke.cvGetPerspectiveTransform(src, _dest, warp);
                    int flags = (int)INTER.CV_INTER_LINEAR + (int)WARP.CV_WARP_FILL_OUTLIERS;
                    CvInvoke.cvWarpPerspective(gray, _roi, warp, flags, new MCvScalar(0));


                    double      min_error;
                    Orientation orient;

                    FindBestOrientation(out min_error, out orient);
                    if (min_error < 0.4)
                    {
                        image.DrawPolyline(c.ToArray(), true, new Bgr(Color.Green), 2);
                        System.Console.WriteLine(min_error + " " + orient);

                        switch (orient)
                        {
                        case Orientation.Degrees0:
                            image.Draw(new LineSegment2D(c[0], c[3]), new Bgr(System.Drawing.Color.Red), 2);
                            break;

                        case Orientation.Degrees90:
                            image.Draw(new LineSegment2D(c[1], c[0]), new Bgr(System.Drawing.Color.Red), 2);
                            break;

                        case Orientation.Degrees180:
                            image.Draw(new LineSegment2D(c[2], c[1]), new Bgr(System.Drawing.Color.Red), 2);
                            break;

                        case Orientation.Degrees270:
                            image.Draw(new LineSegment2D(c[3], c[2]), new Bgr(System.Drawing.Color.Red), 2);
                            break;
                        }
                    }

                    // 0 degrees
                }
                contour_points = contour_points.HNext;
            }
        }