コード例 #1
0
 static IEnumerable<Tuple<string, double>> FetchMatchingImages(string queryImage, IEnumerable<string> imageMap)
 {
     var q = new Emgu.CV.Image<Gray, byte>(queryImage);
        return
        imageMap
        //.AsParallel()
        .Select(im => new
                            {
                                imageFile = im,
                                matchError =
                                    RGBMatch.DoRGBMatch(q,
                                        new Emgu.CV.Image<Gray, byte>(im),
                                        RGBMatch.FastFeatureExt,
                                        10,
                                        100,
                                        RGBMatch.BriefFeatureDescriptorFunc,
                                        RGBMatch.HammingDist,
                                        20,
                                        2,
                                        2,
                                        new Random(1),
                                        fp => 1,//RGBMatch.VisualizeFeaturePairs(fp, queryImage, im,
                                                // @"C:\Kamal\RSE\WorkingDirs\RGBMatchTest\" + Path.GetFileNameWithoutExtension(queryImage) + "_" + Path.GetFileNameWithoutExtension(im) + ".png"),
                                        fp => 1
                                        )
                            })
        .ShowProgress("Attempted" , 1)
        //.Where(res => res.matchError < 10)
        .ShowProgress("Matched", 1)
        .OrderBy(res => res.matchError)
        .Select(r => new Tuple<string, double>(r.imageFile, r.matchError));
 }
コード例 #2
0
 public ExtractLaserLineSlide(Context c) : base(c) {
   InitializeComponent();
   _lle = new Parsley.Core.BrightestPixelLLE();
   _channel = 2;
   _cmb_channel.SelectedIndex = _channel;
   _reference = null;
 }
コード例 #3
0
 protected override void PreQuantization(EmguImage image)
 {
     // Convert image to Lab color space and get palette
     this.rgbImg = image;
     this.labImg = image.Convert<LabColor, byte>();
     this.labPalette = ColorConversion.ToLabPalette<Color>(this.Palette);
     this.nearestNeighbour.Initialize(labPalette);
 }
コード例 #4
0
ファイル: ScanningSlide.cs プロジェクト: guozanhua/parsley
    protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> img) {
      if (_take_texture_image) {
        _take_texture_image = false;
        _texture_image = img.Copy();
        lock (Context.Viewer) {
          UpdateAllColors();
        }
      }

      if (_clear_points) {
        _clear_points = false;
        _pixel_point_ids.Reset();
        Context.Setup.ScanWorkflow.Reset();
        _pointcloud.ClearPoints();
      }

      // Update the transformation between positioner coordinate system and camera coordinate system
      if (_update_positioner_transformation)
      {
        _update_positioner_transformation = false;
        Context.Setup.Positioner.UpdateTransformation(Context.Setup.Camera);
        _pixel_point_ids.Reset();
        Context.Setup.ScanWorkflow.Reset();
      }

      if (Context.Setup.Camera.FrameSize != _pixel_point_ids.Size) {
        _pixel_point_ids.Size = Context.Setup.Camera.FrameSize;
      }

      List<Vector> points;
      List<System.Drawing.Point> pixels;

      if (Context.Setup.ScanWorkflow.Process(Context.Setup, img, out points, out pixels)) {
        lock (Context.Viewer) {
          UpdatePoints(points, pixels);
        }
        foreach (System.Drawing.Point p in pixels) {
          img[p.Y, p.X] = new Bgr(Color.Green);
        }
      }
    }
コード例 #5
0
ファイル: Form1.cs プロジェクト: usmanghani/Misc
        private void button1_Click(object sender, EventArgs e)
        {
            DialogResult result = openFileDialog1.ShowDialog(this);
            if (result == DialogResult.OK)
            {
                Bitmap bmp = (Bitmap)Bitmap.FromStream(openFileDialog1.OpenFile());
                //pictureBox1.Image = bmp;
                Emgu.CV.HaarCascade cascade = new Emgu.CV.HaarCascade("haarcascade_frontalface_default.xml");
                Emgu.CV.HaarCascade eyeCascade = new Emgu.CV.HaarCascade("haarcascade_eye.xml");
                Emgu.CV.Image<Emgu.CV.Structure.Bgr, Byte> image = new Emgu.CV.Image<Emgu.CV.Structure.Bgr, Byte>(bmp);
                Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> gray = image.Convert<Emgu.CV.Structure.Gray, Byte>();
                System.Diagnostics.Stopwatch watch = System.Diagnostics.Stopwatch.StartNew();
                Emgu.CV.Structure.MCvAvgComp[][] faces = gray.DetectHaarCascade(cascade,
                    1.1000, 1, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20,20));
                watch.Stop();
                float scalex = pictureBox1.Width / image.Width;
                float scaley = pictureBox1.Height / image.Height;

                //MessageBox.Show(string.Format("Found {0} Faces in {1} milliseconds", faces[0].Count().ToString(), watch.ElapsedMilliseconds.ToString()));
                foreach (var f in faces[0])
                {
                    StringBuilder sb = new StringBuilder();
                    sb.Append(f.rect.ToString());
                    gray.ROI = f.rect;
                    Emgu.CV.Structure.MCvAvgComp[][] eyes = gray.DetectHaarCascade(eyeCascade,
                    1.1000, 1, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20));
                    gray.ROI = Rectangle.Empty;

                    if (eyes[0].Length < 2) continue;
                    sb.Append(" => ");
                    sb.Append(eyes[0].Length.ToString());
                    listBox1.Items.Add(sb.ToString());
                    //MessageBox.Show(eyes[0].Count().ToString());
                    Graphics g = Graphics.FromImage(bmp);
                    g.DrawRectangle(new Pen(Brushes.Red), f.rect.X,
                        f.rect.Y, f.rect.Width,
                        f.rect.Height);
                    foreach (var eye in eyes[0])
                    {
                        Rectangle r = new Rectangle(
                            eye.rect.X + f.rect.X,
                            eye.rect.Y + f.rect.Y,
                            eye.rect.Width, eye.rect.Height
                            );
                        g.DrawRectangle(new Pen(Brushes.Yellow), r);
                    }
                }

                pictureBox1.Image = bmp;
                //watch = System.Diagnostics.Stopwatch.StartNew();
                //openCV.Net.Haar.HaarClassifierCascade cascade2 = openCV.Net.Haar.HaarClassifierCascade.Parse(XDocument.Load("haarcascade_frontalface_default.xml"));
                //openCV.Net.Haar.Sequence<openCV.Net.Haar.AvgComp>
                //    seq = openCV.Net.FaceDetector.DetectFaces(bmp, cascade2);
                //watch.Stop();

                ////MessageBox.Show(string.Format("Found {0} Faces in {1} milliseconds", seq.Count.ToString(), watch.ElapsedMilliseconds.ToString()));
                //foreach (var a in seq)
                //{
                //    pictureBox1.CreateGraphics().DrawRectangle(new Pen(Brushes.Yellow), a.Rectangle.X * scalex,
                //        a.Rectangle.Y * scaley, a.Rectangle.Width * scalex,
                //        a.Rectangle.Height * scaley);
                //}

                //using (FaceDetector fd = new FaceDetector(LibFaces.ClassifierType.Front))
                //{
                //    var faces = fd.FindFaces(bmp);
                //    Graphics g = pictureBox1.CreateGraphics();
                //    g.DrawRectangles(new Pen(Brushes.Red), faces.ToArray());
                //}
            }
        }
コード例 #6
0
        public static Emgu.CV.Image <Emgu.CV.Structure.Gray, System.Byte> RemoveSpecks(this Emgu.CV.Image <Emgu.CV.Structure.Gray, System.Byte> image)
        {
            if (image is null)
            {
                throw new System.ArgumentNullException(nameof(image));
            }

            return(image.Erode(iterations: 4)
                   .Dilate(iterations: 4));
        }
コード例 #7
0
 private void _btn_take_reference_Click(object sender, EventArgs e)
 {
     _reference = Context.World.Camera.Frame().Copy();
 }
コード例 #8
0
        public static Emgu.CV.Image <Emgu.CV.Structure.Gray, System.Byte> GetThreshold(this Emgu.CV.Image <Emgu.CV.Structure.Gray, System.Byte> image)
        {
            if (image is null)
            {
                throw new System.ArgumentNullException(nameof(image));
            }

            return(image.ThresholdBinary(threshold: Settings.ThresholdMinGray,
                                         maxValue: Settings.ThresholdMaxGray));
        }
コード例 #9
0
 public static PointF[] DetectCornersBW(Bitmap picture, Size patternSize)
 {
     var image = new Emgu.CV.Image<Emgu.CV.Structure.Gray, byte>(picture);
     var corners = Emgu.CV.CameraCalibration.FindChessboardCorners(image, patternSize, Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH);
     if (corners == null)
         return null;
     var cc = new PointF[][] { corners };
     image.FindCornerSubPix(cc, new System.Drawing.Size(11, 11), new System.Drawing.Size(-1, -1), new Emgu.CV.Structure.MCvTermCriteria(30, 0.1));
     return corners;
 }
コード例 #10
0
ファイル: Default.aspx.cs プロジェクト: adamekz/mmn_site
        //główne operacje na obrazie
        public void ocr()
        {
            //otworzenie pliku
            FileStream srcstream = new FileStream(pic_file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
            //stworzenie bitmapy
            Bitmap source = new Bitmap(srcstream);
            //zmiana ustawień webform

            Panel1.Visible = false;
            Image1.Dispose();

            Label2.Text = "Processing...";
            Panel3.Visible = true;

            //Preperation code
            Bitmap ext = source;

            //AForge.Imaging.Filters.
            //Przekształcenie obrazu na skalę odcieni szarości - testować dla obrazów o różnej kolorystyce(opracować system wyznaczania parametrów filtru na podstawie RGB zdjęcia)
            AForge.Imaging.Filters.Grayscale grScl = new AForge.Imaging.Filters.Grayscale(0.2125, 0.0154, 0.0721 );
            source = grScl.Apply(source);
            //Zwiększenie kontrastu
            AForge.Imaging.Filters.ContrastStretch conCor = new AForge.Imaging.Filters.ContrastStretch();

            source = conCor.Apply(source);
            //Wyostrzenie
            AForge.Imaging.Filters.Sharpen shp = new AForge.Imaging.Filters.Sharpen();
            source = shp.Apply(source);

            //Segmentation code
            bool procesed = false;
               // Image2.Width = 350;
               // Image2.Height = (int)((source.Height * 200) / source.Width);

            try
            {
                Emgu.CV.Image<Bgr, Byte> to_rec = new Emgu.CV.Image<Bgr, byte>(source);
                Do_ocr = new Tesseract("tessdata", "eng", Tesseract.OcrEngineMode.OEM_DEFAULT);
                try
                {
                    Do_ocr.Recognize<Bgr>(to_rec);
                    //recognizedText.Text = ocr.GetText();
                    PastOCRBox.Text = Do_ocr.GetText();
                   // StatusBox.Text = "Finished! Ready for next one...";
                    Do_ocr.Dispose();
                    to_rec.Dispose();
                }
                catch (Exception exp)
                {
                    Label2.Text = "Recognition error! " + exp.Message;
                    Do_ocr.Dispose();
                    return;
                }
            }
            catch (Exception exp)
            {
                Label2.Text = "OCR engine failed! " + exp.Message;
                return;
            }

            //czyszczenie z plików tymczasowych

              //  source.Save("D:\\test.bmp");
              //  ext.Save("D:\\testcor.bmp");
            source.Dispose();
            srcstream.Close();
            srcstream.Dispose();
            //System.IO.File.Delete(pic_file);
            System.IO.File.Delete(Server.MapPath("~/img/prev.bmp"));
            System.IO.File.Delete(Server.MapPath("~/img/tmp.bmp"));
            //przygotować wygląd strony po rozpoznawaniu
            Panel3.Visible = false;
            Label1.Visible = false;
            Panel0.Visible = false;
            Panel5.Visible = false;

            Panel4.Visible = true;
        }
コード例 #11
0
 /// <summary>
 /// Draw rectangle to image
 /// </summary>
 /// <param name="o">Rectangle</param>
 /// <param name="img">Image</param>
 public void DrawIndicator(object o, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
 {
     img.Draw((Rectangle)o, new Emgu.CV.Structure.Bgr(Color.Green), 1);
 }
コード例 #12
0
        public static Emgu.CV.Image <Emgu.CV.Structure.Gray, System.Byte> RemoveHoles(this Emgu.CV.Image <Emgu.CV.Structure.Gray, System.Byte> image)
        {
            if (image is null)
            {
                throw new System.ArgumentNullException(nameof(image));
            }

            var structuringElement = Settings.CenterAnchorPoint.GetStructuringElement(elementShape: Emgu.CV.CvEnum.ElementShape.Rectangle,
                                                                                      elementSize: Settings.StructuringElementSize);

            return(image.MorphClose(kernel: structuringElement));
        }
コード例 #13
0
        protected override void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
        {
            // Constraint checking
            if (!Context.Setup.Camera.HasIntrinsics)
            {
                _on_roi = false;
                return;
            }

            if (_interactor.State == Parsley.UI.InteractionState.Interacting)
            {
                _interactor.DrawIndicator(_interactor.Current, img);
            }
            else
            {
                _interactor.DrawIndicator(_r, img);
            }

            if (_on_roi && _pattern != null)
            {
                Image <Gray, Byte> gray = img.Convert <Gray, Byte>();
                _pattern.IntrinsicParameters = Context.Setup.Camera.Intrinsics;

                try {
                    _pattern.FindPattern(gray, _r);
                    if (_pattern.PatternFound)
                    {
                        Parsley.Core.ExtrinsicCalibration ec  = new Parsley.Core.ExtrinsicCalibration(_pattern.ObjectPoints, Context.Setup.Camera.Intrinsics);
                        ExtrinsicCameraParameters         ecp = ec.Calibrate(_pattern.ImagePoints);
                        double[] deviations;
                        Vector[] points;

                        Core.ExtrinsicCalibration.CalibrationError(
                            ecp,
                            Context.Setup.Camera.Intrinsics,
                            _pattern.ImagePoints,
                            _pattern.ObjectPoints,
                            out deviations,
                            out points);

                        double max_error = deviations.Max();
                        if (max_error < _last_error)
                        {
                            _last_detected_plane = ecp;
                            _last_error          = max_error;
                            this.Logger.Info(String.Format("Extrinsics successfully calculated. Maximum error {0:F3}", _last_error));
                        }
                    }
                    else
                    {
                        this.Logger.Warn("Pattern not found.");
                    }
                } catch (System.Exception e) {
                    this.Logger.Warn(String.Format("Failed to determine extrinsic calibration: {0}", e.Message));
                }
            }
            if (_last_detected_plane != null)
            {
                Core.Drawing.DrawCoordinateFrame(img, _last_detected_plane, Context.Setup.Camera.Intrinsics);
            }
        }
コード例 #14
0
        /// <summary>
        /// Tries to find the composite pattern and returns the output parameter image_points.
        /// In case of success the boolean value 'true' is returned.
        /// Note, that CompositePatterns can only be found, if the cameras' intrinsics are set.
        ///
        /// The algorithm is working as follows:
        /// If the main pattern 'patternA' could be found, the algorithm is finished already and the resulting
        /// image_points are known and returned.
        /// If only 'patternB' could be found, the given object_points of 'patternA' are transformed in the
        /// 'patternB' coordinate system, using the predefined transformation matrix.
        /// Furthermore, an extrinsic calibration is performed in order to find the extrinsic matrix, which describes
        /// the relation between camera coordinate system and the coordinate system of 'patternB'.
        /// Finally, the library function 'ProjectPoints' is called in order to project the transformed object_points
        /// (currently expressed in 'patternB'-coordinates) into the camera image plane.
        /// The projected points correspond to the image_points of 'patternA'.
        /// ==> To sum up: the predefined transformation is used to calculate the image_points of 'patternA', even
        /// if 'patternA' is invisible.
        /// </summary>
        /// <param name="img"> Input grayscale image. </param>
        /// <param name="image_points"> 2D output image points. </param>
        /// <returns> true... if pattern has been found; false... otherwise. </returns>
        public override bool FindPattern(Emgu.CV.Image <Emgu.CV.Structure.Gray, byte> img, out System.Drawing.PointF[] image_points)
        {
            if (this.IntrinsicParameters != null && _patternA != null && _patternB != null)
            {
                bool foundA = false;
                System.Drawing.PointF[] currentImagePointsA;
                System.Drawing.PointF[] currentImagePointsB;

                //set the object_points of the composite pattern to the object_points of 'patternA'
                this.ObjectPoints = _patternA.ObjectPoints;

                //try to find 'patternA'
                foundA = _patternA.FindPattern(img, out currentImagePointsA);

                //if 'patternA' could be found: the image_points have been found.
                if (foundA)
                {
                    image_points = currentImagePointsA;
                    //_logger.Info("Pattern found.");
                    return(true);
                }
                else
                //else: try to find 'patternB'
                if (_patternB.FindPattern(img, out currentImagePointsB))
                {
                    ExtrinsicCalibration ec_B = null;
                    Emgu.CV.ExtrinsicCameraParameters ecp_B = null;
                    Matrix extrinsic_matrix = Matrix.Identity(4, 4);
                    Matrix temp_matrix      = null;
                    Emgu.CV.Structure.MCvPoint3D32f[] transformedCornerPoints = new Emgu.CV.Structure.MCvPoint3D32f[_patternA.ObjectPoints.Length];

                    try
                    {
                        //if 'patternB' has been found: find the extrinsic matrix (relation between coordinate systems of 'patternB' and camera
                        ec_B  = new ExtrinsicCalibration(_patternB.ObjectPoints, this.IntrinsicParameters);
                        ecp_B = ec_B.Calibrate(currentImagePointsB);

                        if (ecp_B != null)
                        {
                            //form the resulting extrinsic matrix to a homogeneous (4x4) matrix.
                            temp_matrix = Parsley.Core.Extensions.ConvertToParsley.ToParsley(ecp_B.ExtrinsicMatrix);
                            extrinsic_matrix.SetMatrix(0, temp_matrix.RowCount - 1, 0, temp_matrix.ColumnCount - 1, temp_matrix);

                            //transform object points of A into B coordinate system.
                            transformedCornerPoints = MatrixTransformation.TransformVectorToEmgu(_transformationBToA.Inverse(), 1.0, _patternA.ObjectPoints).ToArray <Emgu.CV.Structure.MCvPoint3D32f>();

                            //project the points into the 2D camera plane (image_points)
                            image_points = Emgu.CV.CameraCalibration.ProjectPoints(transformedCornerPoints, ecp_B, this.IntrinsicParameters);
                            return(true);
                        }
                        else
                        {
                            _logger.Warn("Error calculating extrinsic parameters.");
                            image_points = null;
                            return(false);
                        }
                    }
                    catch (Exception e)
                    {
                        _logger.Warn("Caught Exception: {0}.", e);
                        image_points = null;
                        return(false);
                    }
                }
                else
                {
                    //reset the image_points if the pattern could not be found.
                    image_points = null;
                    return(false);
                }
            }
            else
            {
                _logger.Warn("Error: Intrinsics are needed to find a Composite Pattern but not available.");
                image_points = null;
                return(false);
            }
        }
コード例 #15
0
 /// <summary>
 /// Find pattern in image region  and make it accessible through local properties.
 /// </summary>
 /// <param name="img">Image to find pattern in.</param>
 /// <param name="roi">Region of interest</param>
 /// <returns>True if pattern was found in image, false otherwise</returns>
 public bool FindPattern(Emgu.CV.Image <Gray, byte> img, Rectangle roi) //cari pola yang digunakan saat kalibrasi ekstrinsik, dibatasi ROI
 {
     _pattern_found = this.FindPattern(img, roi, out _image_points);
     return(_pattern_found);
 }
コード例 #16
0
 /// <summary>
 /// Find pattern in image.
 /// </summary>
 /// <param name="img">Image to find pattern in.</param>
 /// <param name="image_points">Pattern points in image.</param>
 /// <returns>True if pattern was found in image, false otherwise.</returns>
 abstract public bool FindPattern(Emgu.CV.Image <Gray, byte> img, out PointF[] image_points);
コード例 #17
0
        public void ProcessImage(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image)
        {
            Emgu.CV.Image <Gray, byte> gray = image.Convert <Gray, byte>();



            Emgu.CV.Image <Gray, byte> binary = new Image <Gray, byte>(image.Size);
            CvInvoke.cvThreshold(gray, binary, 40, 255, THRESH.CV_THRESH_BINARY | THRESH.CV_THRESH_OTSU);
            binary._Not();
            Emgu.CV.Contour <System.Drawing.Point> contour_points = binary.FindContours();

            MemStorage      storage = new MemStorage();
            Matrix <double> warp    = new Matrix <double>(3, 3);

            while (contour_points != null)
            {
                Contour <Point> c = contour_points.ApproxPoly(contour_points.Perimeter * 0.05, storage);
                double          p = c.Perimeter;
                if (c.Total == 4 && p > 300)
                {
                    PointF[] src = new PointF[] {
                        new PointF(c[0].X, c[0].Y),
                        new PointF(c[1].X, c[1].Y),
                        new PointF(c[2].X, c[2].Y),
                        new PointF(c[3].X, c[3].Y)
                    };

                    CvInvoke.cvGetPerspectiveTransform(src, _dest, warp);
                    int flags = (int)INTER.CV_INTER_LINEAR + (int)WARP.CV_WARP_FILL_OUTLIERS;
                    CvInvoke.cvWarpPerspective(gray, _roi, warp, flags, new MCvScalar(0));


                    double      min_error;
                    Orientation orient;

                    FindBestOrientation(out min_error, out orient);
                    if (min_error < 0.4)
                    {
                        image.DrawPolyline(c.ToArray(), true, new Bgr(Color.Green), 2);
                        System.Console.WriteLine(min_error + " " + orient);

                        switch (orient)
                        {
                        case Orientation.Degrees0:
                            image.Draw(new LineSegment2D(c[0], c[3]), new Bgr(System.Drawing.Color.Red), 2);
                            break;

                        case Orientation.Degrees90:
                            image.Draw(new LineSegment2D(c[1], c[0]), new Bgr(System.Drawing.Color.Red), 2);
                            break;

                        case Orientation.Degrees180:
                            image.Draw(new LineSegment2D(c[2], c[1]), new Bgr(System.Drawing.Color.Red), 2);
                            break;

                        case Orientation.Degrees270:
                            image.Draw(new LineSegment2D(c[3], c[2]), new Bgr(System.Drawing.Color.Red), 2);
                            break;
                        }
                    }

                    // 0 degrees
                }
                contour_points = contour_points.HNext;
            }
        }
コード例 #18
0
 public FrameProcessor(int trackType , Emgu.CV.Image<Emgu.CV.Structure.Bgr, Byte> itemImage)
 {
     this.trackType = trackType;
     this.itemImage = itemImage;
 }
コード例 #19
0
 protected virtual void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img)
 {
 }
コード例 #20
0
        public int applyOld(String inputName, String saveName)
        {
            Emgu.CV.Image<Bgr, int> img = new Emgu.CV.Image<Bgr, int>(inputName);

            Bgr d = new Bgr(0, 0, 0);
            List<Bgr> colorCat = new List<Bgr>();

            for (int i = 0; i < img.Data.GetLength(0); i++)
            {
                for (int j = 0; j < img.Data.GetLength(1); j++)
                {

                    Bgr b = new Bgr(img[i, j].Blue, img[i, j].Green, img[i, j].Red);

                    if (!colorCat.Contains(b))
                    {
                        colorCat.Add(b);
                    }

                }
            }

            for (int k = 0; k < colorCat.Count; k++)
            {
                Emgu.CV.Image<Bgr, int> imgS = new Emgu.CV.Image<Bgr, int>(img.Width, img.Height);
                for (int i = 0; i < img.Data.GetLength(0); i++)
                {
                    for (int j = 0; j < img.Data.GetLength(1); j++)
                    {

                        Bgr b = new Bgr(img[i, j].Blue, img[i, j].Green, img[i, j].Red);

                        if (colorCat[k].Blue == b.Blue && colorCat[k].Green == b.Green && colorCat[k].Red == b.Red)
                        {
                            imgS[i, j] = b;
                        }
                        else
                        {
                            imgS[i, j] = d;
                        }

                    }
                }

                imgS.Save(String.Format(saveName, k));

            }

            return colorCat.Count;
        }
コード例 #21
0
        public void LoadCompleteTrainingSet()
        {
            int i = 0;

            ConnectToDatabase();

            ImageInDatabase imageToStore = new ImageInDatabase();
            SqlCommand cmd = new SqlCommand("SP_Select_All_FAces", m_conMyConnection);
            cmd.CommandType = CommandType.StoredProcedure;

            SqlDataAdapter sDataAdapter = new SqlDataAdapter();
            sDataAdapter.SelectCommand = cmd;
            DataSet sDataSet = new DataSet();
            sDataAdapter.Fill(sDataSet);
            DisconnectFromDatabase();

            int iUlngthTrainingArray = sDataSet.Tables[0].Rows.Count;

            m_trainingImages =  new Emgu.CV.Image<Gray, Byte>[iUlngthTrainingArray];
            m_TrainingLabels = new string[iUlngthTrainingArray];

            foreach (DataRow row in sDataSet.Tables[0].Rows)
            {
                byte[] ImageByteArrayToConert = (byte[])row["FacialPic"];
                ImageOfFace = new Bitmap(ConvertByteArray(ImageByteArrayToConert));

                m_trainingImages[i] = new Emgu.CV.Image<Gray, byte>(ImageOfFace);
                m_TrainingLabels[i] = row["ID"].ToString();
                i++;

            }
            DisconnectFromDatabase();
        }
コード例 #22
0
 private void _btn_take_reference_Click(object sender, EventArgs e) {
   _reference = Context.World.Camera.Frame().Copy();
 }
コード例 #23
0
        private void RefreshGrayImage()
        {
            try
            {
                //LogHelper.logger.Info("RefreshGrayImage called...");
                if (pbScreen.Image == null)
                    return;

                //Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> cvImage = new Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte>(test as Bitmap);
                Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> cvImage = new Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte>(pbScreen.Image as Bitmap);
                //Emgu.CV.CvInvoke.cvShowImage("Current Image under use...", cvImage);

                double intensity = tbIntensity.Value;
                var binaryImage = cvImage.Convert<Gray, byte>().ThresholdBinary(new Gray(intensity), new Gray(255));
                //Emgu.CV.CvInvoke.cvShowImage("Current Image under use...", binaryImage);
                pbIntensityTest.Image = (binaryImage.Bitmap).Clone(new Rectangle(0, 0, binaryImage.Width, binaryImage.Height), (binaryImage.Bitmap).PixelFormat);

                txtIntensity.Text = tbIntensity.Value.ToString();
                ImageProcessingManager.IntensityValue = intensity;

                if (cbShowIntensityOnTop.Checked)
                {
                    pbScreen.Image = pbIntensityTest.Image;
                }
            }
            catch (Exception exception)
            {
                LogHelper.logger.Error("RefreshGrayImage: " + exception.Message);
                LogHelper.logger.Error("RefreshGrayImage: " + exception.StackTrace);
                MessageBox.Show("An error occurred. Please restart bot", "Chessbot", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            //LogHelper.logger.Info("RefreshGrayImage finished...");
        }
コード例 #24
0
 public static PointF[] DetectCornersRB(Bitmap picture, Size patternSize)
 {
     var image = new Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte>(picture);
     Emgu.CV.Image<Emgu.CV.Structure.Gray, byte> gray = new Emgu.CV.Image<Emgu.CV.Structure.Gray, byte>(new byte[image.Height, image.Width, 1]);
     for (var y = 0; y < image.Height; y++)
     {
         for (var x = 0; x < image.Width; x++)
         {
             var r = image[y, x].Red;
             var b = image[y, x].Blue;
             var g = image[y, x].Green;
             var rd = Distance(new double[] { r, b, g }, new double[] { 255, 0, 0 });
             if (rd < 200)
                 gray[y, x] = new Emgu.CV.Structure.Gray(0);
             else
                 gray[y, x] = new Emgu.CV.Structure.Gray(255);
         }
     }
     var corners = Emgu.CV.CameraCalibration.FindChessboardCorners(gray, patternSize, Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH);
     if (corners == null)
         return null;
     var cc = new PointF[][] { corners };
     gray.FindCornerSubPix(cc, new System.Drawing.Size(11, 11), new System.Drawing.Size(-1, -1), new Emgu.CV.Structure.MCvTermCriteria(30, 0.1));
     return corners;
 }
コード例 #25
0
ファイル: EyeVideoControl.cs プロジェクト: DeSciL/Ogama
    /// <summary>
    /// This method displays the initial ogama client image in the eye video control.
    /// </summary>
    public void ShowInitialImage()
    {
      this.Stop();
      this.interruptImmediately = true;
      var initialImage = new Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte>(Properties.Resources.StartUp);
      this.pictureBox.Image = initialImage.Resize(
        this.CVImageBox.Width,
        this.CVImageBox.Height,
        Emgu.CV.CvEnum.INTER.CV_INTER_AREA);

      this.pictureBox.Refresh();
    }
コード例 #26
0
        private static void GrayValueTask()
        {
            while (true)
            {
                while (m_CheckList.Count > 0)
                {
                    DataGridViewRow t_Checks = m_CheckList[0];
                    lock (m_Mutex)
                    {
                        string   t_FolderPath = t_Checks.Cells["FolderPath"].Value.ToString();
                        string[] t_ImageFiles = System.IO.Directory.GetFiles(t_FolderPath);
                        foreach (string t_ImageFile in t_ImageFiles)
                        {
                            string[] t_ImageFileSplit = System.IO.Path.GetFileNameWithoutExtension(t_ImageFile).Split('_');
                            string   t_StationNumber  = t_ImageFileSplit[2];
                            if (t_ImageFileSplit[2].CompareTo("15") == 0)
                            {
                                t_StationNumber = t_ImageFileSplit[2] + "_" + t_ImageFileSplit[3];
                            }
                            string  t_StationName = m_ImageNameMappings[t_StationNumber];
                            Point[] t_ROI         = m_FixROILocations[t_StationName];

                            System.Drawing.Bitmap t_Bitmap = new Bitmap(t_ImageFile);

                            Emgu.CV.Mat t_Mat = Emgu.CV.CvInvoke.Imread(t_ImageFile, Emgu.CV.CvEnum.ImreadModes.AnyColor);
                            //Emgu.CV.CvInvoke.NamedWindow("A", Emgu.CV.CvEnum.NamedWindowType.FreeRatio);
                            double t_Average = 0.0;
                            if (t_Bitmap.PixelFormat == System.Drawing.Imaging.PixelFormat.Format8bppIndexed)
                            {
                                Emgu.CV.Image <Emgu.CV.Structure.Gray, byte> t_Image = new Emgu.CV.Image <Emgu.CV.Structure.Gray, byte>(t_Mat.Bitmap);
                                t_Image.ROI = new Rectangle(t_ROI[0].X, t_ROI[0].Y, t_ROI[1].X - t_ROI[0].X, t_ROI[1].Y - t_ROI[0].Y);
                                Emgu.CV.Structure.Gray t_AverageBGR = t_Image.GetAverage();
                                t_Average = t_AverageBGR.MCvScalar.V0;
                                t_Image.Dispose();
                                t_Image = null;
                            }
                            else
                            {
                                Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> t_Image = new Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte>(t_Mat.Bitmap);
                                t_Image.ROI = new Rectangle(t_ROI[0].X, t_ROI[0].Y, t_ROI[1].X - t_ROI[0].X, t_ROI[1].Y - t_ROI[0].Y);
                                Emgu.CV.Structure.Bgr t_AverageBGR = t_Image.GetAverage();
                                t_Average = t_AverageBGR.MCvScalar.V2;
                                t_Image.Dispose();
                                t_Image = null;
                            }
                            t_Checks.Cells[t_StationName].Value = t_Average;
                            t_Mat.Dispose();
                            t_Mat = null;
                            t_Bitmap.Dispose();
                            t_Bitmap = null;
                            GC.Collect();
                        }
                    }
                    string t_ResultsString = string.Empty;
                    foreach (DataGridViewCell t_Check in t_Checks.Cells)
                    {
                        t_ResultsString += t_Check.Value + ",";
                    }
                    t_ResultsString = t_ResultsString.Remove(t_ResultsString.Length - 1, 1);
                    System.IO.File.AppendAllText("Results.csv", t_ResultsString);
                    System.IO.File.AppendAllText("Results.csv", System.Environment.NewLine);

                    m_CheckList.RemoveAt(0);
                }
            }
        }
コード例 #27
0
        public static Emgu.CV.Image <Emgu.CV.Structure.Gray, System.Byte> SimplifyImage(this Emgu.CV.Image <Emgu.CV.Structure.Gray, System.Byte> image)
        {
            if (image is null)
            {
                throw new System.ArgumentNullException(nameof(image));
            }

            return(image.GetThreshold()
                   .RemoveHoles()
                   .RemoveSpecks());
        }
コード例 #28
0
 public static Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> GetBinaryImage(Image inputImage, double intensity)
 {
     //LogHelper.logger.Info("GetBinaryImage called...");
     Image<Gray, byte> binaryImage = null;
     try
     {
         Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> cvImage = new Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte>(inputImage as Bitmap);
         binaryImage = cvImage.Convert<Gray, byte>().ThresholdBinary(new Gray(intensity), new Gray(255));
         //Emgu.CV.CvInvoke.cvShowImage("Current Image under use...", binaryImage);
     }
     catch (Exception exception)
     {
         LogHelper.logger.Error("GetNextBestMove: " + exception.Message);
         LogHelper.logger.Error("GetNextBestMove: " + exception.StackTrace);
         MessageBox.Show("An error occurred. Please restart bot", "Chessbot", MessageBoxButtons.OK, MessageBoxIcon.Error);
     }
     //LogHelper.logger.Info("GetBinaryImage finished...");
     return binaryImage;
 }
コード例 #29
0
 public abstract void Draw(Emgu.CV.Image <Emgu.CV.Structure.Bgr, Byte> rawImage);
コード例 #30
0
 private void btnShowTemplate_Click(object sender, EventArgs e)
 {
     //LogHelper.logger.Info("btnShowTemplate_Click called...");
     try
     {
         Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte> normalizedMasterImage = new Emgu.CV.Image<Emgu.CV.Structure.Gray, Byte>(masterTemplate.CurrentTemplateImage as Bitmap);
         Emgu.CV.CvInvoke.cvShowImage("Current Image under use...", normalizedMasterImage);
     }
     catch (Exception exception)
     {
         LogHelper.logger.Error("btnShowTemplate_Click: " + exception.Message);
         LogHelper.logger.Error("btnShowTemplate_Click: " + exception.StackTrace);
         MessageBox.Show("An error occurred. Please restart bot", "Chessbot", MessageBoxButtons.OK, MessageBoxIcon.Error);
     }
     //LogHelper.logger.Info("btnShowTemplate_Click finished...");
 }
コード例 #31
0
        public static string MatchRGBImage(
            string imageMapFile,
            string imageFile,
            ImageFeatureExtraction.FeatureExtractionOptions options,
            Random random)
        {
            var imageMap = ImageMap.LoadImageMap(imageMapFile, (frameID, imagePoint, depth, pt3D, descriptor) =>
                                            new
                                            {
                                                frameID,
                                                point3D = new MCvPoint3D32f((float)pt3D[0], (float)pt3D[1], (float)pt3D[2]),
                                                descriptor
                                            })
                                            .Select((p, lineNumber) => new { p, lineNumber });

            var frameWiseMap =
                imageMap
                .GroupBy(i => i.p.frameID)
                .ToDictionary(frame => frame.Key,
                                frame => new {
                                        point3D = frame.OrderBy(f => f.lineNumber).Select(f => f.p.point3D).ToArray(),
                                        descMatrx = frame.OrderBy(f => f.lineNumber).Select(f => f.p.descriptor).ToEmguMatrix(b => b)
                                    });

            var image = new Emgu.CV.Image<Emgu.CV.Structure.Gray, byte>(imageFile);
            var imageFeaturePoints =
                options.DoExtract(image)
                .Select(kp =>
                                new
                                {
                                    featureDescriptor = ImageFeatureExtraction.ExtractBriefFeatureDescriptors(image, kp),
                                    kp
                                })
                .Where(kp => kp.featureDescriptor != null)
                .ToArray();

              var imageFeatureDesc = imageFeaturePoints.ToEmguMatrix(p => p.featureDescriptor.EnumerateRowwise().ToArray());

              var matchingOptions = new ImageFeatureMatching.FeatureMatchingOptions();

              Func<string, IEnumerable<Tuple<MCvPoint3D32f, MKeyPoint>>> MatchFeatures =
              (frame) => ImageFeatureMatching.NNMatchBruteForce(imageFeatureDesc, frameWiseMap[frame].descMatrx, matchingOptions.distanceFunction, matchingOptions.featurePairDistanceThreshold,
                            (i1, i2, dist) => new { i1, i2, dist })
                            .Where(p => p.dist < matchingOptions.featurePairDistanceThreshold)
                            .Select(p => new Tuple<MCvPoint3D32f, MKeyPoint>(frameWiseMap[frame].point3D[p.i2], imageFeaturePoints[p.i1].kp))
                            .ToArray();

              DenseMatrix camCalibration = (DenseMatrix)Pose3D.CreateCalibrationMatrix(525, 320, 240);
              var intParameters = new Emgu.CV.IntrinsicCameraParameters();
              intParameters.IntrinsicMatrix = camCalibration.ToEmguMatrix();

            Func<Emgu.CV.ExtrinsicCameraParameters, Tuple<MCvPoint3D32f, MKeyPoint>, double> modelEvaluator =
                            (extParam, featurePair) =>
                                Emgu.CV.CameraCalibration.ProjectPoints(new MCvPoint3D32f[] { featurePair.Item1 }, extParam, intParameters)[0]
                                .distanceTo(featurePair.Item2.Point);

            Func<IEnumerable<Tuple<MCvPoint3D32f, MKeyPoint>>, Tuple<Emgu.CV.ExtrinsicCameraParameters, double>> modelFitter =
                matchedFeatures =>
                {
                    var model = Emgu.CV.CameraCalibration.FindExtrinsicCameraParams2(matchedFeatures.Select(m => m.Item1).ToArray(), matchedFeatures.Select(m => m.Item2.Point).ToArray(), intParameters);
                    return new Tuple<ExtrinsicCameraParameters, double>(model,
                                    matchedFeatures.Average(fp => Math.Pow(modelEvaluator(model, fp), 2)));
                };

            var ransacOptions = new SimpleRansac.RansacOptions
                                    {
                                        minNumInliers = 15,
                                        numMinSamples = 6,
                                        numTrials = 5,
                                        rand = random,
                                        sqInlierErrorThreshold = 9 //3 pixels
                                    };

            var matchingFrame =
            frameWiseMap
                .Skip(212)
                .AsParallel()
                .Select(kvp => new
                {
                    frame = kvp.Key,
                    featurePairs = MatchFeatures(kvp.Key)
                })
                .Where(k => k.featurePairs.Count() >= ransacOptions.minNumInliers)
                .Select(f => new
                            {
                                f.frame,
                                modelAndError = SimpleRansac.Ransac(f.featurePairs, modelFitter, modelEvaluator, ransacOptions)
                            })
                .ShowProgress(".", 1)
                .Where(f => f.modelAndError.Item1 != null)
                .ShowProgress("!", 1)
                .OrderBy(f => f.modelAndError.Item2)
                .Select(f => String.Format("frame:{0}\terror={1}", f.frame, f.modelAndError.Item2));

            File.WriteAllLines(@Path.Combine(@"C:\Kamal\RSE\TestResults\FrameQuery", Path.GetFileNameWithoutExtension(imageFile)), matchingFrame);

            return "";
        }
コード例 #32
0
ファイル: Tests.cs プロジェクト: pleonex/ninoimager
        private static void TestConvertColors(string inputImage, string outputImage)
        {
            // Get colors of the input image
            var img = new Emgu.CV.Image<Bgr, byte>(inputImage);
            Bgr[] colors = new Bgr[img.Width * img.Height];
            for (int x = 0; x < img.Width; x++)
                for (int y = 0; y < img.Height; y++)
                    colors[y * img.Width + x] = img[y, x];

            // Convert
            Lab[] newColors  = ColorConversion.ConvertColors<Bgr, Lab>(colors);
            Bgr[] newColors2 = ColorConversion.ConvertColors<Lab, Bgr>(newColors);

            // Set colors of output image
            var img2 = new Emgu.CV.Image<Bgr, byte>(img.Width, img.Height);
            for (int x = 0; x < img2.Width; x++)
                for (int y = 0; y < img2.Height; y++)
                    img2[y, x] = newColors2[y * img2.Width + x];

            img2.Save(outputImage);
        }
コード例 #33
0
        public static Emgu.CV.Image <TColor, System.Single> GetGradientY <TColor, TDepth>(this Emgu.CV.Image <TColor, TDepth> image)
            where TColor : struct, Emgu.CV.IColor
            where TDepth : new()
        {
            if (image is null)
            {
                throw new System.ArgumentNullException(nameof(image));
            }

            return(image.Sobel(xorder: 0, yorder: 1, apertureSize: -1));
        }