Пример #1
0
 public object Convert(object value, Type targetType, object parameter, CultureInfo culture)
 {
     return(BitmapConverter.GetBitmapImage((byte[])value));
 }
Пример #2
0
        static void Main(string[] args)
        {
            int pictures = 300;

            IProcessorBuilder builder = new ProcessorBuilder();

            using var file = Image.FromFile(Path.Combine(FullPath, Picture));
            var bitmap = new Bitmap(file, new Size(450, 450));
            var source = BitmapConverter.ToColorsMatrix(bitmap);

            bitmap.Dispose();

            var picturesWidth  = 300;
            var picturesHeight = 300;

            var horizontalenergyProcessor = builder
                                            .SetDefaultHorizontalConveyor()
                                            .SetJpegImageSaver(picturesWidth, picturesHeight)
                                            .Build();

            builder.Reset();

            var energyProcessor = builder
                                  .SetDefaultVerticalConveyor()
                                  .SetJpegImageSaver(picturesWidth, picturesHeight)
                                  .Build();

            ProcessingContext context;
            string            filename;
            string            fileDestination;

            var startTime = DateTime.Now;

            for (int i = 0; i < pictures; i++)
            {
                filename        = $"{i}.jpg";
                fileDestination = Path.Combine(FullPath, Folder, filename);

                context = new ProcessingContext {
                    Source = source, DestinationFileName = fileDestination
                };

                if (i % 2 == 0)
                {
                    energyProcessor.Process(context);
                }
                else
                {
                    horizontalenergyProcessor.Process(context);
                }

                source = context.Result;

                Console.WriteLine($"{filename} was succesfully saved.");
            }

            var endTime    = DateTime.Now;
            var difference = endTime - startTime;

            Console.WriteLine($"Processed in {difference.Minutes} m {difference.Seconds} s.");
        }
Пример #3
0
        //Webカメラ画像取得用メソッド
        private void captureBmp(ref Bitmap bitmap1, ref Bitmap bitmap2, ref Bitmap bitmap3)
        {
            //カメラのインデックス番号の取得
            //通常0から始まる連番のはず
            try
            {
                using (CvCapture capture1 = Cv.CreateCameraCapture(int.Parse(textBox1.Text)))
                    using (CvCapture capture2 = Cv.CreateCameraCapture(int.Parse(textBox2.Text)))
                        using (CvCapture capture3 = Cv.CreateCameraCapture(int.Parse(textBox3.Text)))
                        {
                            //カメラ初期化で時間が掛かると例外エラーが出るので1秒停止する
                            System.Threading.Thread.Sleep(1000);

                            //リサイズ画像格納用変数
                            IplImage frameResized1 = Cv.CreateImage(Cv.Size(pictureBox1.Width, pictureBox1.Height), BitDepth.U8, 3);
                            IplImage frameResized2 = Cv.CreateImage(Cv.Size(pictureBox2.Width, pictureBox2.Height), BitDepth.U8, 3);
                            IplImage frameResized3 = Cv.CreateImage(Cv.Size(pictureBox3.Width, pictureBox3.Height), BitDepth.U8, 3);
                            //チェックボックスがONの時キャプチャし続ける
                            while (captureCheckBox.Checked == true && finishFlag == false)
                            {
                                //カメラ1のキャプチャからリサイズ、表示
                                frame1 = Cv.QueryFrame(capture1);
                                Cv.Resize(frame1, frameResized1, Interpolation.Lanczos4);
                                bitmap1           = BitmapConverter.ToBitmap(frameResized1);
                                pictureBox1.Image = bitmap1;
                                //カメラ2のキャプチャからリサイズ、表示
                                frame2 = Cv.QueryFrame(capture2);
                                Cv.Resize(frame2, frameResized2, Interpolation.Lanczos4);
                                bitmap2           = BitmapConverter.ToBitmap(frameResized2);
                                pictureBox2.Image = bitmap2;
                                //カメラ3のキャプチャからリサイズ、表示
                                if (capture3 == null)
                                {
                                    label5.Text = "down";
                                }
                                else
                                {
                                    frame3 = Cv.QueryFrame(capture3);
                                    Cv.Resize(frame3, frameResized3, Interpolation.Lanczos4);
                                    bitmap3           = BitmapConverter.ToBitmap(frameResized3);
                                    pictureBox3.Image = bitmap3;
                                }
                                //画面更新
                                Application.DoEvents();
                            }
                            Cv.ReleaseImage(frame1);
                            Cv.ReleaseImage(frame2);
                            Cv.ReleaseImage(frame3);
                            Cv.ReleaseImage(frameResized1);
                            Cv.ReleaseImage(frameResized2);
                            Cv.ReleaseImage(frameResized3);
                        }
            }
            catch (Exception)
            {
                MessageBox.Show("何らかのエラーです。\n\nカメラが正しく接続され、\nIndex番号が正しく入力されているか確認して下さい。",
                                "エラー",
                                MessageBoxButtons.OK,
                                MessageBoxIcon.Error);
            }
        }
Пример #4
0
        private unsafe void OpenCV(ref Bitmap bitmap)
        {
            Mat         testMat = BitmapConverter.ToMat(bitmap);
            MatOfDouble mu      = new MatOfDouble();
            MatOfDouble sigma   = new MatOfDouble();

            Cv2.MeanStdDev(testMat, mu, sigma);
            double mean = mu.GetArray(0, 0)[0];

            mu.Dispose();
            sigma.Dispose();

            SimpleBlobDetector.Params circleParameters = new SimpleBlobDetector.Params();
            circleParameters.FilterByCircularity = true;
            circleParameters.MinCircularity      = (float)0.85;
            circleParameters.MaxCircularity      = (float)1;
            circleParameters.MinArea             = 30; // Modify the value on the fly (TODO use bigger circle)

            SimpleBlobDetector detectCircleBlobs = new SimpleBlobDetector(circleParameters);

            fingerPoints = detectCircleBlobs.Detect(testMat);
            detectCircleBlobs.Dispose();

            // If Finger found basically
            if (fingerPoints != null)
            {
                this.fingerSize = 0;
                int fingerIndex = -1;
                for (int i = 0; i < fingerPoints.Length; i++)
                {
                    if (fingerPoints[i].Size >= this.fingerSize)
                    {
                        this.fingerSize = (int)fingerPoints[i].Size;
                        fingerIndex     = i;
                    }
                }

                if (fingerIndex != -1)
                {
                    OpenCvSharp.CPlusPlus.Point coordinate = fingerPoints[fingerIndex].Pt;
                    this.fingerSize = (int)((fingerPoints[fingerIndex].Size) * Math.Sqrt(2));
                    testMat.Set <Vec3b>(coordinate.Y, coordinate.X, new Vec3b(0, 255, 0));
                    RotatedRect rRect           = new RotatedRect(new Point2f(coordinate.X, coordinate.Y), new Size2f(this.fingerSize, this.fingerSize), 0);
                    Point2f[]   circleVerticies = rRect.Points();
                    //this.fingerCoordinates[0] = coordinate.X;
                    //this.fingerCoordinates[1] = coordinate.Y;
                    int height = (int)(circleVerticies[0].Y - circleVerticies[1].Y);
                    int width  = (int)(circleVerticies[2].X - circleVerticies[1].X);
                    int startX = (int)(circleVerticies[0].X);
                    int startY = (int)(circleVerticies[1].Y);
                    this.fingerDepth = MapColortoDepth(startX, startY, this.fingerSize, this.fingerSize);
                    OpenCvSharp.CPlusPlus.Rect featureRect = new OpenCvSharp.CPlusPlus.Rect(startX, startY, this.fingerSize, this.fingerSize);

                    // Draw box around finger
                    for (int j = 0; j < 4; j++)
                    {
                        Cv2.Line(testMat, circleVerticies[j], circleVerticies[(j + 1) % 4], new Scalar(0, 255, 0));
                    }

                    Boolean    intersectOccurance = false;
                    List <int> intersectIndicies  = new List <int>();
                    for (int i = 0; i < this.controls.Count; i++)
                    {
                        if (this.controls[i].boundingRect.IntersectsWith(featureRect))
                        {
                            double diff = fingerDepth - this.controls[i].depth;
                            if (Math.Abs(diff) < 0.5)
                            {
                                intersectOccurance = true;
                                intersectIndicies.Add(i);
                            }
                        }
                    }

                    System.Text.StringBuilder append = new System.Text.StringBuilder();
                    if (intersectOccurance)
                    {
                        for (int i = 0; i < intersectIndicies.Count; i++)
                        {
                            append.Append(" " + this.controls[intersectIndicies[i]].title + " " + intersectIndicies[i].ToString());
                        }
                        this.OutputText = "Pressed Button" + append; //TODO Make this more obvious
                    }
                    else
                    {
                        this.OutputText = "No State";
                    }
                }
            }

            bitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(testMat);
            testMat.Dispose();
        }
Пример #5
0
        private Bitmap CreateObjectMask(Bitmap img, /*out IplImage image_mask,*/
                                        out double mask_length, out double mask_area, out double mask_width, out double mask_height,
                                        out double mask_pvheight, int num_smooth, int contrast, double canny1, double canny2,
                                        out Mat image_mask_spc, out double mask2_area, int filter_size = 3,
                                        int brightAreaThreshold = -1, int darkAreaThreshold = -1)
        {
            Bitmap dst = null;
            //IplImage img_mask = Cv.CreateImage(new CvSize(img.Width, img.Height), BitDepth.U8, 1);
            Mat img_mask = new Mat(new OpenCvSharp.Size(img.Width, img.Height), MatType.CV_8UC1, 0);

            image_mask_spc = null;
            mask_length    = mask_area = mask_width = mask_height = mask_pvheight = mask2_area = 0;

            Mat img_gray;
            Mat img_canny;
            Mat img_mask_copy;

            int    i, x, y, offset;
            IntPtr ptr;
            Byte   pixel;

            //////////////////
            var    distance        = new List <double>();
            double center_x        = 0;
            double center_y        = 0;
            double center_count    = 0;
            double distance_mean   = 0;
            double distance_stddev = 0;
            double sum_m           = 0;
            double sum_v           = 0;
            double temp            = 0;

            //////////////////

            ////////////////////////////////////////////////////////////
            ////////////////////////Mask make///////////////////////////
            ////////////////////////////////////////////////////////////
            img_gray      = new Mat(new OpenCvSharp.Size(img.Width, img.Height), MatType.CV_8UC1, 0);
            img_canny     = new Mat(new OpenCvSharp.Size(img.Width, img.Height), MatType.CV_8UC1, 0);
            img_mask_copy = new Mat(new OpenCvSharp.Size(img.Width, img.Height), MatType.CV_8UC1, 0);

            Mat src = BitmapConverter.ToMat(img);

            Cv2.CvtColor(src, img_gray, ColorConversionCodes.BGR2GRAY);

            //Contrast -> Increase the edge contrast for transparent diamond
            byte[] lut = CalcLut(contrast, 0);
            //img_gray.LUT(img_gray, lut);
            Cv2.LUT(img_gray, lut, img_gray);

            //Median filter -> Eliminate point noise in the image



            //Elimination of big dusts should be coded here
            if (num_smooth > 0)
            {
                //for (i = 0; i < num_smooth; i++) img_gray.Smooth(img_gray, SmoothType.Median, 3, 3, 0, 0);
                //for (i = 0; i < num_smooth; i++) img_gray.Smooth(img_gray, SmoothType.Median, filter_size, filter_size, 0, 0);
                for (i = 0; i < num_smooth; i++)
                {
                    Cv2.MedianBlur(img_gray, img_gray, filter_size);
                }

                img_canny = img_gray.Canny(canny1, canny2);
            }
            else
            {
                img_canny = img_gray.Canny(canny1, canny2);
            }

            /////////////////////////////////////////////////////////////
            //ConvexHull
            /////////////////////////////////////////////////////////////

            //OpenCvSharp.CvMemStorage storage = new CvMemStorage(0);
            //CvSeq points = Cv.CreateSeq(SeqType.EltypePoint, CvSeq.SizeOf, CvPoint.SizeOf, storage);
            //CvSeq<CvPoint> points = new CvSeq<CvPoint>(SeqType.EltypePoint, CvSeq.SizeOf, storage);
            //CvPoint pt;

            List <OpenCvSharp.Point> points = new List <OpenCvSharp.Point>();

            OpenCvSharp.Point pt;

            ptr = img_canny.Data;
            for (y = 0; y < img_canny.Height; y++)
            {
                for (x = 0; x < img_canny.Width; x++)
                {
                    offset = (img_canny.Width * y) + (x);
                    pixel  = Marshal.ReadByte(ptr, offset);
                    if (pixel > 0)
                    {
                        pt.X = x;
                        pt.Y = y;
                        points.Add(pt);
                        //////////////////////
                        center_x = center_x + x;
                        center_y = center_y + y;
                        center_count++;
                        //////////////////////
                    }
                }
            }

            center_x = center_x / center_count;
            center_y = center_y / center_count;

            //CvPoint[] hull;
            //CvMemStorage storage1 = new CvMemStorage(0);
            //CvSeq<CvPoint> contours;
            //List<Mat> hull = new List<Mat>();
            MatOfPoint hull = new MatOfPoint();

            int x_min = 3000, x_max = 0, y_min = 3000, y_max = 0;
            int y_x_min = 3000, y_x_max = 3000;

            if (points.Count > 0)
            {
                //Calcurate Ave and Std of distance from each edge points to the weighed center
                for (i = 0; i < points.Count; i++)
                {
                    pt   = points[i];
                    temp = Math.Sqrt((pt.X - center_x) * (pt.X - center_x) + (pt.Y - center_y) * (pt.Y - center_y));
                    distance.Add(temp);
                    sum_m += temp;
                    sum_v += temp * temp;
                }

                distance_mean   = sum_m / points.Count;
                temp            = (sum_v / points.Count) - distance_mean * distance_mean;
                distance_stddev = Math.Sqrt(temp);

                // Outlier elimination
                for (i = points.Count - 1; i >= 0; i--)
                {
                    if (distance[i] > (distance_mean + 3.0 * distance_stddev))
                    {
                        points.RemoveAt(i);
                    }
                }

                Cv2.ConvexHull(MatOfPoint.FromArray(points), hull, true);


                //2014/4/14 Add calc mask_width, mask_height and mask_pvheight

                foreach (OpenCvSharp.Point item in hull)
                {
                    if (x_min > item.X)
                    {
                        x_min   = item.X;
                        y_x_min = item.Y;
                    }
                    else if (x_min == item.X && y_x_min > item.Y)
                    {
                        y_x_min = item.Y;
                    }

                    if (x_max < item.X)
                    {
                        x_max   = item.X;
                        y_x_max = item.Y;
                    }
                    else if (x_max == item.X && y_x_max > item.Y)
                    {
                        y_x_max = item.Y;
                    }

                    if (y_min > item.Y)
                    {
                        y_min = item.Y;
                    }
                    if (y_max < item.Y)
                    {
                        y_max = item.Y;
                    }
                }
                mask_width    = x_max - x_min;
                mask_height   = y_max - y_min;
                mask_pvheight = ((double)y_x_max + (double)y_x_min) / 2 - (double)y_min;

                /////////////////////////////////////////////////////////////
                // For icecream cone shape diamond, need to use triangle mask
                /////////////////////////////////////////////////////////////

                if (diamond_group == DIAMOND_GROUPING.RBC_HighDepth)
                {
                    for (i = 0; i < hull.Count(); i++)
                    {
                        OpenCvSharp.Point p = hull.At <OpenCvSharp.Point>(i);
                        if (y_x_max >= y_x_min)
                        {
                            if (p.Y > y_x_min)
                            {
                                p.X = x_max;
                                p.Y = y_x_max;
                            }
                        }
                        else
                        {
                            if (p.Y > y_x_max)
                            {
                                p.X = x_min;
                                p.Y = y_x_min;
                            }
                        }
                    }
                }

                //////////////////////////////////////////////////////////////

                Cv2.FillConvexPoly(img_mask, hull, Scalar.White, LineTypes.AntiAlias, 0);

                //2013/11/3 Add erode function
                if (erode > 0)
                {
                    for (i = 0; i < erode; i++)
                    {
                        Cv2.Erode(img_mask, img_mask, null);
                    }
                }

                //Calc length and area of img_mask -> use for fancy shape diamonds
                //Cv.FindContours(img_mask, storage1, out contours, CvContour.SizeOf, ContourRetrieval.External, ContourChain.ApproxSimple);
                //Cv.FIndCOntours overwrites img_mask, need to use copy image
                //IplImage img_mask_copy = Cv.Clone(img_mask);
                //Cv2.Copy(img_mask, img_mask_copy);
                Mat   hierarchy = new Mat();
                Mat[] contours;
                img_mask.CopyTo(img_mask_copy);
                Cv2.FindContours(img_mask_copy, out contours, hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
                //Cv.ReleaseImage(img_mask_copy);

                mask_length = Cv2.ArcLength(contours[0], true);
                mask_area   = Math.Abs(Cv2.ContourArea(contours[0]));
                //Cv.ClearSeq(contours);
            }
            else
            {
                mask_length = 0.0;
                mask_area   = 0.0;
            }

            //Memory release
            //Cv.ReleaseImage(img_gray);
            //Cv.ReleaseImage(img_canny);
            //Cv.ReleaseImage(img_mask_copy);
            //Cv.ClearSeq(points);
            //Cv.ReleaseMemStorage(storage);
            //Cv.ReleaseMemStorage(storage1);

            //if the diamond is out of croped image, do not calc color values
            if (x_min == 0 | x_max == (img.Width - 1) | y_min == 0 | y_max == (img.Height - 1))
            {
                return(dst);
            }

            //img_mask.SaveImage(@"P:\Projects\DustDetection\TestSamples\gColorFancyImages\temp\image_mask_hiroshi.jpg");

            if (mask_length > 0)
            {
                dst = BitmapConverter.ToBitmap(img_mask);
            }

            return(dst);
        }
Пример #6
0
        private Bitmap CreateObjectMaskNew(Bitmap image, out Mat image_mask,
                                           out double mask_length, out double mask_area, out double mask_width, out double mask_height,
                                           out double mask_pvheight, double kThresh, double hThresh, double canny1, double canny2,
                                           out Mat image_mask_spc, out double mask2_area, int brightAreaThreshold = -1, int darkAreaThreshold = -1)
        {
            Bitmap dst = null;

            image_mask     = null;
            image_mask_spc = null;
            mask_length    = mask_area = mask_width = mask_height = mask_pvheight = mask2_area = 0;

            try
            {
                Mat src = BitmapConverter.ToMat(image);

                Mat src_kirsch = BitmapConverter.ToMat(image.KirschFilter());

                Mat kirsch_gray = new Mat();
                Cv2.CvtColor(src_kirsch, kirsch_gray, ColorConversionCodes.RGB2GRAY);

                Mat kirsch_threshold = new Mat();
                Cv2.Threshold(kirsch_gray, kirsch_threshold, kThresh, 255, ThresholdTypes.Binary);


                Mat[] contours;
                List <OpenCvSharp.Point> hierarchy;
                List <Mat> hulls;
                Mat        morph_element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new OpenCvSharp.Size(2, 2), new OpenCvSharp.Point(1, 1));

                #region morphology

                Mat kirsch_threshold_copy = new Mat();
                kirsch_threshold.CopyTo(kirsch_threshold_copy);

                int hullCount = 0, numLoops = 0;
                do
                {
                    numLoops++;

                    Mat kirsch_morph = kirsch_threshold_copy.MorphologyEx(MorphTypes.Gradient, morph_element);

                    hierarchy = new List <OpenCvSharp.Point>();
                    Cv2.FindContours(kirsch_morph, out contours, OutputArray.Create(hierarchy),
                                     RetrievalModes.External, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

                    hulls = new List <Mat>();
                    for (int j = 0; j < contours.Length; j++)
                    {
                        Mat hull = new Mat();
                        Cv2.ConvexHull(contours[j], hull);
                        hulls.Add(hull);
                    }

                    Mat drawing = Mat.Zeros(src.Size(), MatType.CV_8UC1);
                    Cv2.DrawContours(drawing, hulls, -1, Scalar.White);

                    if (hulls.Count != hullCount && numLoops < 100)
                    {
                        hullCount             = hulls.Count;
                        kirsch_threshold_copy = drawing;
                    }
                    else
                    {
                        break;
                    }
                } while (true);

                #endregion

                if (numLoops >= 100)
                {
                    throw new Exception("Could not find hull");
                }

                #region bestHull
                //try and filter out dust near to stone

                double largestArea = hulls.Max(m => Cv2.ContourArea(m));
                var    bestHulls   = hulls.Where(m => Cv2.ContourArea(m) == largestArea).ToList();

                Mat hulls_mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
                Cv2.DrawContours(hulls_mask, bestHulls, -1, Scalar.White, -1);

                //hulls_mask is the convex hull of outline, now look for clefts
                Cv2.Threshold(kirsch_gray, kirsch_threshold, hThresh, 255, ThresholdTypes.Binary);
                Mat kirsch_mask = Mat.Zeros(kirsch_threshold.Size(), kirsch_threshold.Type());
                kirsch_threshold.CopyTo(kirsch_mask, hulls_mask);

                Mat kirsch_mask_canny = new Mat();
                Cv2.Canny(kirsch_mask, kirsch_mask_canny, canny1, canny2, 3);

                morph_element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new OpenCvSharp.Size(5, 5), new OpenCvSharp.Point(2, 2));
                Mat kirsch_filled = new Mat();
                Cv2.Dilate(kirsch_mask_canny, kirsch_filled, morph_element);
                Cv2.Dilate(kirsch_filled, kirsch_filled, morph_element);
                Cv2.Erode(kirsch_filled, kirsch_filled, morph_element);
                Cv2.Erode(kirsch_filled, kirsch_filled, morph_element);

                hierarchy = new List <OpenCvSharp.Point>();;
                Cv2.FindContours(kirsch_filled, out contours, OutputArray.Create(hierarchy),
                                 RetrievalModes.External, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

                #endregion

                hulls_mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
                Cv2.DrawContours(hulls_mask, contours, -1, Scalar.White, -1);

                Cv2.Erode(hulls_mask, hulls_mask, morph_element);
                Cv2.Erode(hulls_mask, hulls_mask, morph_element);

                image_mask = hulls_mask;

                //remove bright areas
                if ((brightAreaThreshold > -1) || (darkAreaThreshold > -1))
                {
                    Mat src_mask       = new Mat();
                    Mat hulls_mask_spc = hulls_mask.Clone();
                    src.CopyTo(src_mask, hulls_mask_spc);
                    Mat gray = new Mat();

                    Cv2.CvtColor(src_mask, gray, ColorConversionCodes.BGR2GRAY);
                    if (brightAreaThreshold > -1)
                    {
                        Mat bright = new Mat();
                        Cv2.Threshold(gray, bright, brightAreaThreshold, 255, ThresholdTypes.BinaryInv);
                        Cv2.ImWrite(@"C:\gColorFancy\Image\bright.jpg", bright);
                        Mat t = new Mat();
                        hulls_mask_spc.CopyTo(t, bright);
                        hulls_mask_spc = t.Clone();
                    }
                    if (darkAreaThreshold > -1)
                    {
                        Mat dark = new Mat();
                        Cv2.Threshold(gray, dark, darkAreaThreshold, 255, ThresholdTypes.Binary);
                        Cv2.ImWrite(@"C:\gColorFancy\Image\dark.jpg", dark);
                        Mat t = new Mat();
                        hulls_mask_spc.CopyTo(t, dark);
                        hulls_mask_spc = t.Clone();
                    }

                    image_mask_spc = hulls_mask_spc;

                    var hierarchy2 = new List <OpenCvSharp.Point>();;
                    Cv2.FindContours(hulls_mask_spc, out contours, OutputArray.Create(hierarchy2),
                                     RetrievalModes.External, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

                    largestArea = contours.Max(m => Cv2.ContourArea(m));
                    Mat finalHullSpc = contours.Where(m => Cv2.ContourArea(m) == largestArea).ToList()[0];

                    if (ConvexHullOnMask)
                    {
                        Mat hull = new Mat();
                        Cv2.ConvexHull(finalHullSpc, hull);
                        Mat polySpc = new Mat();
                        Cv2.ApproxPolyDP(hull, polySpc, 3, true);
                        mask2_area = Cv2.ContourArea(polySpc);
                    }
                    else
                    {
                        mask2_area = largestArea;
                    }
                }
                ///////////////////////////

                hierarchy = new List <OpenCvSharp.Point>();;
                Cv2.FindContours(hulls_mask, out contours, OutputArray.Create(hierarchy),
                                 RetrievalModes.External, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

                largestArea = contours.Max(m => Cv2.ContourArea(m));
                Mat finalHull = contours.Where(m => Cv2.ContourArea(m) == largestArea).ToList()[0];

                if (ConvexHullOnMask)
                {
                    var hull = new Mat();
                    Cv2.ConvexHull(finalHull, hull);
                    finalHull = hull;
                }
                List <Mat> finalHulls = new List <Mat>();
                finalHulls.Add(finalHull);
                Cv2.DrawContours(src, finalHulls, -1, new Scalar(128, 0, 128, 255), 3);

                #region bounding

                Mat poly = new Mat();
                Cv2.ApproxPolyDP(finalHull, poly, 3, true);
                Rect boundaryRect = Cv2.BoundingRect(poly);
                mask_width  = boundaryRect.Width;
                mask_height = boundaryRect.Height;
                if (ConvexHullOnMask)
                {
                    mask_area = Cv2.ContourArea(poly);
                }
                else
                {
                    mask_area = largestArea;
                }
                mask_length = Cv2.ArcLength(finalHull, true);

                List <OpenCvSharp.Point> finalPoints = new List <OpenCvSharp.Point>();
                int m1Count            = (finalHull.Rows % 2 > 0) ? finalHull.Rows + 1 : finalHull.Rows;
                OpenCvSharp.Point[] p1 = new OpenCvSharp.Point[m1Count];
                finalHull.GetArray(0, 0, p1);
                Array.Resize(ref p1, finalHull.Rows);
                finalPoints.AddRange(p1.ToList());

                double y_min   = boundaryRect.Bottom;
                double y_x_min = finalPoints.Where(p => p.X == boundaryRect.Left).ToList()[0].Y;
                double y_x_max = finalPoints.Where(p => p.X == boundaryRect.Right).ToList()[0].Y;

                mask_pvheight = ((double)y_x_max + (double)y_x_min) / 2 - (double)y_min;

                #endregion

                //dst = BitmapConverter.ToBitmap(src);
                using (var ms = src.ToMemoryStream())
                {
                    dst = (Bitmap)Image.FromStream(ms);
                }

                try
                {
                    if (saveMaskDataPath.Length > 0)
                    {
                        //StringBuilder sb = new StringBuilder();
                        //sb.AppendLine("mask_length,mask_area,mask_width,mask_height,mask_pvheight");
                        //sb.AppendLine(mask_length + "," + mask_area + "," + mask_width + "," + mask_height + "," + mask_pvheight);
                        image_mask.SaveImage(saveMaskDataPath + @"\image_mask.jpg");
                        if (image_mask_spc != null)
                        {
                            image_mask_spc.SaveImage(saveMaskDataPath + @"\image_mask_spc.jpg");
                        }
                        BitmapConverter.ToMat(image).SaveImage(saveMaskDataPath + @"\src.jpg");
                        //File.WriteAllText(saveMaskDataPath + @"\mask_vals.csv", sb.ToString());
                        //File.AppendAllText(saveMaskDataPath + @"\exception.txt", DateTime.Now + ":" + av.Message);
                        //File.AppendAllText(saveMaskDataPath + @"\exception.txt", DateTime.Now + ":" + av.StackTrace);
                        //File.AppendAllText(saveMaskDataPath + @"\exception.txt", DateTime.Now + ":" + av.Source);
                    }
                }
                catch
                {
                }
            }
            catch (Exception ex)
            {
                dst = null;
            }

            return(dst);
        }
Пример #7
0
 private void GaussianBlurFilter(object sender, RoutedEventArgs e)
 {
     Filters.GaussBlurFilter(_directBitmap);
     ImageCanvas.Source = BitmapConverter.GetBitmapSource(_directBitmap.Bitmap);
 }
 private void вернутьсяКИсходномуToolStripMenuItem_Click(object sender, EventArgs e)
 {
     m_workImage = (double[, , ])m_originalImage.Clone();
     OutputBitmapOnPictureBox(BitmapConverter.DoubleRgbToBitmap(m_workImage));
 }
Пример #9
0
 private void SobelFilter(object sender, RoutedEventArgs e)
 {
     PointTransformation.ConvertToGray(_directBitmap, GrayConversionMode.Colorimetric);
     Filters.SobelFilter(_directBitmap);
     ImageCanvas.Source = BitmapConverter.GetBitmapSource(_directBitmap.Bitmap);
 }
Пример #10
0
 private void HighPassFilter(object sender, RoutedEventArgs e)
 {
     Filters.HighPassFilter(_directBitmap, MaskSize);
     ImageCanvas.Source = BitmapConverter.GetBitmapSource(_directBitmap.Bitmap);
 }
Пример #11
0
 private void ChangeBrightness(object sender, System.Windows.Input.MouseButtonEventArgs e)
 {
     PointTransformation.ModifyBrightness(_directBitmap, _brightness);
     ImageCanvas.Source = BitmapConverter.GetBitmapSource(_directBitmap.Bitmap);
 }
Пример #12
0
 private void RevertChanges(object sender, RoutedEventArgs e)
 {
     ImageCanvas.Source = BitmapConverter.GetBitmapSource(_bitmap);
 }
Пример #13
0
 private void ConvertToGrayAverage(object sender, RoutedEventArgs e)
 {
     PointTransformation.ConvertToGray(_directBitmap, GrayConversionMode.Average);
     ImageCanvas.Source = BitmapConverter.GetBitmapSource(_directBitmap.Bitmap);
 }
Пример #14
0
        private static List <Point[]> FindCountour(Bitmap image)
        {
            var original = BitmapConverter.ToMat(image);
            var gray     = image.ToGrayscaleMat();
            var src      = new Mat();

            gray.CopyTo(src);
            var threshImage = new Mat();

            gray.CopyTo(threshImage);

            //for (var i = 30; i <= 170; i += 20)
            //{
            //Cv2.Threshold(gray, threshImage, i, 255, ThresholdTypes.BinaryInv);
            //var denoise = new Mat();

            //Cv2.BilateralFilter(gray, denoise, 11, 17, 17);

            //Cv2.Threshold(~gray, threshImage, 20, 255, ThresholdTypes.);
            //SaveImage(threshImage, "thresh");

            //SaveImage(threshImage, $"thresh_min_{i}");
            //}
            //return;


            //SaveImage(threshImage, "thresh");
            //for (var i = 10; i < 70; i += 10)
            //{
            //  for (var j = 70; j <= 210; j += 20)
            //  {
            //Cv2.Canny(threshImage, threshImage, 30, 200);
            //Cv2.Canny(threshImage, threshImage, 50, 150);
            //SaveImage(threshImage, $"canny");
            //}
            //}

            var newThresh = new Mat();

            Cv2.AdaptiveThreshold(threshImage, newThresh, 255, AdaptiveThresholdTypes.MeanC, ThresholdTypes.BinaryInv, 201, 15);
            //SaveImage(newThresh, "newThresh");
            threshImage = newThresh;

            Point[][]        contours;
            HierarchyIndex[] hierarchyIndexes;
            Cv2.FindContours(threshImage, out contours, out hierarchyIndexes, RetrievalModes.CComp, ContourApproximationModes.ApproxSimple);

            if (contours.Length == 0)
            {
                return(new List <Point[]>());
            }

            //var dst = new Mat(gray.Rows, gray.Cols, MatType.CV_8UC3, Scalar.All(0));

            var sorted = contours
                         .OrderByDescending(x => Cv2.ContourArea(x))
                         .Take(10)
                         .Where(x =>
            {
                var boundingRect = Cv2.BoundingRect(x);
                return(boundingRect.IsVerticalBlock(1.5));
            }).ToList();

            if (!sorted.Any())
            {
                sorted = contours
                         .OrderByDescending(x => Cv2.ContourArea(x))
                         .Take(10)
                         .Where(x =>
                {
                    var boundingRect = Cv2.BoundingRect(x);
                    return(boundingRect.IsVerticalBlock(1));
                }).ToList();
            }

            foreach (var contour in sorted)
            {
                var boundingRect = Cv2.BoundingRect(contour);

                Cv2.Rectangle(original,
                              new Point(boundingRect.X, boundingRect.Y),
                              new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
                              new Scalar(0, 0, 255), 2);

                //Cv2.Rectangle(dst,
                //  new Point(boundingRect.X, boundingRect.Y),
                //  new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
                //  new Scalar(0, 0, 255), 3);
            }

            //while (contourIndex >= 0)
            //{
            //  var contour = contours[contourIndex];
            //  var boundingRect = Cv2.BoundingRect(contour);

            //  Cv2.Rectangle(gray,
            //    new Point(boundingRect.X, boundingRect.Y),
            //    new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
            //    new Scalar(0, 0, 255), 2);

            //  Cv2.Rectangle(dst,
            //    new Point(boundingRect.X, boundingRect.Y),
            //    new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
            //    new Scalar(0, 0, 255), 2);

            //  contourIndex = hierarchyIndexes[contourIndex].Next;
            //}

            src = ~src;
            src = src * 1.2;

            src = src + new Scalar(15, 15, 15);

            //SaveImage(src, "src");
            SaveImage(original, "result");

            Image = src.ToBitmap();

            return(sorted);
        }
Пример #15
0
 private void EqualizeHistogram(object sender, RoutedEventArgs e)
 {
     _histogram.Equalize();
     ImageCanvas.Source = BitmapConverter.GetBitmapSource(_directBitmap.Bitmap);
 }
 void RefreshWorkImage() => OutputBitmapOnPictureBox(BitmapConverter.DoubleRgbToBitmap(m_workImage));
Пример #17
0
 private void ResetHistogram(object sender, RoutedEventArgs e)
 {
     _histogram.ResetToDefault();
     ImageCanvas.Source = BitmapConverter.GetBitmapSource(_directBitmap.Bitmap);
 }
Пример #18
0
        private Bitmap CreateObjectMaskMelee(Bitmap image, out Mat image_mask,
                                             out double mask_length, out double mask_area, out double mask_width, out double mask_height,
                                             out double mask_pvheight, bool useKthresholdLab = false)
        {
            Bitmap dst = null;

            image_mask  = null;
            mask_length = mask_area = mask_width = mask_height = mask_pvheight = 0;

            try
            {
                Mat src = BitmapConverter.ToMat(image);

                Mat src_kirsch = BitmapConverter.ToMat(image.KirschFilter());

                Mat kirsch_gray = new Mat();
                Cv2.CvtColor(src_kirsch, kirsch_gray, ColorConversionCodes.RGB2GRAY);

                Mat kirsch_threshold = new Mat();
                if (!useKthresholdLab)
                {
                    Cv2.Threshold(kirsch_gray, kirsch_threshold, kThreshold, 255, ThresholdTypes.Binary);
                }
                else
                {
                    Cv2.Threshold(kirsch_gray, kirsch_threshold, kThresholdLab, 255, ThresholdTypes.Binary);
                }

                Mat[] contours;
                List <OpenCvSharp.Point> hierarchy;
                List <Mat> hulls;
                Mat        morph_element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new OpenCvSharp.Size(2, 2), new OpenCvSharp.Point(1, 1));

                #region morphology

                Mat kirsch_threshold_copy = new Mat();
                kirsch_threshold.CopyTo(kirsch_threshold_copy);

                int hullCount = 0, numLoops = 0;
                do
                {
                    numLoops++;

                    Mat kirsch_morph = kirsch_threshold_copy.MorphologyEx(MorphTypes.Gradient, morph_element);

                    hierarchy = new List <OpenCvSharp.Point>();
                    Cv2.FindContours(kirsch_morph, out contours, OutputArray.Create(hierarchy),
                                     RetrievalModes.External, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

                    hulls = new List <Mat>();
                    for (int j = 0; j < contours.Length; j++)
                    {
                        Mat hull = new Mat();
                        Cv2.ConvexHull(contours[j], hull);
                        hulls.Add(hull);
                    }

                    Mat drawing = Mat.Zeros(src.Size(), MatType.CV_8UC1);
                    Cv2.DrawContours(drawing, hulls, -1, Scalar.White);

                    if (hulls.Count != hullCount && numLoops < 100)
                    {
                        hullCount             = hulls.Count;
                        kirsch_threshold_copy = drawing;
                    }
                    else
                    {
                        break;
                    }
                } while (true);

                #endregion

                if (numLoops >= 100)
                {
                    throw new Exception("Could not find hull");
                }

                #region bestHull
                //try and filter out dust near to stone

                double largestArea = hulls.Max(m => Cv2.ContourArea(m));
                var    bestHulls   = hulls.Where(m => Cv2.ContourArea(m) == largestArea).ToList();

                Mat hulls_mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
                Cv2.DrawContours(hulls_mask, bestHulls, -1, Scalar.White, -1);

                //hulls_mask is the convex hull of main outline excluding nearby dust
                Cv2.Threshold(kirsch_gray, kirsch_threshold, hullThreshold, 255, ThresholdTypes.Binary);
                Mat kirsch_mask = Mat.Zeros(kirsch_threshold.Size(), kirsch_threshold.Type());
                kirsch_threshold.CopyTo(kirsch_mask, hulls_mask);

                #endregion

                hierarchy = new List <OpenCvSharp.Point>();;
                Cv2.FindContours(kirsch_mask, out contours, OutputArray.Create(hierarchy),
                                 RetrievalModes.External, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

                List <OpenCvSharp.Point> points = new List <OpenCvSharp.Point>();
                foreach (Mat contour in contours)
                {
                    int m2Count            = (contour.Rows % 2 > 0) ? contour.Rows + 1 : contour.Rows;
                    OpenCvSharp.Point[] p2 = new OpenCvSharp.Point[m2Count];
                    contour.GetArray(0, 0, p2);
                    Array.Resize(ref p2, contour.Rows);

                    points.AddRange(p2.ToList());
                }
                Mat finalHull = new Mat();
                Cv2.ConvexHull(InputArray.Create(points), finalHull);


                List <Mat> finalHulls = new List <Mat>();
                finalHulls.Add(finalHull);
                Cv2.DrawContours(src, finalHulls, -1, new Scalar(128, 0, 128, 255), 2);

                hulls_mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
                Cv2.DrawContours(hulls_mask, finalHulls, -1, Scalar.White, -1);
                image_mask = hulls_mask;

                #region bounding

                Mat poly = new Mat();
                Cv2.ApproxPolyDP(finalHull, poly, 3, true);
                Rect boundaryRect = Cv2.BoundingRect(poly);
                mask_width  = boundaryRect.Width;
                mask_height = boundaryRect.Height;
                mask_area   = Cv2.ContourArea(poly);
                mask_length = Cv2.ArcLength(finalHull, true);

                List <OpenCvSharp.Point> finalPoints = new List <OpenCvSharp.Point>();
                int m1Count            = (finalHull.Rows % 2 > 0) ? finalHull.Rows + 1 : finalHull.Rows;
                OpenCvSharp.Point[] p1 = new OpenCvSharp.Point[m1Count];
                finalHull.GetArray(0, 0, p1);
                Array.Resize(ref p1, finalHull.Rows);
                finalPoints.AddRange(p1.ToList());

                double y_min   = boundaryRect.Bottom;
                double y_x_min = finalPoints.Where(p => p.X == boundaryRect.Left).ToList()[0].Y;
                double y_x_max = finalPoints.Where(p => p.X == boundaryRect.Right).ToList()[0].Y;

                mask_pvheight = ((double)y_x_max + (double)y_x_min) / 2 - (double)y_min;

                #endregion

                //dst = BitmapConverter.ToBitmap(src);
                using (var ms = src.ToMemoryStream())
                {
                    dst = (Bitmap)Image.FromStream(ms);
                }

                try
                {
                    if (saveMaskDataPath.Length > 0)
                    {
                        StringBuilder sb = new StringBuilder();
                        sb.AppendLine("mask_length,mask_area,mask_width,mask_height,mask_pvheight");
                        sb.AppendLine(mask_length + "," + mask_area + "," + mask_width + "," + mask_height + "," + mask_pvheight);
                        image_mask.SaveImage(saveMaskDataPath + @"\image_mask.jpg");
                        File.WriteAllText(saveMaskDataPath + @"\mask_vals.csv", sb.ToString());
                    }
                }
                catch
                {
                }
            }
            catch
            {
                dst = null;
            }

            return(dst);
        }
        private void TestVideoDetection()
        {
            Task.Run(() =>
            {
                // YOLO setting
                //int yoloWidth = 1920, yoloHeight = 1129;
                int yoloWidth = 175, yoloHeight = 102;
                //int yoloWidth = 1000, yoloHeight = 588;
                var configurationDetector = new ConfigurationDetector();
                var config      = configurationDetector.Detect();
                var yoloWrapper = new YoloWrapper(config);

                // OpenCV & WPF setting
                VideoCapture videocapture;
                Mat image          = new Mat();
                WriteableBitmap wb = new WriteableBitmap(yoloWidth, yoloHeight, 96, 96, System.Windows.Media.PixelFormats.Bgr24, null);

                byte[] imageInBytes = new byte[(int)(yoloWidth * yoloHeight * image.Channels())];

                // Read a video file and run object detection over it!
                using (videocapture = new VideoCapture("E:\\WPF Projects\\Automotive_Drones_Analysis_Tool\\Daten_automatisches_Fahren\\DJI_0137.MP4"))
                {
                    for (int i = 0; i < videocapture.FrameCount; i++)
                    {
                        using (Mat imageOriginal = new Mat())
                        {
                            // read a single frame and convert the frame into a byte array
                            videocapture.Read(imageOriginal);
                            image        = imageOriginal.Resize(new OpenCvSharp.Size(yoloWidth, yoloHeight));
                            imageInBytes = image.ToBytes();

                            // conduct object detection and display the result
                            var items = yoloWrapper.Detect(imageInBytes);
                            // We use the image to detect the objects in a very small size - then we draw them onto the
                            // uiImage and scale it up!
                            var uiImage = imageOriginal.Resize(new OpenCvSharp.Size(yoloWidth * 10, yoloHeight * 10));

                            foreach (var item in items)
                            {
                                var x      = item.X * 10;
                                var y      = item.Y * 10;
                                var width  = item.Width * 10;
                                var height = item.Height * 10;
                                var type   = item.Type; // class name of the object

                                // draw a bounding box for the detected object
                                // you can set different colors for different classes
                                Cv2.Rectangle(uiImage, new OpenCvSharp.Rect(x, y, width, height), Scalar.Green, 3);
                            }

                            // display the detection result
                            Application.Current?.Dispatcher?.Invoke(() =>
                            {
                                videoViewer.Source = BitmapHelper.BitmapToBitmapSource(BitmapConverter.ToBitmap(uiImage));
                            });
                        }
                        i++;
                    }
                }
            });
        }
Пример #20
0
 public Mask(System.Drawing.Bitmap bmp)
 {
     src = BitmapConverter.ToMat(bmp);
     LoadMaskSettings();
 }
Пример #21
0
        private void CaptureCameraCallback()
        {
            if (!isCameraRunning)
            {
                return;
            }
            frame   = new Mat();
            capture = new VideoCapture(0);
            capture.Set(CaptureProperty.FrameWidth, 1920);
            capture.Set(CaptureProperty.FrameHeight, 1080);

            Thread.Sleep(InitCameraDelay);
            capture.Open(CaptureDevice.DShow, 0);
            while (!capture.IsOpened())
            {
            }

            if (capture.IsOpened())
            {
                capture.Set(CaptureProperty.FrameWidth, 1920);
                capture.Set(CaptureProperty.FrameHeight, 1080);

                while (isCameraRunning)
                {
                    capture.Read(frame);

                    image = BitmapConverter.ToBitmap(frame);

#if DEBUG
                    if (GetAsyncKeyState(0x7B) == -32767)
                    {
                        if (!Directory.Exists("imgs"))
                        {
                            Directory.CreateDirectory("imgs");
                        }

                        var rnd     = new Random();
                        var rndName = rnd.Next(0, int.MaxValue);

                        int blockIndex = 0;
                        for (int x = 0; x < image.Width; x += BlockConfig.BlockSize.Width)
                        {
                            for (int y = 0; y < image.Height; y += BlockConfig.BlockSize.Height)
                            {
                                blockIndex++;

                                if (blockIndex == 3)
                                {
                                    using (Bitmap blockRegion = new Bitmap(BlockConfig.BlockSize.Width, BlockConfig.BlockSize.Height))
                                    {
                                        using (Graphics blockGraphics = Graphics.FromImage(blockRegion))
                                        {
                                            blockGraphics.DrawImage(image, new Rectangle(0, 0, BlockConfig.BlockSize.Width, BlockConfig.BlockSize.Height), new Rectangle(x, y, BlockConfig.BlockSize.Width, BlockConfig.BlockSize.Height), GraphicsUnit.Pixel);

                                            var name = $"solitaire_{rndName}_{x}_{y}.png";
                                            blockRegion.Save($"imgs/{name}", ImageFormat.Png);
                                        }
                                    }
                                }
                            }
                        }

                        Console.Beep();
                    }
#endif


                    UpdateBuffer.Draw(image);
                }
            }
        }
Пример #22
0
 public void SetSrc(Bitmap bmp)
 {
     src = BitmapConverter.ToMat(bmp);
 }
Пример #23
0
 public static Bitmap getBitmap()
 {
     return(BitmapConverter.ToBitmap(mainPic));
 }
Пример #24
0
        // 描画処理
        private void videoRendering(object sender, NewFrameEventArgs eventArgs)
        {
            Bitmap img = (Bitmap)eventArgs.Frame.Clone();

            // Debug.WriteLine(DateTime.Now + ":" + "描画更新");
            // Debug.WriteLine(mode);

            try
            {
                //pictureBoxCamera.Image = img;

                temp = BitmapConverter.ToMat(img);//比較先画像

                //特徴量の検出と特徴量ベクトルの計算
                akaze.DetectAndCompute(temp, null, out key_point2, descriptor2);

                //画像2の特徴点をoutput2に出力
                Cv2.DrawKeypoints(temp, key_point2, output2);
                //Cv2.ImShow("output2", output2);

                pictureBoxCamera.Image = BitmapConverter.ToBitmap(output2);

                matcher = DescriptorMatcher.Create("BruteForce");
                matches = matcher.Match(descriptor1, descriptor2);

                //閾値以下の要素数のカウント
                for (int i = 0; i < key_point1.Length && i < key_point2.Length; ++i)
                {
                    if (matches[i].Distance < threshold)
                    {
                        ++good_match_length;
                    }
                }

                DMatch[] good_matches = new DMatch[good_match_length];//閾値以下の要素数で定義

                //good_matchesに格納していく
                int j = 0;
                for (int i = 0; i < key_point1.Length && i < key_point2.Length; ++i)
                {
                    if (matches[i].Distance < threshold)
                    {
                        good_matches[j] = matches[i];
                        ++j;
                    }
                }

                //good_matchesの個数デバッグ表示
                Debug.WriteLine(j);
                Invoke((MethodInvoker) delegate()
                {
                    labelMatch.Text = j.ToString();
                });

                //類似点の数が多ければチェックボックスの状態に応じて非常停止
                if (j >= 16)
                {
                    //非常停止
                    if (checkBoxStop.Checked == true)
                    {
                        //WebRequest request = WebRequest.Create("https://maker.ifttt.com/trigger/raspberry/with/key/gHPH_xDKR664IVIr2YtRRj6BbQoQi-K0mCowIJCGPF3");
                        //WebResponse response = request.GetResponse();
                    }

                    //アラート音
                    if (checkBoxAlert.Checked == true)
                    {
                        // _mediaPlayer.settings.volume = 20;
                        _mediaPlayer.URL = @"D:\DCIM\app\AkazeAlert\PcCameraApp\Resources\decision1.mp3";
                        _mediaPlayer.controls.play();
                    }
                }

                Cv2.DrawMatches(mat, key_point1, temp, key_point2, good_matches, output3);
                //Cv2.ImShow("output3", output3);

                pictureBoxResult.Image = BitmapConverter.ToBitmap(output3);
            }
            catch
            {
                pictureBoxCamera.Image = img;
            }
        }
Пример #25
0
 public Bitmap CreateImage(int frame = 0)
 {
     return(BitmapConverter.ToBitmap(GetTargetFrameMat(frame)));
 }
Пример #26
0
        private void detectShapeCandidates(ref Bitmap bitmap, Boolean saveShapes)
        {
            string      myPhotos = Environment.GetFolderPath(Environment.SpecialFolder.MyPictures);
            Mat         colorMat = BitmapConverter.ToMat(bitmap);
            MatOfDouble mu       = new MatOfDouble();
            MatOfDouble sigma    = new MatOfDouble();

            Cv2.MeanStdDev(colorMat, mu, sigma);
            double mean = mu.GetArray(0, 0)[0];

            mu.Dispose();
            sigma.Dispose();

            Mat greyMat = new Mat();

            Cv2.CvtColor(colorMat, greyMat, ColorConversion.BgraToGray, 0);
            greyMat = greyMat.GaussianBlur(new OpenCvSharp.CPlusPlus.Size(1, 1), 5, 5, BorderType.Default);
            greyMat = greyMat.Canny(0.5 * mean, 1.2 * mean, 3, true);

            Mat contourMat = new Mat(greyMat.Size(), colorMat.Type());

            greyMat.CopyTo(contourMat);
            var contours = contourMat.FindContoursAsArray(ContourRetrieval.List, ContourChain.ApproxSimple);

            this.controls.Clear();
            for (int j = 0; j < contours.Length; j++)
            {
                var poly = Cv2.ApproxPolyDP(contours[j], 0.01 * Cv2.ArcLength(contours[j], true), true);
                int num  = poly.Length;

                if (num >= 4 && num < 20)
                {
                    var color = Scalar.Blue;
                    var rect  = Cv2.BoundingRect(poly);

                    if (rect.Height < 20 || rect.Width < 20)
                    {
                        continue;
                    }
                    if (saveShapes)
                    {
                        string path = Path.Combine(myPhotos, "shape_samples");
                        path = Path.Combine(path, "shape_sample_" + Path.GetRandomFileName() + ".png");
                        Mat    shapeMat = preprocessShape(rect, greyMat);
                        Bitmap shape    = shapeMat.ToBitmap();
                        shape.Save(path);
                        shape.Dispose();
                        shapeMat.Dispose();
                        continue;
                    }
                    if (shapeSVM != null)
                    {
                        Mat   shapeMat   = preprocessShape(rect, greyMat);
                        float shapeClass = classifyShape(shapeMat, shapeSVM);
                        if (shapeClass >= 0)
                        {
                            Shape shape = null;
                            switch ((int)shapeClass)
                            {
                            case 0:
                                color = Scalar.Red;
                                shape = new Shape(Shape.ShapeType.SQUARE, rect);
                                break;

                            case 1:
                                color = Scalar.Yellow;
                                shape = new Shape(Shape.ShapeType.CIRCLE, rect);
                                break;

                            case 2:
                                color = Scalar.Green;
                                shape = new Shape(Shape.ShapeType.SLIDER, rect);
                                break;
                            }
                            Cv2.Rectangle(colorMat, rect, color, 2);
                            this.controls.Add(shape);
                        }
                        shapeMat.Dispose();
                    }
                    else
                    {
                        Cv2.Rectangle(colorMat, rect, color, 2);
                    }
                }
            }
            bitmap = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(colorMat);
            colorMat.Dispose();
            greyMat.Dispose();
            contourMat.Dispose();
        }
Пример #27
0
        private void CameraReading()
        {
            bool control = false;

            lock (flags)
            {
                control = flags.running;
            }
            if (control)
            {
                using (Mat videoFrame = new Mat())
                    using (Mat showFrame = new Mat())
                    {
                        if (capture.Read(videoFrame))
                        {
                            lock (flags)
                            {
                                cc.PeopleFilter(flags);
                                localiser.Locate(videoFrame, flags);
                            }
                            localiser.GetLocations(out ball, out car1, out car2);

                            lock (flags)
                            {
                                Point2f[] posBallsF = new Point2f[0];
                                if (flags.calibrated)
                                {
                                    if (ball.Any())
                                    {
                                        posBallsF = cc.CameraToLogic(ball);
                                    }
                                    Point2f[] car12 = { car1, car2 };
                                    Point2f[] carAB = cc.CameraToLogic(car12);
                                    flags.posCarA = carAB[0];
                                    flags.posCarB = carAB[1];
                                }
                                else
                                {
                                    posBallsF     = ball;
                                    flags.posCarA = car1;
                                    flags.posCarB = car2;
                                }
                                Point2i[] posBallsI = new Point2i[posBallsF.Length];
                                for (int i = 0; i < posBallsF.Length; ++i)
                                {
                                    posBallsI[i] = posBallsF[i];
                                }
                                List <Point2i> posBallsList = new List <Point2i>();
                                foreach (Point2i b in posBallsI)
                                {
                                    if (!posBallsList.Any(bb => b.DistanceTo(bb) < Game.MinBallSept))
                                    {
                                        posBallsList.Add(b);
                                    }
                                }
                                flags.posBalls = posBallsList.ToArray();
                            }
                            timeCamNow = DateTime.Now;
                            TimeSpan timeProcess = timeCamNow - timeCamPrev;
                            timeCamPrev = timeCamNow;
                            Cv2.Resize(videoFrame, showFrame, flags.showSize, 0, 0, InterpolationFlags.Nearest);
                            BeginInvoke(new Action <Image>(UpdateCameraPicture), BitmapConverter.ToBitmap(showFrame));
                            //输出视频
                            if (flags.videomode == true)
                            {
                                vw.Write(showFrame);
                            }
                        }
                        lock (flags)
                        {
                            control = flags.running;
                        }
                    }
            }
        }
Пример #28
0
 public object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture)
 {
     return(BitmapConverter.GetBuffer((BitmapImage)value));
 }
Пример #29
0
        private void CaptureCameraCallback()
        {
            frame               = new Mat();
            capture             = new VideoCapture();
            capture.FrameWidth  = 680;
            capture.FrameHeight = 480;
            capture.Open(0);
            String            filenameFaceCascade = "haarcascade_frontalface_alt.xml";
            CascadeClassifier faceCascade         = new CascadeClassifier();

            if (!faceCascade.Load(filenameFaceCascade))
            {
                MessageBox.Show("haar error");
                return;
            }
            while (isCameraRunning == 1)
            {
                if (framecnt > 1000000)
                {
                    framecnt = 0;
                }
                framecnt++;
                capture.Read(frame);
                if (flip)
                {
                    Cv2.Flip(frame, frame, FlipMode.Y);
                }
                if (!frame.Empty())
                {
                    faces = faceCascade.DetectMultiScale(frame);
                    if (faces.Length > 0 && startMeasure)
                    {
                        for (int i = 0; i < faces.Length; i++)
                        {
                            val[i] = (float)Cv2.Mean(frame.SubMat(faces[i]));
                            if (val[i] > standard)
                            {
                                Cv2.Rectangle(frame, faces[i], Scalar.Red); // add rectangle to the image
                            }
                            else
                            {
                                Cv2.Rectangle(frame, faces[i], Scalar.Green); // add rectangle to the image
                            }
                            //textBox1.Text = textBox1.Text + "\tfaces : " + faces[i];
                        }

                        image = BitmapConverter.ToBitmap(frame);

                        using (Graphics graphics = Graphics.FromImage(image))
                        {
                            for (int i = 0; i < faces.Length; i++)
                            {
                                PointF p = new PointF(faces[i].X + faces[i].Width / 2 - 10, faces[i].Y - 15);
                                if (val[i] > standard)
                                {
                                    graphics.DrawString(val[i].ToString(), arialFont, Brushes.Red, p);
                                }
                                else
                                {
                                    graphics.DrawString(val[i].ToString(), arialFont, Brushes.Green, p);
                                }
                            }
                        }
                        if (framecnt % 10 == 0)
                        {
                            for (int i = 0; i < faces.Length; i++)
                            {
                                try
                                {
                                    Mat dst            = frame.SubMat(faces[i]);
                                    OpenCvSharp.Size s = new OpenCvSharp.Size(64, 64);
                                    Mat      resized   = dst.Resize(s);
                                    DateTime time      = DateTime.Now;

                                    OpenConnection();
                                    MySqlCommand command = new MySqlCommand("", conn);
                                    command.CommandText = "INSERT INTO data VALUES(@Date, @Time, @Face, @Stand, @Measure, @Warn)";
                                    byte[] data = resized.ToBytes();
                                    command.Parameters.AddWithValue("@Date", time.ToString("yyyy/MM/dd"));
                                    command.Parameters.AddWithValue("@Time", time.ToString("hh:mm:ss"));
                                    MySqlParameter blob = new MySqlParameter("@Face", MySqlDbType.Blob, data.Length);
                                    blob.Value = data;
                                    command.Parameters.Add(blob);
                                    command.Parameters.AddWithValue("@Stand", standard);
                                    command.Parameters.AddWithValue("@Measure", val[i]);

                                    if (val[i] > standard)
                                    {
                                        byte[]         icon = bad.ToBytes();
                                        MySqlParameter para = new MySqlParameter("@Warn", MySqlDbType.Blob, icon.Length);
                                        para.Value = icon;
                                        command.Parameters.Add(para);
                                    }
                                    else
                                    {
                                        byte[]         icon = good.ToBytes();
                                        MySqlParameter para = new MySqlParameter("@Warn", MySqlDbType.Blob, icon.Length);
                                        para.Value = icon;
                                        command.Parameters.Add(para);
                                    }
                                    command.ExecuteNonQuery();

                                    if (val[i] > standard)
                                    {
                                        simpleSound.Play();
                                    }
                                }
                                catch (Exception ex)
                                {
                                    MessageBox.Show(ex.ToString());
                                }
                            }
                            DisplayData(string.Format("SELECT * FROM data WHERE Date=\"{0}\"", today));
                        }
                    }
                    else
                    {
                        image = BitmapConverter.ToBitmap(frame);
                    }
                    pictureBox1.Image = image;
                }
                image = null;
            }
        }
Пример #30
0
 public NullThumbnailRepository(BitmapConverter bitmapConverter)
     : this(new ThumbnailFactory(bitmapConverter))
 {
 }
Пример #31
0
        /// <summary>
        /// Processes the filter on the passed <paramref name="srcData"/>
        /// resulting into <paramref name="dstData"/>.
        /// </summary>
        /// <param name="srcData">The source bitmap data.</param>
        /// <param name="dstData">The destination bitmap data.</param>
        protected override unsafe void Process(BitmapData srcData, BitmapData dstData)
        {
            // processing start and stop X,Y positions
            const int startX = 1;
            const int startY = 1;
            int       stopX  = startX + srcData.Width - 2;
            int       stopY  = startY + srcData.Height - 2;

            const double toAngle = 180.0 / Math.PI;
            float        leftPixel = 0, rightPixel = 0;

            #region canny
            Bitmap blur = BitmapConverter.BitmapDataToBitmap(srcData);
            // do grayscaling the image
            if (blur.PixelFormat != PixelFormat.Format8bppIndexed)
            {
                // STEP 0 - do grayscaling the image
                blur = TPGrayscale.CommonAlgorithms.BT709.Apply(blur);
            }

            // STEP 1 - blur image
            blur = gaussianFilter.Apply(blur);

            Rectangle  rect     = new Rectangle(0, 0, blur.Width, blur.Height);
            BitmapData blurData =
                blur.LockBits(rect, ImageLockMode.ReadWrite, blur.PixelFormat);

            int dstStride = dstData.Stride;
            int srcStride = blurData.Stride;

            int dstOffset = dstStride - rect.Width + 2;
            int srcOffset = srcStride - rect.Width + 2;

            // orientation array
            byte[,] orients = new byte[dstData.Width, dstData.Height];
            // gradients array
            //int[,] gxArray = new int[dstData.Width, dstData.Height];
            //int[,] gyArray = new int[dstData.Width, dstData.Height];
            float[,] gradients = new float[dstData.Width, dstData.Height];
            float maxGradient = float.NegativeInfinity;

            // do the job
            byte *src = (byte *)blurData.Scan0.ToPointer();
            // allign pointer
            src += srcStride * startY + startX;

            // STEP 2 - calculate magnitude and edge orientation
            // for each line
            for (int y = startY; y < stopY; y++)
            {
                // for each pixel
                for (int x = startX; x < stopX; x++, src++)
                {
                    // pixel's value and gradients
                    int gx = src[-srcStride + 1] + src[srcStride + 1]
                             - src[-srcStride - 1] - src[srcStride - 1]
                             + 2 * (src[1] - src[-1]);

                    int gy = src[-srcStride - 1] + src[-srcStride + 1]
                             - src[srcStride - 1] - src[srcStride + 1]
                             + 2 * (src[-srcStride] - src[srcStride]);

                    //gxArray[x, y] = Math.Abs(gx);
                    //gyArray[x, y] = Math.Abs(gy);

                    // get gradient value
                    gradients[x, y] = (float)Math.Sqrt(gx * gx + gy * gy);
                    if (gradients[x, y] > maxGradient)
                    {
                        maxGradient = gradients[x, y];
                    }

                    // --- get orientation
                    double orientation;
                    if (gx == 0)
                    {
                        // can not divide by zero
                        orientation = (gy == 0) ? 0 : 90;
                    }
                    else
                    {
                        double div = (double)gy / gx;

                        // handle angles of the 2nd and 4th quads
                        if (div < 0)
                        {
                            orientation = 180 - Math.Atan(-div) * toAngle;
                        }
                        // handle angles of the 1st and 3rd quads
                        else
                        {
                            orientation = Math.Atan(div) * toAngle;
                        }

                        // get closest angle from 0, 45, 90, 135 set
                        if (orientation < 22.5)
                        {
                            orientation = 0;
                        }
                        else if (orientation < 67.5)
                        {
                            orientation = 45;
                        }
                        else if (orientation < 112.5)
                        {
                            orientation = 90;
                        }
                        else if (orientation < 157.5)
                        {
                            orientation = 135;
                        }
                        else
                        {
                            orientation = 0;
                        }
                    }

                    // save orientation
                    orients[x, y] = (byte)orientation;
                }
                src += srcOffset;
            }

            // STEP 3 - suppress non maximums
            byte *dst = (byte *)dstData.Scan0.ToPointer();
            // allign pointer
            dst += dstStride * startY + startX;

            // for each line
            for (int y = startY; y < stopY; y++)
            {
                // for each pixel
                for (int x = startX; x < stopX; x++, dst++)
                {
                    // get two adjacent pixels
                    switch (orients[x, y])
                    {
                    case 0:
                        leftPixel  = gradients[x - 1, y];
                        rightPixel = gradients[x + 1, y];
                        break;

                    case 45:
                        leftPixel  = gradients[x - 1, y + 1];
                        rightPixel = gradients[x + 1, y - 1];
                        break;

                    case 90:
                        leftPixel  = gradients[x, y + 1];
                        rightPixel = gradients[x, y - 1];
                        break;

                    case 135:
                        leftPixel  = gradients[x + 1, y + 1];
                        rightPixel = gradients[x - 1, y - 1];
                        break;
                    }
                    // compare current pixels value with adjacent pixels
                    if ((gradients[x, y] < leftPixel) || (gradients[x, y] < rightPixel))
                    {
                        *dst = 0;
                    }
                    else
                    {
                        *dst = (byte)(gradients[x, y] / maxGradient * 255);
                    }
                }
                dst += dstOffset;
            }

            // STEP 4 - hysteresis
            dst = (byte *)dstData.Scan0.ToPointer();
            // allign pointer
            dst += dstStride * startY + startX;

            // for each line
            for (int y = startY; y < stopY; y++)
            {
                // for each pixel
                for (int x = startX; x < stopX; x++, dst++)
                {
                    byte value = 255;
                    if (*dst < HighThreshold)
                    {
                        if (*dst < LowThreshold)
                        {
                            // non edge
                            value = 0;
                        }
                        else
                        {
                            // check 8 neighboring pixels
                            if ((dst[-1] < HighThreshold) &&
                                (dst[1] < HighThreshold) &&
                                (dst[-dstStride - 1] < HighThreshold) &&
                                (dst[-dstStride] < HighThreshold) &&
                                (dst[-dstStride + 1] < HighThreshold) &&
                                (dst[dstStride - 1] < HighThreshold) &&
                                (dst[dstStride] < HighThreshold) &&
                                (dst[dstStride + 1] < HighThreshold))
                            {
                                value = 0;
                            }
                        }
                    }

                    #region color orientations
                    if (value == 255 && orientColored)
                    {
                        byte tmp = orients[x, y];
                        switch (tmp)
                        {
                        case 0:
                            value = 255;
                            break;

                        case 45:
                            value = 45;
                            break;

                        case 90:
                            value = 90;
                            break;

                        case 135:
                            value = 135;
                            break;
                        }
                    }
                    #endregion color orientations
                    *dst = value;
                }
                dst += dstOffset;
            }
            #endregion canny

            //#region Adapt line thickness
            //if (Diameter > 1)
            //{
            //    dst = (byte*)dstData.Scan0.ToPointer();
            //    // allign pointer
            //    dst += dstStride * startY + startX;

            //    // for each line
            //    for (int y = startY; y < stopY; y++)
            //    {
            //        // for each pixel
            //        for (int x = startX; x < stopX; x++, dst++)
            //        {
            //            if (*dst != 0)
            //            {
            //                Drawing8Bpp.DrawThickPoint(
            //                    dstData, *dst, new Point( x, y), Diameter);
            //            }
            //        }
            //        dst += dstOffset;
            //    }
            //}
            //#endregion Adapt line thickness

            blur.UnlockBits(blurData);
            blur.Dispose();
        }