Exemple #1
0
        private Mat GetHoughLines(Mat imgGrayWB, bool DispB = false)
        {
            Mat imgGrayWstdI = _01_ReverseBW(imgGrayWB);
            List <LineSegmentPoint> segStdLst = Cv2.HoughLinesP(imgGrayWstdI, 1, Math.PI / 180, 20, 10, 10).ToList();
            // HoughLinesP(Mat image, double rho, double theta, int threshold, double minLineLength,double maxLineGap)

            Mat dstImg = new Mat(imgGrayWB.Size(), MatType.CV_8UC1, Scalar.White);

            foreach (LineSegmentPoint s in segStdLst)
            {
                dstImg.Line(s.P1, s.P2, Scalar.Black, 1, LineTypes.AntiAlias, 0);
            }
            if (DispB)
            {
                using (new Window("SourceGray", WindowMode.KeepRatio, imgGrayWB))
                    using (new Window("DetectedFrame", WindowMode.KeepRatio, dstImg)){ Cv2.WaitKey(); }
            }
            return(dstImg);
        }
Exemple #2
0
        static void Main(string[] args)
        {
            var reader = new AnnotationReader(new DirectoryInfo(@"C:\Library\정올 2017\Source\MPIIGaze\Data\Original\"));

            reader.Read();

            NoteData ordered = reader.Datas.OrderByDescending(x => x.OnScreenPoint.X).First();

            Console.WriteLine($"MaxX: {ordered}");

            ordered = reader.Datas.OrderByDescending(x => x.OnScreenPoint.Y).First();
            Console.WriteLine($"MaxY: {ordered}");

            while (true)
            {
                Console.Write(">>> ");
                var cmd = Console.ReadLine();
                switch (cmd)
                {
                case "show":
                    bool br           = false;
                    bool skip         = false;
                    bool framebyframe = false;
                    foreach (var d in reader.Datas)
                    {
                        if (br)
                        {
                            break;
                        }
                        d.ImShow(false);
                        int sleep = 500;
                        if (skip)
                        {
                            sleep = 1;
                        }
                        if (framebyframe)
                        {
                            sleep = 0;
                        }
                        char c = (char)Cv2.WaitKey(sleep);
                        switch (c)
                        {
                        case ' ':
                            framebyframe = !framebyframe;
                            break;

                        case 's':
                            skip = !skip;
                            break;

                        case 'e':
                            br = true;
                            break;
                        }
                    }
                    break;

                case "save":
                    int startInd = -1;
                    try
                    {
                        Console.Write("ind?>>> ");
                        startInd = Convert.ToInt32(Console.ReadLine());
                    }
                    catch
                    {
                    }

                    if (startInd > -1 && startInd < reader.Datas.Count)
                    {
                        DirectoryInfo di      = new DirectoryInfo(System.IO.Path.Combine(Environment.CurrentDirectory, "save"));
                        DirectoryInfo diRight = new DirectoryInfo(Path.Combine(di.FullName, "right"));
                        DirectoryInfo diLeft  = new DirectoryInfo(Path.Combine(di.FullName, "left"));
                        if (!di.Exists)
                        {
                            di.Create();
                        }
                        else
                        {
                            Console.WriteLine("already did");
                        }
                        if (!diRight.Exists)
                        {
                            diRight.Create();
                        }
                        if (!diLeft.Exists)
                        {
                            diLeft.Create();
                        }

                        List <CascadeClassifier> cascades = new List <CascadeClassifier>();
                        for (int i = 0; i < Environment.ProcessorCount; i++)
                        {
                            cascades.Add(new CascadeClassifier("haarcascade_eye.xml"));
                        }

                        object countLocker = new object();
                        int    count       = 0;
                        Parallel.For(startInd, reader.Datas.Count, new ParallelOptions()
                        {
                            MaxDegreeOfParallelism = Environment.ProcessorCount
                        }, (i) =>
                        {
                            CascadeClassifier filter;
                            NoteData d;
                            int id;
                            lock (countLocker)
                            {
                                count++;
                                filter = cascades[count % cascades.Count];
                                id     = count - 1 + startInd;
                                d      = reader.Datas[id];
                            }

                            using (Mat frame = Cv2.ImRead(d.File.FullName))
                            {
                                var rects = filter.DetectMultiScale(frame, 1.2, 3, HaarDetectionType.ScaleImage, new Size(30, 30), new Size(308, 308));
                                if (d.GetEye(rects, true) != null && d.GetEye(rects, false) != null)
                                {
                                    d.Save(diLeft, true, frame, rects);
                                    d.Save(diRight, false, frame, rects);
                                }
                            }

                            Console.WriteLine($"Extracted({id})[{count}/{reader.Datas.Count - startInd}] {d}");
                        });
                    }
                    break;
                }
            }
        }
Exemple #3
0
        private void th_grab(int height = 0, int width = 0, int snap_wait = 500)
        {
            try
            {
                // Set the acquisition mode to free running continuous acquisition when the camera is opened.
                camera.CameraOpened += Configuration.AcquireContinuous;

                // Open the connection to the camera device.
                camera.Open();

                if (width == 0 || width > camera.Parameters[PLCamera.Width].GetMaximum())
                {
                    camera.Parameters[PLCamera.Width].SetValue(camera.Parameters[PLCamera.Width].GetMaximum());
                }
                else if (width < camera.Parameters[PLCamera.Width].GetMinimum())
                {
                    camera.Parameters[PLCamera.Width].SetValue(camera.Parameters[PLCamera.Width].GetMinimum());
                }
                else
                {
                    camera.Parameters[PLCamera.Width].SetValue(width);
                }

                if (height == 0 || width > camera.Parameters[PLCamera.Height].GetMaximum())
                {
                    camera.Parameters[PLCamera.Height].SetValue(camera.Parameters[PLCamera.Height].GetMaximum());
                }
                else if (height < camera.Parameters[PLCamera.Height].GetMinimum())
                {
                    camera.Parameters[PLCamera.Height].SetValue(camera.Parameters[PLCamera.Height].GetMinimum());
                }
                else
                {
                    camera.Parameters[PLCamera.Height].SetValue(height);
                }

                camera.Parameters[PLCamera.CenterX].SetValue(true);
                camera.Parameters[PLCamera.CenterY].SetValue(true);

                camera.StreamGrabber.Start();

                if (saveTracked)
                {
                    var    expected = new OpenCvSharp.Size(1920, 1374);
                    string filename = "D:\\save\\" + DateTime.Now.ToString("M.dd-HH.mm.ss") + ".avi";
                    videoWriter.Open(filename, OpenCvSharp.FourCCValues.XVID, 14, expected, false);
                }
                if (saveOrigin)
                {
                    var    expected = new OpenCvSharp.Size(1920, 1374);
                    string filename = "D:\\save\\" + DateTime.Now.ToString("M.dd-HH.mm.ss") + ".origin.avi";
                    originWriter.Open(filename, OpenCvSharp.FourCCValues.XVID, 14, expected, false);
                }
                if (saveHisto)
                {
                    var    expected = new OpenCvSharp.Size(256, 300);
                    string filename = "D:\\save\\" + DateTime.Now.ToString("M.dd-HH.mm.ss") + ".histo.avi";
                    histoWriter.Open(filename, OpenCvSharp.FourCCValues.XVID, 14, expected, false);
                }
                if (saveHeatmap)
                {
                    var    expected = new OpenCvSharp.Size(1920, 1374);
                    string filename = "D:\\save\\" + DateTime.Now.ToString("M.dd-HH.mm.ss") + ".heatmap.avi";
                    heatmapWriter.Open(filename, OpenCvSharp.FourCCValues.XVID, 14, expected, true);
                }

                while (grabbing)
                {
                    camera.Parameters[PLCamera.Gain].SetValue(valueGain);
                    camera.Parameters[PLCamera.ExposureTime].SetValue(valueExpTime);
                    IGrabResult grabResult = camera.StreamGrabber.RetrieveResult(5000, TimeoutHandling.ThrowException);

                    using (grabResult)
                    {
                        if (grabResult.GrabSucceeded)
                        {
                            // convert image from basler IImage to OpenCV Mat
                            Mat img = convertIImage2Mat(grabResult);

                            // convert image from BayerBG to RGB
                            Cv2.CvtColor(img, img, ColorConversionCodes.BayerBG2GRAY);
                            Cv2.Resize(img, img, new OpenCvSharp.Size(1920, 1374), 0, 0, InterpolationFlags.Linear);

                            Mat histo   = new Mat();
                            Mat heatmap = new Mat();
                            Mat dst     = img.Clone();

                            // Apply Histogram
                            histo = cvProcess.histogram(dst);

                            // Apply ColorMap
                            Cv2.ApplyColorMap(dst, heatmap, ColormapTypes.Rainbow);

                            // Apply Background map subtraction
                            Cv2.Subtract(dst, -5, dst);

                            if (saveOrigin)
                            {
                                originWriter.Write(img);
                            }

                            // Create Tracked Image
                            dst = Iso11146(img, dst);

                            Cv2.Resize(dst, dst, new OpenCvSharp.Size(1920, 1374), 0, 0, InterpolationFlags.Linear);
                            if (saveTracked)
                            {
                                videoWriter.Write(dst);
                            }
                            if (saveHisto)
                            {
                                histoWriter.Write(histo);
                            }
                            if (saveHeatmap)
                            {
                                heatmapWriter.Write(heatmap);
                            }

                            // resize image  to fit the imageBox
                            Cv2.Resize(dst, dst, new OpenCvSharp.Size(960, 687), 0, 0, InterpolationFlags.Linear);
                            Cv2.Resize(heatmap, heatmap, new OpenCvSharp.Size(256, 183), 0, 0, InterpolationFlags.Linear);

                            Cv2.Rectangle(dst, new OpenCvSharp.Rect(axis_x, axis_y, axis_scale, axis_scale), Scalar.White, 1);

                            // display images
                            BitmapToImageSource(dst);
                            BitmapHistoToImageSource(histo);
                            BitmapHeatmapToImageSource(heatmap);
                        }
                        else
                        {
                            System.Windows.MessageBox.Show("Error: {0} {1}" + grabResult.ErrorCode, grabResult.ErrorDescription);
                        }
                    }
                    count++;
                    if (count > 500)
                    {
                        count    = 0;
                        tracking = false;
                    }

                    Thread.Sleep(snap_wait);
                }
                videoWriter.Release();
                originWriter.Release();
                histoWriter.Release();
                heatmapWriter.Release();
                camera.StreamGrabber.Stop();
                camera.Close();
            }
            catch (Exception exception)
            {
                if (camera.IsOpen)
                {
                    camera.Close();
                }

                System.Windows.MessageBox.Show("Exception: {0}" + exception.Message);
            }
        }
Exemple #4
0
 // close the opencv window
 public void OnDestroy()
 {
     Cv2.DestroyAllWindows();
 }
        protected override NetResult[] BeginDetect(Bitmap img, float minProbability = 0.3F, string[] labelsFilters = null)
        {
            //Extract width and height from config file
            ExtractValueFromConfig("width", out int widthBlob);
            ExtractValueFromConfig("height", out int heightBlob);

            using (Mat mat = OpenCvSharp.Extensions.BitmapConverter.ToMat(img))
            {
                //Create the blob
                var blob = CvDnn.BlobFromImage(mat, Scale, size: new OpenCvSharp.Size(widthBlob, heightBlob), crop: false);

                //Set blob of a default layer
                network.SetInput(blob);

                //Get all out layers
                string[] outLayers = network.GetUnconnectedOutLayersNames();

                //Initialize all blobs for the all out layers
                Mat[] result = new Mat[outLayers.Length];
                for (int i = 0; i < result.Length; i++)
                {
                    result[i] = new Mat();
                }

                ///Execute all out layers
                network.Forward(result, outLayers);

                List <NetResult> netResults = new List <NetResult>();
                foreach (var item in result)
                {
                    for (int i = 0; i < item.Rows; i++)
                    {
                        //Get the max loc and max of the col range by prefix result
                        Cv2.MinMaxLoc(item.Row[i].ColRange(Prefix, item.Cols), out double min, out double max, out OpenCvSharp.Point minLoc, out OpenCvSharp.Point maxLoc);

                        //Validate the min probability
                        if (max >= minProbability)
                        {
                            //The label is the max Loc
                            string label = Labels[maxLoc.X];
                            if (labelsFilters != null)
                            {
                                if (!labelsFilters.Contains(label))
                                {
                                    continue;
                                }
                            }

                            //The probability is the max value
                            double probability = max;

                            //Center BoundingBox X is the 0 index result
                            int centerX = Convert.ToInt32(item.At <float>(i, 0) * (float)mat.Width);
                            //Center BoundingBox X is the 1 index result
                            int centerY = Convert.ToInt32(item.At <float>(i, 1) * (float)mat.Height);
                            //Width BoundingBox is the 2 index result
                            int width = Convert.ToInt32(item.At <float>(i, 2) * (float)mat.Width);
                            //Height BoundingBox is the 2 index result
                            int height = Convert.ToInt32(item.At <float>(i, 3) * (float)mat.Height);

                            //Build NetResult
                            netResults.Add(NetResult.Build(centerX, centerY, width, height, label, probability));
                        }
                    }
                }

                return(netResults.ToArray());
            }
        }
Exemple #6
0
        static void Main()
        {
            var img = Cv2.ImRead(_input);

            Catalog = CatalogUtil.ReadCatalogItems(_catalogPath);
            var fileTuples = new List <(string input, string output)> {
                (_input, _output)
            };
            string modelFile = _modelPath;

            using (var graph = new TFGraph())
            {
                var model = File.ReadAllBytes(modelFile);
                graph.Import(new TFBuffer(model));
                using (var session = new TFSession(graph))
                {
                    foreach (var tuple in fileTuples)
                    {
                        var tensor = ImageUtil.CreateTensorFromImageFile(tuple.input, TFDataType.UInt8);
                        var runner = session.GetRunner();
                        runner
                        .AddInput(graph["image_tensor"][0], tensor)
                        .Fetch(
                            graph["detection_boxes"][0],
                            graph["detection_scores"][0],
                            graph["detection_classes"][0],
                            graph["num_detections"][0]);

                        Stopwatch sw = new Stopwatch();
                        sw.Start();
                        var output = runner.Run();
                        sw.Stop();
                        Console.WriteLine($"Runtime:{sw.ElapsedMilliseconds} ms");

                        var boxes   = (float[, , ])output[0].GetValue(jagged: false);
                        var scores  = (float[, ])output[1].GetValue(jagged: false);
                        var classes = (float[, ])output[2].GetValue(jagged: false);
                        var num     = (float[])output[3].GetValue(jagged: false);

                        #region show image

                        for (int i = 0; i < boxes.GetLength(1); i++)
                        {
                            if (scores[0, i] > 0.5)
                            {
                                var    idx     = Convert.ToInt32(classes[0, i]);
                                var    x1      = (int)(boxes[0, i, 1] * img.Width);
                                var    y1      = (int)(boxes[0, i, 0] * img.Height);
                                var    x2      = (int)(boxes[0, i, 3] * img.Width);
                                var    y2      = (int)(boxes[0, i, 2] * img.Height);
                                var    catalog = Catalog.First(x => x.Id == idx);
                                string label   = $"{(catalog == null ? idx.ToString() : catalog.DisplayName)}: {scores[0, i] * 100:0.00}%";
                                Console.WriteLine($"{label} {x1} {y1} {x2} {y2}");
                                Cv2.Rectangle(img, new Rect(x1, y1, x2 - x1, y2 - y1), Scalar.Red, 2);
                                int baseline;
                                var textSize = Cv2.GetTextSize(label, HersheyFonts.HersheyTriplex, 0.5, 1, out baseline);
                                textSize.Height = textSize.Height + baseline / 2;
                                var y = y1 - textSize.Height < 0 ? y1 + textSize.Height : y1;
                                Cv2.Rectangle(img, new Rect(x1, y - textSize.Height, textSize.Width, textSize.Height + baseline / 2), Scalar.Red, Cv2.FILLED);
                                Cv2.PutText(img, label, new Point(x1, y), HersheyFonts.HersheyTriplex, 0.5, Scalar.Black);
                            }
                        }
                        #endregion
                    }
                }
            }
            using (new Window("image", img))
            {
                Cv2.WaitKey();
            }
        }
    }
Exemple #7
0
    // Simple example of canny edge detect
    void ProcessImage(Mat _image)
    {
        Cv2.Flip(_image, _image, FlipMode.X);
        Cv2.Canny(_image, cannyImage, 100, 100);
        var grayImage = new Mat();

        Cv2.CvtColor(_image, grayImage, ColorConversionCodes.BGR2GRAY);
        Cv2.EqualizeHist(grayImage, grayImage);

        var face_cascade = new CascadeClassifier();

        face_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_frontalface_default.xml");
        var eye_cascade = new CascadeClassifier();

        eye_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_eye_tree_eyeglasses.xml");
        var mouth_cascade = new CascadeClassifier();

        mouth_cascade.Load(Application.dataPath + "/Plugins/Classifiers/haarcascade_mcs_mouth.xml");
        //Debug.Log(" ");

        var faces = face_cascade.DetectMultiScale(
            image: grayImage,
            scaleFactor: 1.3,
            minNeighbors: 5,
            flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            minSize: new Size(100, 100)
            );

        //Bounds meshRendererBounds = GetComponentInChildren<MeshRenderer>().bounds;
        //Vector3 meshRendererCenter = meshRendererBounds.center;
        //Vector3 maxBound = meshRendererBounds.max;
        //Vector3 minBound = meshRendererBounds.min;
        //OpenCvSharp.Rect rect = new OpenCvSharp.Rect((int)meshRendererCenter.x + 350,(int)meshRendererCenter.y + 50, 600,600);
        var global_rectangle_color = Scalar.FromRgb(0, 0, 255);
        //Cv2.Rectangle(_image, rect, global_rectangle_color, 3);
        //Console.WriteLine("Detected faces: {0}", faces.Length);
        //Debug.Log(faces.Length);

        //var rnd = new System.Random();

        var face_count  = 0;
        var mouth_count = 0;
        var eye_count   = 0;

        foreach (var faceRect in faces)
        {
            var detectedFaceImage = new Mat(_image, faceRect);
            //Cv2.ImShow(string.Format("Face {0}", face_count), detectedFaceImage);
            //Cv2.WaitKey(1); // do events

            var facec_rectangle_color = Scalar.FromRgb(255, 0, 0);
            face = faceRect;
            Cv2.Rectangle(_image, faceRect, facec_rectangle_color, 3);


            rectFront = new OpenCvSharp.Rect(faceRect.X + faceRect.Width / 2 - 50, faceRect.Y + 50, 100, 50);
            //Cv2.Rectangle(_image, rectFront, global_rectangle_color, 3);



            var detectedFaceGrayImage = new Mat();
            Cv2.CvtColor(detectedFaceImage, detectedFaceGrayImage, ColorConversionCodes.BGRA2GRAY);

            var eyes = eye_cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.3,
                minNeighbors: 5,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(50, 50)
                );


            foreach (var eyeRect in eyes)
            {
                var detectedEyeImage = new Mat(_image, eyeRect);
                //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
                //Cv2.WaitKey(1); // do events

                var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
                //Cv2.Rectangle(_image, eyeRect, eye_rectangle_color, 3);

                if (eye_count == 1)
                {
                    // Par rapport à la position de l'oeil gauche
                    rectEyeLeft = new OpenCvSharp.Rect(eyeRect.X + 75, eyeRect.Y + 100, 25, 25);
                    //Cv2.Rectangle(_image, rectEyeLeft, global_rectangle_color, 3);
                }
                else
                {
                    // Par rapport à la position de l'oeil droit
                    rectEyeRight = new OpenCvSharp.Rect(eyeRect.X, eyeRect.Y + 100, 25, 25);
                    //Cv2.Rectangle(_image, rectEyeRight, global_rectangle_color, 3);
                }



                var detectedEyeGrayImage = new Mat();
                Cv2.CvtColor(detectedEyeImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

                eye_count++;
            }


            var mouth = mouth_cascade.DetectMultiScale(
                image: grayImage,
                scaleFactor: 1.3,
                minNeighbors: 5,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(50, 50)
                );
            foreach (var m in mouth)
            {
                var detectedEarImage = new Mat(_image, m);
                //Cv2.ImShow(string.Format("Face {0}", eye_count), detectedEyeImage);
                //Cv2.WaitKey(1); // do events

                if (m.Y > eyes[0].Y && Mathf.Abs(m.Y - eyes[0].Y) > 100)
                {
                    //Debug.Log("mouth height :");
                    //Debug.Log(m.Height);
                    var eye_rectangle_color = Scalar.FromRgb(0, 255, 0);
                    Cv2.Rectangle(_image, m, eye_rectangle_color, 3);
                    lipHeight = (float)m.Height / (float)face.Height;
                }

                var detectedEyeGrayImage = new Mat();
                Cv2.CvtColor(detectedEarImage, detectedEyeGrayImage, ColorConversionCodes.BGRA2GRAY);

                mouth_count++;
            }

            face_count++;
        }
    }
        static void Main()
        {
            #region parameter
            var         image        = Path.Combine(Location, "kite.jpg");
            var         cfg          = Path.Combine(Location, Cfg);
            var         model        = Path.Combine(Location, Weight);
            const float threshold    = 0.5f;    //for confidence
            const float nmsThreshold = 0.3f;    //threshold for nms
            #endregion

            //get image
            var org = new Mat(image);

            //setting blob, size can be:320/416/608
            //opencv blob setting can check here https://github.com/opencv/opencv/tree/master/samples/dnn#object-detection
            var blob = CvDnn.BlobFromImage(org, 1.0 / 255, new Size(416, 416), new Scalar(), true, false);

            //load model and config, if you got error: "separator_index < line.size()", check your cfg file, must be something wrong.
            var net = CvDnn.ReadNetFromDarknet(cfg, model);
            #region set preferable
            net.SetPreferableBackend(3);

            /*
             * 0:DNN_BACKEND_DEFAULT
             * 1:DNN_BACKEND_HALIDE
             * 2:DNN_BACKEND_INFERENCE_ENGINE
             * 3:DNN_BACKEND_OPENCV
             */
            net.SetPreferableTarget(0);

            /*
             * 0:DNN_TARGET_CPU
             * 1:DNN_TARGET_OPENCL
             * 2:DNN_TARGET_OPENCL_FP16
             * 3:DNN_TARGET_MYRIAD
             * 4:DNN_TARGET_FPGA
             */
            #endregion

            //input data
            net.SetInput(blob);

            //get output layer name
            var outNames = net.GetUnconnectedOutLayersNames();
            //create mats for output layer
            var outs = outNames.Select(_ => new Mat()).ToArray();

            #region forward model
            Stopwatch sw = new Stopwatch();
            sw.Start();

            net.Forward(outs, outNames);

            sw.Stop();
            Console.WriteLine($"Runtime:{sw.ElapsedMilliseconds} ms");
            #endregion

            //get result from all output
            GetResult(outs, org, threshold, nmsThreshold);

            using (new Window("died.tw", org))
            {
                Cv2.WaitKey();
            }
        }
Exemple #9
0
        public SURFSample()
        {
            // cvExtractSURF

            // call cv::initModule_nonfree() before using SURF/SIFT.
            Cv2.InitModule_NonFree();

            using (IplImage obj = Cv.LoadImage(FilePath.Image.SurfBox, LoadMode.GrayScale))
                using (IplImage image = Cv.LoadImage(FilePath.Image.SurfBoxinscene, LoadMode.GrayScale))
                    using (IplImage objColor = Cv.CreateImage(obj.Size, BitDepth.U8, 3))
                        using (IplImage correspond = Cv.CreateImage(new CvSize(image.Width, obj.Height + image.Height), BitDepth.U8, 1))
                        {
                            Cv.CvtColor(obj, objColor, ColorConversion.GrayToBgr);

                            Cv.SetImageROI(correspond, new CvRect(0, 0, obj.Width, obj.Height));
                            Cv.Copy(obj, correspond);
                            Cv.SetImageROI(correspond, new CvRect(0, obj.Height, correspond.Width, correspond.Height));
                            Cv.Copy(image, correspond);
                            Cv.ResetImageROI(correspond);

                            CvSURFPoint[] objectKeypoints, imageKeypoints;
                            float[][]     objectDescriptors, imageDescriptors;
                            Stopwatch     watch = Stopwatch.StartNew();
                            {
                                CvSURFParams param = new CvSURFParams(500, true);
                                Cv.ExtractSURF(obj, null, out objectKeypoints, out objectDescriptors, param);
                                Console.WriteLine("Object Descriptors: {0}", objectDescriptors.Length);
                                Cv.ExtractSURF(image, null, out imageKeypoints, out imageDescriptors, param);
                                Console.WriteLine("Image Descriptors: {0}", imageDescriptors.Length);
                            }
                            watch.Stop();
                            Console.WriteLine("Extraction time = {0}ms", watch.ElapsedMilliseconds);
                            watch.Reset();
                            watch.Start();

                            CvPoint[] srcCorners = new CvPoint[4]
                            {
                                new CvPoint(0, 0), new CvPoint(obj.Width, 0), new CvPoint(obj.Width, obj.Height), new CvPoint(0, obj.Height)
                            };
                            CvPoint[] dstCorners = LocatePlanarObject(objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, srcCorners);
                            if (dstCorners != null)
                            {
                                for (int i = 0; i < 4; i++)
                                {
                                    CvPoint r1 = dstCorners[i % 4];
                                    CvPoint r2 = dstCorners[(i + 1) % 4];
                                    Cv.Line(correspond, new CvPoint(r1.X, r1.Y + obj.Height), new CvPoint(r2.X, r2.Y + obj.Height), CvColor.White);
                                }
                            }

                            // 対応点同士を線で引く
                            int[] ptPairs = FindPairs(objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors);
                            for (int i = 0; i < ptPairs.Length; i += 2)
                            {
                                CvSURFPoint r1 = objectKeypoints[ptPairs[i]];
                                CvSURFPoint r2 = imageKeypoints[ptPairs[i + 1]];
                                Cv.Line(correspond, r1.Pt, new CvPoint(Cv.Round(r2.Pt.X), Cv.Round(r2.Pt.Y + obj.Height)), CvColor.White);
                            }

                            // 特徴点の場所に円を描く
                            for (int i = 0; i < objectKeypoints.Length; i++)
                            {
                                CvSURFPoint r      = objectKeypoints[i];
                                CvPoint     center = new CvPoint(Cv.Round(r.Pt.X), Cv.Round(r.Pt.Y));
                                int         radius = Cv.Round(r.Size * (1.2 / 9.0) * 2);
                                Cv.Circle(objColor, center, radius, CvColor.Red, 1, LineType.AntiAlias, 0);
                            }
                            watch.Stop();
                            Console.WriteLine("Drawing time = {0}ms", watch.ElapsedMilliseconds);

                            using (CvWindow windowObject = new CvWindow("Object", WindowMode.AutoSize))
                                using (CvWindow windowCorrespond = new CvWindow("Object Correspond", WindowMode.AutoSize))
                                {
                                    windowObject.ShowImage(correspond);
                                    windowCorrespond.ShowImage(objColor);
                                    Cv.WaitKey(0);
                                }
                        }
        }
Exemple #10
0
        private void _ClearFrame(Mat Img, int sf = 4, bool DispB = false)
        {
            if (DispB)
            {
                using (new Window("Before", WindowMode.KeepRatio, Img)){ Cv2.WaitKey(0); }
            }

            int         W = Img.Width, H = Img.Height;
            byte        b128 = (byte)128, b255 = (byte)255;
            Queue <int> Que = new Queue <int>();

            unsafe {
                byte *S0 = Img.DataPointer;
                int   rc;
                for (int r = 0; r < H; r++)
                {
                    if (r < sf || r >= H - sf)
                    {
                        for (int c = 0; c < W; c++)
                        {
                            rc = r * W + c;
                            if (S0[rc] < b128)
                            {
                                S0[rc] = b255;
                                if (r > 0 && S0[rc - W] < b128)
                                {
                                    Que.Enqueue(rc - W);
                                }
                                if (r < H - 1 && S0[rc + W] < b128)
                                {
                                    Que.Enqueue(rc + W);
                                }
                                if (c > 0 && S0[rc - 1] < b128)
                                {
                                    Que.Enqueue(rc - 1);
                                }
                                if (c < W - 1 && S0[rc + 1] < b128)
                                {
                                    Que.Enqueue(rc + 1);
                                }
                            }
                        }
                    }
                    else
                    {
                        for (int c = 0; c < sf; c++)
                        {
                            rc = r * W + c;
                            if (S0[rc] < b128)
                            {
                                S0[rc] = b255;
                                if (r > 0 && S0[rc - W] < b128)
                                {
                                    Que.Enqueue(rc - W);
                                }
                                if (r < H - 1 && S0[rc + W] < b128)
                                {
                                    Que.Enqueue(rc + W);
                                }
                                if (c > 0 && S0[rc - 1] < b128)
                                {
                                    Que.Enqueue(rc - 1);
                                }
                                if (c < W - 1 && S0[rc + 1] < b128)
                                {
                                    Que.Enqueue(rc + 1);
                                }
                            }
                        }
                        for (int c = W - sf; c < W; c++)
                        {
                            rc = r * W + c;
                            if (S0[rc] < b128)
                            {
                                S0[rc] = b255;
                                if (r > 0 && S0[rc - W] < b128)
                                {
                                    Que.Enqueue(rc - W);
                                }
                                if (r < H - 1 && S0[rc + W] < b128)
                                {
                                    Que.Enqueue(rc + W);
                                }
                                if (c > 0 && S0[rc - 1] < b128)
                                {
                                    Que.Enqueue(rc - 1);
                                }
                                if (c < W - 1 && S0[rc + 1] < b128)
                                {
                                    Que.Enqueue(rc + 1);
                                }
                            }
                        }
                    }
                }

                while (Que.Count > 0)
                {
                    rc         = Que.Dequeue();
                    *(S0 + rc) = b255;
                    int r = rc / W, c = rc % W;
                    if (S0[rc] < b128)
                    {
                        S0[rc] = b255;
                        if (r > 0 && S0[rc - W] < b128)
                        {
                            Que.Enqueue(rc - W);
                        }
                        if (r < H - 1 && S0[rc + W] < b128)
                        {
                            Que.Enqueue(rc + W);
                        }
                        if (c > 0 && S0[rc - 1] < b128)
                        {
                            Que.Enqueue(rc - 1);
                        }
                        if (c < W - 1 && S0[rc + 1] < b128)
                        {
                            Que.Enqueue(rc + 1);
                        }
                    }
                }
                if (DispB)
                {
                    using (new Window("After", WindowMode.KeepRatio, Img)){ Cv2.WaitKey(0); }
                }
            }
        }
        /// <summary>
        /// Get result form all output
        /// </summary>
        /// <param name="output"></param>
        /// <param name="image"></param>
        /// <param name="threshold"></param>
        /// <param name="nmsThreshold">threshold for nms</param>
        /// <param name="nms">Enable Non-maximum suppression or not</param>
        private static void GetResult(IEnumerable <Mat> output, Mat image, float threshold, float nmsThreshold, bool nms = true)
        {
            //for nms
            var classIds      = new List <int>();
            var confidences   = new List <float>();
            var probabilities = new List <float>();
            var boxes         = new List <Rect2d>();

            var w = image.Width;
            var h = image.Height;

            /*
             * YOLO3 COCO trainval output
             * 0 1 : center                    2 3 : w/h
             * 4 : confidence                  5 ~ 84 : class probability
             */
            const int prefix = 5;   //skip 0~4

            foreach (var prob in output)
            {
                for (var i = 0; i < prob.Rows; i++)
                {
                    var confidence = prob.At <float>(i, 4);
                    if (confidence > threshold)
                    {
                        //get classes probability
                        Cv2.MinMaxLoc(prob.Row[i].ColRange(prefix, prob.Cols), out _, out Point max);
                        var classes     = max.X;
                        var probability = prob.At <float>(i, classes + prefix);

                        if (probability > threshold) //more accuracy, you can cancel it
                        {
                            //get center and width/height
                            var centerX = prob.At <float>(i, 0) * w;
                            var centerY = prob.At <float>(i, 1) * h;
                            var width   = prob.At <float>(i, 2) * w;
                            var height  = prob.At <float>(i, 3) * h;

                            if (!nms)
                            {
                                // draw result (if don't use NMSBoxes)
                                Draw(image, classes, confidence, probability, centerX, centerY, width, height);
                                continue;
                            }

                            //put data to list for NMSBoxes
                            classIds.Add(classes);
                            confidences.Add(confidence);
                            probabilities.Add(probability);
                            boxes.Add(new Rect2d(centerX, centerY, width, height));
                        }
                    }
                }
            }

            if (!nms)
            {
                return;
            }

            //using non-maximum suppression to reduce overlapping low confidence box
            CvDnn.NMSBoxes(boxes, confidences, threshold, nmsThreshold, out int[] indices);

            Console.WriteLine($"NMSBoxes drop {confidences.Count - indices.Length} overlapping result.");

            foreach (var i in indices)
            {
                var box = boxes[i];
                Draw(image, classIds[i], confidences[i], probabilities[i], box.X, box.Y, box.Width, box.Height);
            }
        }
Exemple #12
0
        public int[] sdkFrameRecg_SolverV3(Mat imgWhiteB, byte thrVal, bool DispB = false)
        {
            //___PrintTimeSpan("start");

            Mat imgBlackW = _01_ReverseBW(imgWhiteB);

            Point2d[] Q4 = null;

            stdImg = _01_DetectGrid4(imgWhiteB, imgBlackW, thrVal, out Q4, DispB: false);  //Binarization: background white, object black
            //___PrintTimeSpan("_01_DetectGrid4");

            if (Q4 == null)
            {
                return(null);
            }
            if (DispB)
            {
                using (new Window("_0A_StandardFrame", WindowMode.KeepRatio, stdImg)){ Cv2.WaitKey(0); }
            }

            Mat stdImg2 = GetHoughLines(stdImg);

            //___PrintTimeSpan("GetHoughLines");
            Point2d[] Q16 = _0_DetectGrid16(stdImg2, Q4, DispB: false); //Frame distortion adjustment
            //___PrintTimeSpan("_0_DetectGrid16");
            Point2d[] Q100 = _0_DetectGrid100Ex(stdImg2, Q16, DispB: false);
            //___PrintTimeSpan("_0_DetectGrid100Ex");

            int[] SDK8 = _0_digitRecognition100(stdImg, Q100, DispB: false);//true);  //Result display
            //___PrintTimeSpan("_0_digitRecognition100");

            return(SDK8);    //stdImg;
        }
Exemple #13
0
        private Point2d[] _0_DetectGrid100Ex(Mat stdImg, Point2d[] Q16, bool DispB)
        {
            double a = 1.0 / 3.0, b = 2.0 / 3.0;
            var    Q100 = new Point2d[100];

            for (int k = 0; k < 16; k++)
            {
                Q100[(k / 4 * 30) + (k % 4) * 3] = Q16[k];
            }
            for (int r = 0; r <= 10; r += 3)
            {
                for (int c = 0; c < 9; c += 3)
                {
                    int rc = r * 10 + c;
                    Q100[rc + 1] = _2_Get_InterPolation(Q100[rc], Q100[rc + 3], a);
                    Q100[rc + 2] = _2_Get_InterPolation(Q100[rc], Q100[rc + 3], b);
                }
            }
            for (int c = 0; c <= 9; c += 3)
            {
                for (int r = 0; r < 9; r += 3)
                {
                    int rc = (r) * 10 + c;
                    Q100[rc + 10] = _2_Get_InterPolation(Q100[rc], Q100[rc + 30], a);
                    Q100[rc + 20] = _2_Get_InterPolation(Q100[rc], Q100[rc + 30], b);
                }
            }
            Mat Q100Mat = stdImg.CvtColor(ColorConversionCodes.GRAY2BGR);  //####

            foreach (var P in Q100.Where(p => p.X > 0))
            {
                Q100Mat.Circle((Point)P, 3, Scalar.Green, 2);                                    //####
            }
            double evMax = 0.0;

            for (int loop = 0; loop < 10; loop++)   //####
            {
                double ev = 0.0;
                for (int bk = 0; bk < 9; bk++)
                {
                    int rc0 = (bk / 3) * 30 + (bk % 3) * 3;
                    ev += AdjustGrid81(stdImg, Q100, rc0 + 1, rc0 + 0, rc0 + 3, rc0 + 31, rc0 + 30, rc0 + 33);
                    ev += AdjustGrid81(stdImg, Q100, rc0 + 2, rc0 + 0, rc0 + 3, rc0 + 32, rc0 + 30, rc0 + 33);

                    ev += AdjustGrid81(stdImg, Q100, rc0 + 10, rc0 + 0, rc0 + 30, rc0 + 13, rc0 + 3, rc0 + 33);
                    ev += AdjustGrid81(stdImg, Q100, rc0 + 20, rc0 + 0, rc0 + 30, rc0 + 23, rc0 + 3, rc0 + 33);
                }

                //WriteLine("### AdjustGrid81 ev:{0} evMax:{1}", ev, evMax );
                if (ev <= evMax)
                {
                    break;
                }
                evMax = ev;
            }
            if (DispB)
            {
                foreach (var P in Q100.Where(p => p.X > 0))
                {
                    Q100Mat.Circle((Point)P, 5, Scalar.Red, 2);                                            //####
                }
                using (new Window("_0_DetectGrid100Ex", WindowMode.KeepRatio, Q100Mat)){ Cv2.WaitKey(0); } //####
            }
            return(Q100);
        }
Exemple #14
0
        public int[] _01B_DetectDigits100(Mat frame2, Point2d[] Q100, bool DispBres = false)
        {
            Point[] Q4W = new Point[4];

            for (int r = 1; r < 9; r++)
            {
                if ((r % 3) == 0)
                {
                    continue;
                }
                int r0 = r / 3 * 3;
                for (int c = 1; c < 9; c++)
                {
                    if ((c % 3) == 0)
                    {
                        continue;
                    }
                    int c0 = c / 3 * 3;
                    Q100[r * 10 + c] = crossPoint(Q100[r * 10 + c0], Q100[r * 10 + c0 + 3], Q100[r0 * 10 + c], Q100[(r0 + 3) * 10 + c]);
                }
            }

            int[] SB81 = new int[81];
            for (int rc = 0; rc < 81; rc++)
            {
                int r = rc / 9, c = rc % 9, rcA = r * 10 + c;
                Q4W[0] = (Point)Q100[rcA]; Q4W[1] = (Point)Q100[rcA + 1]; Q4W[2] = (Point)Q100[rcA + 11]; Q4W[3] = (Point)Q100[rcA + 10];

                //Inverse projection transformation
                Mat reaRecog9 = _2B_SDK_ProjectivTransformInv(frame2, Q4W, DispB: false); //##@@2

                //Sudoku digits recognition
                int Q = _2B_DigitRecognition(reaRecog9, rc0: rc, resDispB: true); //$$$$$$$$
                SB81[rc] = Q;
                if (folderName != null && Q == 99)
                {
                    if (!Directory.Exists(folderName))
                    {
                        Directory.CreateDirectory(folderName);
                    }
                    string fName = folderName + "/" + DateTime.Now.ToString("yyMMdd_HHmmssfff") + ".jpg";
                    Cv2.ImWrite(fName, reaRecog9);
                }

                /*
                 * if(DispBres || Q==99){
                 *    using( new Window("DDigit rc:"+rc,WindowMode.KeepRatio,reaRecog9) ){ Cv2.WaitKey(0); }
                 *
                 * }
                 */
            }
            if (DispBres)
            {
                string st = "SDK_ImageRecogEx2   ";
                for (int rc = 0; rc < 81; rc++)
                {
                    int sb = SB81[rc];
                    if (sb == 10)
                    {
                        st += " .";
                    }
                    else if (sb == 0 || sb > 10)
                    {
                        st += " X";
                    }
                    else
                    {
                        st += " " + SB81[rc];
                    }
                    if ((rc % 9) == 8)
                    {
                        WriteLine(st); st = "SDK_ImageRecogEx2   ";
                    }
                }
                WriteLine();
            }
            return(SB81);
        }
Exemple #15
0
        private void button1_Click(object sender, EventArgs e)
        {
            int imgWidth = Convert.ToInt32(textBox1.Text);
            int blankPx  = Convert.ToInt32(textBox2.Text);

            Mat srcImage1 = new Mat();
            Mat blankImg  = new Mat(new OpenCvSharp.Size(imgWidth, blankPx), MatType.CV_8UC3,
                                    new Scalar(textBox3.BackColor.B, textBox3.BackColor.G, textBox3.BackColor.R));
            Mat board;

            if (!Directory.Exists("Output"))
            {
                Directory.CreateDirectory("Output");
            }

            CommonOpenFileDialog dialog = new CommonOpenFileDialog();

            dialog.IsFolderPicker = true;
            if (dialog.ShowDialog() == CommonFileDialogResult.Ok)
            {
                try
                {
                    string[] dirs  = Directory.GetDirectories(dialog.FileName);
                    int      Count = 0;
                    foreach (var XX in dirs)
                    {
                        DirectoryInfo di = new DirectoryInfo(XX);
                        board = new Mat();

                        toolStripProgressBar1.Maximum = dirs.Length;

                        int total = dirs.Length;
                        var items = di.GetFiles("*.*").Where(s => s.Name.EndsWith(".jpg") || s.Name.EndsWith(".png") || s.Name.EndsWith(".bmp"));

                        foreach (var item in items)
                        {
                            Application.DoEvents();
                            Invoke((MethodInvoker)(() => toolStripStatusLabel1.Text = $"작업목록: {di.Name}"));
                            //Console.WriteLine(item.Name);
                            srcImage1 = Cv2.ImRead(item.FullName);

                            double ratio = imgWidth / (double)srcImage1.Width;

                            Cv2.Resize(srcImage1, srcImage1, new OpenCvSharp.Size(0, 0), ratio, ratio, InterpolationFlags.Area);

                            if (board.Width != imgWidth)
                            {
                                Cv2.VConcat(srcImage1, blankImg, board);
                            }
                            else
                            {
                                Cv2.VConcat(srcImage1, blankImg, srcImage1);
                                Cv2.VConcat(board, srcImage1, board);
                            }
                            srcImage1.Dispose();

                            Cv2.ImWrite($"Output\\{di.Name}.png", board);
                        }

                        toolStripProgressBar1.Value = ++Count;
                        board.Dispose();
                    }
                }
                catch (Exception ex)
                {
                    Console.WriteLine("Error: " + ex.Message);
                }
            }
        }
Exemple #16
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            List <Mat> grayImages       = new List <Mat>();
            List <Mat> unwarpedImages   = new List <Mat>();
            List <Mat> filtered         = new List <Mat>();
            List <Mat> processedSquares = new List <Mat>();
            List <Mat> trainingData     = new List <Mat>();

            string[] imgNames =
            {
                "7_a.jpg",
                "7_b.jpg",
                "1_a.jpg",
                "1_b.jpg",
                "2_a.jpg",
                "2_b.jpg",
                "3_a.jpg",
                "3_b.jpg",
                "4_a.jpg",
                "4_b.jpg",
                "5_a.jpg",
                "5_b.jpg",
                "6_a.jpg",
                "6_b.jpg",
                "8_a.jpg",
                "8_b.jpg",
                "9_a.jpg",
                "9_b.jpg"
            };

            var imganalyzer = new SudokuFinder();
            var gray        = new Mat();

            List <int> intList = new List <int>();

            foreach (var s in imgNames)
            {
                var src = Cv2.ImRead(@".\Resources\" + s);
                imganalyzer.FindSudoku(src);
                var data = imganalyzer.ProcessPuzzle(imganalyzer.unwarpedSudoku);

                grayImages.Add(src);
                filtered.Add(imganalyzer.unwarpedSudoku.Clone());
                processedSquares.Add(concatPUzzle(data));
                trainingData.AddRange(data);
                int x       = Int32.Parse(s[0].ToString());
                var myArray = new List <int>();
                for (int i = 0; i < 81; i++)
                {
                    myArray.Add(x);
                }
                intList.AddRange(myArray);
            }
            intList.Add(0);

            List <double[]> TrainginData = new List <double[]>();

            int size = 0;

            foreach (var d in trainingData)
            {
                size = d.Size().Height *d.Size().Height;
                byte[] managedArray = new byte[size];
                Marshal.Copy(d.Data, managedArray, 0, size);

                TrainginData.Add(Array.ConvertAll(managedArray, c => c != 0 ? (double)1 : 0));
            }

            TrainginData.Add(new double[size]);

            var saved = imganalyzer.trainSVM(TrainginData.ToArray(), intList.ToArray());

            double sizeb = ((float)saved.Count()) / 1000000.0;

            //Test
            imganalyzer = new SudokuFinder(saved);

            var src2 = Cv2.ImRead(@".\Resources\2.jpg");

            imganalyzer.FindSudoku(src2);
            var data2 = imganalyzer.ProcessPuzzle(imganalyzer.unwarpedSudoku);

            var ans2 = imganalyzer.OCR(data2);

            Cv2.ImShow("Source", imganalyzer.unwarpedSudoku);
            Cv2.ImShow("newbox", src2);
            Cv2.ImShow("data2", concatPUzzle(data2));

            Application.Run();
            Console.ReadKey();
        }
Exemple #17
0
        public static Bitmap CaptureScreenArea(Rectangle captureArea)
        {
            var bmp = new Bitmap(captureArea.Width, captureArea.Height, PixelFormat.Format32bppArgb);
            var g   = Graphics.FromImage(bmp);

            g.CopyFromScreen(captureArea.Left, captureArea.Top, 0, 0, bmp.Size, CopyPixelOperation.SourceCopy);

            var src      = OpenCvSharp.Extensions.BitmapConverter.ToMat(bmp);
            var vertices = new List <List <Point> >
            {
                new List <Point>()
                {
                    new Point(10, 500)
                },
                new List <Point>()
                {
                    new Point(10, 300)
                },
                new List <Point>()
                {
                    new Point(300, 200)
                },
                new List <Point>()
                {
                    new Point(500, 200)
                },
                new List <Point>()
                {
                    new Point(700, 300)
                },
                new List <Point>()
                {
                    new Point(700, 500)
                }
            };

            //src = src.CvtColor(ColorConversionCodes.RGB2RGBA);

            src.FillPoly(vertices, Scalar.Pink);
            src.AdjustROI(200, 200, 200, 200);
            src = src.CvtColor(ColorConversionCodes.BGR2GRAY);
            src = src.Canny(200, 300);
            src = src.GaussianBlur(new OpenCvSharp.Size(5, 5), 0);

            var lines = src.HoughLinesP(1, Math.PI / 180, 50, 30, 15);

            var dst = src.EmptyClone();


            foreach (var line in lines)
            {
                Cv2.Line(dst, line.P1, line.P2, Scalar.Cyan, 2);
            }


            return(OpenCvSharp.Extensions.BitmapConverter.ToBitmap(dst));

            //var mask = src.EmptyClone();


            //Cv2.BitwiseAnd(src, mask, OutputArray.Create(src));


            //var dst = new Mat();
            //src.CopyTo(dst);
            //src = src.GaussianBlur(new OpenCvSharp.Size(7, 9),0);
            //src = src.CvtColor(ColorConversionCodes.BGR2GRAY);
            //src = src.Canny(50, 75);
            //var channels=src.Split();


            //var lines = src.HoughLinesP(1, Math.PI / 180, 50);
            //var Blank = new Mat(src.Rows, src.Cols, MatType.CV_8UC3, new Scalar(0, 0, 0));

            //foreach (var line in lines)
            //{
            //    //Cv2.Line(src, line.P1, line.P2, new Scalar(0, 0, 0), 2, LineTypes.AntiAlias);
            //    Cv2.Line(dst, line.P1, line.P2, new Scalar(255, 255, 255), 2, LineTypes.AntiAlias);
            //}

            //src = src.AdjustROI(100, 640, 100, 700);
        }
Exemple #18
0
        public void Run()
        {
            Mat img = Cv2.ImRead(FilePath.Lenna, LoadMode.GrayScale);

            // expand input image to optimal size
            Mat padded = new Mat();
            int m      = Cv2.GetOptimalDFTSize(img.Rows);
            int n      = Cv2.GetOptimalDFTSize(img.Cols); // on the border add zero values

            Cv2.CopyMakeBorder(img, padded, 0, m - img.Rows, 0, n - img.Cols, BorderType.Constant, Scalar.All(0));

            // Add to the expanded another plane with zeros
            Mat paddedF32 = new Mat();

            padded.ConvertTo(paddedF32, MatType.CV_32F);
            Mat[] planes  = { paddedF32, Mat.Zeros(padded.Size(), MatType.CV_32F) };
            Mat   complex = new Mat();

            Cv2.Merge(planes, complex);

            // this way the result may fit in the source matrix
            Mat dft = new Mat();

            Cv2.Dft(complex, dft);

            // compute the magnitude and switch to logarithmic scale
            // => log(1 + sqrt(Re(DFT(I))^2 + Im(DFT(I))^2))
            Mat[] dftPlanes;
            Cv2.Split(dft, out dftPlanes);  // planes[0] = Re(DFT(I), planes[1] = Im(DFT(I))

            // planes[0] = magnitude
            Mat magnitude = new Mat();

            Cv2.Magnitude(dftPlanes[0], dftPlanes[1], magnitude);

            magnitude += Scalar.All(1);  // switch to logarithmic scale
            Cv2.Log(magnitude, magnitude);

            // crop the spectrum, if it has an odd number of rows or columns
            Mat spectrum = magnitude[
                new Rect(0, 0, magnitude.Cols & -2, magnitude.Rows & -2)];

            // rearrange the quadrants of Fourier image  so that the origin is at the image center
            int cx = spectrum.Cols / 2;
            int cy = spectrum.Rows / 2;

            Mat q0 = new Mat(spectrum, new Rect(0, 0, cx, cy));   // Top-Left - Create a ROI per quadrant
            Mat q1 = new Mat(spectrum, new Rect(cx, 0, cx, cy));  // Top-Right
            Mat q2 = new Mat(spectrum, new Rect(0, cy, cx, cy));  // Bottom-Left
            Mat q3 = new Mat(spectrum, new Rect(cx, cy, cx, cy)); // Bottom-Right

            // swap quadrants (Top-Left with Bottom-Right)
            Mat tmp = new Mat();

            q0.CopyTo(tmp);
            q3.CopyTo(q0);
            tmp.CopyTo(q3);

            // swap quadrant (Top-Right with Bottom-Left)
            q1.CopyTo(tmp);
            q2.CopyTo(q1);
            tmp.CopyTo(q2);

            // Transform the matrix with float values into a
            Cv2.Normalize(spectrum, spectrum, 0, 1, NormType.MinMax);

            // Show the result
            Cv2.ImShow("Input Image", img);
            Cv2.ImShow("Spectrum Magnitude", spectrum);

            // calculating the idft
            Mat inverseTransform = new Mat();

            Cv2.Dft(dft, inverseTransform, DftFlag2.Inverse | DftFlag2.RealOutput);
            Cv2.Normalize(inverseTransform, inverseTransform, 0, 1, NormType.MinMax);
            Cv2.ImShow("Reconstructed by Inverse DFT", inverseTransform);
            Cv2.WaitKey();
        }
Exemple #19
0
        public void DoOCR(KNearest kNearest, string path)
        {
            var src = Cv2.ImRead(path);

            Cv2.ImShow("Source", src);

            var gray = new Mat();

            Cv2.CvtColor(src, gray, ColorConversionCodes.BGRA2GRAY);

            var threshImage = new Mat();

            Cv2.Threshold(gray, threshImage, Thresh, ThresholdMaxVal, ThresholdTypes.BinaryInv); // Threshold to find contour


            Point[][]        contours;
            HierarchyIndex[] hierarchyIndexes;
            Cv2.FindContours(
                threshImage,
                out contours,
                out hierarchyIndexes,
                mode: RetrievalModes.CComp,
                method: ContourApproximationModes.ApproxSimple);

            if (contours.Length == 0)
            {
                throw new NotSupportedException("Couldn't find any object in the image.");
            }

            //Create input sample by contour finding and cropping
            var dst = new Mat(src.Rows, src.Cols, MatType.CV_8UC3, Scalar.All(0));

            var contourIndex = 0;

            while ((contourIndex >= 0))
            {
                var contour = contours[contourIndex];

                var boundingRect = Cv2.BoundingRect(contour); //Find bounding rect for each contour

                Cv2.Rectangle(src,
                              new Point(boundingRect.X, boundingRect.Y),
                              new Point(boundingRect.X + boundingRect.Width, boundingRect.Y + boundingRect.Height),
                              new Scalar(0, 0, 255),
                              2);

                var roi = new Mat(threshImage, boundingRect); //Crop the image

                var resizedImage      = new Mat();
                var resizedImageFloat = new Mat();
                Cv2.Resize(roi, resizedImage, new Size(10, 10));             //resize to 10X10
                resizedImage.ConvertTo(resizedImageFloat, MatType.CV_32FC1); //convert to float
                var result = resizedImageFloat.Reshape(1, 1);


                var results           = new Mat();
                var neighborResponses = new Mat();
                var dists             = new Mat();
                var detectedClass     = (int)kNearest.FindNearest(result, 1, results, neighborResponses, dists);

                //Console.WriteLine("DetectedClass: {0}", detectedClass);
                //Cv2.ImShow("roi", roi);
                //Cv.WaitKey(0);

                //Cv2.ImWrite(string.Format("det_{0}_{1}.png",detectedClass, contourIndex), roi);

                Cv2.PutText(
                    dst,
                    detectedClass.ToString(CultureInfo.InvariantCulture),
                    new Point(boundingRect.X, boundingRect.Y + boundingRect.Height),
                    0,
                    1,
                    new Scalar(0, 255, 0),
                    2);

                contourIndex = hierarchyIndexes[contourIndex].Next;
            }

            Cv2.ImShow("Segmented Source", src);
            Cv2.ImShow("Detected", dst);

            Cv2.ImWrite("dest.jpg", dst);

            Cv2.WaitKey();
        }
Exemple #20
0
        /// <summary>
        /// https://github.com/Itseez/opencv_extra/blob/master/learning_opencv_v2/ch9_watershed.cpp
        /// </summary>
        private static void watershedExample()
        {
            var src     = new Mat(@"..\..\Images\corridor.jpg", ImreadModes.AnyDepth | ImreadModes.AnyColor);
            var srcCopy = new Mat();

            src.CopyTo(srcCopy);

            var markerMask = new Mat();

            Cv2.CvtColor(srcCopy, markerMask, ColorConversionCodes.BGRA2GRAY);

            var imgGray = new Mat();

            Cv2.CvtColor(markerMask, imgGray, ColorConversionCodes.GRAY2BGR);
            markerMask = new Mat(markerMask.Size(), markerMask.Type(), s: Scalar.All(0));

            var sourceWindow = new Window("Source (Select areas by mouse and then press space)")
            {
                Image = srcCopy
            };

            var previousPoint = new Point(-1, -1);

            sourceWindow.OnMouseCallback += (@event, x, y, flags) =>
            {
                if (x < 0 || x >= srcCopy.Cols || y < 0 || y >= srcCopy.Rows)
                {
                    return;
                }

                if (@event == MouseEvent.LButtonUp || !flags.HasFlag(MouseEvent.FlagLButton))
                {
                    previousPoint = new Point(-1, -1);
                }
                else if (@event == MouseEvent.LButtonDown)
                {
                    previousPoint = new Point(x, y);
                }
                else if (@event == MouseEvent.MouseMove && flags.HasFlag(MouseEvent.FlagLButton))
                {
                    var pt = new Point(x, y);
                    if (previousPoint.X < 0)
                    {
                        previousPoint = pt;
                    }

                    Cv2.Line(img: markerMask, pt1: previousPoint, pt2: pt, color: Scalar.All(255), thickness: 5);
                    Cv2.Line(img: srcCopy, pt1: previousPoint, pt2: pt, color: Scalar.All(255), thickness: 5);
                    previousPoint      = pt;
                    sourceWindow.Image = srcCopy;
                }
            };

            var rnd = new Random();

            for (; ;)
            {
                var key = Cv2.WaitKey(0);

                if ((char)key == 27) // ESC
                {
                    break;
                }

                if ((char)key == 'r') // Reset
                {
                    markerMask = new Mat(markerMask.Size(), markerMask.Type(), s: Scalar.All(0));
                    src.CopyTo(srcCopy);
                    sourceWindow.Image = srcCopy;
                }

                if ((char)key == 'w' || (char)key == ' ') // Apply watershed
                {
                    Point[][]        contours;            //vector<vector<Point>> contours;
                    HierarchyIndex[] hierarchyIndexes;    //vector<Vec4i> hierarchy;
                    Cv2.FindContours(
                        markerMask,
                        out contours,
                        out hierarchyIndexes,
                        mode: RetrievalModes.CComp,
                        method: ContourApproximationModes.ApproxSimple);

                    if (contours.Length == 0)
                    {
                        continue;
                    }

                    var markers = new Mat(markerMask.Size(), MatType.CV_32S, s: Scalar.All(0));

                    var componentCount = 0;
                    var contourIndex   = 0;
                    while ((contourIndex >= 0))
                    {
                        Cv2.DrawContours(
                            markers,
                            contours,
                            contourIndex,
                            color: Scalar.All(componentCount + 1),
                            thickness: -1,
                            lineType: LineTypes.Link8,
                            hierarchy: hierarchyIndexes,
                            maxLevel: int.MaxValue);

                        componentCount++;
                        contourIndex = hierarchyIndexes[contourIndex].Next;
                    }

                    if (componentCount == 0)
                    {
                        continue;
                    }

                    var colorTable = new List <Vec3b>();
                    for (var i = 0; i < componentCount; i++)
                    {
                        var b = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);
                        var g = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);
                        var r = rnd.Next(0, 255); //Cv2.TheRNG().Uniform(0, 255);

                        colorTable.Add(new Vec3b((byte)b, (byte)g, (byte)r));
                    }

                    Cv2.Watershed(src, markers);

                    var watershedImage = new Mat(markers.Size(), MatType.CV_8UC3);

                    // paint the watershed image
                    for (var i = 0; i < markers.Rows; i++)
                    {
                        for (var j = 0; j < markers.Cols; j++)
                        {
                            var idx = markers.At <int>(i, j);
                            if (idx == -1)
                            {
                                watershedImage.Set(i, j, new Vec3b(255, 255, 255));
                            }
                            else if (idx <= 0 || idx > componentCount)
                            {
                                watershedImage.Set(i, j, new Vec3b(0, 0, 0));
                            }
                            else
                            {
                                watershedImage.Set(i, j, colorTable[idx - 1]);
                            }
                        }
                    }

                    watershedImage = watershedImage * 0.5 + imgGray * 0.5;
                    Cv2.ImShow("Watershed Transform", watershedImage);
                    Cv2.WaitKey(1); //do events
                }
            }

            sourceWindow.Dispose();
            Cv2.DestroyAllWindows();
            src.Dispose();
        }
Exemple #21
0
 // Display the original video in a opencv window
 void UpdateWindow(Mat _image)
 {
     Cv2.Flip(_image, _image, FlipMode.X);
     //Cv2.ImShow("Copy video", _image);
     displayCount++;
 }
Exemple #22
0
        private void ProcImage(ref System.Drawing.Bitmap src, ref System.Drawing.Bitmap maskRef, out System.Drawing.Bitmap dst)
        {
            dst = null;
            Mat srcImg = BitmapConverter.ToMat(src);

            Cv2.CvtColor(srcImg, srcImg, ColorConversionCodes.BGRA2BGR);

            Mat srcMask = BitmapConverter.ToMat(maskRef);

            Cv2.CvtColor(srcMask, srcMask, ColorConversionCodes.BGRA2BGR);
            Cv2.ImWrite(@"C:\opencv\ImageProcessing\ImageProcessing\Images\manualmask.jpg", srcMask);
            Mat mask = new Mat(new OpenCvSharp.Size(src.Width, src.Height), MatType.CV_8UC1, 0);

            for (int i = 0; i < srcMask.Cols; i++)
            {
                for (int j = 0; j < srcMask.Rows; j++)
                {
                    if (i > srcMask.Cols / 4 && i < srcMask.Cols / 4 * 3 && j > srcMask.Rows / 4 && j < srcMask.Rows / 4 * 3)
                    {
                        mask.Set <byte>(j, i, 3);
                    }
                    else
                    {
                        mask.Set <byte>(j, i, 0);
                    }

                    Vec3b intensity = srcMask.Get <Vec3b>(j, i);
                    if (intensity.Item1 > 40)
                    {
                        mask.Set <byte>(j, i, 1);
                    }

                    if (intensity.Item2 > 40)
                    {
                        mask.Set <byte>(j, i, 0);
                    }
                }
            }

            //dilate process
            //Cv2.Dilate(srcImg, dstImg, new Mat());

            Mat bgdModel = new Mat();
            Mat fgdModel = new Mat();

            OpenCvSharp.Rect r = new OpenCvSharp.Rect(500, 500, (int)Width - 1000, (int)Height - 1000);
            Cv2.GrabCut(srcImg, mask, r, bgdModel, fgdModel, 3, GrabCutModes.InitWithMask);

            //for (int i = mask.Cols / 2 - 50; i < mask.Cols / 2 + 50; i++)
            //{
            //    for (int j = mask.Rows / 2 - 25; j < mask.Rows / 2 + 75; j++)
            //    {
            //        mask.Set<byte>(j, i, 1);
            //    }
            //}

            //Cv2.GrabCut(srcImg, mask, r, bgdModel, fgdModel, 1, GrabCutModes.InitWithMask);

            for (int i = 0; i < mask.Cols; i++)
            {
                for (int j = 0; j < mask.Rows; j++)
                {
                    byte e = mask.Get <byte>(j, i);
                    if (e == 0 | e == 2)
                    {
                        mask.Set <byte>(j, i, 0);
                    }
                    else if (e == 1)
                    {
                        mask.Set <byte>(j, i, 255);
                    }
                    else
                    {
                        mask.Set <byte>(j, i, 127);
                    }
                }
            }
            Mat res = srcImg.Clone();

            dst = BitmapConverter.ToBitmap(mask);
        }
Exemple #23
0
        public static List <System.Drawing.Point> OpenCvFindPic(int left, int top, int width, int height, Bitmap S_bmp, Bitmap P_bmp, int method, out double matchDegree)
        {
            List <System.Drawing.Point> res = new List <System.Drawing.Point>();
            //Mat small = BitmapToMat(P_bmp);
            //Mat big = BitmapToMat(S_bmp);
            Mat    small = OpenCvSharp.Extensions.BitmapConverter.ToMat(P_bmp);
            Mat    big = OpenCvSharp.Extensions.BitmapConverter.ToMat(S_bmp);
            var    result = new Mat();
            var    minLoc = new OpenCvSharp.Point(0, 0);
            var    maxLoc = new OpenCvSharp.Point(0, 0);
            double max = 0, min = 0;

            matchDegree = 0;
            switch (method)
            {
            case 0:
                Cv2.MatchTemplate(big, small, result, TemplateMatchModes.SqDiff);
                Cv2.MinMaxLoc(result, out min, out max, out minLoc, out maxLoc);
                Debug.WriteLine($"SqDiff:{min} {max}");
                break;

            case 1:
                Cv2.MatchTemplate(big, small, result, TemplateMatchModes.SqDiffNormed);
                Cv2.MinMaxLoc(result, out min, out max, out minLoc, out maxLoc);
                matchDegree = (1 - min) / 1.0;
                Debug.WriteLine($"SqDiffNormed:{min} {max}");
                break;

            case 2:
                // not good for our usage
                Cv2.MatchTemplate(big, small, result, TemplateMatchModes.CCorr);
                Cv2.MinMaxLoc(result, out min, out max, out minLoc, out maxLoc);
                Debug.WriteLine($"CCorr:{min} {max}");
                break;

            case 3:
                Cv2.MatchTemplate(big, small, result, TemplateMatchModes.CCorrNormed);
                Cv2.MinMaxLoc(result, out min, out max, out minLoc, out maxLoc);
                Debug.WriteLine($"CCorrNormed:{min} {max}");
                matchDegree = max / 1.0;
                break;

            case 4:
                Cv2.MatchTemplate(big, small, result, TemplateMatchModes.CCoeff);
                Cv2.MinMaxLoc(result, out min, out max, out minLoc, out maxLoc);
                Debug.WriteLine($"CCoeff:{min} {max}");
                break;

            case 5:
            default:
                Cv2.MatchTemplate(big, small, result, TemplateMatchModes.CCoeffNormed);
                Cv2.MinMaxLoc(result, out min, out max, out minLoc, out maxLoc);
                matchDegree = (max + 1) / 2.0;
                Debug.WriteLine($"CCoeffNormed:{min} {max}");
                break;
            }

            // the sqD lower is good
            if (method == 0 || method == 1)
            {
                res.Add(new System.Drawing.Point(minLoc.X, minLoc.Y));
            }
            else
            {
                res.Add(new System.Drawing.Point(maxLoc.X, maxLoc.Y));
            }

            result.Dispose();
            big.Dispose();
            small.Dispose();
            return(res);
        }
Exemple #24
0
        private void CreateObjectMask(ref System.Drawing.Bitmap image, out System.Drawing.Bitmap mask)
        {
            double kThresh = 125;
            double hThresh = 55;
            double canny1  = 25;
            double canny2  = 75;

            mask = null;
            Mat src = BitmapConverter.ToMat(image);

            Mat src_kirsch = BitmapConverter.ToMat(image.KirschFilter());

            Mat kirsch_gray = new Mat();

            Cv2.CvtColor(src_kirsch, kirsch_gray, ColorConversionCodes.RGB2GRAY);

            Cv2.ImWrite(@"C:\temp\kirsch_gray.jpg", kirsch_gray);
            Mat kirsch_threshold = new Mat();

            Cv2.Threshold(kirsch_gray, kirsch_threshold, kThresh, 255, ThresholdTypes.Binary);
            Cv2.ImWrite(@"C:\temp\kirsch_threshold.jpg", kirsch_threshold);

            Mat[] contours;
            List <OpenCvSharp.Point> hierarchy;
            List <Mat> hulls;
            Mat        morph_element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new OpenCvSharp.Size(2, 2), new OpenCvSharp.Point(1, 1));

            #region morphology

            Mat kirsch_threshold_copy = new Mat();
            kirsch_threshold.CopyTo(kirsch_threshold_copy);

            int hullCount = 0, numLoops = 0;
            do
            {
                numLoops++;

                Mat kirsch_morph = kirsch_threshold_copy.MorphologyEx(MorphTypes.Gradient, morph_element);
                Cv2.ImWrite(@"C:\temp\kirsch_morph" + numLoops + ".jpg", kirsch_morph);
                hierarchy = new List <OpenCvSharp.Point>();
                Cv2.FindContours(kirsch_morph, out contours, OutputArray.Create(hierarchy),
                                 RetrievalModes.External, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

                hulls = new List <Mat>();
                for (int j = 0; j < contours.Length; j++)
                {
                    Mat hull = new Mat();
                    Cv2.ConvexHull(contours[j], hull);
                    hulls.Add(hull);
                }

                Mat drawing = Mat.Zeros(src.Size(), MatType.CV_8UC1);
                Cv2.DrawContours(drawing, hulls, -1, Scalar.White);

                Cv2.ImWrite(@"C:\temp\drawing" + numLoops + ".jpg", drawing);
                if (hulls.Count != hullCount && numLoops < 100)
                {
                    hullCount             = hulls.Count;
                    kirsch_threshold_copy = drawing;
                }
                else
                {
                    break;
                }
            } while (true);

            #endregion

            if (numLoops >= 100)
            {
                throw new Exception("Could not find hull");
            }

            #region bestHull
            //try and filter out dust near to stone
            double largestArea = hulls.Max(m => Cv2.ContourArea(m));
            var    bestHulls   = hulls.Where(m => Cv2.ContourArea(m) == largestArea).ToList();

            Mat hulls_mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
            Cv2.DrawContours(hulls_mask, bestHulls, -1, Scalar.White, -1);
            Cv2.ImWrite(@"C:\temp\hulls_mask.jpg", hulls_mask);

            //hulls_mask is the convex hull of outline, now look for clefts
            Cv2.Threshold(kirsch_gray, kirsch_threshold, hThresh, 255, ThresholdTypes.Binary);
            Mat kirsch_mask = Mat.Zeros(kirsch_threshold.Size(), kirsch_threshold.Type());
            kirsch_threshold.CopyTo(kirsch_mask, hulls_mask);
            Cv2.ImWrite(@"C:\temp\kirsch_mask.jpg", kirsch_mask);
            Mat kirsch_mask_canny = new Mat();
            Cv2.Canny(kirsch_mask, kirsch_mask_canny, canny1, canny2, 3);
            Cv2.ImWrite(@"C:\temp\kirsch_mask_canny.jpg", kirsch_mask_canny);
            morph_element = Cv2.GetStructuringElement(MorphShapes.Ellipse, new OpenCvSharp.Size(5, 5), new OpenCvSharp.Point(2, 2));
            Mat kirsch_filled = new Mat();
            Cv2.Dilate(kirsch_mask_canny, kirsch_filled, morph_element);
            Cv2.Dilate(kirsch_filled, kirsch_filled, morph_element);
            Cv2.Erode(kirsch_filled, kirsch_filled, morph_element);
            Cv2.Erode(kirsch_filled, kirsch_filled, morph_element);
            Cv2.ImWrite(@"C:\temp\kirsch_filled.jpg", kirsch_filled);
            hierarchy = new List <OpenCvSharp.Point>();;
            Cv2.FindContours(kirsch_filled, out contours, OutputArray.Create(hierarchy),
                             RetrievalModes.External, ContourApproximationModes.ApproxSimple, new OpenCvSharp.Point(0, 0));

            #endregion

            hulls_mask = Mat.Zeros(src.Size(), MatType.CV_8UC1);
            Cv2.DrawContours(hulls_mask, contours, -1, Scalar.White, -1);
            Cv2.ImWrite(@"C:\temp\hulls_mask_final.jpg", hulls_mask);
            Cv2.Erode(hulls_mask, hulls_mask, morph_element);
            Cv2.Erode(hulls_mask, hulls_mask, morph_element);

            Mat fImage = new Mat();
            src.CopyTo(fImage, hulls_mask);
            Cv2.ImWrite(@"C:\temp\fImage.png", fImage);
            mask = BitmapConverter.ToBitmap(fImage);

            // contrast calculation
            double contrast, L, a, b, H, C;
            CalculateContrast(ref src, ref hulls_mask, out L, out a, out b, out contrast, out H, out C, ContrastType.RMS);

            Console.WriteLine("contrast: {0}, L: {1}, a: {2}, b: {3}, C: {4}, H: {5}", contrast, L, a, b, C, H);

            //hulls_mask = null;

            CalcHistogram(ref src, ref hulls_mask, out mask);
        }
 /// <summary>
 /// Display image in new window using OpenCV.
 /// </summary>
 /// <param name="image">
 /// Target image in OpenCV matrix format.
 /// </param>
 /// <param name="title">
 /// Window title.
 /// </param>
 public static void DisplayImage(Mat image, string title)
 {
     Cv2.ImShow(title, image);
 }
Exemple #26
0
        static void Main(string[] args)
        {
            Mat src = Cv2.ImRead("OpenCV_Logo.png", ImreadModes.ReducedColor2);

            Console.WriteLine(src);
        }
Exemple #27
0
        static void Main(string[] args)
        {
            var host     = ConfigurationManager.AppSettings["host"];
            var username = ConfigurationManager.AppSettings["username"];
            var password = ConfigurationManager.AppSettings["password"];
            var database = ConfigurationManager.AppSettings["database"];

            var connectionString = $"Host={host};Username={username};Password={password};Database={database}";

            using (var connection = new NpgsqlConnection(connectionString))
            {
                connection.Open();

                using (var command = new NpgsqlCommand("SELECT name FROM sets", connection))
                {
                    using (var reader = command.ExecuteReader())
                    {
                        while (reader.Read())
                        {
                            Console.WriteLine(reader.GetString(0));
                        }
                    }
                }
            }

            Console.Read();

            var    barcodeReader = CreateBarcodeReader(BarcodeFormat.ITF, autoRotate: true, tryInverted: true, tryHarder: true);
            string barcode       = null;

            var tesseractEngine = new TesseractEngine("./tessdata", "eng", EngineMode.Default);

            var imagePath   = args[0];
            var sourceImage = new Mat(imagePath, ImreadModes.Color);

            ShowImage(sourceImage, "source");

            var thresholdedImage = GetRedThresholdedImage(sourceImage);

            ShowImage(thresholdedImage, "thresholded");

            for (int erodeIterations = 0; erodeIterations <= 4 && barcode == null; erodeIterations++)
            {
                var contours = GetCountours(thresholdedImage, erodeIterations);
                ShowContours(sourceImage, contours, new Scalar(0d, 255d, 0d), "contours " + erodeIterations);
                foreach (var contour in contours)
                {
                    if (barcode != null)
                    {
                        break;
                    }
                    using (var labelImage = GetCorrectedRectangle(sourceImage, contour))
                    {
                        var labelSize = labelImage.Size();
                        //if (contours.Length > 50 && labelSize.Height > 10 && labelSize.Width > 10)
                        if (labelSize.Height >= 50 && labelSize.Width >= labelSize.Height * 2.5)
                        {
                            //resize
                            while (labelImage.Size().Height < 250)
                            {
                                Cv2.Resize(labelImage, labelImage, new OpenCvSharp.Size(0d, 0d), 2d, 2d, InterpolationFlags.Lanczos4);
                            }

                            //sharpen
                            var tempMat = new Mat();
                            Cv2.GaussianBlur(labelImage, tempMat, new OpenCvSharp.Size(0d, 0d), 3d);
                            Cv2.AddWeighted(labelImage, 1.5d, tempMat, -0.5d, 0d, labelImage);

                            ShowImage(labelImage, "label " + contour.Length + " " + new Random().Next());

                            OCRText(tesseractEngine, labelImage);

                            using (var barcodeImage = GetBarcodeImage(labelImage))
                            {
                                //ShowImage(barcodeImage, "barcode " + new Random().Next());
                                barcode = barcode ?? DecodeBarcode(barcodeReader, barcodeImage);
                            }
                        }
                    }
                }
            }

            Console.WriteLine("Barcode: " + barcode);

            Cv2.WaitKey();
        }
Exemple #28
0
 public Scalar MeanStdDev(Mat image)
 {
     Cv2.MeanStdDev(image, out _, out var meanStdDev);
     return(meanStdDev);
 }
Exemple #29
0
        public Mat Iso11146(Mat img, Mat dst)
        {
            Cv2.Resize(img, img, new OpenCvSharp.Size(960, 687), 0, 0, InterpolationFlags.Linear);
            Cv2.Resize(dst, dst, new OpenCvSharp.Size(960, 687), 0, 0, InterpolationFlags.Linear);

            OpenCvSharp.Point[][] contours;
            HierarchyIndex[]      hierarchy;
            Cv2.Threshold(img, img, 50, 255, ThresholdTypes.Binary);
            Cv2.FindContours(img, out contours, out hierarchy, RetrievalModes.External, ContourApproximationModes.ApproxTC89L1);

            foreach (OpenCvSharp.Point[] p in contours)
            {
                if (Cv2.ContourArea(p) < 1000)
                {
                    continue;
                }

                Moments moments = Cv2.Moments(p, true);

                if (moments.M00 != 0)
                {
                    int cX  = (int)(moments.M10 / moments.M00);
                    int cY  = (int)(moments.M01 / moments.M00);
                    int cX2 = (int)(moments.Mu20 / moments.M00);
                    int cXY = (int)(moments.Mu11 / moments.M00);
                    int cY2 = (int)(moments.Mu02 / moments.M00);

                    double a  = Math.Pow(((cX2 + cY2) + 2 * Math.Abs(cXY)), 0.5);
                    int    dX = (int)(2 * Math.Pow(2, 0.5) * Math.Pow(((cX2 + cY2) + 2 * Math.Abs(cXY)), 0.5));
                    int    dY = (int)(2 * Math.Pow(2, 0.5) * Math.Pow(((cX2 + cY2) - 2 * Math.Abs(cXY)), 0.5));

                    double t;
                    if ((cX2 - cY2) != 0)
                    {
                        t = 2 * cXY / (cX2 - cY2);
                    }
                    else
                    {
                        t = 0;
                    }

                    double            theta  = 0.5 * Math.Atan(t) * 180;
                    OpenCvSharp.Point center = new OpenCvSharp.Point(cX, cY);
                    OpenCvSharp.Size  axis   = new OpenCvSharp.Size(dX, dY);
                    Cv2.Circle(dst, cX, cY, 1, Scalar.Black);
                    if (trackingLD1)
                    {
                        if (tracking == false)
                        {
                            tracking = true;

                            if ((cX - (axis_x + axis_scale / 2)) > 10)
                            {
                                itc.getTecCurrSetpoint(0, out temp);
                                itc.setTecCurrSetpoint(temp - 0.005);
                            }
                            else if ((cX - (axis_x + axis_scale / 2)) < -10)
                            {
                                itc.getTecCurrSetpoint(0, out temp);
                                itc.setTecCurrSetpoint(temp + 0.005);
                            }
                        }
                    }

                    if (dX > 0 && dY > 0)
                    {
                        Cv2.Ellipse(dst, center, axis, theta, 0, 360, Scalar.White);
                    }
                }
            }

            return(dst);
        }
Exemple #30
0
        private Mat CorrectDistortion(Mat imgWhiteB, List <Point2d> PTs, out Point2d[] Q4, bool DispB = false)
        {
            Center = new Point2d(PTs.Average(P => P.X), PTs.Average(P => P.Y));
            List <PointEx> PTda = PTs.ToList().ConvertAll(P => new PointEx(P, Center));

            PTda.Sort((A, B) => (int)((A.Angle - B.Angle) * 100000.0));
            PTda.AddRange(PTda);
            //if(DispB){ for(int k=0; k<PTda.Count(); k++) WriteLine("k:{0} PTda:{1}",k,PTda[k]); }
            List <List <PointEx> > PTda4 = _SepareteLins(PTda);    //BLTR

            if (PTda4 == null || PTda4.Count < 4)
            {
                Q4 = null; return(null);
            }
#if false
            if (DispB)
            {
                Mat      resImg = imgWhiteB.CvtColor(ColorConversionCodes.GRAY2BGR); //Gray->Color変換
                Scalar[] clrLst = { Scalar.Red, Scalar.Blue, Scalar.Yellow, Scalar.Green };
                for (int k = 0; k < 4; k++)
                {
                    var clr = clrLst[k];    //(k%2==0)? Scalar.Red: Scalar.Blue;
                    PTda4[k].ForEach(Q => resImg.Circle(Q.Pt, 5, clr, 5));
                }
                using (new Window("◆1◆resImg", WindowMode.KeepRatio, resImg)){ Cv2.WaitKey(0); }
            }
#endif
            int nSize   = 1;
            var FRegTop = new FuncApproximation(nSize, PTda4[2], CalculusB: true);
            var FRegBtm = new FuncApproximation(nSize, PTda4[0], CalculusB: true);
            var FRegLft = new FuncApproximation(nSize, PTda4[1], CalculusB: true);
            var FRegRgt = new FuncApproximation(nSize, PTda4[3], CalculusB: true);

            Point2d[] PT4 = new Point2d[4];
            var       FM = new RegFuncUtility(ImgCheck: imgWhiteB);
            Point2d   P00 = new Point2d(0, 0), PGG = new Point2d(imgWhiteB.Width, imgWhiteB.Height);

            PT4[0] = FM.IntersectionPoint(FRegTop, FRegLft, P00, PGG, DispB: false);
            PT4[2] = FM.IntersectionPoint(FRegBtm, FRegLft, P00, PGG, DispB: false);
            PT4[1] = FM.IntersectionPoint(FRegTop, FRegRgt, P00, PGG, DispB: false);
            PT4[3] = FM.IntersectionPoint(FRegBtm, FRegRgt, P00, PGG, DispB: false);

            if (DispB)
            {
                Mat      resImg = imgWhiteB.CvtColor(ColorConversionCodes.GRAY2BGR); //Gray->Color
                Scalar[] clrLst = { Scalar.Red, Scalar.Blue, Scalar.Yellow, Scalar.Green };
                for (int k = 0; k < 4; k++)
                {
                    var clr = clrLst[k];    //(k%2==0)? Scalar.Red: Scalar.Blue;
                    PTda4[k].ForEach(Q => resImg.Circle((Point)Q.Pt, 5, clr, 5));
                }
                //using( new Window("#1#resImg",WindowMode.KeepRatio,resImg) ){ Cv2.WaitKey(0); }

                for (int k = 0; k < 4; k++)
                {
                    var clr = clrLst[k];
                    PTda4[k].ForEach(Q => resImg.Circle((Point)PT4[k], 10, clr, 5));
                }
                using (new Window("#2#resImg", WindowMode.KeepRatio, resImg)){ Cv2.WaitKey(0); }
            }

            FRegTop.CreateFunction(PT4[0], 0, 32 * 9);
            FRegBtm.CreateFunction(PT4[2], 0, 32 * 9);
            //FRegLft.CreateFunction(PT4[0],0,32*9);
            //FRegRgt.CreateFunction(PT4[2],0,32*9);

            Mat MatCor = new Mat(352, 352, MatType.CV_8UC1, Scalar.White);
            unsafe {
                byte * S = imgWhiteB.DataPointer;
                byte * D = MatCor.DataPointer;
                int    W = imgWhiteB.Width, H = imgWhiteB.Height, W2 = MatCor.Width;
                double per = 1.0 / (32 * 9);
                for (int x = 0; x <= 32 * 9 + 3; x++)
                {
                    double xd  = x;
                    Point  ptT = (Point)FRegTop.EstimateL2Pt(xd);
                    Point  ptB = (Point)FRegBtm.EstimateL2Pt(xd);
                    for (int y = 0; y <= 32 * 9 + 3; y++)
                    {
                        Point pt = (Point)_2_Get_InterPolation(ptT, ptB, per * y);
                        if (pt.X < 0 || pt.X >= W || pt.Y < 0 || pt.Y >= H)
                        {
                            ////            using( new Window("#test# imgWhiteB",WindowMode.KeepRatio,imgWhiteB) ){ Cv2.WaitKey(0); }
                            Q4 = null;
                            return(null);
                        }
                        D[(y + 32) * W2 + x + 32] = S[pt.Y * W + pt.X];
                    }
                }
            }

            Q4    = new Point2d[4];
            Q4[0] = new Point2d(32, 32);
            Q4[1] = new Point2d(320, 32);
            Q4[2] = new Point2d(32, 320);
            Q4[3] = new Point2d(320, 320);

            if (DispB)
            {
                new Window("MatCor", WindowMode.KeepRatio, MatCor);
            }
            return(MatCor);
        }