Esempio n. 1
0
        private bool IsImageVisible(UiElement element, float confidence = 0.80f)
        {
            using var uielement = GrabElement(element);

            if (Settings.SaveImages)
            {
                Directory.CreateDirectory("logs");
                uielement.Save($"logs\\{element.Id}.bmp");
            }

            using var uiElementMat     = uielement.ToMat();
            using var templateMat      = Cv2.ImDecode(element.Image, ImreadModes.AnyColor);
            using var uiElementMatGray = new Mat();
            using var templateMatGray  = new Mat();


            var size       = templateMat.Size();
            var targetSize = new OpenCvSharp.Size(size.Height * 2, size.Width * 2);

            Cv2.CvtColor(uiElementMat, uiElementMatGray, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(templateMat, templateMatGray, ColorConversionCodes.BGR2GRAY);
            using var uiElementMatGrayScaled = new Mat();
            Cv2.Resize(uiElementMatGray, uiElementMatGrayScaled, targetSize);
            using var scaledTemplateMat = new Mat();
            Cv2.Resize(templateMatGray, scaledTemplateMat, targetSize);

            using var result = new Mat();
            Cv2.MatchTemplate(uiElementMatGrayScaled, scaledTemplateMat, result, TemplateMatchModes.CCoeffNormed);
            Cv2.MinMaxLoc(result, out var _, out var maxVal, out var _, out _);

            Overlay?.ShowIsVisibile(element, maxVal > confidence, maxVal);

            return(maxVal >= confidence);
        }
Esempio n. 2
0
        //校验是否在边界
        public static bool NotOnBorder(Rect rectToJudge,
                                       OpenCvSharp.Size borderSize,
                                       int leftLimit   = 0,
                                       int rightLimit  = 0,
                                       int topLimit    = 0,
                                       int bottomLimit = 0)
        {
            float leftPercent   = leftLimit / 100f;
            float rightPercent  = rightLimit / 100f;
            float topPercent    = topLimit / 100f;
            float bottomPercent = bottomLimit / 100f;
            float widthPercent  = 1f - leftPercent - rightPercent;
            float heightPercent = 1f - topPercent - bottomPercent;
            int   xLimit        = (int)(borderSize.Width * leftPercent);
            int   yLimit        = (int)(borderSize.Height * topPercent);
            int   widthLimit    = (int)(borderSize.Width * widthPercent);
            int   heightLimit   = (int)(borderSize.Height * heightPercent);
            Rect  rectLimit     = new Rect();

            rectLimit.X      = xLimit;
            rectLimit.Y      = yLimit;
            rectLimit.Width  = widthLimit;
            rectLimit.Height = heightLimit;
            return(rectLimit.Contains(rectToJudge));
        }
Esempio n. 3
0
 public CharImage(string fileName, string name, PlateChar plateChar, OpenCvSharp.Size matSize)
 {
     this.FileName  = fileName;
     this.Name      = name;
     this.PlateChar = plateChar;
     this.MatSize   = matSize;
 }
Esempio n. 4
0
        /// <summary>
        /// 이미지가 표시될 크기를 얻습니다.
        /// PictureBox의 사이즈 모드에 따라 이미지의 크기가 변경되므로 이 메소드를 이용해 정확한 크기를 얻어야합니다.
        /// </summary>
        /// <param name="streamingType">스트리밍 타입</param>
        /// <param name="frame">표시될 이미지</param>
        /// <returns>표시될 이미지의 크기</returns>
        public OpenCvSharp.Size GetDisplaySize(StreamingType streamingType, Mat frame)
        {
            if (this.Visualizer.StreamingType == streamingType)
            {
                this._cachedDisplaySize = frame.Size();
            }

            if (this.defaultView.streamingFrameBox.SizeMode == PictureBoxSizeMode.CenterImage)
            {
                if (this.Blender.Enabled)
                {
                    this._cachedDisplaySize = this.Blender.Size;
                }

                return(this._cachedDisplaySize);
            }
            else if (this.defaultView.streamingFrameBox.SizeMode == PictureBoxSizeMode.StretchImage)
            {
                return(new OpenCvSharp.Size(this.defaultView.streamingFrameBox.Width, this.defaultView.streamingFrameBox.Height));
            }
            else
            {
                var aspect = this._cachedDisplaySize.Width / (float)this._cachedDisplaySize.Height;
                if (this._cachedDisplaySize.Width > this._cachedDisplaySize.Height)
                {
                    return(new OpenCvSharp.Size(this.defaultView.streamingFrameBox.Width, this.defaultView.streamingFrameBox.Height / aspect));
                }
                else
                {
                    return(new OpenCvSharp.Size(this.defaultView.streamingFrameBox.Width * aspect, this.defaultView.streamingFrameBox.Height));
                }
            }
        }
Esempio n. 5
0
        public Form1()
        {
            InitializeComponent();

            // Read an image from a file.
            Mat src = Cv2.ImRead("lena.jpg");

            // Prepare the processed image.
            OpenCvSharp.Size size = new OpenCvSharp.Size(src.Cols, src.Rows);
            Mat dst = new Mat(size, OpenCvSharp.MatType.CV_8UC3);

            // Run Sobel filter.
            Cv2.Sobel(src, dst, src.Depth(), 1, 1);

            // Display the original image.
            Cv2.ImShow("src", src);

            // Display the processed image.
            OpenCvSharp.Cv2.ImShow("dst", dst);

            // Wait for key input.
            OpenCvSharp.Cv2.WaitKey(0);

            Environment.Exit(0);
        }
        //读入相机标定数据
        private void ucBtnExt_readCamParm_BtnClick(object sender, EventArgs e)
        {
            rootDir     = Directory.GetParent(Environment.CurrentDirectory); //定位bin目录
            camParmyaml = rootDir.Parent.Parent.FullName + @"\CamParm.yaml"; //获取文件名称
            using (var fs = new FileStorage(camParmyaml, FileStorage.Mode.Read))
            {
                ShareData.leftCamIntrinsic  = fs["leftCamIntrinsic"].ReadMat();
                ShareData.leftDistCoeffs    = fs["leftDistCoeffs"].ReadMat();
                ShareData.rightCamIntrinsic = fs["rightCamIntrinsic"].ReadMat();
                ShareData.rightDistCoeffs   = fs["rightDistCoeffs"].ReadMat();
                ShareData.R = fs["R"].ReadMat();
                ShareData.T = fs["T"].ReadMat();
                ShareData.E = fs["E"].ReadMat();
                ShareData.F = fs["F"].ReadMat();
            }
            ///使用640 * 480图片的分辨率重新采集图像 计算数值
            OpenCvSharp.Size imgSize = new OpenCvSharp.Size(640, 480);
            //注意 这里传入的参数 必须符合规定格式
            double alpha = 0;       //只显示有效像素区域

            Cv2.StereoRectify(ShareData.leftCamIntrinsic, ShareData.leftDistCoeffs, ShareData.rightCamIntrinsic,
                              ShareData.rightDistCoeffs, imgSize, ShareData.R, ShareData.T,
                              ShareData.R1, ShareData.R2, ShareData.P1, ShareData.P2, ShareData.Q, StereoRectificationFlags.ZeroDisparity,
                              alpha, imgSize, out ShareData.PixROI1, out ShareData.PixROI2);
            //计算左右相机校正映射 , 映射为定点表示形式
            Cv2.InitUndistortRectifyMap(ShareData.leftCamIntrinsic, ShareData.leftDistCoeffs, ShareData.R1, ShareData.P1,
                                        imgSize, MatType.CV_16SC2, ShareData.leftMap1, ShareData.leftMap2);
            Cv2.InitUndistortRectifyMap(ShareData.rightCamIntrinsic, ShareData.rightDistCoeffs, ShareData.R2, ShareData.P2,
                                        imgSize, MatType.CV_16SC2, ShareData.rightMap1, ShareData.rightMap2);
            //标志位置1
            this.IS_READPARM = true;
            ShareData.Log    = "[msg] 相机参数读入计算成功";
        }
Esempio n. 7
0
        public static void compute_hog_test(List <Mat> img_lst, OpenCvSharp.Size size, int lables, string path)
        {
            HOGDescriptor hog = new HOGDescriptor();

            hog.WinSize = size;
            Mat gray = new Mat();

            float[]      descriptors;
            int          descriptors_size = 0;
            StreamWriter sw = new StreamWriter(path, false, Encoding.UTF8);

            for (int i = 0; i < img_lst.Count; i++)// vong lap duyet tung anh
            {
                string line = lables.ToString();
                sw.Write(line);

                Cv2.CvtColor(img_lst[i], gray, ColorConversionCodes.RGB2GRAY);
                descriptors = hog.Compute(gray);

                descriptors_size = descriptors.Length;
                Mat Mat_descriptor = new Mat(descriptors_size, 1, MatType.CV_8UC1);

                for (int a = 0; a < descriptors.Length; a++)
                {
                    Mat_descriptor.Set <float>(a, 0, descriptors[a]);
                    float  value = Mat_descriptor.Get <float>(a, 0);
                    string lines = " " + (a + 1) + ":" + value;
                    sw.Write(lines);
                }
                sw.WriteLine();
            }
            sw.Close();
        }
Esempio n. 8
0
        public ImageSource ReshapePicture(string picture, int width, int lenth)
        {
            string str2 = System.IO.Directory.GetCurrentDirectory();
            string p    = str2 + "\\data.txt";
            //记录
            FileInfo     fileInfo = new FileInfo(p);
            FileStream   fs       = new FileStream(fileInfo.Name, FileMode.OpenOrCreate);
            StreamWriter sw       = new StreamWriter(fs);

            //开始写入
            sw.Write(width);
            sw.Write('\n');
            sw.Write(lenth);
            //清空缓冲区
            sw.Flush();
            //关闭流
            sw.Close();
            fs.Close();
            //修改图片大小
            var img_origin = Cv2.ImRead(picture, ImreadModes.Color);
            //Mat img_origin = new Mat(picture, ImreadModes.Color);
            Mat dst = new Mat();

            OpenCvSharp.Size size = new OpenCvSharp.Size(lenth, width);
            Cv2.Resize(img_origin, dst, size);
            Bitmap bitmap = new Bitmap(dst.Cols, dst.Rows, (int)dst.Step(), System.Drawing.Imaging.PixelFormat.Format32bppArgb, dst.Data);

            bitmap.Save(str2 + "data.bmp");
            ImageSourceConverter imageSourceConverter = new ImageSourceConverter();
            ImageSource          imageSource          = (ImageSource)imageSourceConverter.ConvertFrom(bitmap);

            //ImageSource imageSource = (ImageSource)
            return(imageSource);
        }
Esempio n. 9
0
        private void PicZoom()
        {
            Rectangle cropRect1 = new Rectangle(xOffset, yOffset, imageWidth, imageHeight);
            Bitmap    src1      = image_in as Bitmap;

            picIn.Image = src1.Clone(cropRect1, src1.PixelFormat);

            Mat  image_opencv   = BitmapConverter.ToMat(new Bitmap(image_in));
            Size dsize          = new Size(0, 0);
            int  fx             = zoom;
            int  fy             = zoom;
            Mat  image_filtered = image_opencv.Resize(dsize, fx, fy, interpolation);

            Mat blur      = new Mat();
            Mat image_usm = new Mat();

            Cv2.GaussianBlur(image_filtered, blur, new Size(0, 0), 25);
            Cv2.AddWeighted(image_filtered, alpha, blur, -1 * beta, gamma, image_usm);

            MemoryStream ms_out = new MemoryStream(image_filtered.ToBytes());

            image_out = Image.FromStream(ms_out);

            Rectangle cropRect2 = new Rectangle(xOffset * zoom, yOffset * zoom, image_out.Width / zoom, image_out.Height / zoom);
            Bitmap    src2      = image_out as Bitmap;

            picOut.Image = src2.Clone(cropRect2, src2.PixelFormat);
        }
Esempio n. 10
0
        public static void MeanValueCompute_test(List <Mat> img_lst, OpenCvSharp.Size size, int lables, string path)
        {
            Mat[]        RGB = new Mat[3];
            StreamWriter sw2 = new StreamWriter(path, false, Encoding.UTF8);

            for (int i = 0; i < img_lst.Count; i++)// vong lap duyet tung anh
            {
                RGB = Cv2.Split(img_lst[i]);
                string line = lables.ToString();
                sw2.Write(line);

                //if (InforPublic.canny == true)
                //{
                //    Cv2.Canny(gray, gray, InforPublic.Th1, InforPublic.Th2);
                //}
                //if (InforPublic.median == true)
                //{
                //    Cv2.MedianBlur(gray, gray, InforPublic.Kernel_median);
                //    // ListImage.Add(img);
                //}
                for (int a = 0; a < RGB.Length; a++)
                {
                    double mean  = CalMeanValue(RGB[a]);
                    string lines = " " + (a + 1) + ":" + mean;
                    sw2.Write(lines);
                }
                sw2.WriteLine();
            }
            sw2.Close();
        }
Esempio n. 11
0
        private void imageshow_MouseLeftButtonDown(object sender, MouseButtonEventArgs e)
        {
            CoordinateTransformation dd = new CoordinateTransformation();

            Mat    rgb     = null;
            string rgbpath = System.IO.Path.Combine(Global.ImageDataPath, "rgb_" + pCurrentImage.ToString("D5") + ".jpg");

            rgb = Cv2.ImRead(rgbpath);

            OpenCvSharp.Size imagesize = rgb.Size();



            int inputx = ((int)e.GetPosition(imageshow).X *imagesize.Width) / ((int)imageshow.ActualWidth);
            int inputy = ((int)e.GetPosition(imageshow).Y *imagesize.Height) / ((int)imageshow.ActualHeight);



            pt = dd.getvalue(pCurrentImage, inputx, inputy);
            //   Console.WriteLine(pt);
            if (pt != new Point3D(0, 0, 0))
            {
                VIewer3D.Instance.ClickedCoordinate();
            }
        }
Esempio n. 12
0
        private static Mat DetectFace(CascadeClassifier cascade, Mat photo)
        {
            Mat result;

            //Mat src = photo;
            using (var src = photo)
                using (var gray = new Mat())
                {
                    result = src.Clone();
                    Cv2.CvtColor(src, gray, ColorConversionCodes.BGR2GRAY);

                    // Detect faces
                    Rect[] faces = cascade.DetectMultiScale(
                        gray, 1.08, 2, HaarDetectionType.ScaleImage, new OpenCvSharp.Size(30, 30));

                    // Render all detected faces
                    foreach (Rect face in faces)
                    {
                        var center = new OpenCvSharp.Point
                        {
                            X = (int)(face.X + face.Width * 0.5),
                            Y = (int)(face.Y + face.Height * 0.5)
                        };
                        var axes = new OpenCvSharp.Size
                        {
                            Width  = (int)(face.Width * 0.5),
                            Height = (int)(face.Height * 0.5)
                        };
                        Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4);
                    }

                    return(result);
                }
        }
Esempio n. 13
0
        private CvSize GetScaledCvSize(CvSize size, double scale)
        {
            var wid = size.Width * scale;
            var hei = size.Height * scale;

            return(new CvSize(wid, hei));
        }
Esempio n. 14
0
        private Mat findFace(Mat srcMat)
        {
            using (CascadeClassifier detectFace = new CascadeClassifier(System.Windows.Forms.Application.StartupPath + "\\opencv_data\\resource\\opencv_data\\haarcascade_frontalface_alt.xml"))
            {
                Mat result = new Mat();
                Mat output = new Mat();
                Cv2.CvtColor(srcMat, result, ColorConversionCodes.BGR2GRAY);

                int    gr_thr     = 4;
                double scale_step = 1.1;

                OpenCvSharp.Size min_obj_sz = new OpenCvSharp.Size(48, 96);
                OpenCvSharp.Size max_obj_sz = new OpenCvSharp.Size(100, 200);

                int x1, x2;
                int y1, y2;

                // detect
                Rect[] found;
                found = detectFace.DetectMultiScale(result, scale_step, gr_thr);

                if (found.Length == 1)
                {
                    output = srcMat.Clone();

                    srcMat.Rectangle(found[0], Scalar.Red, 5);

                    OpenCvSharp.Cv2.ImWrite(System.Windows.Forms.Application.StartupPath + "\\opencv_data\\test.jpg", output);    //jpg파일 저장

                    x1 = found[0].X;
                    y1 = found[0].Y;
                    x2 = found[0].X + found[0].Width;
                    y2 = found[0].Y + found[0].Height;

                    /*
                     * var json = new JObject();
                     * json.Add("x1", x1);
                     * json.Add("y1", y1);
                     * json.Add("x2", x2);
                     * json.Add("y2", y2);
                     */
                    //Console.WriteLine(json.ToString());
                    using (StreamWriter wr = new StreamWriter(System.Windows.Forms.Application.StartupPath + "\\opencv_data\\data.txt"))
                    {
                        wr.WriteLine(x1);
                        wr.WriteLine(y1);
                        wr.WriteLine(x2);
                        wr.WriteLine(y2);
                    }
                }

                output.Dispose();
                found = null;
                detectFace.Dispose();
                result.Dispose();
                found = null;
            }

            return(srcMat);
        }
Esempio n. 15
0
        public StructuredLight(System.Drawing.Size size, bool xaxis = false, bool yaxis = true, bool inversePattern = true)//horizonal codes for disparity-gating
        {
            var cvSize = new OpenCvSharp.Size(size.Width, size.Height);

            code  = new XorCode(cvSize, xaxis, yaxis, inversePattern);
            Calib = new DisparityCalibration();
        }
Esempio n. 16
0
        public F_Main()
        {
            InitializeComponent();

            //Recording BackgroundWorker
            bw_cap                            = new BackgroundWorker();
            bw_cap.DoWork                    += new DoWorkEventHandler(doWork_cap);
            bw_cap.ProgressChanged           += new ProgressChangedEventHandler(progressChanged_cap); //Cancellation is not implimented yet.
            bw_cap.WorkerSupportsCancellation = true;
            bw_cap.WorkerReportsProgress      = true;

            bw_chScan                            = new BackgroundWorker();
            bw_chScan.DoWork                    += new DoWorkEventHandler(doWork_chScan);
            bw_chScan.ProgressChanged           += new ProgressChangedEventHandler(progressChanged_chScan); //Cancellation is not implimented yet.
            bw_chScan.WorkerSupportsCancellation = true;
            bw_chScan.WorkerReportsProgress      = true;

            this.vids = new List <int>();
            if (this.scanCamera() == 0)
            {
                currentch = 0;
                this.vcap = new VideoCapture(this.vids[currentch]);
                this.vcap.Set(VideoCaptureProperties.Focus, 0.01);
                this.vcap.Set(VideoCaptureProperties.Gain, 0.0);
                FPS          = this.vcap.Fps;
                this.w       = this.vcap.FrameWidth; this.h = this.vcap.FrameHeight;
                this.size    = new OpenCvSharp.Size(this.w, this.h);
                this.mat_cap = new Mat(this.h, this.w, MatType.CV_8UC3);
            }
            bw_cap.RunWorkerAsync();
            bw_chScan.RunWorkerAsync();
        }
Esempio n. 17
0
        static Mat GetCorrectedRectangle(Mat sourceImage, OpenCvSharp.Point[] contour)
        {
            var rotatedRectangle = Cv2.MinAreaRect(contour);

            var rectangleAngle = rotatedRectangle.Angle;
            var rectangleSize  = ToSize(rotatedRectangle.Size);

            if (rectangleAngle < -45f)
            {
                rectangleAngle += 90f;
                rectangleSize   = new OpenCvSharp.Size(rectangleSize.Height, rectangleSize.Width);
            }

            using (var rotationMatrix = Cv2.GetRotationMatrix2D(rotatedRectangle.Center, rectangleAngle, 1d))
            {
                using (var rotatedImage = new Mat())
                {
                    Cv2.WarpAffine(sourceImage, rotatedImage, rotationMatrix, sourceImage.Size(), InterpolationFlags.Lanczos4);

                    var croppedImage = new Mat();
                    Cv2.GetRectSubPix(rotatedImage, rectangleSize, rotatedRectangle.Center, croppedImage);

                    return(croppedImage);
                }
            }
        }
        /// <summary>
        /// Render detected faces via OpenCV.
        /// </summary>
        /// <param name="state">Current frame state.</param>
        /// <param name="image">Web cam or video frame.</param>
        /// <returns>Returns new image frame.</returns>
        private static Mat RenderFaces(FrameState state, Mat image)
        {
            Mat result = image.Clone();

            Cv2.CvtColor(image, image, ColorConversionCodes.BGR2GRAY);

            // Render all detected faces
            foreach (var face in state.Faces)
            {
                var center = new OpenCvSharp.Point
                {
                    X = face.Center.X,
                    Y = face.Center.Y
                };
                var axes = new OpenCvSharp.Size
                {
                    Width  = (int)(face.Size.Width * 0.5) + 10,
                    Height = (int)(face.Size.Height * 0.5) + 10
                };

                Cv2.Ellipse(result, center, axes, 0, 0, 360, _faceColorBrush, 4);
            }

            return(result);
        }
Esempio n. 19
0
        public int checkAppearObject(Rect before, Rect after, OpenCvSharp.Size matSz)
        /// <summary>
        /// check the object rect which is disappearing or appearing on edge location
        /// </summary>
        /// <param name="after">objecct rect current position</param>
        /// <param name="before">object rect previews position</param>
        /// <param name="matSz"> size of video frame </param>
        {
            var bef = this.isBorderRect(before, matSz);
            var aft = this.isBorderRect(after, matSz);

            if (bef && !aft)
            {
                return(0);  // appear
            }
            else if (!bef && aft)
            {
                this.isBorderRect(before, matSz);
                this.isBorderRect(after, matSz);
                return(1);  // disappear
            }
            else
            {
                return(2);  // track
            }
        }
        public BitmapSource WeatheringNG(BitmapSource SourceImage, WeatheringParam param, bool Watermark)
        {
            Mat SourceMat = SourceImage.ToMat();

            if (SourceMat.Type() == MatType.CV_8UC1)
            {
                SourceMat = SourceMat.CvtColor(ColorConversionCodes.GRAY2BGRA);
            }
            if (Watermark)
            {
                AddWatermark(ref SourceMat, Watermark_Zhihu);
                AddWatermark(ref SourceMat, Watermark_Sina);
                AddWatermark(ref SourceMat, Watermark_Toutiao);
                AddWatermark(ref SourceMat, Watermark_Tieba);
            }

            if (param.Noise != 0.0)
            {
                Noising(ref SourceMat, param.Noise);
            }
            if (param.Green != 0.0)
            {
                Greening(ref SourceMat, param.Green);
            }
            if (param.AspectRatio != 1.0)
            {
                OpenCvSharp.Size size = new OpenCvSharp.Size(0, 0);
                SourceMat = SourceMat.Resize(size, param.AspectRatio, param.AspectRatio, InterpolationFlags.Area);
            }
            if (param.Quality != 0.0)
            {
                SourceMat = Compressing(SourceMat, param.Quality);
            }
            return(SourceMat.ToBitmapSource());
        }
        protected override void ProcessRecord()
        {
            // File existence check
            if (!File.Exists(@Target))
            {
                WriteError(new ErrorRecord((new FileNotFoundException()), "FileNotFound", ErrorCategory.ObjectNotFound, Target));
                return;
            }
            if (!File.Exists(@Template))
            {
                WriteError(new ErrorRecord((new FileNotFoundException()), "FileNotFound", ErrorCategory.ObjectNotFound, Template));
                return;
            }

            try
            {
                //Load taraget and template image file
                using (var matTarget = new Mat(@Target))
                    using (var matTemplate = new Mat(@Template))
                        //Prepare result image
                        using (var result = new Mat(matTarget.Height - matTemplate.Height + 1, matTarget.Width - matTemplate.Width + 1, MatType.CV_8UC1))
                        {
                            var templateSize = new OpenCvSharp.Size(matTemplate.Width, matTemplate.Height);

                            do
                            {
                                // Invoke template matching
                                Cv2.MatchTemplate(matTarget, matTemplate, result, TemplateMatchModes.CCoeffNormed);
                                // Extract most similar point
                                Cv2.MinMaxLoc(result, out double minVal, out double maxVal, out Point minPoint, out Point maxPoint);

                                if (maxVal < Threshold)
                                {
                                    break;
                                }
                                else
                                {
                                    var matchedRect = new MatchedRect();
                                    matchedRect.Rect       = new Rect(maxPoint, templateSize);
                                    matchedRect.Similarity = (float)maxVal;

                                    // Return matched rectangle
                                    WriteObject(matchedRect);

                                    // Fill matched rectangle as random
                                    Rect fillRect = matchedRect.Rect;
                                    fillRect.Inflate(Convert.ToInt32(fillRect.Width * -0.2), Convert.ToInt32(fillRect.Height * -0.2));
                                    using (var matRandom = new Mat(fillRect.Size, matTarget.Type()))
                                    {
                                        Cv2.Randu(matRandom, new Scalar(0, 0, 0), new Scalar(255, 255, 255));
                                        matTarget[fillRect] = matRandom;
                                    }
                                }
                            } while (true);
                        }
            }
            catch (Exception e) {
                WriteError(new ErrorRecord(e, "Template Matching Failed", ErrorCategory.OperationStopped, null));
            }
        }
Esempio n. 22
0
        private (string iconKey, float conf, Vector2 pos) ScanStaticAndDynamic(Size iconSlotSize, Mat source)
        {
            var matchResultStatic  = MatchIcon(iconSlotSize, source, true);
            var matchResultDynamic = MatchIcon(iconSlotSize, source, false);

            return(matchResultStatic.conf > matchResultDynamic.conf ? matchResultStatic : matchResultDynamic);
        }
Esempio n. 23
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            try
            {
                Console.WriteLine("Tick");

                cap.Read(frame);
                OpenCvSharp.Size newsize = new OpenCvSharp.Size(pictureBox1.Width, pictureBox1.Height);
                frame = frame.Resize(newsize);

                Cv2.Canny(frame, dstframe, 20, 100);

                Bitmap tempimage1 = BitmapConverter.ToBitmap(frame);
                Bitmap tempimage2 = BitmapConverter.ToBitmap(dstframe);

                pictureBox1.Image = tempimage1;
                pictureBox2.Image = tempimage2;

                //Cv2.WaitKey(10);
            }
            catch (Exception extdf)
            {
                Console.WriteLine(extdf.ToString());
            }
        }
Esempio n. 24
0
        private void bStart_MouseClick(object sender, MouseEventArgs e)
        {
            //CvCapture camera = new CvCapture("C:\\rosbank\\facedetect\\test\\media\\test.avi");
            var capture = new VideoCapture("C:\\rosbank\\facedetect\\test\\media\\test.avi");
            //capture.Set(CaptureProperty.FrameWidth, 320);
            //capture.Set(CaptureProperty.FrameHeight, 240);

            int sleepTime = (int)Math.Round(1000 / capture.Fps);

            Mat image = new Mat();

            Rect[] faces = null;
            int    i     = 0;

            while (true)
            {
                i++;
                capture.Read(image);
                if (image.Empty())
                {
                    break;
                }


                Mat small = new Mat();
                Cv2.Resize(image, small, new OpenCvSharp.Size(320, 240), 0, 0, InterpolationFlags.Lanczos4);

                if (i % 5 == 0)
                {
                    var cascade = new CascadeClassifier(@"C:\opencv3\opencv\sources\data\haarcascades\haarcascade_frontalface_alt.xml");
                    faces = cascade.DetectMultiScale(small, 1.08, 2, HaarDetectionType.ScaleImage, new OpenCvSharp.Size(30, 30));
                }
                if (faces != null && faces.Length > 0)
                {
                    foreach (Rect face in faces)
                    {
                        var center = new OpenCvSharp.Point
                        {
                            X = (int)(face.X + face.Width * 0.5),
                            Y = (int)(face.Y + face.Height * 0.5)
                        };
                        var axes = new OpenCvSharp.Size
                        {
                            Width  = (int)(face.Width * 0.5),
                            Height = (int)(face.Height * 0.5)
                        };

                        Mat f = new Mat(small, face);
                        pb1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(f);
                        pb1.Refresh();

                        Cv2.Ellipse(small, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 2);
                    }
                }

                bpMain.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(small);
                bpMain.Refresh();
                Cv2.WaitKey(100);
            }
        }
Esempio n. 25
0
        public static void transImage(Mat src, Mat dst, double[,] positon)
        {
            //变换前的四点
            Point2f[] srcPoints = new Point2f[]
            {
                new Point2f((float)positon[0, 0], (float)positon[0, 1]),
                new Point2f((float)positon[1, 0], (float)positon[1, 1]),
                new Point2f((float)positon[2, 0], (float)positon[2, 1]),
                new Point2f((float)positon[3, 0], (float)positon[3, 1]),
            };
            float[] size = setSize(positon);
            //变换后的四点
            Point2f[] dstPoints = new Point2f[]
            {
                new Point2f(0, 0),
                new Point2f(0, size[1]),
                new Point2f(size[0], size[1]),
                new Point2f(size[0], 0),
            };

            //根据变换前后四个点坐标,获取变换矩阵
            Mat  mm    = Cv2.GetPerspectiveTransform(srcPoints, dstPoints);
            Size size1 = new Size(size[0], size[1]);

            //进行透视变换
            Cv2.WarpPerspective(src, dst, mm, size1);
        }
Esempio n. 26
0
        private OpenCvSharp.Size GetGmapSize(Mat frame)
        {
            var minimum = frame.Width > frame.Height ? frame.Height : frame.Width;
            var size    = new OpenCvSharp.Size();

            if (this.State == GmapState.Collapsed)
            {
                size.Width  = Math.Min((int)(minimum / 10.0f), this._icon.Width);
                size.Height = Math.Min((int)(minimum / 10.0f), this._icon.Height);
            }
            else if (this.State == GmapState.Expanded)
            {
                if (this._owner.SizeMode == PictureBoxSizeMode.CenterImage)
                {
                    if (frame.Width > this._owner.Width)
                    {
                        size.Width = (int)(this._owner.Width * EXPANDED_WIDTH_RATIO);
                    }
                    else
                    {
                        size.Width = (int)(frame.Width * EXPANDED_HEIGHT_RATIO);
                    }

                    if (frame.Height > this._owner.Height)
                    {
                        size.Height = (int)(this._owner.Height * EXPANDED_WIDTH_RATIO);
                    }
                    else
                    {
                        size.Height = (int)(frame.Height * EXPANDED_HEIGHT_RATIO);
                    }
                }
                else if (this._owner.SizeMode == PictureBoxSizeMode.Zoom)
                {
                    if (frame.Width > frame.Height)
                    {
                        size.Width  = (int)(this._owner.Width * EXPANDED_WIDTH_RATIO);
                        size.Height = (int)(frame.Height * EXPANDED_HEIGHT_RATIO);
                    }
                    else
                    {
                        size.Width  = (int)(frame.Width * EXPANDED_WIDTH_RATIO);
                        size.Height = (int)(this._owner.Height * EXPANDED_HEIGHT_RATIO);
                    }
                }
                else
                {
                    size.Width  = (int)(this._owner.Width * EXPANDED_WIDTH_RATIO);
                    size.Height = (int)(this._owner.Height * EXPANDED_HEIGHT_RATIO);
                }
            }
            else
            {
                size.Width  = frame.Width;
                size.Height = frame.Height;
            }

            return(size);
        }
Esempio n. 27
0
        /// <summary>
        /// カメラ画像を取得して次々に表示を切り替える
        /// </summary>
        public virtual void Capture(object state)
        {
            var haarCascade = new CascadeClassifier("data/haarcascades/haarcascade_frontalface_default.xml");

            var camera = new VideoCapture(0 /*0番目のデバイスを指定*/)
            {
                // キャプチャする画像のサイズフレームレートの指定
                FrameWidth  = 480,
                FrameHeight = 270,
                // Fps = 60
            };

            using (var img = new Mat()) // 撮影した画像を受ける変数
                using (camera) {
                    while (true)
                    {
                        if (this.IsExitCapture)
                        {
                            this.Dispatcher.Invoke(() => this._Image.Source = null);
                            break;
                        }


                        camera.Read(img); // Webカメラの読み取り(バッファに入までブロックされる

                        if (img.Empty())
                        {
                            break;
                        }
                        var result = img.Clone();
                        using (var gray = new Mat()) {
                            Cv2.CvtColor(img, gray, ColorConversionCodes.BGR2GRAY);
                            var faces = haarCascade.DetectMultiScale(
                                gray,
                                1.08,
                                2,
                                HaarDetectionType.FindBiggestObject,
                                new OpenCvSharp.Size(50, 50)
                                );
                            foreach (var face in faces)
                            {
                                var center = new OpenCvSharp.Point {
                                    X = (int)(face.X + face.Width * 0.5),
                                    Y = (int)(face.Y + face.Height * 0.5)
                                };
                                var axes = new OpenCvSharp.Size {
                                    Width  = (int)(face.Width * 0.5),
                                    Height = (int)(face.Height * 0.5)
                                };
                                Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4);
                            }
                        }

                        this.Dispatcher.Invoke(() => {
                            this._Image.Source = result.ToWriteableBitmap(); // WPFに画像を表示
                        });
                    }
                }
        }
Esempio n. 28
0
        private int OpenCVDeepLearningDetector(string path)
        {
            // uses emugu library
            //https://medium.com/@vinuvish/face-detection-with-opencv-and-deep-learning-90bff9028fa8
            string prototextPath  = @"./Dnn/deploy.prototxt";
            string caffeModelPath = @"./Dnn/res10_300x300_ssd_iter_140000.caffemodel";

            //// load the model;
            using (var net = OpenCvSharp.Dnn.CvDnn.ReadNetFromCaffe(prototxt: prototextPath, caffeModel: caffeModelPath))
                using (OpenCvSharp.Mat image = Cv2.ImRead(path))
                {
                    // get the original image size
                    OpenCvSharp.Size imageSize = image.Size();
                    // the dnn detector works on a 300x300 image;
                    // now resize the image for the Dnn dector;
                    OpenCvSharp.Size size = new OpenCvSharp.Size(299, 299);
                    // set the scalar property to RGB colors, don't know what these values represent.
                    OpenCvSharp.Scalar mcvScalar = new OpenCvSharp.Scalar(104.0, 177.0, 123.0);
                    using (var blob = OpenCvSharp.Dnn.CvDnn.BlobFromImage(image: image, scaleFactor: 1, size: size, mean: mcvScalar, swapRB: true))
                    {
                        net.SetInput(blob, "data");
                        using (OpenCvSharp.Mat detections = net.Forward())
                        {
                            // convert the detected values to a faces object that we can use to
                            // draw rectangles.
                            List <ConfidenceRect> Faces = new List <ConfidenceRect>();
                            //var rows = detections.SizeOfDimension[2];
                            //Array ans = detections.GetData();
                            //for (int n = 0; n < rows; n++)
                            //{
                            //    object confidence = ans.GetValue(0, 0, n, 2);
                            //    object x1 = ans.GetValue(0, 0, n, 3);
                            //    object y1 = ans.GetValue(0, 0, n, 4);
                            //    object x2 = ans.GetValue(0, 0, n, 5);
                            //    object y2 = ans.GetValue(0, 0, n, 6);
                            //    ConfidenceRect cr = new ConfidenceRect(confidence, x1, y1, x2, y2, imageSize);
                            //    if (cr.Confidence > 0)
                            //    {
                            //        Debug.WriteLine($"Confidence {cr.Confidence}");
                            //    }
                            //    if (cr.Confidence > Confidence)
                            //    {
                            //        Faces.Add(cr);
                            //    }
                            //}

                            //// convert to a writeableBitmap
                            //WriteableBitmap writeableBitmap = new WriteableBitmap(ImageSource);

                            //ImageSource = ConvertWriteableBitmapToBitmapImage(writeableBitmap);
                            //OnPropertyChanged("ImageSource");

                            //DrawDnnOnImage?.Invoke(Faces, imageSize);
                            //return Faces.Count.ToString();
                        }
                    }
                }
            return(0);
        }
Esempio n. 29
0
 public PlateImage(string fileName, string name, PlateCategory plateCategory, OpenCvSharp.Size
                   matSize)
 {
     this.FileName      = fileName;
     this.Name          = name;
     this.PlateCategory = plateCategory;
     this.MatSize       = matSize;
 }
Esempio n. 30
0
 /// <summary>
 /// Get dynamic icons with matching size
 /// </summary>
 /// <seealso cref="StaticIcons"/>
 /// <seealso cref="DynamicIcons"/>
 /// <param name="size">Size of the icon in cells</param>
 /// <returns>Dictionary with the key being the uid and the value being the icon</returns>
 internal static Dictionary <string, Mat> GetDynamicIcons(Size size)
 {
     if (!DynamicIcons.ContainsKey(size))
     {
         return(null);
     }
     return(DynamicIcons[size]);
 }