Пример #1
0
    private void initFundMatrix() // initialize fundamental matrix
    {
        Point2f[] leftMatchPoints =
        {
            new Point2f(54, 31),    //1,1
            new Point2f(116, 98),   //2,2
            new Point2f(180, 162),  //3,3
            new Point2f(242, 229),  //4,4
            new Point2f(304, 294),  //5,5
            new Point2f(367, 361),  //6,6
            new Point2f(304, 31),   //5,1
            new Point2f(367, 97),   //6,2
            new Point2f(428, 162),  //7,3
        };
        Point2f[] rightMatchPoints =
        {
            new Point2f(30, 31),    //1,1
            new Point2f(92, 98),    //2,2
            new Point2f(156, 162),  //3,3
            new Point2f(217, 229),  //4,4
            new Point2f(281, 294),  //5,5
            new Point2f(343, 361),  //6,6
            new Point2f(280, 31),   //5,1
            new Point2f(343, 97),   //6,2
            new Point2f(404, 162),  //7,3
        };
        InputArray leftInput  = InputArray.Create(leftMatchPoints);
        InputArray rightInput = InputArray.Create(rightMatchPoints);

        this.fundMatrix = Cv2.FindFundamentalMat(leftInput, rightInput, FundamentalMatMethod.Point8);
    }
Пример #2
0
        public static Dictionary <string, double> CalculateMatrix(IPattern from, IPattern to)
        {
            Point2f[] from_ = new Point2f[from.Points.Count];
            Point2f[] to_   = new Point2f[to.Points.Count];

            int i = 0;

            from.Points.ForEach(pt =>
            {
                from_[i++] = new Point2f((float)pt.X, (float)pt.Y);
            });

            i = 0;

            to.Points.ForEach(pt =>
            {
                to_[i++] = new Point2f((float)pt.X, (float)pt.Y);
            });

            Mat inliers = new Mat();

            Mat affineMatrix = Cv2.EstimateAffine2D(InputArray.Create(from_), InputArray.Create(to_), inliers, RobustEstimationAlgorithms.RANSAC, 300.0, 2000, 0.99, 10);

            PrintMatAsync(inliers);


            return(TransformToDict(affineMatrix));
        }
Пример #3
0
 public static Mat GetHomography(Mat mMain, Mat mSecondary)
 {
     KeyPoint[] keypoints  = null;
     KeyPoint[] keypoints2 = null;
     using (SIFT sIFT = SIFT.Create(1000))
     {
         using (Mat mat = new Mat())
         {
             using (Mat mat2 = new Mat())
             {
                 sIFT.DetectAndCompute(mMain, new Mat(), out keypoints, mat);
                 sIFT.DetectAndCompute(mSecondary, new Mat(), out keypoints2, mat2);
                 FlannBasedMatcher flannBasedMatcher = new FlannBasedMatcher();
                 DMatch[]          array             = new DMatch[0];
                 array = flannBasedMatcher.Match(mat, mat2);
                 List <Point2f> list  = new List <Point2f>();
                 List <Point2f> list2 = new List <Point2f>();
                 for (int i = 0; i < array.Length; i++)
                 {
                     list.Add(keypoints[array[i].QueryIdx].Pt);
                     list2.Add(keypoints2[array[i].TrainIdx].Pt);
                 }
                 return(Cv2.FindHomography(InputArray.Create(list2), InputArray.Create(list), HomographyMethods.Ransac));
             }
         }
     }
 }
Пример #4
0
        public Angles GetAnglesAndPoints(Mat <Point2d> points, int width, int height)
        {
            using (var objPtsMat = InputArray.Create(Model_points, MatType.CV_32FC3)) //new Mat(objPts.Count, 1, MatType.CV_32FC3, objPts))
                using (var imgPtsMat = InputArray.Create(points, MatType.CV_32FC2))   //new Mat(imgPts.Length, 1, MatType.CV_32FC2, imgPts))
                    using (var cameraMatrixMat = Mat.Eye(3, 3, MatType.CV_64FC1))
                        using (var distMat = Mat.Zeros(5, 0, MatType.CV_64FC1))
                            using (var rvecMat = new Mat())
                                using (var tvecMat = new Mat())
                                {
                                    Cv2.SolvePnP(objPtsMat, imgPtsMat, cameraMatrixMat, distMat, rvecMat, tvecMat);

                                    using (Mat resultPoints = new Mat())
                                    {
                                        Cv2.ProjectPoints(objPtsMat, rvecMat, tvecMat, cameraMatrixMat, distMat, resultPoints);
                                    }

                                    // 根据旋转矩阵求解坐标旋转角
                                    double theta_x = Math.Atan2((float)rvecMat.At <double>(2, 1), (float)rvecMat.At <double>(2, 2));
                                    double theta_y = Math.Atan2((float)-rvecMat.At <double>(2, 0),
                                                                Math.Sqrt((float)((rvecMat.At <double>(2, 1) * rvecMat.At <double>(2, 1)) + ((float)rvecMat.At <double>(2, 2) * rvecMat.At <double>(2, 2)))));
                                    double theta_z = Math.Atan2((float)rvecMat.At <double>(1, 0), (float)rvecMat.At <double>(0, 0));

                                    // 将弧度转为角度
                                    Angles angles = new Angles();
                                    angles.Roll  = theta_x * (180 / Math.PI);
                                    angles.Pitch = theta_y * (180 / Math.PI);
                                    angles.Yaw   = theta_z * (180 / Math.PI);

                                    // 将映射的点的坐标保存下来
                                    // outarray类型的resultpoints如何转存到list中?
                                    return(angles);
                                }
        }
        private void FindAndDrawHomo()
        {
            using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.Color))
            {
                using (var surf = SURF.Create(1000))
                {
                    using (var templateDescriptors = new Mat())
                    {
                        surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors);
                        using (var image = new Mat("Images\\Circle.bmp", ImreadModes.Color))
                        {
                            using (var imageDescriptors = new Mat())
                            {
                                surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors);
                                using (var matcher = new BFMatcher())
                                {
                                    var matches     = matcher.Match(imageDescriptors, templateDescriptors);
                                    var goodMatches = matches;//.Where(m => m.Distance < 0.2).ToArray();
                                    using (var srcPoints = InputArray.Create(goodMatches.Select(m => templateKeyPoints[m.TrainIdx].Pt)))
                                    {
                                        using (var dstPoints = InputArray.Create(goodMatches.Select(m => imageKeyPoints[m.QueryIdx].Pt)))
                                        {
                                            using (var h**o = Cv2.FindHomography(srcPoints, dstPoints, HomographyMethods.Rho))
                                            {
                                                ////using (var overlay = image.Overlay())
                                                ////{
                                                ////    DrawBox(template, h**o, overlay);
                                                ////    this.Result.Source = overlay.ToBitmapSource();
                                                ////}

                                                using (var tmp = image.Overlay())
                                                {
                                                    Cv2.BitwiseNot(template, template);
                                                    Cv2.WarpPerspective(template, tmp, h**o, tmp.Size());
                                                    using (var overlay = tmp.Overlay())
                                                    {
                                                        for (var r = 0; r < tmp.Rows; r++)
                                                        {
                                                            for (var c = 0; c < tmp.Cols; c++)
                                                            {
                                                                overlay.Set(r, c,
                                                                            tmp.At <int>(r, c) == 0
                                                                        ? new Vec4b(0, 0, 0, 0)
                                                                        : new Vec4b(0, 0, 255, 150));
                                                            }
                                                        }

                                                        this.Result.Source = overlay.ToBitmapSource();
                                                    }
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
Пример #6
0
        /// <summary>
        /// Srcに対してTargetに合うよう射影変換を適用する
        /// </summary>
        public void FitSrcToTarget()
        {
            // 使用する特徴点の量だけベクトル用意
            int size         = SelectedMatched.Count();
            var getPtsSrc    = new Vec2f[size];
            var getPtsTarget = new Vec2f[size];

            // SrcとTarget画像の対応する特徴点の座標を取得し、ベクトル配列に格納していく。
            int count = 0;

            foreach (var item in SelectedMatched)
            {
                var ptSrc    = KeyPtsSrc[item.QueryIdx].Pt;
                var ptTarget = KeyPtsTarget[item.TrainIdx].Pt;
                getPtsSrc[count][0]    = ptSrc.X;
                getPtsSrc[count][1]    = ptSrc.Y;
                getPtsTarget[count][0] = ptTarget.X;
                getPtsTarget[count][1] = ptTarget.Y;
                count++;
            }

            // SrcをTargetにあわせこむ変換行列homを取得する。ロバスト推定法はRANZAC。
            var hom = Cv2.FindHomography(
                InputArray.Create(getPtsSrc),
                InputArray.Create(getPtsTarget),
                HomographyMethods.Ransac);

            // 行列homを用いてSrcに射影変換を適用する。
            WarpedSrcMat = new Mat();
            Cv2.WarpPerspective(
                SrcMat, WarpedSrcMat, hom,
                new OpenCvSharp.Size(TargetMat.Width, TargetMat.Height));
        }
Пример #7
0
        public override float Predict(ModelDataSet input)
        {
            if (TrainedModel == null)
            {
                throw new Exception("Must initialize the model before calling");
            }

            lock (TrainedModel)
            {
                // cache for reuse
                if (PredictInput == null)
                {
                    PredictInput = new float[input.Features()];
                }
                for (int i = 0; i < PredictInput.Length; i++)
                {
                    PredictInput[i] = input.Feature(i);
                }

                using (var arr = InputArray.Create <float>(PredictInput))
                {
                    return(TrainedModel.Predict(arr));
                }
            }
        }
Пример #8
0
        public void AddScalar()
        {
            using Mat src = new Mat(2, 2, MatType.CV_8UC1, new byte[] { 1, 2, 3, 4 });
            using Mat dst = new Mat();
            Cv2.Add(new Scalar(10), src, dst);

            Assert.Equal(MatType.CV_8UC1, dst.Type());
            Assert.Equal(2, dst.Rows);
            Assert.Equal(2, dst.Cols);

            Assert.Equal(11, dst.At <byte>(0, 0));
            Assert.Equal(12, dst.At <byte>(0, 1));
            Assert.Equal(13, dst.At <byte>(1, 0));
            Assert.Equal(14, dst.At <byte>(1, 1));

            Cv2.Add(src, new Scalar(10), dst);
            Assert.Equal(11, dst.At <byte>(0, 0));
            Assert.Equal(12, dst.At <byte>(0, 1));
            Assert.Equal(13, dst.At <byte>(1, 0));
            Assert.Equal(14, dst.At <byte>(1, 1));

            using var inputArray = InputArray.Create(10.0);
            Cv2.Add(src, inputArray, dst);
            Assert.Equal(11, dst.At <byte>(0, 0));
            Assert.Equal(12, dst.At <byte>(0, 1));
            Assert.Equal(13, dst.At <byte>(1, 0));
            Assert.Equal(14, dst.At <byte>(1, 1));
        }
Пример #9
0
        public static void MatchImpact()
        {
            Bitmap screen = new Bitmap("ImpactTest.png");
            //Bitmap cropped = CompassSensor.Crop(screen, screen.Width - 400, 0, screen.Width - 100, 300);
            Mat screenwhole = BitmapConverter.ToMat(screen);

            Mat brightHSV = screenwhole.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat redMask   = brightHSV.InRange(InputArray.Create(new int[] { 0, 250, 200 }), InputArray.Create(new int[] { 5, 256, 256 }))
                            + brightHSV.InRange(InputArray.Create(new int[] { 175, 250, 200 }), InputArray.Create(new int[] { 180, 256, 256 }));
            Mat darkAreas = new Mat();

            screenwhole.CopyTo(darkAreas, redMask);
            Mat red = darkAreas.Split()[2];

            red.SaveImage("impacttemplateraw.png");
            Mat template = new Mat("res3/impacttemplate.png", ImreadModes.GrayScale);
            Mat result   = new Mat(red.Size(), red.Type());

            Cv2.MatchTemplate(red, template, result, TemplateMatchModes.CCoeffNormed);
            Window w2 = new Window(red);
            Window w3 = new Window(result);

            Cv2.Threshold(result, result, 0.4, 1.0, ThresholdTypes.Tozero);
            Window w4 = new Window(result);
            Window w1 = new Window(screenwhole);
        }
Пример #10
0
        /// <summary>
        /// 把输入的图片变成灰度图,锐化一次会写入一次图片
        /// </summary>
        /// <param name="input_img">原尺寸下分割出的文字</param>
        /// <param name="x">横坐标值</param>
        /// <param name="y">纵坐标值</param>
        /// <param name="width">向右</param>
        /// <param name="height">向下</param>
        /// <returns></returns>
        public static string Seg_img_to_text(Mat input_img, double x, double y, int width, int height)
        {
            //x *= input_img.Width; y *= input_img.Height; //0.09  0.262
            //width = 71; height = 32;
            /*--缓存,好蠢的方式。但是Pix的LoadFromMemory里的byte是什么鬼--*/
            string imgPath = @"D:\XD\1-dis\pic\cache\seged_image.jpg";

            using (InputArray kernel = InputArray.Create <double>(new double[3, 3] {
                { -1, -1, -1 }, { -1, 9, -1 }, { -1, -1, -1 }
            }))
                using (Mat dst = new Mat(input_img, new OpenCvSharp.Rect((int)x, (int)y, width, height)))
                {
                    //不准的话就画外接矩形
                    //https://www.itdaan.com/tw/46038a31b3020fce189dd3f30699ac0e
                    if (dst.Channels() != 1)
                    {
                        Cv2.CvtColor(dst, dst, ColorConversionCodes.BGR2GRAY);
                    }
                    //OTSU = Cv2.Threshold(dst, dst, 200, 255, ThresholdTypes.Otsu);
                    //Cv2.Resize(dst, dst, new Size(dst.Width * 5, dst.Height * 5), 0, 0, InterpolationFlags.Cubic);
                    //Cv2.Filter2D(dst, dst, dst.Depth(), kernel, new Point(-1, -1), 0);
                    //Cv2.Threshold(dst, dst, 125, 255, ThresholdTypes.Binary);
                    Cv2.ImWrite(imgPath, dst);
                }

            string strResult = ImageToText(imgPath);

            if (string.IsNullOrEmpty(strResult))
            {
                strResult = "无法识别";
            }
            strResult = strResult.Replace(" ", ""); //删除中间的空格。为什么识别中文的时候会有空格??
            return(strResult);
        }
Пример #11
0
        private static (Mat transformedImage, List <Point2f> transformedPoints) SimilarityTransform(int outputWidth,
                                                                                                    int outputHeight, IList <Point2f> face, Mat img)
        {
            var s60 = (float)Math.Sin(60 * Math.PI / 180.0);
            var c60 = (float)Math.Cos(60 * Math.PI / 180.0);

            Point2f[] ToEquilateral(Point2f a, Point2f b)
            {
                var dx = a.X - b.X;
                var dy = a.Y - b.Y;

                return(new[] { a, b, new Point2f(c60 * dx - s60 * dy + b.X, s60 * dx + c60 * dy + b.Y) });
            }

            var eyeCornersSource = ToEquilateral(face[36], face[45]);
            var eyeCornersDest   = ToEquilateral(
                new Point2f(0.3F * outputWidth, outputHeight / 3f),
                new Point2f(0.7F * outputWidth, outputHeight / 3f));

            using (var inputArray = InputArray.Create(eyeCornersSource))
                using (var array = InputArray.Create(eyeCornersDest))
                    using (var transform = Cv2.EstimateRigidTransform(inputArray, array, false))
                    {
                        var transformedImage = Mat.Zeros(outputWidth, outputHeight, MatType.CV_32FC3).ToMat();
                        Cv2.WarpAffine(img, transformedImage, transform, transformedImage.Size());
                        using (var src = InputArray.Create(face))
                            using (var dst = new Mat())
                            {
                                Cv2.Transform(src, dst, transform);
                                var transformedPoints = new Point2f[dst.Rows * dst.Cols];
                                dst.GetArray(0, 0, transformedPoints);
                                return(transformedImage, transformedPoints.ToList());
                            }
                    }
        }
Пример #12
0
        public override void Apply(Mat input)
        {
            _start = DateTime.Now;
            Input  = input;

            OpenCvSharp.Cv2.Canny(InputArray.Create(Input), Output, _minThreshold, _minThreshold * 2);
            base.Apply(input);
        }
        public static double MSE(Bitmap reference, Bitmap compare)
        {
            InputArray referenceArray = InputArray.Create(BitmapConverter.ToMat(reference));
            InputArray compareArray   = InputArray.Create(BitmapConverter.ToMat(compare));
            Scalar     scalar         = QualityMSE.Compute(referenceArray, compareArray, null);

            return((scalar[0] + scalar[1] + scalar[2]) / 3);
        }
Пример #14
0
        static void Main(string[] args)
        {
            using (var sourceMat = new Mat("source.jpg"))
                using (var destinationMat = new Mat("destination.jpg"))
                    using (var hc = new CascadeClassifier("HaarCascade.xml"))
                        using (var facemark = FacemarkLBF.Create())
                        {
                            Console.WriteLine("Face detection starting..");
                            var sourceFaceRects = hc.DetectMultiScale(sourceMat);
                            if (sourceFaceRects == null || sourceFaceRects.Length == 0)
                            {
                                Console.WriteLine($"Source image: No faces detected.");
                                return;
                            }
                            Console.WriteLine($"Source image: detected {sourceFaceRects.Length} faces.");

                            var destFaceRects = hc.DetectMultiScale(destinationMat);
                            if (destFaceRects == null || destFaceRects.Length == 0)
                            {
                                Console.WriteLine($"Destination image: No faces detected.");
                                return;
                            }
                            Console.WriteLine($"Destination image: detected {destFaceRects.Length} faces.");

                            facemark.LoadModel("lbfmodel.yaml");
                            using (var sourceInput = InputArray.Create(sourceFaceRects))
                                using (var destInput = InputArray.Create(destFaceRects))
                                {
                                    facemark.Fit(sourceMat, sourceInput, out Point2f[][] sourceLandmarks);
                                    var sourcePoints = sourceLandmarks[0];

                                    facemark.Fit(destinationMat, destInput, out Point2f[][] destLandmarks);
                                    var destPoints = destLandmarks[0];

                                    var triangles = destPoints.Take(60).GetDelaunayTriangles();
                                    var warps     = triangles.GetWarps(sourcePoints.Take(60), destPoints.Take(60));

                                    using (var warpedMat = sourceMat.ApplyWarps(destinationMat.Width, destinationMat.Height, warps))
                                        using (var mask = new Mat(destinationMat.Size(), MatType.CV_8UC3))
                                            using (var result = new Mat(destinationMat.Size(), MatType.CV_8UC3))
                                            {
                                                mask.SetTo(0);

                                                var convexHull = Cv2.ConvexHull(destPoints).Select(s => new Point(s.X, s.Y));
                                                Cv2.FillConvexPoly(mask, convexHull, Scalar.White);

                                                var rect   = Cv2.BoundingRect(convexHull);
                                                var center = new Point(rect.X + rect.Width / 2, rect.Y + rect.Height / 2);

                                                Cv2.SeamlessClone(warpedMat, destinationMat, mask, center, result, SeamlessCloneMethods.NormalClone);
                                                var blured = result.MedianBlur(5);
                                                blured.SaveImage("result.png");
                                            }
                                }
                        }
            Console.WriteLine("Done");
        }
        public override bool run()
        {
            InputArray m = InputArray.Create <double>(matrix, MatType.CV_64FC1);
            double     d1, d2, d3, d4, d5, d6;

            evaluateMatrix(m.GetMat(), out d1, out d2, out d3, out d4, out d5, out d6);
            dst = src.WarpAffine(m, src.Size(), InterpolationFlags.Lanczos4, BorderTypes.Constant);
            return(true);
        }
Пример #16
0
        public static double GetMaxDifference(Mat a, Mat b)
        {
            var resultMat = new Mat();

            Cv2.Absdiff(InputArray.Create(a), InputArray.Create(b), resultMat);
            resultMat.MinMaxLoc(out double minVal, out double maxVal);

            return(maxVal);
        }
Пример #17
0
        public void NormVecb()
        {
            var vec = new Vec3b(10, 20, 30);

            using var ia = InputArray.Create(vec);
            var norm = Cv2.Norm(ia, NormTypes.L1);

            Assert.Equal(60, norm);
        }
Пример #18
0
        /// <summary>
        /// Try to match (part of) a large green circle on the screen.
        /// </summary>
        public CircleSegment FindCorona()
        {
            // see the Experiments for how this works
            Bitmap cropped = CompassSensor.Crop(screen.bitmap,
                                                screen.bitmap.Width * 1 / 3,
                                                screen.bitmap.Height * 1 / 3,
                                                screen.bitmap.Width * 2 / 3,
                                                screen.bitmap.Height * 2 / 3);
            Mat screenwhole = BitmapConverter.ToMat(cropped);

            Point2f ShipPointerOffset = new Point2f(0, 0);

            try
            {
                ShipPointerOffset = FindShipPointer(IsolateYellow(screenwhole));
            }
            catch (Exception)
            {
                // If we can't find the ship pointer (it's hard to see it against the sun) then use the middle of the screen.
            }

            // erase the vivid areas, otherwise the blur subtraction turns yellow near red to green
            Mat brightHSV     = screenwhole.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat darkAreasMask = brightHSV.InRange(InputArray.Create(new int[] { 0, 0, 0 }), InputArray.Create(new int[] { 180, 255, 180 }));
            Mat darkAreas     = new Mat();

            screenwhole.CopyTo(darkAreas, darkAreasMask);

            Mat screenblur        = darkAreas - darkAreas.Blur(new OpenCvSharp.Size(10, 10));
            Mat sourceHSV         = screenblur.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat mask              = sourceHSV.InRange(InputArray.Create(new int[] { 35, 204, 20 }), InputArray.Create(new int[] { 90, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Mat sourceGrey = sourceHSVFiltered.Split()[2].InRange(32, 256);

            LineSegmentPoint[] result = sourceGrey.HoughLinesP(1, 3.1415 / 180, 5, 10, 2);
            List <Point2d>     points = new List <Point2d>();

            foreach (var line in result)
            {
                points.Add(line.P1);
                points.Add(line.P2);
            }
            if (points.Count < 8)
            {
                throw new ArgumentException("Not enough points in corona circle");
            }
            CircleSegment c = ComputeCircle(points);

            sourceGrey.Line(c.Center, ShipPointerOffset, 255);
            c.Center -= ShipPointerOffset; // adjust for camera movement by taking ship pointer offset
            sourceGrey.Circle(c.Center, (int)c.Radius, 255);
            debugWindow.Image = BitmapConverter.ToBitmap(sourceGrey);
            return(c);
        }
Пример #19
0
        private async Task CaptureLoop(CancellationToken cancelToken)
        {
            byte[] dilateArray =
            { 1,     1, 1, 1, 1,
              1, 1, 1, 1, 1,
              1, 1, 1, 1, 1,
              1, 1, 1, 1, 1,
              1, 1, 1, 1, 1, };

            Texture2D stencilViewTexture = new Texture2D(width, height);
            Texture2D rgbViewTexture     = new Texture2D(width, height);
            Texture2D inpaintViewTexture = new Texture2D(width, height);

            HumanStencil_Image.texture = stencilViewTexture;
            RGB_Image.texture          = rgbViewTexture;
            Inpaint_Image.texture      = inpaintViewTexture;

            while (!cancelToken.IsCancellationRequested)
            {
                await Task.Delay(10);

                if (RGB_Texture == null || Stencil_Texture == null)
                {
                    continue;
                }

                using (Mat stencilMat = OpenCvSharp.Unity.TextureToMat(Stencil_Texture))
                    using (Mat rgbMat = OpenCvSharp.Unity.TextureToMat(RGB_Texture))
                        using (Mat inpaintMat = new Mat())
                        {
                            #region stencil texture
                            Cv2.CvtColor(stencilMat, stencilMat, ColorConversionCodes.BGR2GRAY);
                            Cv2.Dilate(stencilMat, stencilMat, InputArray.Create(dilateArray));
                            Cv2.Resize(stencilMat, stencilMat, new OpenCvSharp.Size(width, height));
                            stencilViewTexture = OpenCvSharp.Unity.MatToTexture(stencilMat, stencilViewTexture);
                            #endregion

                            #region rgb texture
                            Cv2.Resize(rgbMat, rgbMat, new OpenCvSharp.Size(width, height));
                            Cv2.Flip(rgbMat, rgbMat, FlipMode.Y);
                            rgbViewTexture = OpenCvSharp.Unity.MatToTexture(rgbMat, rgbViewTexture);
                            #endregion

                            #region inpaint
                            Cv2.Inpaint(rgbMat, stencilMat, inpaintMat, 3, InpaintMethod.NS);
                            inpaintViewTexture = OpenCvSharp.Unity.MatToTexture(inpaintMat, inpaintViewTexture);
                            #endregion

                            stencilMat.Dispose();
                            rgbMat.Dispose();
                            inpaintMat.Dispose();
                        }
            }
        }
Пример #20
0
        /// <summary>
        /// Filter the given image to select certain yellow hues (returned as grayscale)
        /// </summary>
        public static Mat IsolateYellow(Mat source)
        {
            Mat sourceHSV         = source.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat mask              = sourceHSV.InRange(InputArray.Create(new int[] { 10, 200, 128 }), InputArray.Create(new int[] { 30, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Mat valueChannel = sourceHSVFiltered.Split()[2];

            return(valueChannel);
        }
Пример #21
0
 public Mat Detect(Mat mat)
 {
     using (var gray = new Mat())
     {
         Cv2.CvtColor(InputArray.Create(mat), OutputArray.Create(gray), ColorConversionCodes.BGR2GRAY);
         cascadeClassifier.DetectMultiScale(gray, Convert.ToDouble(scaleFactor), minimumNeighbors, 0,
                                            new OpenCvSharp.Size(minSizeWidth, minSizeHeight))
         .ToList().ForEach(r => mat.Rectangle(new OpenCvSharp.Rect(r.X, r.Y, r.Width, r.Height), Scalar.Red));
         return(mat);
     }
 }
Пример #22
0
        public static Mat IsolateRed(Mat source)
        {
            Mat brightHSV = source.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat redMask   = brightHSV.InRange(InputArray.Create(new int[] { 0, 250, 200 }), InputArray.Create(new int[] { 5, 256, 256 }))
                            + brightHSV.InRange(InputArray.Create(new int[] { 175, 250, 200 }), InputArray.Create(new int[] { 180, 256, 256 }));
            Mat redAreas = new Mat();

            source.CopyTo(redAreas, redMask);
            Mat red = redAreas.Split()[2];

            return(red);
        }
Пример #23
0
        public static int?GetLastFinalBossFrame(VideoCapture capture, float scale, int videoEndFrame,
                                                Action <LoadRemover.ProgressPhase, float> updateProgress, int start = 1, int stepSize = -1)
        {
            if (stepSize <= 0)
            {
                stepSize = (int)capture.Fps;
            }


            capture.Set(VideoCaptureProperties.PosFrames, videoEndFrame);

            int maxFrames = (int)(MaxTimeAfterFinalHit * capture.Fps);

            for (int i = start; i < maxFrames; i += stepSize)
            {
                // Start from the end of the video and move backwards to find the last occurrence of the boss icon
                capture.Set(VideoCaptureProperties.PosFrames, videoEndFrame - i);

                Mat mat = new Mat();
                capture.Read(mat);

                Cv2.Resize(mat, mat, new Size(mat.Width * scale, mat.Height * scale));
                // Crop to the bottom-left corner, since that's where the lives are
                mat = mat[(int)(mat.Height * 0.83f), mat.Height, 0, (int)(mat.Width * 0.085f)];

                Mat result = new Mat();
                Cv2.MatchTemplate(InputArray.Create(mat), InputArray.Create(ImgEndBoss), result,
                                  TemplateMatchModes.SqDiffNormed, InputArray.Create(ImgEndBossMask));

                result.MinMaxLoc(out double minVal, out double maxVal, out Point minLoc, out Point maxLoc);
                var timespan = TimeSpan.FromSeconds((videoEndFrame - i) / (float)capture.Fps);

                if (start == 1)
                {
                    updateProgress.Invoke(LoadRemover.ProgressPhase.Phase_4_EndingTime, (float)i / maxFrames);
                }

                if (minVal < EndBossIconMatchThreshold)
                {
                    if (stepSize == 1)
                    {
                        return((videoEndFrame - i) + 1);
                    }
                    else
                    {
                        return(GetLastFinalBossFrame(capture, scale, videoEndFrame, updateProgress, i - stepSize, stepSize: 1));
                    }
                }
            }

            return(null);
        }
Пример #24
0
 public List <Point2f> UndistortPoint(List <Point2f> src)
 {
     if (_init)
     {
         List <Point2f> dst = new List <Point2f>();
         Cv2.UndistortPoints(InputArray.Create(src), OutputArray.Create(dst), _cameraMatrix, _distCoeffs);
         return(dst);
     }
     else
     {
         return(src);
     }
 }
Пример #25
0
        private static BossType?IsBossFrame(VideoCapture capture, float scale, float posSeconds)
        {
            int frameNum = (int)(posSeconds * capture.Fps);

            capture.Set(VideoCaptureProperties.PosFrames, frameNum);

            Mat mat = new Mat();

            capture.Read(mat);

            Cv2.Resize(mat, mat, new Size(mat.Width * scale, mat.Height * scale));
            // Crop to the bottom-left corner, since that's where the lives are
            mat = mat[(int)(mat.Height * 0.83f), mat.Height, 0, (int)(mat.Width * 0.085f)];

            float lowestThreshold = float.MaxValue;

            BossType?type = null;

            for (int i = 0; i < (int)BossType._NumTypes; i++)
            {
                Mat result = new Mat();
                Cv2.MatchTemplate(InputArray.Create(mat), InputArray.Create(ImgBoss[(BossType)i]), result,
                                  TemplateMatchModes.SqDiffNormed, InputArray.Create(ImgBossMask[(BossType)i]));

                result.MinMaxLoc(out double minVal, out double maxVal, out Point minLoc, out Point maxLoc);

                if (minVal < EndBossIconMatchThreshold)
                {
                    if (minVal < lowestThreshold)   // pick the match with the lowest threshold
                    {
                        type            = (BossType)i;
                        lowestThreshold = (float)minVal;
                    }
                }
            }

            if (type == BossType.Bzzit || type == BossType.Mosquito)
            {
                capture.Read(mat);
                // check bottom line, for mosquito fight it's black
                mat = mat[(int)(mat.Height * 0.983f), mat.Height, 0, mat.Width];

                if (Util.GetAverageBrightness(mat) < Util.DarknessMaxBrightness)
                {
                    type = BossType.Mosquito;
                }
            }

            return(type);
        }
Пример #26
0
        public static void FindTriQuadrant()
        {
            Bitmap image     = (Bitmap)Image.FromFile("res3/supercruisetarget.png");
            Mat    source    = BitmapConverter.ToMat(image);
            Mat    sourceHSV = source.CvtColor(ColorConversionCodes.BGR2HSV);

            /* Paint.Net uses HSV [0..360], [0..100], [0..100].
             * OpenCV uses H: 0 - 180, S: 0 - 255, V: 0 - 255
             * Paint.NET colors:
             * 50   94  100     bright yellow
             * 27   93  90      orange
             * 24   91  74      brown
             * 16   73  25      almost background (low V)
             * suggested range [20..55], [80..100], [50..100] (paint.net)
             * suggested range [10..27], [200..255], [128..255] (openCV
             * */
            Mat mask = sourceHSV.InRange(InputArray.Create(new int[] { 10, 200, 128 }), InputArray.Create(new int[] { 27, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Window w3         = new Window("yellowfilter", sourceHSVFiltered.CvtColor(ColorConversionCodes.HSV2BGR));
            Mat    sourceGrey = sourceHSVFiltered.Split()[2]; // Value channel is pretty good as a greyscale conversion
            Window w4         = new Window("yellowFilterValue", sourceGrey);

            CircleSegment[] circles2 = sourceGrey.HoughCircles(
                HoughMethods.Gradient,
                dp: 1f,       /* resolution scaling factor?  full resolution seems to work better */
                minDist: 100, /* if we find more than one then we go to the second analysis, the crosshair is probably blue as well*/
                param1: 100,  /* default was fine after experimentation */
                param2: 13,   /* required quality factor. 9 finds too many, 14 finds too few */
                minRadius: 40,
                maxRadius: 47);
            foreach (CircleSegment circle in circles2)
            {
                var quarterCircle = new OpenCvSharp.Point2f(circle.Radius, circle.Radius);
                source.Rectangle(circle.Center - quarterCircle, circle.Center + quarterCircle, new Scalar(0, 255, 0));
            }


            Mat    templatepointer = new Mat("res3/squaretarget.png", ImreadModes.GrayScale);
            Mat    matches         = sourceGrey.MatchTemplate(templatepointer, TemplateMatchModes.CCoeffNormed);
            Window w6 = new Window("pointer", matches);

            OpenCvSharp.Point minloc, maxloc;
            matches.MinMaxLoc(out minloc, out maxloc);

            source.Rectangle(maxloc, maxloc + new OpenCvSharp.Point(templatepointer.Size().Width, templatepointer.Size().Height), new Scalar(255, 255, 0));

            Window w5 = new Window("result", source);
        }
Пример #27
0
        public static unsafe Mat NormalizeTextImage(
            Mat input,
            TextImageHistStats stats,
            Size size,
            double noiseStdevFactor = 1)
        {
            var x0 = stats.X0;
            var x1 = stats.X1;

            using var inverse_ = x1 < x0 ? ~input : null;
            using var inverse  = inverse_ != null ? (Mat)inverse_ : null;

            if (x1 < x0)
            {
                x0 = 255 - x0;
                x1 = 255 - x1;
            }

            var background =
                (byte)Math.Max((int)(x1 - stats.Stdev1 * noiseStdevFactor), 0);
            var src = inverse ?? input;

            using var mean = src.Blur(size);
            var meanIndex  = mean.GetUnsafeGenericIndexer <byte>();
            var index      = src.GetUnsafeGenericIndexer <byte>();
            var normalized = new Mat(input.Size(), MatType.CV_8UC1);

            normalized.ForEachAsByte((value, position) =>
            {
                var y       = position[0];
                var x       = position[1];
                var invalue = index[y, x];
                var mean    = meanIndex[y, x];

                *value = mean < background ? invalue : background;
            });

            var k      = 255.0 / (background - x0);
            var lookup = new byte[256];

            for (var i = 0; i < lookup.Length; ++i)
            {
                lookup[i] = i <= x0 ? (byte)0 : i >= x1 ?
                            (byte)255 : (byte)Math.Min((i - x0) * k, 255);
            }

            using var lut = InputArray.Create(lookup);

            return(normalized.LUT(lut));
        }
Пример #28
0
        // filtriramo vrhove zato što znamo da prsti imaju oštar vrh
        private IEnumerable <DefectVertex> FilterVerticesWithAngle(IEnumerable <DefectVertex> vertices, int maxAngle)
        {
            return(vertices.Where(x =>
            {
                var vert1 = Cv2.Norm(InputArray.Create(x.d1.Subtract(x.d2).ToVec2i()));
                var vert2 = Cv2.Norm(InputArray.Create(x.point.Subtract(x.d1).ToVec2i()));
                var vert3 = Cv2.Norm(InputArray.Create(x.point.Subtract(x.d2).ToVec2i()));

                var deg = Math.Acos((vert2 * vert2 + vert3 * vert3 - vert1 * vert1) / (2 * vert2 * vert3)) *
                          (180 / Math.PI);

                return deg < maxAngle;
            }));
        }
Пример #29
0
 public override void Apply(Mat input)
 {
     _start = DateTime.Now;
     Input  = input;
     if (IsActive)
     {
         OpenCvSharp.Cv2.CvtColor(InputArray.Create(input), OutputArray.Create(Output), ColorConversionCodes.BGR2HSV);
     }
     else
     {
         OpenCvSharp.Cv2.CopyTo(input, Output);
     }
     base.Apply(input);
 }
Пример #30
0
        public static void MatchCorona()
        {
            Bitmap screen      = new Bitmap("Screenshot_0028.bmp");
            Bitmap cropped     = CompassSensor.Crop(screen, screen.Width * 1 / 3, screen.Height * 1 / 3, screen.Width * 2 / 3, screen.Height * 2 / 3);
            Mat    screenwhole = BitmapConverter.ToMat(cropped);

            // erase the vivid areas, otherwise the blur subtraction turns yellow near red to green
            Mat brightHSV     = screenwhole.CvtColor(ColorConversionCodes.BGR2HSV);
            Mat darkAreasMask = brightHSV.InRange(InputArray.Create(new int[] { 0, 0, 0 }), InputArray.Create(new int[] { 180, 255, 180 }));
            Mat darkAreas     = new Mat();

            screenwhole.CopyTo(darkAreas, darkAreasMask);

            Mat    screenblur = darkAreas - darkAreas.Blur(new OpenCvSharp.Size(10, 10));
            Window w3         = new Window(screenblur);

            //screenblur.SaveImage("sharplines.png");
            Mat sourceHSV = screenblur.CvtColor(ColorConversionCodes.BGR2HSV);

            /* Paint.Net uses HSV [0..360], [0..100], [0..100].
             * OpenCV uses H: 0 - 180, S: 0 - 255, V: 0 - 255
             * Paint.NET colors:
             * 73   100 18     brightest part of green edge
             * 72   98  9      very dark green
             * suggested range [70..180], [80..100], [8..100] (paint.net)
             * suggested range [35..90], [204..255], [20..255] (openCV)
             * */
            Mat mask = sourceHSV.InRange(InputArray.Create(new int[] { 35, 204, 20 }), InputArray.Create(new int[] { 90, 255, 255 }));
            Mat sourceHSVFiltered = new Mat();

            sourceHSV.CopyTo(sourceHSVFiltered, mask);
            Window w5         = new Window("yellowfilter", sourceHSVFiltered.CvtColor(ColorConversionCodes.HSV2BGR));
            Mat    sourceGrey = sourceHSVFiltered.Split()[2].InRange(32, 256); // Value channel is pretty good as a greyscale conversion
            Window w6         = new Window("yellowFilterValue", sourceGrey);

            LineSegmentPoint[] result = sourceGrey.HoughLinesP(1, 3.1415 / 180, 5, 10, 2);
            List <Point2d>     points = new List <Point2d>();

            foreach (var line in result)
            {
                points.Add(line.P1);
                points.Add(line.P2);
                darkAreas.Line(line.P1, line.P2, new Scalar(255, 0, 255));
            }
            CircleSegment c = CruiseSensor.ComputeCircle(points);

            darkAreas.Circle(c.Center, (int)c.Radius, new Scalar(255, 255, 0));
            Window w9 = new Window("final", darkAreas);
        }