Пример #1
0
        public void handleOpenCV(String fileName)
        {
            PARSE_RESULT  = "";
            ERROR_MESSAGE = "";
            BITMAP        = null;

            Mat matRead = Cv2.ImRead(fileName, ImreadModes.Grayscale);

            // CROP: https://076923.github.io/posts/C-opencv-9/
            OpenCvSharp.Rect rect = new OpenCvSharp.Rect(452, 182, 1024, 728);
            Mat subMat            = matRead.SubMat(rect);

            //Cv2.ImShow(strFIle, matRead);
            BITMAP = BitmapConverter.ToBitmap(subMat);
            try
            {
                // System.DllNotFoundException: Failed to find library "leptonica-1.80.0.dll" for platform x86.
                using (var engine = new TesseractEngine(@"./tessdata", "eng", EngineMode.TesseractOnly))
                {
                    using (var page = engine.Process(BITMAP))
                        PARSE_RESULT = page.GetText();
                    Debug.WriteLine(PARSE_RESULT);
                }
            }
            catch (Exception ex)
            {
                ERROR_MESSAGE = ex.ToString();
                Debug.WriteLine(ERROR_MESSAGE);
            }
        }
Пример #2
0
        private void MatchAndReplaceFaceRectangles(FaceAPI.Contract.Face[] faces, OpenCvSharp.Rect[] clientRects)
        {
            // Use a simple heuristic for matching the client-side faces to the faces in the
            // results. Just sort both lists left-to-right, and assume a 1:1 correspondence.

            // Sort the faces left-to-right.
            var sortedResultFaces = faces
                                    .OrderBy(f => f.FaceRectangle.Left + 0.5 * f.FaceRectangle.Width)
                                    .ToArray();

            // Sort the clientRects left-to-right.
            var sortedClientRects = clientRects
                                    .OrderBy(r => r.Left + 0.5 * r.Width)
                                    .ToArray();

            // Assume that the sorted lists now corrrespond directly. We can simply update the
            // FaceRectangles in sortedResultFaces, because they refer to the same underlying
            // objects as the input "faces" array.
            for (int i = 0; i < Math.Min(faces.Length, clientRects.Length); i++)
            {
                // convert from OpenCvSharp rectangles
                OpenCvSharp.Rect r = sortedClientRects[i];
                sortedResultFaces[i].FaceRectangle = new FaceAPI.Contract.FaceRectangle {
                    Left = r.Left, Top = r.Top, Width = r.Width, Height = r.Height
                };
            }
        }
        private static Cv.Rect FixInvalidRects(Cv.Rect rect, int width, int height)
        {
            if (rect.X < 0)
            {
                rect.X = 0;
            }

            if (rect.Y < 0)
            {
                rect.Y = 0;
            }

            if (rect.X > width)
            {
                rect.Width = width;
            }

            if (rect.Y > height)
            {
                rect.Height = height;
            }

            if (rect.Width + rect.X > width)
            {
                rect.Width = width - rect.X;
            }

            if (rect.Height + rect.Y > height)
            {
                rect.Height = height - rect.Y;
            }

            return(rect);
        }
    void TransformImage(Point[] corners)
    {
        //4頂点が検出されていなければ何もしない
        if (corners == null)
        {
            return;
        }
        //4頂点の並べ替え
        SortCorners(corners);
        //検出された4頂点の座標を入力
        Point2f[] input = { corners[0], corners[1],
                            corners[2], corners[3] };
        //テクスチャとして使用する正方形画像の4頂点の座標を入力
        Point2f[] square =
        { new Point2f(0,              0), new Point2f(0,   255),
          new Point2f(255, 255), new Point2f(255, 0) };
        //歪んだ四角形を正方形に変換するパラメータを計算
        Mat transform = Cv2.GetPerspectiveTransform(input, square);

        //変換パラメータに基づいて画像を生成
        Cv2.WarpPerspective(bgr, bgr, transform, new Size(256, 256));
        int s = (int)(256 * 0.05f); //今回、枠の太さを幅の5%と設計
        int w = (int)(256 * 0.9f);  //両サイドを差し引いた90%が内側の幅

        OpenCvSharp.Rect innerRect = new OpenCvSharp.Rect(s, s, w, w);
        bgr = bgr[innerRect];
    }
Пример #5
0
    public TempletGenerator(
        int maxTempletSize,
        MatType matType,
        int numRotationSteps,
        Point plantCenter,
        int plantCenterWidth,
        float maxSteamLengthRatio,
        OpenCvSharp.Rect maxMatchingRect,
        Mat plantMask,
        float outsideMaskRatio)
    {
        generatorState = State.NotSet;

        this.templetCenter         = maxTempletSize / 2;
        this.shiftDistance         = plantCenterWidth / 2;
        this.uperPlantCenterCorner = new Point(plantCenter.X - shiftDistance,
                                               plantCenter.Y - shiftDistance);
        this.maxSteamLengthRatio = maxSteamLengthRatio;
        this.maxMatchingRect     = maxMatchingRect;
        this.plantMaskInvert     = 255 - plantMask;
        this.outsideMaskRatio    = outsideMaskRatio;

        rotatPoint        = new RotatPoint(numRotationSteps);
        templetContureMat = Mat.Zeros(maxTempletSize, maxTempletSize, matType);
        templetFillMat    = Mat.Zeros(maxTempletSize, maxTempletSize, matType);

        matchingArea = new Point[8];
        for (int i = 0; i < matchingArea.Length; ++i)
        {
            matchingArea [i] = new Point();
        }
    }
Пример #6
0
    public bool checkAgainstMask(Point location)
    {
        if (generatorState < State.TempletSet)
        {
            Debug.Log("Templet not set.");
            return(false);
        }

        double templetValue = Cv2.Sum(templetFill).Val0;

        OpenCvSharp.Rect maskRect = new OpenCvSharp.Rect(matchingBoxRect.X + location.X,
                                                         matchingBoxRect.Y + location.Y,
                                                         templetFill.Width,
                                                         templetFill.Height);

        Mat mask          = new Mat(plantMaskInvert, maskRect);
        Mat bitwiseResult = new Mat();

        Cv2.BitwiseAnd(mask, templetFill, bitwiseResult);

        double outsideMaskValue        = Cv2.Sum(bitwiseResult).Val0;
        double templetOutsideMaskRatio = 0.0;

        if (templetValue > 0.0)
        {
            templetOutsideMaskRatio = outsideMaskValue / templetValue;
        }

        if (templetOutsideMaskRatio > outsideMaskRatio)
        {
            return(false);
        }
        return(true);
    }
Пример #7
0
    private void FindPlantBounds()
    {
        Mat nonZero = new Mat();

        Cv2.FindNonZero(plantSegmentasionImage, nonZero);
        plantBounds = Cv2.BoundingRect(nonZero);
    }
        /// <summary>
        /// Crops an image to a square shape <br/>
        ///Image processing algorithm
        /// </summary>
        /// <param name="img">input image, of type OpenCvSharp.Mat</param>
        /// <returns>output image, of type OpenCvSharp.Mat</returns>
        public Mat CropToROI(ref Mat img)
        {
            // If images are the same height and width, no crop is necessary
            if (img.Width == img.Height)
            {
                return(img);
            }

            // If image width is larger than image height
            else if (img.Width > img.Height)
            {
                this.ROI = new OpenCvSharp.Rect((img.Width - img.Height) / 2, 0, img.Height, img.Height);
            }

            // If image height is larger than image width
            else
            {
                this.ROI = new OpenCvSharp.Rect((img.Height - img.Width) / 2, 0, img.Width, img.Width);
            }

            // Create empty image with specified size and fill it with data
            new Mat(img, ROI).CopyTo(newImgROI);

            return(newImgROI);
        }
Пример #9
0
        void crop_image(Mat outimg)
        {
            try
            {
                Mat copy = new Mat();
                outimg.CopyTo(copy);
                OpenCvSharp.Rect rect = new OpenCvSharp.Rect(500, 0, 1100, 1080);

                crop = new Mat(outimg, rect);
                crop.CopyTo(outputimg);

                // Cv2.ImWrite("crop" + ".bmp", crop);
                if (outputimg.Channels() == 1)
                {
                    Cv2.CvtColor(outputimg, outputimg, ColorConversionCodes.GRAY2BGR);
                }
                thread(crop);
                Holes1(crop);
            }
            catch (Exception Ex)
            {
                //MessageBox.Show(Ex.Message.ToString());
                log.Error("Error Message: " + Ex.Message.ToString(), Ex);
            }
        }
        private List <List <Cv.Rect> > GroupRects(List <Cv.Rect> rects, Mat debug)
        {
            rects.Sort((l, r) => l.Y.CompareTo(r.Y));

            List <List <Cv.Rect> > grouped       = new List <List <Cv.Rect> >();
            List <int>             ignoreIndices = new List <int>();

            for (int i = 0; i < rects.Count; i++)
            {
                if (ignoreIndices.Contains(i))
                {
                    continue;
                }

                var current = rects[i];

                if (IsTooLarge(current, debug))
                {
                    continue;
                }

                List <Cv.Rect> group = new List <Cv.Rect>();
                group.Add(rects[i]);

                for (int j = i + 1; j < rects.Count; j++)
                {
                    if (IsTooLarge(rects[j], debug))
                    {
                        ignoreIndices.Add(j);
                        continue;
                    }

                    if (rects[j].Y > current.Y + current.Height + (rects[j].Height * 2))
                    {
                        continue;
                    }

                    var l = new Cv.Rect(rects[i].X, 0, rects[i].Width, rects[i].Height);
                    var r = new Cv.Rect(rects[j].X, 0, rects[j].Width, rects[j].Height);

                    if (l.IntersectsWith(r))
                    {
                        var intersect     = l.Intersect(r);
                        var intersectArea = intersect.Width * intersect.Height;
                        var lArea         = rects[i].Width * rects[i].Height;
                        var rArea         = rects[j].Width * rects[j].Height;

                        if (intersectArea > (lArea * 0.25) && intersectArea > (rArea * 0.25))
                        {
                            current = current.Union(rects[j]);
                            group.Add(rects[j]);
                        }
                    }
                }

                grouped.Add(group);
            }

            return(grouped);
        }
        private OpenCvSharp.Rect AdjustBoundingBox(OpenCvSharp.Rect faceRect)
        {
            int w = faceRect.Width;
            int h = faceRect.Height;

            faceRect.X -= (int)(0.067 * w);
            faceRect.Y -= (int)(0.028 * h);

            faceRect.Width  += (int)(0.15 * w);
            faceRect.Height += (int)(0.13 * h);

            if (faceRect.Width < faceRect.Height)
            {
                var dx = (faceRect.Height - faceRect.Width);
                faceRect.X     -= dx / 2;
                faceRect.Width += dx;
            }
            else
            {
                var dy = (faceRect.Width - faceRect.Height);
                faceRect.Y      -= dy / 2;
                faceRect.Height += dy;
            }
            return(faceRect);
        }
Пример #12
0
        public Mat Overlay(Mat frame, OpenCvSharp.Point offset)
        {
            if (this.State != GmapState.Collapsed && this.Gmap.Empty())
            {
                return(frame);
            }

            var area = new OpenCvSharp.Rect(offset, this.GetGmapSize(frame));

            if (this.State == GmapState.Collapsed)
            {
                frame = frame.CvtColor(ColorConversionCodes.BGR2BGRA);
                var icon = this._icon.Resize(area.Size);
                var mask = this._mask.Resize(area.Size);
                icon.CopyTo(new Mat(frame, area), mask);
                frame = frame.CvtColor(ColorConversionCodes.BGRA2BGR);
            }
            else if (this.State == GmapState.Expanded)
            {
                var gmap = this.Gmap.Resize(area.Size);
                gmap.CopyTo(new Mat(frame, area));
            }
            else
            {
                var gmap = this.Gmap.Resize(area.Size);
                frame = gmap;
            }

            this._currentFrame = frame;
            return(this._currentFrame);
        }
Пример #13
0
		protected override bool ProcessTexture(WebCamTexture input, ref Texture2D output) {
			// 1. Obtain Frame
			Mat img = Unity.TextureToMat (input, TextureParameters);

			// 2. Rezise frame , for now the best size to analize is 10% of the original size 1280*720
			Mat rzImg = img.Resize (Size.Zero, rs.downScale, rs.downScale);

			// 3. Calculate and draw rect
			OpenCvSharp.Rect fgRect = rs.DrawRect (imageTransform, img, rzImg, TextureParameters, ref output);

			if (rs.isTracking) {
				// 3. Apply grabcut Initializating I recommend 5 iterations
				Mat mask = new Mat (Size.Zero, MatType.CV_8U);
				Mat bgModel = new Mat ();
				Mat fgModel = new Mat ();
				Cv2.GrabCut (rzImg, mask, fgRect, bgModel, fgModel, iterations, GrabCutModes.InitWithRect);

				// 4. Use foreground or bg as mask
				mask = (mask & 1) * 255; // Takes the mask and convert every non 0 value to 255 (White)
				mask = mask.Resize (new Size (img.Width, img.Height), 0, 0, InterpolationFlags.Lanczos4);

				// 5. Aply the mask while coping the original image to a green background 
				Mat bg = new Mat (img.Size(), rzImg.Type(), green);
				img.CopyTo (bg, mask);

				// 6. Show result
				output = Unity.MatToTexture (bg, output);
			}
			return true;
		}
Пример #14
0
        void Button2Click(object sender, EventArgs e)
        {
            Bitmap roibmp;
            Pix    roipix;
            Mat    src;

            buttonRight.Enabled = false;
            buttonNext.Enabled  = false;
            button2.Enabled     = false;
            OpenCvSharp.Rect roi = ramka.getRoi();
            Cursor = Cursors.WaitCursor;
            if (roi.X != -1)
            {
                src = ramka.Image[roi];
            }
            else
            {
                src = ramka.Image;
            }
            roibmp = src.ToBitmap();
            roipix = PixConverter.ToPix(roibmp);
            Pix roipix1 = roipix.Deskew();

            Bitmap newbm  = PixConverter.ToBitmap(roipix1);
            Mat    newmat = BitmapConverter.ToMat(newbm);

            ramka.Image = newmat;
            Cursor      = Cursors.Default;
            src.Dispose();
            newbm.Dispose();
            roipix.Dispose();
            buttonRight.Enabled = true;
            buttonNext.Enabled  = true;
            button2.Enabled     = true;
        }
Пример #15
0
        public Reading Analyze(Mat input)
        {
            var resized   = input.ResizePreserveAspectRatio(maxSize: _settings.InputAnalysisMaxSize);
            var grayScale = resized.Image.CvtColor(ColorConversionCodes.BGR2GRAY);

            var noiseReduced = grayScale.Erode(new Mat()).Dilate(new Mat());

            var meterNumberRectangles = DetectMeterNumbers(noiseReduced);

            if (!meterNumberRectangles.Any())
            {
                return(new Reading(""));
            }

            Mat[] croppedImages;
            if (_settings.DarkSectors)
            {
                croppedImages = meterNumberRectangles.Select(r =>
                {
                    var rect = new Rect(r.Position.X, r.Position.Y, r.Width, r.Height);
                    return(new Mat(noiseReduced, rect)
                           .Threshold(0, 255, ThresholdTypes.Otsu)
                           .Erode(new Mat())
                           .ResizePreserveAspectRatio(_settings.DarkSectorsSeparateMeterNumbersResizeMaxSize).Image);
                }).ToArray();
            }
            else
            {
                croppedImages = meterNumberRectangles.Select(r =>
                {
                    var rect = new Rect(r.Position.X, r.Position.Y, r.Width, r.Height);
                    return(new Mat(noiseReduced, rect)
                           .AdaptiveThreshold(255, AdaptiveThresholdTypes.GaussianC, ThresholdTypes.BinaryInv, _settings.LightSectorsAdaptiveThresholdBlockSize, _settings.LightSectorsAdaptiveThresholdC)
                           .ResizePreserveAspectRatio(_settings.LightSectorsSeparateMeterNumbersResizeMaxSize).Image);
                }).ToArray();

                croppedImages = croppedImages.Select(i =>
                {
                    var rect = new BlobDetector().GetLargestBlob(i);
                    return(new Mat(i, rect));
                }).ToArray();
            }

            var resizedCroppedNumbers = croppedImages.ResizeToAverageHeight();

            var combinedNumbers = resizedCroppedNumbers.Skip(1)
                                  .Aggregate(croppedImages[0], (a, b) => a.CombineImages(b), r => r);

            var numbers = combinedNumbers.WidenEdges(10);

            if (!_settings.DarkSectors)
            {
                Cv2.BitwiseNot(numbers, numbers);
            }

            var reading = new Reading(InsertDecimal(_stringExtractor.GetStringFromImage(numbers)),
                                      GetScaledRectangles(resized.OriginalScale, meterNumberRectangles));

            return(reading);
        }
Пример #16
0
 public static void ClickImg(Rect r, bool isClick = true)
 {
     MoveTo(r, false);
     if (isClick)
     {
         LeftClick();
     }
 }
Пример #17
0
    public static void Trnstxt(GameObject a, RectTransform rt, OpenCvSharp.Rect r)
    {
        var newVec = new Vector2(r.X, -r.Y) - new Vector2(rt.sizeDelta.x / 2.0f, -rt.sizeDelta.y / 2.0f);

        Debug.LogFormat("r : {0}, sizeDelta : {1}", new Vector2(r.X, r.Y), new Vector2(rt.sizeDelta.x, rt.sizeDelta.y));
        //a.transform.localPosition = (newVec);
        a.transform.localPosition = (newVec + new Vector2((float)r.Width / 2.0f, 0)) * InstantiateText.Canvas.transform.localScale.x;
    }
Пример #18
0
        Mat Clamp(Mat mat, double maxWidth, double maxHeight)
        {
            var marginX = Math.Max(0, (int)Math.Ceiling((mat.Width - maxWidth) / 2));
            var marginY = Math.Max(0, (int)Math.Ceiling((mat.Height - maxHeight) / 2));
            var rect    = new OpenCvSharp.Rect(marginX, marginX, mat.Width - marginX * 2, mat.Height - marginY * 2);

            return(new Mat(mat, rect));
        }
Пример #19
0
 public void SetCastableDetectionArea(System.Windows.Rect captureRect)
 {
     this.CastableDetectionArea = new Rect(
         (int)captureRect.X,
         (int)captureRect.Y,
         (int)captureRect.Width,
         (int)captureRect.Height
         );
 }
Пример #20
0
 public void SetCaptureRect(System.Windows.Rect captureRect)
 {
     this.CaptureRect = new Rect(
         (int)captureRect.X,
         (int)captureRect.Y,
         (int)captureRect.Width,
         (int)captureRect.Height
         );
 }
Пример #21
0
        private void Detect(ref Mat src_img, ref Mat src_img2, int index)
        {
            Mat dst_img = new Mat();

            List <float> Xpoint = new List <float>();
            List <float> Ypoint = new List <float>();

            //空Matに全座標の比較データ(配列)を格納
            Cv2.MatchTemplate(src_img, _templateNumeric[index], dst_img, TemplateMatchModes.CCoeffNormed);

            //比較データ(配列)のうち、しきい値0.7以下を排除(0)にする
            Cv2.Threshold(dst_img, dst_img, 0.70, 1.0, ThresholdTypes.Tozero);

            //0以上の座標データをXpoint Ypointに格納する
            for (int x = 0; x < dst_img.Rows; x++)
            {
                for (int y = 0; y < dst_img.Cols; y++)
                {
                    if (dst_img.At <float>(x, y) > 0)
                    {
                        Xpoint.Add(y);
                        Ypoint.Add(x);
                    }
                }
            }

            dst_img.Dispose();

            List <OpenCvSharp.Rect> rectangles = new List <OpenCvSharp.Rect>();

            for (int i = 0; i < Xpoint.Count; i++)
            {
                OpenCvSharp.Rect rect        = new OpenCvSharp.Rect(new OpenCvSharp.Point(Xpoint[i], Ypoint[i]), _templateNumeric[index].Size());
                bool             isIntersect = false;

                foreach (OpenCvSharp.Rect r in rectangles)
                {
                    if (isIntersect = r.IntersectsWith(rect))
                    {
                        break;
                    }
                }
                ;

                if (isIntersect)
                {
                    continue;
                }

                rectangles.Add(rect);

                Cv2.Rectangle(src_img2, rect, _colors[index], 2);

                Cv2.PutText(src_img2, index.ToString(), rect.Location, HersheyFonts.HersheySimplex, 1, _colors[index], 2);
            }
        }
Пример #22
0
        //模板匹配
        public static List <Mat> MyMatchTemplate(Mat img, OpenCvSharp.Rect img_roi, string templatename, double threshold, int tubey, List <OpenCvSharp.Rect> rect_list = null)
        {
            List <Mat> imgs     = new List <Mat>();
            Stopwatch  sw       = new Stopwatch();
            Mat        img_gray = new Mat();

            img = img.Clone();
            img = img.SubMat(img_roi);
            Cv2.CvtColor(img, img_gray, ColorConversionCodes.BGR2GRAY);
            var    template = GetTimplateImage(templatename);
            Mat    result = new Mat();
            double minVal, maxVal;

            OpenCvSharp.Point minLoc, maxLoc, maxFloc;
            Cv2.MatchTemplate(img_gray, template, result, TemplateMatchModes.CCoeffNormed);
            Cv2.MinMaxLoc(result, out minVal, out maxVal, out minLoc, out maxLoc);
            maxFloc = maxLoc;
            int count = 0;

            OpenCvSharp.Rect last_rect = new OpenCvSharp.Rect();
            int ww = img.Width / template.Width - 1;

            for (int i = 0; i < ww; i++)
            {
                int px = i * template.Width;
                int py = maxFloc.Y;
                OpenCvSharp.Rect roi = new OpenCvSharp.Rect(px, py, template.Width, template.Height);
                if (roi.Y + roi.Height > result.Height)
                {
                    roi.Height = result.Height - roi.Y;
                }
                Mat RoiResult = new Mat(result, roi);
                Cv2.MinMaxLoc(RoiResult, out minVal, out maxVal, out minLoc, out maxLoc);//查找极值
                var rect = new OpenCvSharp.Rect(px + maxLoc.X, py + maxLoc.Y, template.Width, template.Height);
                if (maxVal > threshold && Math.Abs(last_rect.X - rect.X) > 50)
                {
                    count++;
                    if (tubey != 0)
                    {
                        rect.Y = tubey - img_roi.Y;
                    }
                    Mat retimg = new Mat(img, rect);
                    if (rect_list != null)
                    {
                        rect_list.Add(rect);
                    }
                    last_rect = rect;
                    imgs.Add(retimg);
                    if (count >= 8)
                    {
                        break;
                    }
                }
            }
            return(imgs);
        }
Пример #23
0
        public void GetRects(Mat temp, Mat wafer)
        {
            //读取图片
            Mat result = new Mat(); //匹配结果

            //模板匹配
            Cv2.MatchTemplate(wafer, temp, result, TemplateMatchModes.CCoeffNormed);//最好匹配为1,值越小匹配越差
            Double minVul;
            Double maxVul;

            OpenCvSharp.Point minLoc   = new OpenCvSharp.Point(0, 0);
            OpenCvSharp.Point maxLoc   = new OpenCvSharp.Point(0, 0);
            OpenCvSharp.Point matchLoc = new OpenCvSharp.Point(0, 0);
            Cv2.Normalize(result, result, 0, 1, NormTypes.MinMax, -1);             //归一化
            Cv2.MinMaxLoc(result, out minVul, out maxVul, out minLoc, out maxLoc); //查找极值
            matchLoc = maxLoc;                                                     //最大值坐标
            //result.Set(matchLoc.Y, matchLoc.X, 0);//改变最大值为最小值
            Mat mask = wafer.Clone();                                              //复制整个矩阵

            //画框显示
            Cv2.Rectangle(mask, matchLoc, new OpenCvSharp.Point(matchLoc.X + temp.Cols, matchLoc.Y + temp.Rows), Scalar.Green, 2);

            Console.WriteLine("最大值:{0},X:{1},Y:{2}", maxVul, matchLoc.Y, matchLoc.X);
            Console.WriteLine("At获取最大值(Y,X):{0}", result.At <float>(matchLoc.Y, matchLoc.X));
            Console.WriteLine("result的类型:{0}", result.GetType());

            //循环查找画框显示
            Double threshold = 0.8;
            Mat    maskMulti = wafer.Clone();                                                 //复制整个矩阵

            for (int i = 1; i < result.Rows - temp.Rows; i += temp.Rows)                      //行遍历
            {
                for (int j = 1; j < result.Cols - temp.Cols; j += temp.Cols)                  //列遍历
                {
                    OpenCvSharp.Rect roi = new OpenCvSharp.Rect(j, i, temp.Cols, temp.Rows);  //建立感兴趣
                    Mat RoiResult        = new Mat(result, roi);
                    Cv2.MinMaxLoc(RoiResult, out minVul, out maxVul, out minLoc, out maxLoc); //查找极值
                    matchLoc = maxLoc;                                                        //最大值坐标
                    if (maxVul > threshold)
                    {
                        //画框显示
                        Cv2.Rectangle(maskMulti, new OpenCvSharp.Point(j + maxLoc.X, i + maxLoc.Y), new OpenCvSharp.Point(j + maxLoc.X + temp.Cols, i + maxLoc.Y + temp.Rows), Scalar.Green, 2);
                        string axis = '(' + Convert.ToString(i + maxLoc.Y) + ',' + Convert.ToString(j + maxLoc.X) + ')';
                        Cv2.PutText(maskMulti, axis, new OpenCvSharp.Point(j + maxLoc.X, i + maxLoc.Y), HersheyFonts.HersheyPlain, 1, Scalar.Red, 1, LineTypes.Link4);
                    }
                }
            }


            //新建窗体显示图片
            Cv2.Resize(maskMulti, maskMulti, new OpenCvSharp.Size(512, 384));
            using (new OpenCvSharp.Window("maskMulti image", maskMulti))
            {
                Cv2.WaitKey();
            }
        }
Пример #24
0
    public static GameObject Instxt(RectTransform rt, OpenCvSharp.Rect r)
    {
        var newVec = new Vector2((float)r.X, -(float)r.Y) - new Vector2(rt.sizeDelta.x / 2.0f, -rt.sizeDelta.y / 2.0f);

        Debug.Log(newVec);
        GameObject a = GameObject.Instantiate(Text, Canvas.transform);

        a.transform.localPosition = (newVec + new Vector2((float)r.Width / 2.0f, 0)) * Canvas.transform.localScale.x;
        return(a);
    }
Пример #25
0
    public void RunScript()
    {
        // Checking that that there exist a file of any type.
        if (!File.Exists(imageFilePath))
        {
            Debug.Log("Image File do not exist!");
            //return;
        }

        Mat image = Cv2.ImRead(imageFilePath, ImreadModes.Color);

        if (image.Empty())
        {
            Debug.Log("No readable image file.");
            return;
        }

        Cv2.NamedWindow("Image", WindowMode.KeepRatio);
        Cv2.NamedWindow("Image2", WindowMode.KeepRatio);
        Cv2.NamedWindow("subImage", WindowMode.KeepRatio);
        Cv2.ImShow("Image", image);

//		OpenCvSharp.Rect roi = new OpenCvSharp.Rect (image.Width / 4, image.Height / 4, image.Width / 2, image.Height / 2);
//		Mat subImage = new Mat (image, roi);
//		//subImage = new Scalar (255, 255, 255) - subImage;
//		Cv2.Add (subImage, subImage, subImage);
//		Cv2.ImShow ("subImage", subImage);
//		Cv2.ImShow ("Image2", image);

        Mat imageGray = image.CvtColor(ColorConversionCodes.BGR2GRAY) / 10;

        // NB! EmptyClone do not overwrite memory, old contntent in memory makes glitch's
        //Mat canvas = imageGray.EmptyClone ();
        Mat canvas = Mat.Zeros(imageGray.Size(), imageGray.Type());



        Vector2 drawingCanvasSize = new Vector2(canvas.Width - squareSize, canvas.Height - squareSize);

        for (int i = 0; i < iteration; ++i)
        {
            OpenCvSharp.Rect drawingRegion = new OpenCvSharp.Rect(
                (int)Random.Range(0, drawingCanvasSize.x - 1),
                (int)Random.Range(0, drawingCanvasSize.y - 1),
                squareSize, squareSize);

            Mat drawingCanvas = new Mat(canvas, drawingRegion);
            Mat drawingSource = new Mat(imageGray, drawingRegion);

            Cv2.Add(drawingCanvas, drawingSource, drawingCanvas);

            Cv2.ImShow("Image2", canvas);
        }
    }
Пример #26
0
    public static void render_2D(ref OpenCvSharp.Mat left_display, sl.float2 img_scale, ref sl.Objects objects, bool render_mask, bool isTrackingON)
    {
        OpenCvSharp.Mat  overlay    = left_display.Clone();
        OpenCvSharp.Rect roi_render = new OpenCvSharp.Rect(0, 0, left_display.Size().Width, left_display.Size().Height);

        OpenCvSharp.Mat mask = new OpenCvSharp.Mat(left_display.Rows, left_display.Cols, OpenCvSharp.MatType.CV_8UC1);

        int line_thickness = 2;

        for (int i = 0; i < objects.numObject; i++)
        {
            sl.ObjectData obj = objects.objectData[i];
            if (Utils.renderObject(obj, isTrackingON))
            {
                OpenCvSharp.Scalar base_color = Utils.generateColorID_u(obj.id);

                // Display image scale bouding box 2d
                if (obj.boundingBox2D.Length < 4)
                {
                    continue;
                }

                Point top_left_corner     = Utils.cvt(obj.boundingBox2D[0], img_scale);
                Point top_right_corner    = Utils.cvt(obj.boundingBox2D[1], img_scale);
                Point bottom_right_corner = Utils.cvt(obj.boundingBox2D[2], img_scale);
                Point bottom_left_corner  = Utils.cvt(obj.boundingBox2D[3], img_scale);

                // Create of the 2 horizontal lines
                Cv2.Line(left_display, top_left_corner, top_right_corner, base_color, line_thickness);
                Cv2.Line(left_display, bottom_left_corner, bottom_right_corner, base_color, line_thickness);
                // Creation of two vertical lines
                Utils.drawVerticalLine(ref left_display, bottom_left_corner, top_left_corner, base_color, line_thickness);
                Utils.drawVerticalLine(ref left_display, bottom_right_corner, top_right_corner, base_color, line_thickness);

                // Scaled ROI
                OpenCvSharp.Rect roi = new OpenCvSharp.Rect(top_left_corner.X, top_left_corner.Y, (int)top_right_corner.DistanceTo(top_left_corner), (int)bottom_right_corner.DistanceTo(top_right_corner));

                overlay.SubMat(roi).SetTo(base_color);

                sl.float2 position_image = getImagePosition(obj.boundingBox2D, img_scale);
                Cv2.PutText(left_display, obj.label.ToString(), new Point(position_image.x - 20, position_image.y - 12), HersheyFonts.HersheyComplexSmall, 0.5f, new Scalar(255, 255, 255, 255), 1);

                if (!float.IsInfinity(obj.position.Z))
                {
                    string text = Math.Abs(obj.position.Z).ToString("0.##M");
                    Cv2.PutText(left_display, text, new Point(position_image.x - 20, position_image.y), HersheyFonts.HersheyComplexSmall, 0.5, new Scalar(255, 255, 255, 255), 1);
                }
            }
        }

        // Here, overlay is as the left image, but with opaque masks on each detected objects
        Cv2.AddWeighted(left_display, 0.7, overlay, 0.3, 0.0, left_display);
    }
Пример #27
0
        public static unsafe Mat ExtractRect(Mat src, OpenCvSharp.Rect position)
        {
            Mat   ret          = new Mat(position.Height, position.Width, MatType.CV_8UC3);
            int   top          = position.Top;
            int   bottom       = position.Bottom;
            int   left         = position.Left;
            int   right        = position.Right;
            int   src_channels = src.Channels();
            long  ret_step     = ret.Step();
            long  src_step     = src.Step();
            int   src_height   = src.Height;
            int   src_width    = src.Width;
            byte *ps           = (byte *)src.Data.ToPointer();
            byte *pr           = (byte *)ret.Data.ToPointer();

#if RELEASE_PARALLEL
            Parallel.For(0, src_height, y =>
            {
                if (top <= y && y <= bottom)
                {
                    for (int x = 0; x < src_width; ++x)
                    {
                        if (left <= x && x <= right)
                        {
                            for (int c = 0; c < src_channels; ++c)
                            {
                                *(pr + (y - top) * ret_step + (x - left) * src_channels + c) = *(ps + y * src_step + x * src_channels + c);
                            }
                        }
                    }
                }
            });
#else
            for (int y = 0; y < src_height; ++y)
            {
                if (top <= y && y <= bottom)
                {
                    for (int x = 0; x < src_width; ++x)
                    {
                        if (left <= x && x <= right)
                        {
                            for (int c = 0; c < src_channels; ++c)
                            {
                                *(pr + (y - top) * ret_step + (x - left) * src_channels + c) = *(ps + y * src_step + x * src_channels + c);
                            }
                        }
                    }
                }
            }
#endif

            return(ret);
        }
Пример #28
0
    void drawColorSampler(Mat input)
    {
        int frameWidth = input.Size().Width, frameHeight = input.Size().Height;

        int    rectangleSize  = 40;
        Scalar rectangleColor = new Scalar(255, 0, 255);

        skinColorSamplerRectangle1 = new OpenCvSharp.Rect(160, frameHeight / 2, rectangleSize, rectangleSize);
        skinColorSamplerRectangle2 = new OpenCvSharp.Rect(160, frameHeight / 3, rectangleSize, rectangleSize);

        Cv2.Rectangle(
            input,
            skinColorSamplerRectangle1,
            rectangleColor
            );

        Cv2.Rectangle(
            input,
            skinColorSamplerRectangle2,
            rectangleColor
            );

        backgroundColorSamplerRectangle1 = new OpenCvSharp.Rect(80, 80, rectangleSize, rectangleSize);
        backgroundColorSamplerRectangle2 = new OpenCvSharp.Rect(560, 80, rectangleSize, rectangleSize);
        backgroundColorSamplerRectangle3 = new OpenCvSharp.Rect(80, 400, rectangleSize, rectangleSize);
        backgroundColorSamplerRectangle4 = new OpenCvSharp.Rect(560, 400, rectangleSize, rectangleSize);

        Cv2.Rectangle(
            input,
            backgroundColorSamplerRectangle1,
            rectangleColor
            );

        Cv2.Rectangle(
            input,
            backgroundColorSamplerRectangle2,
            rectangleColor
            );

        Cv2.Rectangle(
            input,
            backgroundColorSamplerRectangle3,
            rectangleColor
            );

        Cv2.Rectangle(
            input,
            backgroundColorSamplerRectangle4,
            rectangleColor
            );
    }
        private List <Cv.Rect> MergeRects(List <Cv.Rect> rects, int width, int height)
        {
            List <int> ignoreIndices = new List <int>();

            rects.Sort((l, r) => l.X.CompareTo(r.X));

            for (int i = 0; i < rects.Count; i++)
            {
                if (ignoreIndices.Contains(i))
                {
                    continue;
                }

                for (int j = i + 1; j < rects.Count; j++)
                {
                    if (rects[j].Height < 5 || rects[j].Width < 5)
                    {
                        ignoreIndices.Add(j);
                        continue;
                    }

                    var hExpand = (int)Math.Round(width * 0.01, 0);
                    var r       = new Cv.Rect(rects[j].X, rects[j].Y, rects[j].Width, rects[j].Height);
                    r.Inflate(hExpand / 2, 0);

                    if (rects[i].IntersectsWith(r))
                    {
                        var union = rects[i].Union(rects[j]);

                        if (union.Width < width * 0.30 && union.Height < height * 0.05)
                        {
                            rects[i] = union;
                            ignoreIndices.Add(j);
                        }
                    }
                }
            }

            var merged = new List <Cv.Rect>();

            for (int i = 0; i < rects.Count; i++)
            {
                rects[i] = FixInvalidRects(rects[i], width, height);

                if (!ignoreIndices.Contains(i))
                {
                    merged.Add(rects[i]);
                }
            }
            return(merged);
        }
Пример #30
0
        public bool CreateGrabCut(ref System.Drawing.Bitmap src, out System.Drawing.Bitmap dst, bool displayWindows = false)
        {
            dst = null;
            Mat srcImg = BitmapConverter.ToMat(src);

            Cv2.CvtColor(srcImg, srcImg, ColorConversionCodes.BGRA2BGR);
            Mat mask = new Mat(new OpenCvSharp.Size(src.Width, src.Height), MatType.CV_8UC1, 0);

            //dilate process
            //Cv2.Dilate(srcImg, dstImg, new Mat());

            //grabcut
            //Mat bgdModel = new Mat(new OpenCvSharp.Size(65, 1), MatType.CV_64FC1);
            //Mat fgdModel = new Mat(new OpenCvSharp.Size(65, 1), MatType.CV_64FC1);

            Mat bgdModel = new Mat();
            Mat fgdModel = new Mat();

            OpenCvSharp.Rect r = new OpenCvSharp.Rect(50, 50, (int)Width - 100, (int)Height - 100);
            Cv2.GrabCut(srcImg, mask, r, bgdModel, fgdModel, 1, GrabCutModes.InitWithRect);

            for (int i = mask.Cols / 2 - 50; i < mask.Cols / 2 + 50; i++)
            {
                for (int j = mask.Rows / 2 - 25; j < mask.Rows / 2 + 75; j++)
                {
                    mask.Set <byte>(j, i, 1);
                }
            }

            Cv2.GrabCut(srcImg, mask, r, bgdModel, fgdModel, 1, GrabCutModes.InitWithMask);

            for (int i = 0; i < mask.Cols; i++)
            {
                for (int j = 0; j < mask.Rows; j++)
                {
                    byte e = mask.Get <byte>(j, i);
                    if (e == 0 | e == 2)
                    {
                        mask.Set <byte>(j, i, 0);
                    }
                    else
                    {
                        mask.Set <byte>(j, i, 255);
                    }
                }
            }
            Mat res = srcImg.Clone();

            dst = BitmapConverter.ToBitmap(mask);
            return(true);
        }
	// Use this for initialization
	private void Start () {						
		if (isVid) {
			frame = new Mat ();
			//gray = new Mat();
			cap = new VideoCapture (1);
			tex = new Texture2D (cap.FrameWidth, cap.FrameHeight);
			bkrnd_win_size = 20; //cap.FrameWidth / 5;
			cap.Read (frame);
		} else {
			frame = new Mat(Application.dataPath + "/profile_photo.png", ImreadModes.Color);
			tex = new Texture2D (frame.Width, frame.Height);
			bkrnd_win_size = 20;//frame.Width / 5;
		}
		frame_backproj = new Mat ();
		mask = new Mat ();
		tex.LoadImage (frame.ToBytes (".png", new int[]{0}));
		go.GetComponent<Renderer> ().material.mainTexture = tex;
		//myDetector = new CascadeClassifier ("C:/Users/admin/opencv/build/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml");
		bkrnd_rect = new OpenCvSharp.Rect(1,1,bkrnd_win_size,bkrnd_win_size);

	}