示例#1
0
        void DoProcess()
        {
            if (!(GT.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("GT is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_GT = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(GT);

            if (!(src.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("src is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_src = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(src);

            if (!(ROI.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("ROI is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_ROI = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(ROI);

            storeResult.Value = (float)OpenCVForUnity.XimgprocModule.Ximgproc.computeBadPixelPercent(wrapped_GT, wrapped_src, wrapped_ROI);
        }
示例#2
0
    public bool GetPosition(Mat frame, bool isKeyboardFound)
    {
        Mat frameProc = new Mat(); //frame.rows(), frame.cols(), CvType.CV_16UC3
        Mat frameMask = new Mat();
        Mat hierarchy = new Mat();

        Imgproc.cvtColor(frame, frameProc, Imgproc.COLOR_BGR2HSV);
        Scalar lowerB = new Scalar(HueLower, SatLower, ValLower);
        Scalar upperB = new Scalar(HueUpper, SatUpper, ValUpper);

        Core.inRange(frameProc, lowerB, upperB, frameMask);
        Core.bitwise_and(frame, frame, frameProc, frameMask);
        //Imgproc.bilateralFilter(frameProc, frameProc, 9, 50, 100);
        Imgproc.morphologyEx(frameProc, frameProc, 2, Mat.ones(5, 5, CvType.CV_8U)); //
        Imgproc.dilate(frameProc, frameProc, Mat.ones(5, 5, CvType.CV_8U));          //Mat.ones(5, 5, CvType.CV_8U), anchor: new Point(-1, -1), iteration:2
        Imgproc.cvtColor(frameProc, frameProc, Imgproc.COLOR_BGR2GRAY);

        List <MatOfPoint> contoursList = new List <MatOfPoint>();

        Imgproc.findContours(frameProc, contoursList, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);

        int count = 0;

        foreach (MatOfPoint contour in contoursList)
        {
            MatOfPoint2f approx   = new MatOfPoint2f();
            MatOfPoint2f contourf = new MatOfPoint2f(contour.toArray());
            Imgproc.approxPolyDP(contourf, approx, 0.01 * Imgproc.arcLength(contourf, true), true);
            //print(approx.dump());
            if (approx.rows() == 4 && Imgproc.contourArea(contour) >= min_area)
            {
                count++;
                if (count >= 2)
                {
                    continue;
                }
                else
                {
                    OpenCVForUnity.CoreModule.Rect track_win = Imgproc.boundingRect(approx);
                    TrackWindow = new int[] { track_win.x, track_win.y, track_win.width, track_win.height };
                    if (frame.height() - 5 < TrackWindow[0] + TrackWindow[2] &&
                        TrackWindow[0] + TrackWindow[2] <= frame.height() ||
                        0 <= TrackWindow[0] && TrackWindow[0] < 5 ||
                        frame.width() - 5 < TrackWindow[1] + TrackWindow[3] &&
                        TrackWindow[1] + TrackWindow[3] <= frame.width() ||
                        0 <= TrackWindow[1] && TrackWindow[1] < 5)
                    {
                        continue;
                    }
                    else
                    {
                        Approx  = approx;
                        Contour = contour;
                        return(isKeyboardFound = true);
                    }
                }
            }
        }
        return(isKeyboardFound = false);
    }
示例#3
0
        private void DetectInRegion(Mat img, Rect region, List <Rect> detectedObjectsInRegions, FaceLandmarkDetector landmarkDetector)
        {
            Rect r0 = new Rect(new Point(), img.size());
            Rect r1 = new Rect(region.x, region.y, region.width, region.height);

            Rect.inflate(r1, (int)((r1.width * coeffTrackingWindowSize) - r1.width) / 2,
                         (int)((r1.height * coeffTrackingWindowSize) - r1.height) / 2);
            r1 = Rect.intersect(r0, r1);

            if ((r1.width <= 0) || (r1.height <= 0))
            {
                Debug.Log("detectInRegion: Empty intersection");
                return;
            }

            using (Mat img1_roi = new Mat(img, r1))
                using (Mat img1 = new Mat(r1.size(), img.type()))
                {
                    img1_roi.copyTo(img1);

                    OpenCVForUnityUtils.SetImage(landmarkDetector, img1);

                    List <UnityEngine.Rect> detectResult = landmarkDetector.Detect();

                    int len = detectResult.Count;
                    for (int i = 0; i < len; i++)
                    {
                        UnityEngine.Rect tmp = detectResult[i];
                        Rect             r   = new Rect((int)(tmp.x + r1.x), (int)(tmp.y + r1.y), (int)tmp.width, (int)tmp.height);
                        detectedObjectsInRegions.Add(r);
                    }
                }
        }
        void DoProcess()
        {
            if (!(img.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("img is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_img = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(img);

            if (!(rec.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("rec is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_rec = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(rec);

            if (!(color.Value is OpenCVForUnityPlayMakerActions.Scalar))
            {
                LogError("color is not initialized. Add Action \"newScalar\".");
                return;
            }
            OpenCVForUnity.CoreModule.Scalar wrapped_color = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Scalar, OpenCVForUnity.CoreModule.Scalar>(color);

            OpenCVForUnity.ImgprocModule.Imgproc.rectangle(wrapped_img, wrapped_rec, wrapped_color);
        }
        /// <summary>
        /// Raises the scan face mask button click event.
        /// </summary>
        public void OnScanFaceMaskButtonClick()
        {
            RemoveFaceMask();

            // Capture webcam frame.
            if (webCamTextureToMatHelper.IsPlaying())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                faceRectInMask = DetectFace(rgbaMat);
                if (faceRectInMask.width == 0 && faceRectInMask.height == 0)
                {
                    Debug.Log("A face could not be detected from the input image.");
                    return;
                }

                Rect rect = new Rect((int)faceRectInMask.x, (int)faceRectInMask.y, (int)faceRectInMask.width, (int)faceRectInMask.height);
                rect.inflate(rect.x / 5, rect.y / 5);
                rect = rect.intersect(new Rect(0, 0, rgbaMat.width(), rgbaMat.height()));

                faceMaskTexture = new Texture2D(rect.width, rect.height, TextureFormat.RGBA32, false);
                faceMaskMat     = new Mat(rgbaMat, rect).clone();
                OpenCVForUnity.UnityUtils.Utils.matToTexture2D(faceMaskMat, faceMaskTexture);
                Debug.Log("faceMaskMat ToString " + faceMaskMat.ToString());

                faceRectInMask           = DetectFace(faceMaskMat);
                faceLandmarkPointsInMask = DetectFaceLandmarkPoints(faceMaskMat, faceRectInMask);

                if (faceRectInMask.width == 0 && faceRectInMask.height == 0)
                {
                    RemoveFaceMask();
                    Debug.Log("A face could not be detected from the input image.");
                }
            }
        }
示例#6
0
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.Tracker))
            {
                LogError("owner is not initialized. Add Action \"newTracker\".");
                return;
            }
            OpenCVForUnity.VideoModule.Tracker wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Tracker, OpenCVForUnity.VideoModule.Tracker>(owner);

            if (!(image.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("image is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_image = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(image);

            if (!(boundingBox.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("boundingBox is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_boundingBox = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(boundingBox);

            storeResult.Value = wrapped_owner.update(wrapped_image, wrapped_boundingBox);

            Fsm.Event(storeResult.Value ? trueEvent : falseEvent);
        }
        private void DetectInRegion(Mat img, Rect region, List <Rect> detectedObjectsInRegions, CascadeClassifier cascade)
        {
            Rect r0 = new Rect(new Point(), img.size());
            Rect r1 = new Rect(region.x, region.y, region.width, region.height);

            Rect.inflate(r1, (int)((r1.width * coeffTrackingWindowSize) - r1.width) / 2,
                         (int)((r1.height * coeffTrackingWindowSize) - r1.height) / 2);
            r1 = Rect.intersect(r0, r1);

            if ((r1.width <= 0) || (r1.height <= 0))
            {
                Debug.Log("detectInRegion: Empty intersection");
                return;
            }

            int d = Math.Min(region.width, region.height);

            d = (int)Math.Round(d * coeffObjectSizeToTrack);

            using (MatOfRect tmpobjects = new MatOfRect())
                using (Mat img1 = new Mat(img, r1)) //subimage for rectangle -- without data copying
                {
                    cascade.detectMultiScale(img1, tmpobjects, 1.1, 2, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size(d, d), new Size());

                    Rect[] tmpobjectsArray = tmpobjects.toArray();
                    int    len             = tmpobjectsArray.Length;
                    for (int i = 0; i < len; i++)
                    {
                        Rect tmp = tmpobjectsArray[i];
                        Rect r   = new Rect(new Point(tmp.x + r1.x, tmp.y + r1.y), tmp.size());

                        detectedObjectsInRegions.Add(r);
                    }
                }
        }
        void DoProcess()
        {
            if (!(probImage.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("probImage is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_probImage = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(probImage);

            if (!(window.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("window is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_window = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(window);

            if (!(criteria.Value is OpenCVForUnityPlayMakerActions.TermCriteria))
            {
                LogError("criteria is not initialized. Add Action \"newTermCriteria\".");
                return;
            }
            OpenCVForUnity.CoreModule.TermCriteria wrapped_criteria = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.TermCriteria, OpenCVForUnity.CoreModule.TermCriteria>(criteria);

            storeResult.Value = OpenCVForUnity.VideoModule.Video.meanShift(wrapped_probImage, wrapped_window, wrapped_criteria);
        }
        //private bool IsCollideByCircle(Rect a, Rect b, float coeffRectangleOverlap)
        //{
        //    int r1 = (int)(a.width / 2.0f);
        //    int r2 = (int)(b.width / 2.0f);
        //    int px1 = a.x + r1;
        //    int py1 = a.y + r1;
        //    int px2 = b.x + r2;
        //    int py2 = b.y + r2;

        //    if ((px2 - px1) * (px2 - px1) + (py2 - py1) * (py2 - py1) <= (r1 + r2) * (r1 + r2) * coeffRectangleOverlap)
        //        return true;
        //    else
        //        return false;
        //}

        private bool IsCollideByRectangle(Rect a, Rect b, float coeffRectangleOverlap)
        {
            int mw  = (int)(a.width * coeffRectangleOverlap);
            int mh  = (int)(a.height * coeffRectangleOverlap);
            int mx1 = (int)(a.x + (a.width - mw) / 2.0f);
            int my1 = (int)(a.y + (a.height - mh) / 2.0f);
            int mx2 = (int)(mx1 + mw);
            int my2 = (int)(my1 + mh);

            int ew  = (int)(b.width * coeffRectangleOverlap);
            int eh  = (int)(b.height * coeffRectangleOverlap);
            int ex1 = (int)(b.x + (b.width - ew) / 2.0f);
            int ey1 = (int)(b.y + (b.height - eh) / 2.0f);
            int ex2 = (int)(ex1 + ew);
            int ey2 = (int)(ey1 + eh);

            if (mx1 <= ex2 && ex1 <= mx2 && my1 <= ey2 && ey1 <= my2)
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
示例#10
0
        // Search for just a single object in the image, such as the largest face, storing the result into 'largestObject'.
        // Can use Haar cascades or LBP cascades for Face Detection, or even eye, mouth, or car detection.
        // Input is temporarily shrunk to 'scaledWidth' for much faster detection, since 200 is enough to find faces.
        // Note: detectLargestObject() should be faster than detectManyObjects().
        public static void DetectLargestObject(Mat img, CascadeClassifier cascade, out Rect largestObject, int scaledWidth = 320)
        {
            // Only search for just 1 object (the biggest in the image).
            int flags = Objdetect.CASCADE_FIND_BIGGEST_OBJECT;// | CASCADE_DO_ROUGH_SEARCH;
            // Smallest object size.
            Size minFeatureSize = new Size(20, 20);
            // How detailed should the search be. Must be larger than 1.0.
            float searchScaleFactor = 1.1f;
            // How much the detections should be filtered out. This should depend on how bad false detections are to your system.
            // minNeighbors=2 means lots of good+bad detections, and minNeighbors=6 means only good detections are given but some are missed.
            int minNeighbors = 4;

            // Perform Object or Face Detection, looking for just 1 object (the biggest in the image).
            List <Rect> objects;

            detectObjectsCustom(img, cascade, out objects, scaledWidth, flags, minFeatureSize, searchScaleFactor, minNeighbors);
            if (objects.Count > 0)
            {
                // Return the only detected object.
                largestObject = (Rect)objects[0];
            }
            else
            {
                // Return an invalid rect.
                largestObject = new Rect(-1, -1, -1, -1);
            }
        }
示例#11
0
    Vector2[] getOrangeBlobs(ref Mat colorImage, ref Mat orangeMask)
    {
        Mat hsvMat = new Mat();

        Imgproc.cvtColor(colorImage, hsvMat, Imgproc.COLOR_RGB2HSV);
        orangeMask = new Mat();

        Scalar orangeLower = new Scalar((int)Mathf.Clamp(h_low, 0.0f, 255.0f), (int)Mathf.Clamp(s_low, 0.0f, 255.0f), (int)Mathf.Clamp(l_low, 0.0f, 255.0f));
        Scalar orangeUpper = new Scalar((int)Mathf.Clamp(h_high, 0.0f, 255.0f), (int)Mathf.Clamp(s_high, 0.0f, 255.0f), (int)Mathf.Clamp(l_high, 0.0f, 255.0f));

        Core.inRange(hsvMat, orangeLower, orangeUpper, orangeMask);

        List <MatOfPoint> contours = new List <MatOfPoint>();

        Mat heirarchy = new Mat();

        Imgproc.findContours(orangeMask, contours, heirarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);

        Vector2[] blobs = new Vector2[contours.Count];
        for (int i = 0; i < contours.Count; i++)
        {
            //get center.
            OpenCVForUnity.CoreModule.Rect rectangle = Imgproc.boundingRect(contours[i]);
            blobs[i] = new Vector2(rectangle.x, rectangle.y);
        }

        return(blobs);
    }
示例#12
0
        private void DetectObject(Mat img, out List <Rect> detectedObjects, CascadeClassifier cascade, bool correctToDlibResult = false)
        {
            int d = Mathf.Min(img.width(), img.height());

            d = (int)Mathf.Round(d * minDetectionSizeRatio);

            MatOfRect objects = new MatOfRect();

            if (cascade != null)
            {
                cascade.detectMultiScale(img, objects, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, new Size(d, d), new Size());
            }

            detectedObjects = objects.toList();

            if (correctToDlibResult)
            {
                int len = detectedObjects.Count;
                for (int i = 0; i < len; i++)
                {
                    Rect r = detectedObjects[i];
                    // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                    r.x     += (int)(r.width * 0.05f);
                    r.y     += (int)(r.height * 0.1f);
                    r.width  = (int)(r.width * 0.9f);
                    r.height = (int)(r.height * 0.9f);
                }
            }
        }
        /// <summary>
        /// Acquire a portion of the latest frame in the frame buffer.
        /// </summary>
        /// <param name="roi">Portion of the image you want to acquire.</param>
        /// <returns>Cropped Mat frame.</returns>
        public Mat AcquireLatestFrame(Rect roi)
        {
            if (!playing)
            {
                Debug.Log("Camera " + this.id + " is not playing!");
                return(null);
            }
            if (!CheckROI(roi, this.width, this.height))
            {
                Debug.Log("Roi exceeded image boundaries.");
                return(null);
            }
            sbyte[] srgbImage = plugin.Call <sbyte[]>("getLatestImage");
            byte[]  rgbImage  = (byte[])(Array)srgbImage;

            if (rgbImage == null)
            {
                Debug.Log("Frame is null");
                return(null);
            }
            //Convert from YUV to RGB format
            Mat rgbFrame = GetRGBfromYUVbytes(rgbImage, this.height, this.width);

            return(new Mat(rgbFrame, roi));
        }
        void DoProcess()
        {
            if (!(imgRect.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("imgRect is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_imgRect = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(imgRect);

            if (!(pt1.Value is OpenCVForUnityPlayMakerActions.Point))
            {
                LogError("pt1 is not initialized. Add Action \"newPoint\".");
                return;
            }
            OpenCVForUnity.CoreModule.Point wrapped_pt1 = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Point, OpenCVForUnity.CoreModule.Point>(pt1);

            if (!(pt2.Value is OpenCVForUnityPlayMakerActions.Point))
            {
                LogError("pt2 is not initialized. Add Action \"newPoint\".");
                return;
            }
            OpenCVForUnity.CoreModule.Point wrapped_pt2 = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Point, OpenCVForUnity.CoreModule.Point>(pt2);

            storeResult.Value = OpenCVForUnity.ImgprocModule.Imgproc.clipLine(wrapped_imgRect, wrapped_pt1, wrapped_pt2);

            Fsm.Event(storeResult.Value ? trueEvent : falseEvent);
        }
        // Returns minimal Mat containing both faces.
        private Rect getMinFrameRect(Mat frame, Rect rect_ann, Rect rect_bob)
        {
            Rect bounding_rect = RectUtils.Union(rect_ann, rect_bob);

            bounding_rect = RectUtils.Intersect(bounding_rect, new Rect(0, 0, frame.cols(), frame.rows()));

            return(bounding_rect);
        }
        // Returns minimal Mat containing both faces
        private Rect getMinFrameRect(Mat frame, Rect target_rect, Rect source_rect)
        {
            Rect bounding_rect = RectUtils.Union(target_rect, source_rect);

            bounding_rect = RectUtils.Intersect(bounding_rect, new Rect(0, 0, frame.cols(), frame.rows()));

            return(bounding_rect);
        }
 private static bool CheckROI(Rect roi, int imgWidth, int imgHeight)
 {
     //Check boundaries
     if (roi.x < 0 || roi.x > imgWidth && roi.y < 0 && roi.y > imgHeight && roi.x + roi.width > imgWidth && roi.y + roi.height > imgHeight)
     {
         return(false);
     }
     return(true);
 }
示例#18
0
        public virtual Texture2D UpdateLUTTex(int id, Mat src, Mat dst, List <Vector2> src_landmarkPoints, List <Vector2> dst_landmarkPoints)
        {
            if (src_mask != null && (src.width() != src_mask.width() || src.height() != src_mask.height()))
            {
                src_mask.Dispose();
                src_mask = null;
            }
            src_mask = src_mask ?? new Mat(src.rows(), src.cols(), CvType.CV_8UC1, Scalar.all(0));

            if (dst_mask != null && (dst.width() != dst_mask.width() || dst.height() != dst_mask.height()))
            {
                dst_mask.Dispose();
                dst_mask = null;
            }
            dst_mask = dst_mask ?? new Mat(dst.rows(), dst.cols(), CvType.CV_8UC1, Scalar.all(0));

            // Get facial contour points.
            GetFacialContourPoints(src_landmarkPoints, src_facialContourPoints);
            GetFacialContourPoints(dst_landmarkPoints, dst_facialContourPoints);

            // Get facial contour rect.
            Rect src_facialContourRect = Imgproc.boundingRect(new MatOfPoint(src_facialContourPoints));
            Rect dst_facialContourRect = Imgproc.boundingRect(new MatOfPoint(dst_facialContourPoints));

            src_facialContourRect = src_facialContourRect.intersect(new Rect(0, 0, src.width(), src.height()));
            dst_facialContourRect = dst_facialContourRect.intersect(new Rect(0, 0, dst.width(), dst.height()));

            Mat src_ROI      = new Mat(src, src_facialContourRect);
            Mat dst_ROI      = new Mat(dst, dst_facialContourRect);
            Mat src_mask_ROI = new Mat(src_mask, src_facialContourRect);
            Mat dst_mask_ROI = new Mat(dst_mask, dst_facialContourRect);

            GetPointsInFrame(src_mask_ROI, src_facialContourPoints, src_facialContourPoints);
            GetPointsInFrame(dst_mask_ROI, dst_facialContourPoints, dst_facialContourPoints);

            src_mask_ROI.setTo(new Scalar(0));
            dst_mask_ROI.setTo(new Scalar(0));
            Imgproc.fillConvexPoly(src_mask_ROI, new MatOfPoint(src_facialContourPoints), new Scalar(255));
            Imgproc.fillConvexPoly(dst_mask_ROI, new MatOfPoint(dst_facialContourPoints), new Scalar(255));

            Texture2D LUTTex;

            if (LUTTexDict.ContainsKey(id))
            {
                LUTTex = LUTTexDict [id];
            }
            else
            {
                LUTTex = new Texture2D(256, 1, TextureFormat.RGB24, false);
                LUTTexDict.Add(id, LUTTex);
            }

            FaceMaskShaderUtils.CalculateLUT(src_ROI, dst_ROI, src_mask_ROI, dst_mask_ROI, LUTTex);

            return(LUTTex);
        }
        /// <summary>
        /// Extract detection area from camera image
        /// </summary>
        /// <param name="img">Image data</param>
        private Mat getROI(Mat img)
        {
            int  w         = img.width();
            int  h         = img.height();
            int  ROIwidth  = Math.Max(0, Math.Min(w, ((int)Math.Round(w * detectionArea.x * 0.5)) * 2));
            int  ROIheight = Math.Max(0, Math.Min(h, ((int)Math.Round(h * detectionArea.y * 0.5)) * 2));
            Rect ROI       = new Rect((w - ROIwidth) / 2, (h - ROIheight) / 2, ROIwidth, ROIheight);

            return(new Mat(img, ROI));
        }
示例#20
0
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("owner is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(owner);

            storeResult.Value = (float)wrapped_owner.area();
        }
示例#21
0
    public static OpenCVForUnity.CoreModule.Rect GetRect(MatOfPoint matOfPoint)
    {
        var points = matOfPoint.toArray();
        var minX   = points.OrderBy(e => e.x).First().x;
        var minY   = points.OrderBy(e => e.y).First().y;
        var maxX   = points.OrderByDescending(e => e.x).First().x;
        var maxY   = points.OrderByDescending(e => e.y).First().y;
        var rect   = new OpenCVForUnity.CoreModule.Rect((int)minX, (int)minY, (int)(maxX - minX), (int)(maxY - minY));

        return(rect);
    }
示例#22
0
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("owner is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(owner);

            wrapped_owner.inflate(new OpenCVForUnity.CoreModule.Size((double)size_width.Value, (double)size_height.Value));
        }
示例#23
0
    float compareImageToOriginal(Color color)
    {
        Texture2D userTexture     = easelSprite.texture;
        Texture2D originalTexture = compareSprite.texture;
        Scalar    colorScalar     = ColorToRGBScalar(color);
        Mat       originalMat     = TextureToMat(originalTexture);
        Mat       userMat         = TextureToMat(userTexture);

        Mat ogMasked = new Mat();

        Core.inRange(originalMat, colorScalar, colorScalar, ogMasked);

        Mat userMasked = new Mat();

        Core.inRange(userMat, colorScalar, colorScalar, userMasked);

        Imgproc.threshold(userMasked, userMasked, 50, 255, Imgproc.THRESH_BINARY_INV);
        Imgproc.threshold(ogMasked, ogMasked, 50, 255, Imgproc.THRESH_BINARY_INV);

        // Crop images to original
        Mat invertedOg = new Mat();

        Core.bitwise_not(ogMasked, invertedOg);

        (Point start, Point end) = CropMatBounds(invertedOg);
        OpenCVForUnity.CoreModule.Rect roi = new OpenCVForUnity.CoreModule.Rect(end, start);
        ogMasked   = new Mat(ogMasked, roi);
        userMasked = new Mat(userMasked, roi);
        invertedOg = new Mat(invertedOg, roi);

        int thickness    = 40;
        Mat kernel_erode = new Mat(thickness, thickness, CvType.CV_8UC1);

        Imgproc.erode(ogMasked, ogMasked, kernel_erode);
        Imgproc.erode(userMasked, userMasked, kernel_erode);
        Imgproc.dilate(invertedOg, invertedOg, kernel_erode);

        Mat differencesMat = new Mat(userMasked.rows(), userMasked.cols(), CvType.CV_8UC4);

        Core.subtract(userMasked, ogMasked, differencesMat);

        Mat originalWhite    = new Mat();
        Mat croppedDiffWhite = new Mat();

        Core.findNonZero(invertedOg, originalWhite);
        Core.findNonZero(differencesMat, croppedDiffWhite);

        // SaveMatToFile(invertedOg, "inverted" + (color == Color.black ? "black" : "red"));
        // SaveMatToFile(differencesMat, "diffs" + (color == Color.black ? "black" : "red"));

        float ratio = (float)croppedDiffWhite.rows() / (float)originalWhite.rows();

        return(1f - Mathf.Min(ratio, 1f));
    }
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.DisparityFilter))
            {
                LogError("owner is not initialized. Add Action \"newDisparityFilter\".");
                return;
            }
            OpenCVForUnity.XimgprocModule.DisparityFilter wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.DisparityFilter, OpenCVForUnity.XimgprocModule.DisparityFilter>(owner);

            if (!(disparity_map_left.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("disparity_map_left is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_disparity_map_left = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(disparity_map_left);

            if (!(left_view.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("left_view is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_left_view = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(left_view);

            if (!(filtered_disparity_map.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("filtered_disparity_map is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_filtered_disparity_map = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(filtered_disparity_map);

            if (!(disparity_map_right.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("disparity_map_right is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_disparity_map_right = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(disparity_map_right);

            if (!(ROI.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("ROI is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_ROI = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(ROI);

            if (!(right_view.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("right_view is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_right_view = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(right_view);

            wrapped_owner.filter(wrapped_disparity_map_left, wrapped_left_view, wrapped_filtered_disparity_map, wrapped_disparity_map_right, wrapped_ROI, wrapped_right_view);
        }
        void DoProcess()
        {
            if (!(image.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("image is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_image = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(image);

            if (!(mask.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("mask is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_mask = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(mask);

            if (!(seedPoint.Value is OpenCVForUnityPlayMakerActions.Point))
            {
                LogError("seedPoint is not initialized. Add Action \"newPoint\".");
                return;
            }
            OpenCVForUnity.CoreModule.Point wrapped_seedPoint = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Point, OpenCVForUnity.CoreModule.Point>(seedPoint);

            if (!(newVal.Value is OpenCVForUnityPlayMakerActions.Scalar))
            {
                LogError("newVal is not initialized. Add Action \"newScalar\".");
                return;
            }
            OpenCVForUnity.CoreModule.Scalar wrapped_newVal = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Scalar, OpenCVForUnity.CoreModule.Scalar>(newVal);

            if (!(rect.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("rect is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_rect = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(rect);

            if (!(loDiff.Value is OpenCVForUnityPlayMakerActions.Scalar))
            {
                LogError("loDiff is not initialized. Add Action \"newScalar\".");
                return;
            }
            OpenCVForUnity.CoreModule.Scalar wrapped_loDiff = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Scalar, OpenCVForUnity.CoreModule.Scalar>(loDiff);

            if (!(upDiff.Value is OpenCVForUnityPlayMakerActions.Scalar))
            {
                LogError("upDiff is not initialized. Add Action \"newScalar\".");
                return;
            }
            OpenCVForUnity.CoreModule.Scalar wrapped_upDiff = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Scalar, OpenCVForUnity.CoreModule.Scalar>(upDiff);

            storeResult.Value = OpenCVForUnity.ImgprocModule.Imgproc.floodFill(wrapped_image, wrapped_mask, wrapped_seedPoint, wrapped_newVal, wrapped_rect, wrapped_loDiff, wrapped_upDiff);
        }
        void DoProcess()
        {
            OpenCVForUnity.CoreModule.Rect[] wrapped_a = new OpenCVForUnity.CoreModule.Rect[a.Length];
            OpenCVForUnityPlayMakerActionsUtils.ConvertFsmArrayToArray <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(a, wrapped_a);

            if (!(storeResult.Value is OpenCVForUnityPlayMakerActions.MatOfRect))
            {
                storeResult.Value = new OpenCVForUnityPlayMakerActions.MatOfRect();
            }
            ((OpenCVForUnityPlayMakerActions.MatOfRect)storeResult.Value).wrappedObject = new OpenCVForUnity.CoreModule.MatOfRect(wrapped_a);

            OpenCVForUnityPlayMakerActionsUtils.ConvertArrayToFsmArray <OpenCVForUnity.CoreModule.Rect, OpenCVForUnityPlayMakerActions.Rect>(wrapped_a, a);
        }
示例#27
0
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("owner is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(owner);

            storeResult.Value = wrapped_owner.intersectsWith(new OpenCVForUnity.CoreModule.Rect((int)rect_x.Value, (int)rect_y.Value, (int)rect_width.Value, (int)rect_height.Value));

            Fsm.Event(storeResult.Value ? trueEvent : falseEvent);
        }
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.Rect))
            {
                LogError("owner is not initialized. Add Action \"newRect\".");
                return;
            }
            OpenCVForUnity.CoreModule.Rect wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Rect, OpenCVForUnity.CoreModule.Rect>(owner);

            storeResult.Value = wrapped_owner.contains(x.Value, y.Value);

            Fsm.Event(storeResult.Value ? trueEvent : falseEvent);
        }
        private Rect getRectInFrame(Mat frame, Rect r)
        {
            Size  wholesize = new Size();
            Point ofs       = new Point();

            frame.locateROI(wholesize, ofs);

            Rect rect = new Rect(r.x - (int)ofs.x, r.y - (int)ofs.y, r.width, r.height);

            rect = RectUtils.Intersect(rect, new Rect(0, 0, frame.cols(), frame.rows()));

            return(rect);
        }
        public Rect[] CreateRawRects()
        {
            Rect[] rectsWhereRegions = new Rect[_trackedObjects.Count];

            int count = _trackedObjects.Count;

            for (int i = 0; i < count; i++)
            {
                rectsWhereRegions[i] = _trackedObjects[i].position;
            }

            return(rectsWhereRegions);
        }