예제 #1
0
        public override Rectangle[] find(Image <Bgr, Byte> image)
        {
            HOGDescriptor descriptor = new HOGDescriptor();

            descriptor.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
            return(descriptor.DetectMultiScale(image));
        }
예제 #2
0
        private static Rectangle[] FindPedestrian(IInputArray image, out long processTime)
        {
            Stopwatch watch;

            Rectangle[] regions;

            using (InputArray iaImage = image.GetInputArray())
            {
                using (HOGDescriptor des = new HOGDescriptor())
                {
                    des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
                    watch = Stopwatch.StartNew();

                    MCvObjectDetection[] results = des.DetectMultiScale(image);
                    regions = new Rectangle[results.Length];

                    for (int i = 0; i < results.Length; i++)
                    {
                        regions[i] = results[i].Rect;
                    }
                    watch.Stop();
                }
            }

            processTime = watch.ElapsedMilliseconds;

            return(regions);
        }
예제 #3
0
        public float[] GetHog(Image <Bgr, Byte> image, int block_size = 16, int cell_size = 8)
        {
            HOGDescriptor hog = new HOGDescriptor(image.Size, new Size(block_size, block_size), new Size(cell_size, cell_size), new Size(cell_size, cell_size));

            float[] result = hog.Compute(image);
            return(result);
        }
예제 #4
0
 public HoG(Size windowSize, Size cellSize, Size blockSize)
 {
     this._windowSize = windowSize;
     this._cellSize   = cellSize;
     this._blockSize  = blockSize;
     _hogDescriptor   = new HOGDescriptor();
 }
예제 #5
0
        //=============================Feature Descriptor (HOG) Data Training Tanaman=============================
        public static Rectangle[] findObjects(Image <Bgr, Byte> image, out long processingTime, Size winSize, string dataFile)
        {
            Stopwatch watch;

            Rectangle[] regions;
            if (GpuInvoke.HasCuda)
            {
                using (GpuHOGDescriptor des = new GpuHOGDescriptor())
                {
                    des.SetSVMDetector(GpuHOGDescriptor.GetDefaultPeopleDetector());

                    watch = Stopwatch.StartNew();
                    using (GpuImage <Bgr, Byte> gpuImg = new GpuImage <Bgr, byte>(image))
                        using (GpuImage <Bgra, Byte> gpuBgra = gpuImg.Convert <Bgra, Byte>())
                        {
                            regions = des.DetectMultiScale(gpuBgra);
                        }
                }
            }
            else
            {
                using (HOGDescriptor des = new HOGDescriptor(winSize, blockSize, blockStride, cellSize, nbins, 1, -1, 0.2, true))
                {
                    des.SetSVMDetector(GetDataObjects(dataFile));
                    watch   = Stopwatch.StartNew();
                    regions = des.DetectMultiScale(image);
                }
            }
            watch.Stop();
            processingTime = watch.ElapsedMilliseconds;
            return(regions);
        }
        /// <summary>
        /// Raises the video capture to mat helper initialized event.
        /// </summary>
        public void OnVideoCaptureToMatHelperInitialized()
        {
            Debug.Log("OnVideoCaptureToMatHelperInitialized");

            Mat rgbMat = sourceToMatHelper.GetMat();

            texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGB24, false);
            Utils.fastMatToTexture2D(rgbMat, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            gameObject.transform.localScale = new Vector3(rgbMat.cols(), rgbMat.rows(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);


            float width  = rgbMat.width();
            float height = rgbMat.height();

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            des = new HOGDescriptor();
        }
        // Update is called once per frame
        void Update()
        {
            if (sourceToMatHelper.IsPlaying() && sourceToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbMat = sourceToMatHelper.GetMat();

                using (MatOfRect locations = new MatOfRect())
                    using (MatOfDouble weights = new MatOfDouble())
                    {
                        des.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector());
                        des.detectMultiScale(rgbMat, locations, weights);

                        OpenCVForUnity.CoreModule.Rect[] rects = locations.toArray();
                        for (int i = 0; i < rects.Length; i++)
                        {
                            //Debug.Log ("detected person " + rects [i]);
                            Imgproc.rectangle(rgbMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0), 2);
                        }
                        //Debug.Log (locations.ToString ());
                        //Debug.Log (weights.ToString ());

                        Utils.fastMatToTexture2D(rgbMat, texture);
                    }
            }
        }
예제 #8
0
        public static void compute_hog_test(List <Mat> img_lst, OpenCvSharp.Size size, int lables, string path)
        {
            HOGDescriptor hog = new HOGDescriptor();

            hog.WinSize = size;
            Mat gray = new Mat();

            float[]      descriptors;
            int          descriptors_size = 0;
            StreamWriter sw = new StreamWriter(path, false, Encoding.UTF8);

            for (int i = 0; i < img_lst.Count; i++)// vong lap duyet tung anh
            {
                string line = lables.ToString();
                sw.Write(line);

                Cv2.CvtColor(img_lst[i], gray, ColorConversionCodes.RGB2GRAY);
                descriptors = hog.Compute(gray);

                descriptors_size = descriptors.Length;
                Mat Mat_descriptor = new Mat(descriptors_size, 1, MatType.CV_8UC1);

                for (int a = 0; a < descriptors.Length; a++)
                {
                    Mat_descriptor.Set <float>(a, 0, descriptors[a]);
                    float  value = Mat_descriptor.Get <float>(a, 0);
                    string lines = " " + (a + 1) + ":" + value;
                    sw.Write(lines);
                }
                sw.WriteLine();
            }
            sw.Close();
        }
예제 #9
0
        public float[] HOG(ref Image <Gray, Byte> modImg)
        {
            // Square-Root Normalization - compresses the input pixel less than Gamma. Increases accuracy of HOG
            //CvInvoke.Sqrt(newMatrix, newMatrix);
            // make the image 64x128 - recommended for HOG description
            int    h       = modImg.Height;
            double scaleBy = h / 256.0;
            int    width   = (int)((double)modImg.Width / scaleBy);
            int    height  = (int)((double)modImg.Height / scaleBy);

            modImg = modImg.Resize(width, height, Emgu.CV.CvEnum.Inter.Linear);

            /* Compute the Gradient Vector of every pixel, as well as magnitude and direction */
            // apply Sobel by x and y
            Image <Gray, float> sobel = modImg.Sobel(0, 1, 3).Add(modImg.Sobel(1, 0, 3)).AbsDiff(new Gray(0.0));

            modImg = sobel.Convert <Gray, Byte>();

            /* Compute descriptor values */
            HOGDescriptor hog       = new HOGDescriptor();
            Size          WinStride = new Size(18, 18);
            Size          Padding   = new Size(10, 10);

            Point[] locations        = null;
            float[] descriptorValues = hog.Compute(modImg, WinStride, Padding, locations);

            return(descriptorValues);
        }
예제 #10
0
        /// <summary>
        /// Find the pedestrian in the image
        /// </summary>
        /// <param name="image">The image</param>
        /// <param name="processingTime">The pedestrian detection time in milliseconds</param>
        /// <returns>The image with pedestrian highlighted.</returns>
        public static Image <Bgr, Byte> Find(Image <Bgr, Byte> image)
        {
            Rectangle[] regions = new Rectangle[5];
            //this is the CPU version
            using (HOGDescriptor des = new HOGDescriptor())
            {
                des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());

                Emgu.CV.Structure.MCvObjectDetection[] objects = des.DetectMultiScale(image);

                for (int i = 0; i < objects.Length; i++)
                {
                    regions[i] = objects[i].Rect;
                    if (objects[i].Score > 0.50)
                    {
                        FormVideo.Counter++;
                    }
                }
            }
            foreach (Rectangle pedestrain in regions)
            {
                image.Draw(pedestrain, new Bgr(Color.Red), 1);
            }
            return(image);
        }
예제 #11
0
        private void videoSourcePlayerCamera_NewFrame(object sender, ref Bitmap image)
        {
            // get new frame
            if (needUpdateCamera)
            {
                needUpdateCamera = false;
                Image <Bgr, Byte> img = null;
                if (image != null)
                {
                    img = new Image <Bgr, Byte>(image);

                    //Image<Bgr, Byte> imgBlank = null;
                    HOGDescriptor        hogd = new HOGDescriptor();
                    MCvObjectDetection[] mObj;
                    //ibVideoPieton.Image = imgBlank;
                    Application.DoEvents();

                    hogd.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
                    mObj = hogd.DetectMultiScale(img, 0, new Size(4, 4), new Size(8, 8), 1.05);

                    //mObj = hogd.DetectMultiScale(img);

                    foreach (MCvObjectDetection obj in mObj)
                    {
                        img.Draw(obj.Rect, new Bgr(System.Drawing.Color.Yellow), 2);
                    }



                    ibCameraDetection.Image = img;
                }
            }

            //motiondetector.ProcessFrame(image);
        }
예제 #12
0
    //HOGDetect를 통해 사람 확인
    void Update()
    {
        tex2   = MakeTexture2D(carm.targetTexture);
        rgbMat = new Mat(tex2.height, tex2.width, CvType.CV_8UC3);

        Utils.texture2DToMat(tex2, rgbMat);

        int frameWidth  = rgbMat.cols();
        int frameHeight = rgbMat.rows();

        colors = new Color32[frameWidth * frameHeight];

        Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);

        using (MatOfRect locations = new MatOfRect())
            using (MatOfDouble weights = new MatOfDouble())
            {
                des.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector());
                des.detectMultiScale(rgbMat, locations, weights);

                OpenCVForUnity.Rect[] rects = locations.toArray();
                for (int i = 0; i < rects.Length; i++)
                {
                    Imgproc.rectangle(rgbMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0), 2);
                }
            }

        Texture2D texture = new Texture2D(320, 160, TextureFormat.ARGB32, false);

        Utils.matToTexture2D(rgbMat, texture, colors);

        GameObject.Find("test2").GetComponent <Renderer>().material.mainTexture = texture;
    }
예제 #13
0
        private OpenCVController()
        {
            CANNY_THRESH = 10;
            CANNY_CONNECT_THRESH = 20;
            Hog_Descriptor = new HOGDescriptor(new Size(60, 60), new Size(10, 10), new Size(5, 5), new Size(5, 5), 9, 1, -1, 0.2, false);

        }
예제 #14
0
        public Rectangle[] FindBodyHOG_WithoutGpu(Mat image)
        {
            Rectangle[] regions = null;


            //this is the CPU version
            using (HOGDescriptor des = new HOGDescriptor())
            {
                try
                {
                    //Mat newImage = new Mat();
                    //BackgroundSubtractor bs = new BackgroundSubtractorMOG2(500, 16, false);
                    //bs.Apply(image, newImage);

                    des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());

                    MCvObjectDetection[] allBodies = des.DetectMultiScale(image);

                    regions = new Rectangle[allBodies.Length];
                    for (int i = 0; i < allBodies.Length; i++)
                    {
                        regions[i] = allBodies[i].Rect;
                        //if (body.Score > threshold)
                        //regions.Add(body.Rect);
                    }
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }
            }


            return(regions);
        }
예제 #15
0
        public HOGFeatureGenerator(HOGFeatureGeneratorInitializationData initializationData)
        {
            _cellCountX = initializationData.cellCountX;
            _cellCountY = initializationData.cellCountY;

            _hog = new HOGDescriptor();
        }
예제 #16
0
        /// <summary>
        /// Find the pedestrian in the image
        /// </summary>
        /// <param name="image">The image</param>
        /// <returns>The region where pedestrians are detected</returns>
        public static Rectangle[] Find(IInputArray image, HOGDescriptor hog, CudaHOG hogCuda = null)
        {
            Rectangle[] regions;

            using (InputArray iaImage = image.GetInputArray())
            {
                //if the input array is a GpuMat
                //check if there is a compatible Cuda device to run pedestrian detection
                if (iaImage.Kind == InputArray.Type.CudaGpuMat && hogCuda != null)
                {
                    //this is the Cuda version
                    using (GpuMat cudaBgra = new GpuMat())
                        using (VectorOfRect vr = new VectorOfRect())
                        {
                            CudaInvoke.CvtColor(image, cudaBgra, ColorConversion.Bgr2Bgra);
                            hogCuda.DetectMultiScale(cudaBgra, vr);
                            regions = vr.ToArray();
                        }
                }
                else
                {
                    //this is the CPU/OpenCL version
                    MCvObjectDetection[] results = hog.DetectMultiScale(image);
                    regions = new Rectangle[results.Length];
                    for (int i = 0; i < results.Length; i++)
                    {
                        regions[i] = results[i].Rect;
                    }
                }

                return(regions);
            }
        }
예제 #17
0
 public void CreateHog()
 {
     if (GpuInvoke.HasCuda && AllowGpu)
     {
         try
         {
             //gpuhog = new GpuHOGDescriptor();
             gpuhog = new GpuHOGDescriptor(this.winSize, this.blockSize, this.blockStride, this.cellSize, this.nbins, this.winSigma, this.L2HysThreshold, this.gammaCorrection, this.nLevels);
             gpuhog.SetSVMDetector(GpuHOGDescriptor.GetDefaultPeopleDetector());
             //gpuhog.SetSVMDetector(GpuHOGDescriptor.GetPeopleDetector64x128()); // there are 3 different detectors built-in. maybe others work better?
         }
         catch (Exception e)
         {
             Status = e.ToString();
         }
     }
     else
     {
         try
         {
             hog = new HOGDescriptor(this.winSize, this.blockSize, this.blockStride, this.cellSize, this.nbins, 1, this.winSigma, this.L2HysThreshold, this.gammaCorrection);
             hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
         }
         catch (Exception e)
         {
             Status = e.ToString();
         }
     }
 }
 public frmSideCamera()
 {
     InitializeComponent();
     getCamera();
     descriptor.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
     motionDetector = new MotionDetector(new TwoFramesDifferenceDetector(), new MotionAreaHighlighting());
     fgDetector     = new BackgroundSubtractorMOG2();
 }
예제 #19
0
    void Start()
    {
        carm = this.GetComponent <Camera>();

        rgbMat = new Mat();

        des = new HOGDescriptor();
    }
예제 #20
0
        //HOGDescriptor
        public static float[] ComputeHogDescriptors(Mat image)
        {
            Mat matToHog = image.Resize(HOGWinSize);
            //初始化一个自定义的hog描述子
            HOGDescriptor hog = new HOGDescriptor(HOGWinSize, HOGBlockSize, HOGBlockStride, HOGCellSize, HOGNBits);

            return(hog.Compute(matToHog, new OpenCvSharp.Size(1, 1), new OpenCvSharp.Size(0, 0)));
        }
예제 #21
0
        /// <summary>
        /// Find the pedestrian in the image
        /// </summary>
        /// <param name="image">The image</param>
        /// <param name="processingTime">The pedestrian detection time in milliseconds</param>
        /// <returns>The region where pedestrians are detected</returns>
        public static Rectangle[] Find(Mat image, bool tryUseCuda, bool tryUseOpenCL, out long processingTime)
        {
            Stopwatch watch;

            Rectangle[] regions;

#if !(IOS || NETFX_CORE)
            //check if there is a compatible Cuda device to run pedestrian detection
            if (tryUseCuda && CudaInvoke.HasCuda)
            { //this is the Cuda version
                using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8)))
                {
                    des.SetSVMDetector(des.GetDefaultPeopleDetector());

                    watch = Stopwatch.StartNew();
                    using (GpuMat cudaBgr = new GpuMat(image))
                        using (GpuMat cudaBgra = new GpuMat())
                            using (VectorOfRect vr = new VectorOfRect())
                            {
                                CudaInvoke.CvtColor(cudaBgr, cudaBgra, ColorConversion.Bgr2Bgra);
                                des.DetectMultiScale(cudaBgra, vr);
                                regions = vr.ToArray();
                            }
                }
            }
            else
#endif
            {
                //Many opencl functions require opencl compatible gpu devices.
                //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented
                //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices).
                CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice;

                //this is the CPU/OpenCL version
                using (HOGDescriptor des = new HOGDescriptor())
                {
                    des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());

                    //load the image to umat so it will automatically use opencl is available
                    UMat umat = image.ToUMat(AccessType.Read);

                    watch = Stopwatch.StartNew();

                    MCvObjectDetection[] results = des.DetectMultiScale(umat);
                    regions = new Rectangle[results.Length];
                    for (int i = 0; i < results.Length; i++)
                    {
                        regions[i] = results[i].Rect;
                    }
                    watch.Stop();
                }
            }

            processingTime = watch.ElapsedMilliseconds;

            return(regions);
        }
예제 #22
0
 public Tuple <ISequence <float[]>, ISequence <Rect> > Slide(
     [InputPin(Description = "", PropertyMode = PropertyMode.Never)] HOGDescriptor hogDescriptor,
     [InputPin(Description = "The values calculated by the Hog computation of a full image", PropertyMode = PropertyMode.Allow)] float[] values,
     [InputPin(Description = "", PropertyMode = PropertyMode.Default)] Size cellSize,
     [InputPin(Description = "How many blocks are describing one Feature-Vector.", PropertyMode = PropertyMode.Default)] Size blockCount
     )
 {
     return(SlideInternal(values, hogDescriptor, cellSize, blockCount));
 }
예제 #23
0
 public void PropertyCellSize()
 {
     using (var obj = new HOGDescriptor())
     {
         Size value = new Size(123, 789);
         obj.CellSize = value;
         Assert.Equal(value, obj.CellSize);
     }
 }
예제 #24
0
 /// <summary>
 /// for training
 /// </summary>
 public VehicleSpeedEstimator(System.Drawing.Size hog_training_img_size)
 {
     anomaly = new AnomalySpeedDetector();
     hog     = new HOGDescriptor(new OpenCvSharp.Size(hog_training_img_size.Width, hog_training_img_size.Height), new OpenCvSharp.Size(16, 16),
                                 new OpenCvSharp.Size(8, 8), new OpenCvSharp.Size(8, 8));
     classifier        = OpenCvSharp.ML.SVM.Create();
     AllowErrorRate    = 1.5f;
     SurgingSpeedMin   = 0.45f;
     SurgingCountLimit = 7;
 }
        /// <summary>
        /// Find the pedestrian in the image
        /// </summary>
        /// <param name="image">The image</param>
        /// <param name="processingTime">The pedestrian detection time in milliseconds</param>
        /// <returns>The region where pedestrians are detected</returns>
        public static Rectangle[] Find(Mat image, bool tryUseCuda, out long processingTime)
        {
            Stopwatch watch;

            Rectangle[] regions;

#if !(__IOS__ || NETFX_CORE)
            //check if there is a compatible Cuda device to run pedestrian detection
            if (tryUseCuda && CudaInvoke.HasCuda)
            { //this is the Cuda version
                using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8)))
                {
                    des.SetSVMDetector(des.GetDefaultPeopleDetector());

                    watch = Stopwatch.StartNew();
                    using (GpuMat cudaBgr = new GpuMat(image))
                        using (GpuMat cudaBgra = new GpuMat())
                            using (VectorOfRect vr = new VectorOfRect())
                            {
                                CudaInvoke.CvtColor(cudaBgr, cudaBgra, ColorConversion.Bgr2Bgra);
                                des.DetectMultiScale(cudaBgra, vr);
                                regions = vr.ToArray();
                            }
                }
            }
            else
#endif
            {
                //this is the CPU/OpenCL version
                using (HOGDescriptor des = new HOGDescriptor())
                {
                    des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());

                    //load the image to umat so it will automatically use opencl is available
                    UMat umat = image.ToUMat(AccessType.Read);

                    watch = Stopwatch.StartNew();

                    MCvObjectDetection[] results = des.DetectMultiScale(umat);
                    regions = new Rectangle[results.Length];
                    for (int i = 0; i < results.Length; i++)
                    {
                        regions[i] = results[i].Rect;
                    }
                    watch.Stop();
                }
            }

            processingTime = watch.ElapsedMilliseconds;

            return(regions);
        }
예제 #26
0
        // Use this for initialization
        void Start()
        {
            rgbMat = new Mat();

            capture = new VideoCapture();
            capture.open(Utils.getFilePath("768x576_mjpeg.mjpeg"));

            if (capture.isOpened())
            {
                Debug.Log("capture.isOpened() true");
            }
            else
            {
                Debug.Log("capture.isOpened() false");
            }

            Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            Debug.Log("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get(Videoio.CV_CAP_PROP_PREVIEW_FORMAT));
            Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));


            capture.grab();
            capture.retrieve(rgbMat, 0);
            int frameWidth  = rgbMat.cols();
            int frameHeight = rgbMat.rows();

            colors  = new Color32[frameWidth * frameHeight];
            texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGBA32, false);
            gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1);
            float widthScale  = (float)Screen.width / (float)frameWidth;
            float heightScale = (float)Screen.height / (float)frameHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = (float)frameHeight / 2;
            }
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            des = new HOGDescriptor();
        }
예제 #27
0
        private static float GetOutline_LoadConfig(Image <Bgr, byte> src)
        {
            SVM svm = new SVM();

            svm.Load("4.xml");
            //////////////////////////////////////////////////////////////////参数设定
            Rectangle rc = new Rectangle();

            rc.Width  = 28;
            rc.Height = 28;//tuxaing daxiao

            Rectangle rc1 = new Rectangle();

            rc1.Width  = 14;
            rc1.Height = 14;//bloke

            Rectangle rc2 = new Rectangle();

            rc2.Width  = 7;
            rc2.Height = 7;

            Rectangle rc3 = new Rectangle();

            rc3.Width  = 7;
            rc3.Height = 7;

            ///////////////设置参数//////////////////////////////////////////////
            Size r1 = new Size();

            r1.Width  = 1;
            r1.Height = 1;
            Size r2 = new Size();

            r2.Width  = 0;
            r2.Height = 0;

            HOGDescriptor hog = new HOGDescriptor(rc.Size, rc1.Size, rc2.Size, rc3.Size, 9, 1, -1, 0.2, false);

            float[]   yy = new float[1000000];
            Rectangle bb = new Rectangle();

            bb.Width  = 28;
            bb.Height = 28;
            Image <Bgr, byte> yyy = new Image <Bgr, byte>(bb.Size);

            CvInvoke.cvResize(src, yyy, INTER.CV_INTER_LINEAR);
            yy = hog.Compute(yyy, r1, r2, null);
            Matrix <float> match  = new Matrix <float>(yy);
            float          result = svm.Predict(match);

            return(result);
        }
예제 #28
0
        /// <summary>
        /// Find the pedestrian in the image
        /// </summary>
        /// <param name="image">The image</param>
        /// <param name="processingTime">The processing time in milliseconds</param>
        /// <returns>The region where pedestrians are detected</returns>
        public static Rectangle[] Find(IInputArray image, out long processingTime)
        {
            Stopwatch watch;

            Rectangle[] regions;

            using (InputArray iaImage = image.GetInputArray())
            {
#if !(__IOS__ || NETFX_CORE)
                //if the input array is a GpuMat
                //check if there is a compatible Cuda device to run pedestrian detection
                if (iaImage.Kind == InputArray.Type.CudaGpuMat)
                {
                    //this is the Cuda version
                    using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8)))
                    {
                        des.SetSVMDetector(des.GetDefaultPeopleDetector());

                        watch = Stopwatch.StartNew();
                        using (GpuMat cudaBgra = new GpuMat())
                            using (VectorOfRect vr = new VectorOfRect())
                            {
                                CudaInvoke.CvtColor(image, cudaBgra, ColorConversion.Bgr2Bgra);
                                des.DetectMultiScale(cudaBgra, vr);
                                regions = vr.ToArray();
                            }
                    }
                }
                else
#endif
                {
                    //this is the CPU/OpenCL version
                    using (HOGDescriptor des = new HOGDescriptor())
                    {
                        des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
                        watch = Stopwatch.StartNew();

                        MCvObjectDetection[] results = des.DetectMultiScale(image);
                        regions = new Rectangle[results.Length];
                        for (int i = 0; i < results.Length; i++)
                        {
                            regions[i] = results[i].Rect;
                        }
                        watch.Stop();
                    }
                }

                processingTime = watch.ElapsedMilliseconds;

                return(regions);
            }
        }
 //Обработчик на событие загрузки окна
 private void MetroWindow_Loaded(object sender, RoutedEventArgs e)
 {
     //Initialization
     GetSettingsFromFile();
     //Первоначальная инициализация объекта захвата изображений для тесов
     Capture = new VideoCapture(FileName);
     //Инициализация каскада Хаара
     Haar = new CascadeClassifier(Path.GetFullPath(Properties.Settings.Default.HaarPath));
     //Инициализация SVM детектора
     Hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
     //Перерисовка некоторых значений на экране
     RedrawComponents();
 }
예제 #30
0
        /// <summary>
        /// Clear and reset the model. Required Init function to be called again before calling ProcessAndRender.
        /// </summary>
        public void Clear()
        {
            if (_hog != null)
            {
                _hog.Dispose();
                _hog = null;
            }

            if (_hogCuda != null)
            {
                _hogCuda.Dispose();
                _hog = null;
            }
        }
예제 #31
0
        public void Run()
        {
            var img = Cv2.ImRead(FilePath.Asahiyama, LoadMode.Color);

            var hog = new HOGDescriptor();
            hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());

            bool b = hog.CheckDetectorSize();
            Console.WriteLine("CheckDetectorSize: {0}", b);

            var watch = Stopwatch.StartNew();

            // run the detector with default parameters. to get a higher hit-rate
            // (and more false alarms, respectively), decrease the hitThreshold and
            // groupThreshold (set groupThreshold to 0 to turn off the grouping completely).
            Rect[] found = hog.DetectMultiScale(img, 0, new Size(8, 8), new Size(24, 16), 1.05, 2);

            watch.Stop();
            Console.WriteLine("Detection time = {0}ms", watch.ElapsedMilliseconds);
            Console.WriteLine("{0} region(s) found", found.Length);

            foreach (Rect rect in found)
            {
                // the HOG detector returns slightly larger rectangles than the real objects.
                // so we slightly shrink the rectangles to get a nicer output.
                var r = new Rect
                {
                    X = rect.X + (int)Math.Round(rect.Width * 0.1),
                    Y = rect.Y + (int)Math.Round(rect.Height * 0.1),
                    Width = (int)Math.Round(rect.Width * 0.8),
                    Height = (int)Math.Round(rect.Height * 0.8)
                };
                img.Rectangle(r.TopLeft, r.BottomRight, Scalar.Red, 3, LineType.Link8, 0);
            }

            using (var window = new Window("people detector", WindowMode.None, img))
            {
                window.SetProperty(WindowProperty.Fullscreen, 1);
                Cv.WaitKey(0);
            }
        }
예제 #32
0
        public ActionResult DetectHuman(HttpPostedFileBase imageData)
        {
            try
            {
                if (imageData == null) { throw new ArgumentException("File is not exist."); }

                using (var img = Mat.FromStream(imageData.InputStream, LoadMode.Color))
                {
                    var ExecutingAssemblyPath = new Uri(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().CodeBase.ToString())).LocalPath;
                    double scale = 2.0;
                    using (var gray = new Mat())
                    using (var smallImg = new Mat((int)(img.Rows / scale), (int)(img.Cols / scale), MatType.CV_8UC1))
                    {
                        byte[] imgBytes = img.ToBytes(".png");
                        string base64Img = Convert.ToBase64String(imgBytes);
                        ViewBag.Base64Img = base64Img;

                        var hog = new HOGDescriptor();
                        hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());
                        var rects = hog.DetectMultiScale(img);

                        foreach (var rect in rects)
                        {
                            var r = rect;
                            r.X += Cv.Round(rect.Width * 0.1);
                            r.Width = Cv.Round(rect.Width * 0.8);
                            r.Y += Cv.Round(rect.Height * 0.1);
                            r.Height = Cv.Round(rect.Height * 0.8);
                            Cv2.Rectangle(img, r, new Scalar(0, 255, 0), 3);
                        }

                        byte[] resultBytes = img.ToBytes(".png");
                        string base64Result = Convert.ToBase64String(resultBytes);
                        ViewBag.Base64OrgResult = base64Result;
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
            }

            return View();
        }
예제 #33
0
        // Update is called once per frame
        void Update()
        {
            //Loop play
                        if (capture.get (Videoio.CAP_PROP_POS_FRAMES) >= capture.get (Videoio.CAP_PROP_FRAME_COUNT))
                                capture.set (Videoio.CAP_PROP_POS_FRAMES, 0);

                        //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
                        if (capture.grab ()) {

                                capture.retrieve (rgbMat, 0);

                                Imgproc.cvtColor (rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);

                                //Debug.Log ("Mat toString " + rgbMat.ToString ());

                                using (HOGDescriptor des = new HOGDescriptor())
                                using (MatOfRect locations = new MatOfRect ())
                                using (MatOfDouble weights = new MatOfDouble ()) {
                                        des.setSVMDetector (HOGDescriptor.getDefaultPeopleDetector ());
                                        des.detectMultiScale (rgbMat, locations, weights);

                                        OpenCVForUnity.Rect[] rects = locations.toArray ();
                                        for (int i = 0; i < rects.Length; i++) {
            //												Debug.Log ("detected person " + rects [i]);
                                                Imgproc.rectangle (rgbMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0), 2);
                                        }
            //										Debug.Log (locations.ToString ());
            //										Debug.Log (weights.ToString ());
                                }

                                Utils.matToTexture2D (rgbMat, texture);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }
        }