예제 #1
0
        public void ListStart(Image <Gray, Byte> img)
        {
            var bin = img.ThresholdAdaptive(new Gray(255), Emgu.CV.CvEnum.ADAPTIVE_THRESHOLD_TYPE.CV_ADAPTIVE_THRESH_MEAN_C, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY, 11, new Gray(0));

            form.imageBox1.Image = bin;

            // flood fill
            form.listBox1.Items.Clear();
            var tmp  = bin.Clone();
            var mask = new Image <Gray, Byte>(tmp.Width + 2, tmp.Height + 2);

            for (int i = 0; i < tmp.Width; i++)
            {
                for (int j = 0; j < tmp.Height; j++)
                {
                    if (tmp.Data[j, i, 0] == 255)
                    {
                        MCvConnectedComp comp = new MCvConnectedComp();
                        var seed = new Point(i, j);
                        CvInvoke.cvFloodFill(tmp, seed, new MCvScalar(0),
                                             new MCvScalar(0), new MCvScalar(0), out comp,
                                             Emgu.CV.CvEnum.CONNECTIVITY.EIGHT_CONNECTED,
                                             Emgu.CV.CvEnum.FLOODFILL_FLAG.DEFAULT, mask.Ptr);
                        if (comp.area < 50)
                        {
                        }
                    }
                }
            }

            form.imageBox1.Image = tmp;
        }
예제 #2
0
        //1. 畫面上選取要追蹤的區域這裡我稱為 物件 (Object),也就是要被追蹤的東東
        //2. 計算出物件的 直方圖 (Histogram)
        //3. 將物件的直方圖與輸入畫面進行 反向投影(Backproject) 計算
        //4. 透過 物件追蹤演算法(Camshift) 快速地框出反向投影計算出的結果
        //5. 回傳追蹤區域
        //於是一直重複 3~5 的過程,我們的物件追蹤程式就大功告成了!

        public ObjectTracking(Image <Bgr, Byte> image, Rectangle ROI)
        {
            // Initialize parameters

            //A boundary box
            trackbox = new MCvBox2D();
            //連通物件
            trackcomp = new MCvConnectedComp();
            //Hue圖?
            hue = new Image <Gray, byte>(image.Width, image.Height);
            //*待理解
            hue._EqualizeHist();
            //遮罩
            mask = new Image <Gray, byte>(image.Width, image.Height);
            //直方圖
            hist = new DenseHistogram(30, new RangeF(0, 180));
            //一張普通的灰階圖
            backproject = new Image <Gray, byte>(image.Width, image.Height);

            // Assign Object's ROI from source image.
            // Region of Interest 只針對特定區域進行處理
            trackingWindow = ROI;

            // Producing Object's hist
            //產生物件直方圖
            CalObjectHist(image);
        }
        public Rectangle Tracking(Image <Bgr, Byte> image)
        {
            MCvConnectedComp cc = new MCvConnectedComp();

            UpdateHue(image);

            // Calucate BackProject
            backproject = hist.BackProject(new Image <Gray, Byte>[] { hue });

            // Apply mask
            backproject._And(mask);

            // Tracking windows empty means camshift lost bounding-box last time
            // here we give camshift a new start window from 0,0 (you could change it)
            if (trackingWindow.IsEmpty || trackingWindow.Width == 0 || trackingWindow.Height == 0)
            {
                trackingWindow = new Rectangle(0, 0, 100, 100);
            }
            //CvInvoke.cvCamShift(backproject, trackingWindow,
            //    new MCvTermCriteria(10, 1), out trackcomp, out trackbox);

            CvInvoke.cvMeanShift(backproject, trackingWindow,
                                 new MCvTermCriteria(10, 1), out cc);

            // // update tracking window
            //  trackingWindow = trackcomp.rect;

            return(trackingWindow);
        }
예제 #4
0
        public ObjectTracking(Image <Bgr, Byte> image, Rectangle ROI)
        {
            // Initialize parameters
            trackbox  = new MCvBox2D();
            trackcomp = new MCvConnectedComp();
            hue       = new Image <Gray, byte>(image.Width, image.Height);
            hue._EqualizeHist();
            mask        = new Image <Gray, byte>(image.Width, image.Height);
            hist        = new DenseHistogram(30, new RangeF(0, 180));
            backproject = new Image <Gray, byte>(image.Width, image.Height);

            // Assign Object's ROI from source image.
            trackingWindow = ROI;

            // Producing Object's hist
            CalObjectHist(image);
        }
        private void Pulse()
        {
            using (ColorImageFrame imageFrame = _kinectSensor.ColorStream.OpenNextFrame(200))
            {
                if (imageFrame == null)
                {
                    return;
                }

                using (Image <Bgr, byte> image = imageFrame.ToOpenCVImage <Bgr, byte>())
                    using (MemStorage storage = new MemStorage()) //create storage for motion components
                    {
                        if (_forgroundDetector == null)
                        {
                            _forgroundDetector = new BGStatModel <Bgr>(image
                                                                       , Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
                        }

                        _forgroundDetector.Update(image);

                        //update the motion history
                        _motionHistory.Update(_forgroundDetector.ForgroundMask);

                        //get a copy of the motion mask and enhance its color
                        double[] minValues, maxValues;
                        System.Drawing.Point[] minLoc, maxLoc;
                        _motionHistory.Mask.MinMax(out minValues, out maxValues
                                                   , out minLoc, out maxLoc);
                        Image <Gray, Byte> motionMask = _motionHistory.Mask
                                                        .Mul(255.0 / maxValues[0]);

                        //create the motion image
                        Image <Bgr, Byte> motionImage = new Image <Bgr, byte>(motionMask.Size);
                        motionImage[0] = motionMask;

                        //Threshold to define a motion area
                        //reduce the value to detect smaller motion
                        double minArea = 100;

                        storage.Clear(); //clear the storage
                        Seq <MCvConnectedComp> motionComponents = _motionHistory.GetMotionComponents(storage);
                        bool isMotionDetected = false;
                        //iterate through each of the motion component
                        for (int c = 0; c < motionComponents.Count(); c++)
                        {
                            MCvConnectedComp comp = motionComponents[c];
                            //reject the components that have small area;
                            if (comp.area < minArea)
                            {
                                continue;
                            }

                            OnDetection();
                            isMotionDetected = true;
                            break;
                        }
                        if (isMotionDetected == false)
                        {
                            OnDetectionStopped();
                            this.Dispatcher.Invoke(new Action(() => rgbImage.Source = null));
                            StopRecording();
                            return;
                        }

                        this.Dispatcher.Invoke(
                            new Action(() => rgbImage.Source = imageFrame.ToBitmapSource())
                            );
                        Record(imageFrame);
                    }
            }
        }
예제 #6
0
        public void run()
        {
            if (Thread.CurrentThread.Name.EndsWith("2"))
            {
                Thread.Sleep(10000);//MessageBox.Show("play " + Thread.CurrentThread.Name);// //
            }
            if (Thread.CurrentThread.Name.EndsWith("3"))
            {
                Thread.Sleep(23000); //MessageBox.Show("play " + Thread.CurrentThread.Name);// Thread.Sleep(5000);
            }
            try
            {
                //if (Thread.CurrentThread.Name.Equals("camera2"))
                //    Thread.Sleep(1000);
                switch (camno)
                {
                case 1:
                    break;

                case 2:
                    break;

                case 3:
                    break;

                default:
                    break;
                }

                //Thread Processing
                while ((currImage = cap.QueryFrame()) != null)
                {
                    //### updating the currImage every time
                    currImage = currImage.Resize(pictureBox1.Width, pictureBox1.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
                    //### Background Subtraction ~ currImage is subtracted with the bgImage and the result is stored in finalBlobImg.


                    switch (this.state)
                    {
                    case "BGSUB":

                        // Console.WriteLine("_________________");

                        if (!Main.tracking)    //&& Main.lasttrack!=camno
                        {
                            Console.WriteLine("###########" + Thread.CurrentThread.Name + "starts BGSUB");
                            blobDistanceList = bgSubtraction(currImage, bgImage, ref finalBlobImg, ref blobRect);

                            if (blobDistanceList != null)
                            {
                                foreach (Blob blob in blobDistanceList)
                                {
                                    //Console.WriteLine("Blob::" + blob.distance + " " + blob.rect);
                                    if (blob.distance < Main.Dis)     //set the tracker to cam with same object //&& Main.lasttrack!=camno
                                    {
                                        track_window_mean = blob.rect;
                                        if (blob.rect.Width < 50)
                                        {
                                            track_window_mean.Width = 60;
                                        }
                                        else
                                        {
                                            track_window_mean.Width = blob.rect.Width;
                                        }

                                        if (blob.rect.Width + blob.rect.X > 319)
                                        {
                                            track_window_mean.X     = track_window_mean.X - 10;
                                            track_window_mean.Width = 40;
                                        }
                                        if (blob.rect.Height + blob.rect.Y > 239)
                                        {
                                            track_window_mean.Height = track_window_mean.Height - 20;
                                        }
                                        if (blob.rect.Y == 0)
                                        {
                                            track_window_mean.Y = blob.rect.Y + 15;
                                        }

                                        //Console.WriteLine("Blob::::::::" + blob.distance + " " + blob.rect);
                                        Main.lasttrack = camno;
                                        Main.tracking  = true;
                                        state          = "TRACK";
                                        Console.WriteLine("###########" + Thread.CurrentThread.Name + "starts TRACK");
                                        break;
                                    }
                                }
                            }
                        }

                        break;

                    case "TRACK":

                        blobDistanceList = bgSubtraction(currImage, bgImage, ref finalBlobImg, ref blobRect);
                        //--------------
                        foreach (Blob blob in blobDistanceList)
                        {
                            Console.WriteLine("Tracking Blob== " + blob.distance + " " + blob.rect);
                        }
                        Console.WriteLine("----");
                        //-------------

                        Image <Hsv, Byte> hsv = new Image <Hsv, Byte>(w, h);
                        hsv = currImage.Convert <Hsv, Byte>();
                        Console.WriteLine("1");
                        //extract the hue and value channels
                        Image <Gray, Byte>[] channels = hsv.Split();                                         //split into components
                        Image <Gray, Byte>[] imghue   = new Image <Gray, byte> [1]; imghue[0] = channels[0]; //hsv, so channels[0] is hue.
                        Image <Gray, Byte>   imgval   = channels[2];                                         //hsv, so channels[2] is value.
                        Image <Gray, Byte>   imgsat   = channels[1];                                         //hsv, so channels[1] is saturation.

                        mask = new Image <Gray, Byte>(w, h);
                        Hsv hsv_lower = new Hsv(0, smin, Math.Min(vmin, vmax));
                        Hsv hsv_upper = new Hsv(180, 256, Math.Max(vmin, vmax));
                        mask = hsv.InRange(hsv_lower, hsv_upper);

                        Image <Gray, Byte> backproject = Main.hist.BackProject(imghue);

                        mask        = mask.And(finalBlobImg.Dilate(2));
                        backproject = mask.And(backproject);
                        MCvConnectedComp trac_comp = new MCvConnectedComp();
                        //Console.WriteLine("2");
                        MCvTermCriteria criteria_mean = new MCvTermCriteria(100, 0.002);
                        pictureBox2.Image = mask.Bitmap;
                        //Console.WriteLine(criteria_mean.GetType);
                        try
                        {
                            Emgu.CV.CvInvoke.cvMeanShift(backproject, track_window_mean, criteria_mean, out trac_comp);
                        }
                        catch (CvException e)
                        {
                            Console.WriteLine(track_window_mean);
                            MessageBox.Show(e.ToString());
                        }
                        // Console.WriteLine("3");

                        currImage.Draw(trac_comp.rect, new Bgr(255, 0, 0), 2);
                        currImage.Draw(new Cross2DF(new PointF((trac_comp.rect.X + trac_comp.rect.Width / 2), (trac_comp.rect.Y + trac_comp.rect.Height / 2)), 20, 20), new Bgr(255, 255, 255), 2);
                        track_window_mean = trac_comp.rect;

                        //check person left the view
                        Image <Gray, byte> subImgBg = bgImgGy.GetSubRect(trac_comp.rect);
                        Image <Gray, byte> subImgFg = currImgGy.GetSubRect(trac_comp.rect);
                        Image <Gray, byte> imMask   = subImgFg.AbsDiff(subImgBg);
                        Gray cnt = imMask.GetAverage();
                        if (cnt.Intensity < 10)
                        {
                            Main.lasttrack = camno;
                            Main.tracking  = false;
                            state          = "BGSUB";
                            Console.WriteLine("###########" + Thread.CurrentThread.Name + "switches to BGSUB");
                        }
                        //---------------------------
                        outimage = new Image <Gray, byte>(w, h);
                        for (int i = 0; i < trac_comp.rect.Height; i++)
                        {
                            for (int j = 0; j < trac_comp.rect.Width; j++)
                            {
                                //subImgFg.Data[i, j, 0] = 255;
                                if (imMask.Data[i, j, 0] < Main.ThSub)
                                {
                                    imMask.Data[i, j, 0] = 0;
                                    outimage.Data[i + trac_comp.rect.Y, j + trac_comp.rect.X, 0] = 0;
                                }
                                else
                                {
                                    imMask.Data[i, j, 0] = 255;
                                    outimage.Data[i + trac_comp.rect.Y, j + trac_comp.rect.X, 0] = 255;
                                }
                            }
                            //Console.WriteLine();
                        }

                        outimage._Erode(2);
                        outimage._Dilate(3);
                        try
                        {
                            Image <Bgr, byte> subimg = currImage.And((outimage.Convert <Bgr, Byte>())).GetSubRect(trac_comp.rect);
                            //subimg.GetSubRect(subrect);//.Save(Thread.CurrentThread.Name + "\\" + k++ + ".jpg");

                            //Calc HISTOGRAM of each blob

                            DenseHistogram    histBlob = new DenseHistogram(hdims, hranges); //cvCreateHist(1, &hdims, CV_HIST_ARRAY, &hranges, 1);
                            Image <Hsv, byte> hsvBlob  = subimg.Convert <Hsv, byte>();

                            //extract the hue and value channels
                            Image <Gray, Byte>[] channelsBlob = hsvBlob.Split();                                             //split into components
                            Image <Gray, Byte>[] imghueBlob   = new Image <Gray, byte> [1]; imghueBlob[0] = channelsBlob[0]; //hsv, so channels[0] is hue.

                            Image <Gray, Byte> maskBlob = hsvBlob.InRange(hsv_lower, hsv_upper);

                            histBlob.Calculate(imghueBlob, false, maskBlob);

                            double distance = CvInvoke.cvCompareHist(Main.hist.Ptr, histBlob.Ptr, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_BHATTACHARYYA);
                            //if (distance < 0.15)
                            //{
                            //  //  Main.hist = histBlob;
                            //    Console.WriteLine(Thread.CurrentThread.Name + " ===== " + distance);
                            //}
                        }

                        catch (CvException cve)
                        {
                            MessageBox.Show(cve.StackTrace);
                        }
                        //---------------------------

                        //pictureBox2.Image = mask.Bitmap;
                        //pictureBox3.Image = mask.And(finalBlobImg).Bitmap;
                        break;
                    }
                    pictureBox1.Image = currImage.Bitmap;


                    Thread.Sleep(20);
                }
                Console.WriteLine("###########" + Thread.CurrentThread.Name + " exited");
            }catch (CvException e)
            {}
        }
예제 #7
0
 public static extern int cvMeanShift(
     IntPtr probImage,
     Rectangle window,
     MCvTermCriteria criteria,
     out MCvConnectedComp comp);
예제 #8
0
 public static extern int cvCamShift(
     IntPtr probImage,
     Rectangle window,
     MCvTermCriteria criteria,
     out MCvConnectedComp comp,
     out MCvBox2D box);
예제 #9
0
        private SkeletonPoint FindHand(short[] depthData, Rectangle rect)
        {
            CvInvoke.cvZero(HandImage.Ptr);
            var handImageData  = HandImage.Data;
            var handMaskData   = HandMask.Data;
            var playerMaskData = playerMask.Data;

            var maxDepth = 0;
            var minDepth = Int32.MaxValue;

            for (int y = rect.Top; y < rect.Top + rect.Height && y < height; y++)
            {
                for (int x = rect.Left; x < rect.Left + rect.Width && x < width; x++)
                {
                    if (y > 0 && x > 0 && handMaskData[y, x, 0] > 0 &&
                        playerMaskData[y, x, 0] > 0)
                    {
                        var depth = DepthUtil.RawToDepth(depthData[y * width + x]);
                        maxDepth = Math.Max(maxDepth, depth);
                        if (depth < minDepth)
                        {
                            minDepth = depth;
                        }
                    }
                }
            }

            var scale = (float)255 / (maxDepth - minDepth);

            for (int y = rect.Top; y < rect.Top + rect.Height && y < height; y++)
            {
                for (int x = rect.Left; x < rect.Left + rect.Width && x < width; x++)
                {
                    if (y > 0 && x > 0 && playerMaskData[y, x, 0] > 0 &&
                        handMaskData[y, x, 0] > 0)
                    {
                        var depth = DepthUtil.RawToDepth(depthData[y * width + x]);
                        handImageData[y, x, 0] = (byte)((maxDepth - depth) * scale);
                    }
                }
            }

            var connectedComp = new MCvConnectedComp();
            var shiftedBox    = new MCvBox2D();

            CvInvoke.cvCamShift(HandImage.Ptr, rect, new MCvTermCriteria(0.0), out connectedComp,
                                out shiftedBox);

            PrevHand = new windows.Point(HandBox.center.X, HandBox.center.Y);
            HandBox  = shiftedBox;
            var newRect  = shiftedBox.MinAreaRect();
            var aveDepth = 0.0;
            var count    = 0;

            for (int y = newRect.Top; y < newRect.Top + newRect.Height && y < height; y++)
            {
                for (int x = newRect.Left; x < newRect.Left + newRect.Width && x < width; x++)
                {
                    if (x > 0 && y > 0 && playerMaskData[y, x, 0] > 0 &&
                        handMaskData[y, x, 0] > 0)
                    {
                        var depth = DepthUtil.RawToDepth(depthData[y * width + x]);
                        aveDepth += depth;
                        count++;
                    }
                }
            }

            aveDepth /= count;
            var shiftedCenterX = Math.Max(0, shiftedBox.center.X);

            shiftedCenterX = Math.Min(shiftedCenterX, width);
            var shiftedCenterY = Math.Max(0, shiftedBox.center.Y);

            shiftedCenterY = Math.Min(shiftedCenterY, height);
            return(mapper.MapDepthPointToSkeletonPoint((int)shiftedCenterX,
                                                       (int)shiftedCenterY, (int)aveDepth));
        }
        public void run()
        {
            //to read each frame and display in picturebox
            if (Thread.CurrentThread.Name.Equals("camera2"))
            {
                Thread.Sleep(25000);
            }
            while ((currImage = cap.QueryFrame()) != null)
            {
                //### updating the currImage every time

                currImage = currImage.Resize(pictureBox1.Width, pictureBox1.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);

                //### Background Subtraction ~ currImage is subtracted with the bgImage and the result is stored in finalBlobImg.
                bgSubtraction(currImage, bgImage, ref finalBlobImg, ref blobRect);

                if (Main.state1.Equals(Thread.CurrentThread.Name))
                {
                    Image <Hsv, Byte> hsv = new Image <Hsv, Byte>(w, h);
                    hsv = currImage.Convert <Hsv, Byte>();


                    //extract the hue and value channels
                    Image <Gray, Byte>[] channels = hsv.Split();                                         //split into components
                    Image <Gray, Byte>[] imghue   = new Image <Gray, byte> [1]; imghue[0] = channels[0]; //hsv, so channels[0] is hue.
                    Image <Gray, Byte>   imgval   = channels[2];                                         //hsv, so channels[2] is value.
                    Image <Gray, Byte>   imgsat   = channels[1];                                         //hsv, so channels[1] is saturation.

                    /**
                     * Check if the pixels in hsv fall within a particular range.
                     * H: 0 to 180
                     * S: smin to 256
                     * V: vmin to vmax
                     * Store the result in variable: mask
                     */
                    mask = new Image <Gray, Byte>(w, h);
                    Hsv hsv_lower = new Hsv(0, smin, Math.Min(vmin, vmax));
                    Hsv hsv_upper = new Hsv(180, 256, Math.Max(vmin, vmax));
                    mask = hsv.InRange(hsv_lower, hsv_upper);

                    Image <Gray, Byte> backproject = Main.hist.BackProject(imghue);

                    mask        = mask.And(finalBlobImg);
                    backproject = mask.And(backproject);
                    MCvConnectedComp trac_comp = new MCvConnectedComp();

                    MCvTermCriteria criteria_mean = new MCvTermCriteria(100, 0.025);

                    //Console.WriteLine(criteria_mean.GetType);
                    Emgu.CV.CvInvoke.cvMeanShift(backproject, track_window_mean, criteria_mean, out trac_comp);
                    currImage.Draw(trac_comp.rect, new Bgr(255, 0, 0), 2);

                    track_window_mean = trac_comp.rect;

                    //check person left the view

                    Image <Gray, byte> subImgBg = bgImgGy.GetSubRect(trac_comp.rect);
                    Image <Gray, byte> subImgFg = currImgGy.GetSubRect(trac_comp.rect);
                    Image <Gray, byte> imMask   = subImgFg.AbsDiff(subImgBg);
                    Gray cnt = imMask.GetAverage();

                    pictureBox2.Image = mask.Bitmap;
                    pictureBox3.Image = mask.And(finalBlobImg).Bitmap;
                }
                //  pictureBox3.Image = finalBlobImg.Bitmap;
                pictureBox1.Image = currImage.Bitmap;

                //mean
                /* Update tracking window for camshift */


                Thread.Sleep(10);
            }
        }