Example #1
0
        private static void Track()
        {
            using (var video = new CvCapture("data/bach.mp4"))
            {
                IplImage frame        = null;
                IplImage gray         = null;
                IplImage binary       = null;
                IplImage render       = null;
                IplImage renderTracks = null;
                CvTracks tracks       = new CvTracks();
                CvWindow window       = new CvWindow("render");
                CvWindow windowTracks = new CvWindow("tracks");

                for (int i = 0; ; i++)
                {
                    frame = video.QueryFrame();
                    //if (frame == null)
                    //    frame = new IplImage("data/shapes.png");
                    if (gray == null)
                    {
                        gray         = new IplImage(frame.Size, BitDepth.U8, 1);
                        binary       = new IplImage(frame.Size, BitDepth.U8, 1);
                        render       = new IplImage(frame.Size, BitDepth.U8, 3);
                        renderTracks = new IplImage(frame.Size, BitDepth.U8, 3);
                    }

                    render.Zero();
                    renderTracks.Zero();

                    Cv.CvtColor(frame, gray, ColorConversion.BgrToGray);
                    Cv.Threshold(gray, binary, 0, 255, ThresholdType.Otsu);

                    CvBlobs blobs    = new CvBlobs(binary);
                    CvBlobs newBlobs = new CvBlobs(blobs
                                                   .OrderByDescending(pair => pair.Value.Area)
                                                   .Take(200)
                                                   .ToDictionary(pair => pair.Key, pair => pair.Value), blobs.Labels);
                    newBlobs.RenderBlobs(binary, render);
                    window.ShowImage(render);

                    newBlobs.UpdateTracks(tracks, 10.0, Int32.MaxValue);
                    tracks.Render(binary, renderTracks);
                    windowTracks.ShowImage(renderTracks);

                    Cv.WaitKey(200);
                    Console.WriteLine(i);
                }
            }
        }
Example #2
0
 /// <summary>
 /// Updates list of tracks based on current blobs. 
 /// </summary>
 /// <param name="blobs">List of blobs.</param>
 /// <param name="tracks">List of tracks.</param>
 /// <param name="thDistance">Max distance to determine when a track and a blob match.</param>
 /// <param name="thInactive">Max number of frames a track can be inactive.</param>
 /// <param name="thActive">If a track becomes inactive but it has been active less than thActive frames, the track will be deleted.</param>
 /// <remarks>
 /// Tracking based on:
 /// A. Senior, A. Hampapur, Y-L Tian, L. Brown, S. Pankanti, R. Bolle. Appearance Models for
 /// Occlusion Handling. Second International workshop on Performance Evaluation of Tracking and
 /// Surveillance Systems &amp; CVPR'01. December, 2001.
 /// (http://www.research.ibm.com/peoplevision/PETS2001.pdf)
 /// </remarks>
 public static void UpdateTracks(CvBlobs blobs, CvTracks tracks, double thDistance, int thInactive, int thActive)
 {
     if (blobs == null)
         throw new ArgumentNullException(nameof(blobs));
     blobs.UpdateTracks(tracks, thDistance, thInactive, thActive);
 }
Example #3
0
        private static void Track()
        {
            using (var video = new CvCapture("data/bach.mp4"))
            {
                IplImage frame = null;
                IplImage gray = null;
                IplImage binary = null;
                IplImage render = null;
                IplImage renderTracks = null;
                CvTracks tracks = new CvTracks();
                CvWindow window = new CvWindow("render");
                CvWindow windowTracks = new CvWindow("tracks");

                for (int i = 0; ; i++)
                {
                    frame = video.QueryFrame();
                    //if (frame == null)
                    //    frame = new IplImage("data/shapes.png");
                    if (gray == null)
                    {
                        gray = new IplImage(frame.Size, BitDepth.U8, 1);
                        binary = new IplImage(frame.Size, BitDepth.U8, 1);
                        render = new IplImage(frame.Size, BitDepth.U8, 3);
                        renderTracks = new IplImage(frame.Size, BitDepth.U8, 3);
                    }

                    render.Zero();
                    renderTracks.Zero();

                    Cv.CvtColor(frame, gray, ColorConversion.BgrToGray);
                    Cv.Threshold(gray, binary, 0, 255, ThresholdType.Otsu);

                    CvBlobs blobs = new CvBlobs(binary);
                    CvBlobs newBlobs = new CvBlobs(blobs
                        .OrderByDescending(pair => pair.Value.Area)
                        .Take(200)
                        .ToDictionary(pair => pair.Key, pair => pair.Value), blobs.Labels);
                    newBlobs.RenderBlobs(binary, render);
                    window.ShowImage(render);

                    newBlobs.UpdateTracks(tracks, 10.0, Int32.MaxValue);
                    tracks.Render(binary, renderTracks);
                    windowTracks.ShowImage(renderTracks);

                    Cv.WaitKey(200);
                    Console.WriteLine(i);
                }
            }
        }