コード例 #1
0
        void ProcessFrame(object sender, EventArgs e)
        {
            Mat frame         = _cameraCapture.QueryFrame();
            Mat smoothedFrame = new Mat();

            CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises
            //frame._SmoothGaussian(3);

            #region use the BG/FG detector to find the forground mask
            Mat forgroundMask = new Mat();
            _fgDetector.Apply(smoothedFrame, forgroundMask);
            #endregion

            CvBlobs blobs = new CvBlobs();
            _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs);
            blobs.FilterByArea(100, int.MaxValue);

            float scale = (frame.Width + frame.Width) / 2.0f;
            _tracker.Update(blobs, 0.01 * scale, 5, 5);

            foreach (var pair in _tracker)
            {
                CvTrack b = pair.Value;
                CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2);
                CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0));
            }

            imageBox1.Image = frame;
            imageBox2.Image = forgroundMask;
        }
コード例 #2
0
        static Image <Bgr, Byte> _doDetect(string fileName)
        {
            Debug.WriteLine($"Processing: {fileName}");

            var frameOrig = new Image <Bgr, Byte>(fileName);

            var frame = frameOrig.Convert <Gray, Byte>();

            Mat smoothedFrame = new Mat();

            CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(19, 19), 7); //filter out noises

            var smoothedImage = smoothedFrame.ToImage <Gray, Byte>();

            if (_original == null)
            {
                _original = smoothedImage;
                return(null);
            }

            var frameDelta = smoothedImage.AbsDiff(_original);
            var thresh     = frameDelta.ThresholdBinary(new Gray(25), new Gray(255));

            thresh = thresh.Dilate(2);

            //File.WriteAllBytes(@"C:\Temp\imagery\aathreh.jpg", thresh.ToJpegData(95));

            _original = smoothedImage;

            //var cnts = new VectorOfVectorOfPoint();
            //CvInvoke.FindContours(thresh.Copy(), cnts, null, RetrType.External,
            //    ChainApproxMethod.ChainApproxSimple);

            //var goer = false;

            //for (var i = 0; i < cnts.Size; i++)
            //{
            //    var c = cnts[i];

            //    if (CvInvoke.ContourArea(c) < 500)
            //    {
            //        continue;
            //    }
            //    goer = true;


            //    //Debug.WriteLine(CvInvoke.ContourArea(c));
            //    //var rect = CvInvoke.BoundingRectangle(c);
            //    //CvInvoke.Rectangle(frame, rect, new MCvScalar(255.0, 255.0, 255.0), 2);
            //}

            ////// File.WriteAllBytes(@"C:\Temp\imagery\aaframes.jpg", frame.ToJpegData(95));

            // return goer;

            //Mat forgroundMask = new Mat();
            //_fgDetector.Apply(smoothedFrame, forgroundMask);

            CvBlobs blobs = new CvBlobs();

            _blobDetector.Detect(thresh, blobs);
            blobs.FilterByArea(1600, int.MaxValue);

            float scale = (frame.Width + frame.Width) / 2.0f;

            //File.WriteAllBytes(@"C:\Temp\imagery\aaout.jpg", smoothedImage.ToJpegData(95));


            _tracker.Update(blobs, scale, 5, 5);

            foreach (var pair in _tracker)
            {
                CvTrack b = pair.Value;
                CvInvoke.Rectangle(frameOrig, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2);
                CvInvoke.PutText(frameOrig, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0));
            }

            //  File.WriteAllBytes(@"C:\Temp\imagery\aaframes.jpg", frame.ToJpegData(95));
            // File.WriteAllBytes(@"C:\Temp\imagery\aablur.jpg", smoothedFrame.ToImage<Gray, byte>().ToJpegData(95));

            return(_tracker.Count > 0 ? frameOrig : null);
            //var cnts = new VectorOfVectorOfPoint();
            //CvInvoke.FindContours(thresh.Copy(), cnts, null, RetrType.External,
            //    ChainApproxMethod.ChainApproxSimple);



            //for (var i = 0; i < cnts.Size; i++)
            //{
            //    var c = cnts[i];
            //    Debug.WriteLine(CvInvoke.ContourArea(c));
            //    var rect = CvInvoke.BoundingRectangle(c);
            //    CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2);


            //}



            //Mat smoothedFrame = new Mat();
            //CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(23, 23), 5); //filter out noises
            ////frame._SmoothGaussian(3);


            //Mat forgroundMask = new Mat();
            //_fgDetector.Apply(smoothedFrame, forgroundMask);


            //CvBlobs blobs = new CvBlobs();
            //_blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs);
            //blobs.FilterByArea(100, int.MaxValue);

            //float scale = (frame.Width + frame.Width) / 2.0f;

            //File.WriteAllBytes(@"C:\Temp\imagery\aaout.jpg", forgroundMask.ToImage<Gray, byte>().ToJpegData(95));


            //_tracker.Update(blobs, scale, 5, 5);

            //foreach (var pair in _tracker)
            //{
            //    CvTrack b = pair.Value;
            //    CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2);
            //    CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0));
            //}

            //File.WriteAllBytes(@"C:\Temp\imagery\aaframes.jpg", frame.ToJpegData(95));
            //File.WriteAllBytes(@"C:\Temp\imagery\aablur.jpg", smoothedFrame.ToImage<Gray, byte>().ToJpegData(95));

            //Console.WriteLine($" >>>> Tracker: {_tracker.Count}, Blobs: {blobs.Count}");



            //foreach (var pair in _tracker)
            //{
            //    CvTrack b = pair.Value;
            //    CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2);
            //    CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0));
            //}

            //imageBox1.Image = frame;
            //imageBox2.Image = forgroundMask;
        }
コード例 #3
0
ファイル: ObjectTracker.cs プロジェクト: spdjudd/BirdTracker
 private void SetTrackedImage(CvTrack track, Image<Bgr, byte> image, TrackedObject myTrack)
 {
     var windowSize = _settings.SubImageMultiplier.Value > 1
         ? _settings.SubImageMultiplier.Value * (1 + (myTrack.MaxSize / _settings.SubImageMultiplier.Value))
         : myTrack.MaxSize;
     var x = Math.Max(0, Convert.ToInt32(track.Centroid.x) - windowSize / 2);
     x = Math.Min(_imageWidth - windowSize, x);
     var y = Math.Max(0, Convert.ToInt32(track.Centroid.y) - windowSize / 2);
     y = Math.Min(_imageHeight - windowSize, y);
     var sourceRect = new Rectangle(x, y, windowSize, windowSize);
     var subImage = image.Copy(sourceRect);
     myTrack.ProcessTrackerResult(sourceRect, subImage);
 }
コード例 #4
0
        public CameraTrackingFindSubjectsReturnModel FindSubjects()
        {
            double    largestW     = 0;
            double    largestH     = 0;
            double    centerX      = 0;
            double    centerY      = 0;
            bool      foundSubject = false;
            Rectangle subject      = new Rectangle();

            // get detection 'blobs' or regions

            CvBlobs blobs = new CvBlobs();

            _blobDetector.Detect(this.lastMask.ToImage <Gray, byte>(), blobs);
            blobs.FilterByArea(100, int.MaxValue);

            float scale = (this.lastFrame.Width + this.lastFrame.Width) / 2.0f;

            _tracker.Update(blobs, 0.01 * scale, 5, 5);

            FrameWidth  = this.lastFrame.Width;
            FrameHeight = this.lastFrame.Height;

            foreach (var pair in _tracker)
            {
                CvTrack b = pair.Value;

                // limit the largest and smallest size boxes we care about.

                if (b.BoundingBox.Width < (this.lastFrame.Width / this.smallestDetectionWidthSizeDivisor) ||
                    b.BoundingBox.Height < (this.lastFrame.Height / this.smallestDetectionHeightSizeDivisor) ||
                    (b.BoundingBox.Width > (this.lastFrame.Width / this.largestDetectionWidthSizeDivisor) &&
                     b.BoundingBox.Height > (this.lastFrame.Height / this.largestDetectionHeightSizeDivisor)))
                {
                    continue;
                }

                // keep track of the largest regions as we only care to track the largest

                if (b.BoundingBox.Width > largestW)
                {
                    subject  = b.BoundingBox;
                    largestW = b.BoundingBox.Width;
                    largestH = b.BoundingBox.Height;
                    centerX  = b.Centroid.X;
                    centerY  = b.Centroid.Y;

                    CvInvoke.Rectangle(
                        this.lastFrame,
                        b.BoundingBox,
                        new MCvScalar(
                            255.0,
                            255.0,
                            255.0),
                        20);

                    CvInvoke.PutText(
                        this.lastFrame,
                        b.Id.ToString(),
                        new Point(
                            (int)Math.Round(b.Centroid.X),
                            (int)Math.Round(b.Centroid.Y)),
                        FontFace.HersheyPlain,
                        1.0,
                        new MCvScalar(255.0, 255.0, 255.0));

                    foundSubject = true;
                }
                else
                {
                    CvInvoke.Rectangle(
                        this.lastFrame,
                        b.BoundingBox,
                        new MCvScalar(
                            255.0,
                            255.0,
                            255.0),
                        1);

                    CvInvoke.PutText(
                        this.lastFrame,
                        b.Id.ToString(),
                        new Point(
                            (int)Math.Round(b.Centroid.X),
                            (int)Math.Round(b.Centroid.Y)),
                        FontFace.HersheyPlain,
                        1.0,
                        new MCvScalar(
                            255.0,
                            255.0,
                            255.0));
                }
            }

            return(new CameraTrackingFindSubjectsReturnModel()
            {
                CenterX = centerX,
                CenterY = centerY,
                BoundingBox = subject,
                FoundSubject = foundSubject
            });
        }
コード例 #5
0
ファイル: ObjectTracker.cs プロジェクト: spdjudd/BirdTracker
        /// <summary>
        /// Here: mark active as active, set crop, public enumerable
        /// UI thread: iterate observable - throw out any not active, mark all active inactive, add any new, send new images for id, create imagesource
        /// </summary>
        /// <param name="track"></param>
        /// <param name="image"></param>
        private void ProcessTrack(CvTrack track, Image<Bgr, Byte> image)
        {
            // can this happen for tracks that later resume?
            if (track.Active <= 0) return;

            // get the tracked object currently using this id, if any
            TrackedObject myTrack;
            _trackedObjectIdentities.TryGetValue(track.Id, out myTrack);

            if (track.Lifetime <= 1 && myTrack != null && myTrack.TrackingState == TrackingState.Dead)
            {
                _trackedObjectIdentities.Remove(track.Id);
                myTrack = null;
            }

            if (track.Lifetime < _settings.MinFramesToStartIdentifying.Value)
            {
                return;
            }

            if (myTrack == null)
            {
                myTrack = new TrackedObject{Id = _nextTrackId++, TrackingState = TrackingState.New};
            }
            else
            {
                myTrack.TrackingState = TrackingState.Current;
            }
            try
            {
                myTrack.MaxSize = Math.Max(myTrack.MaxSize,
                    Math.Max(track.BoundingBox.Height, track.BoundingBox.Width));
                if (myTrack.MaxSize < _settings.MinSizeToIdentify.Value) return;
                if (myTrack.TrackingState == TrackingState.New)
                    _trackedObjectIdentities[track.Id] = myTrack;
                SetTrackedImage(track, image, myTrack);
            }
            catch (Exception ex)
            {
                Log.Error("Exception processing track", ex);
            }
        }