Update() public method

public Update ( ) : void
return void
Example #1
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            bool HaveInitialized = false;

            Frame = capture.QueryFrame();
            if (Frame != null)
            {
                if (HaveSelected && TrackerType != "")
                {
                    if (!HaveInitialized)
                    {
                        if (!tracker.Init(Frame, BoundingBox))
                        {
                            HaveSelected = false;
                        }
                        HaveInitialized = true;
                    }
                    else
                    {
                        if (tracker.Update(Frame, out BoundingBox))
                        {
                            CvInvoke.Rectangle(Frame, BoundingBox, new MCvScalar(0, 0, 255), 2);
                        }
                    }
                }
                System.Threading.Thread.Sleep((int)(1000.0 / VideoFps - 5));
                VideoBox.Image = Frame;
                GC.Collect();
            }
        }
Example #2
0
        protected static void UpdateBase(Tracker tracker)
        {
            if (tracker is null)
            {
                throw new System.ArgumentNullException(nameof(tracker));
            }

            // ETHZ dataset
            // ETHZ is Eidgenössische Technische Hochschule Zürich, in Deutsch
            // https://data.vision.ee.ethz.ch/cvl/aess/cvpr2008/seq03-img-left.tar.gz
            // This video could be research data and it may not allow to use commercial use.
            // This test can not track person perfectly but it is enough to test whether unit test works.

            // This rect indicates person who be captured in first frame
            var bb = new Rect2d(286, 146, 70, 180);

            // If you want to save markers image, you must change the following values.
            const string path = @"_data/image/ETHZ/seq03-img-left";

            foreach (var i in Enumerable.Range(0, 21))
            {
                var file = $"image_{i:D8}_0.png";

                using var mat = new Mat(Path.Combine(path, file));
                if (i == 0)
                {
                    tracker.Init(mat, bb);
                }
                else
                {
                    tracker.Update(mat, ref bb);
                }

                if (Debugger.IsAttached)
                {
                    Directory.CreateDirectory(path);
                    mat.Rectangle(
                        new Point((int)bb.X, (int)bb.Y),
                        new Point((int)(bb.X + bb.Width), (int)(bb.Y + bb.Height)),
                        new Scalar(0, 0, 255));
                    Cv2.ImWrite(Path.Combine(path, file), mat);
                }
            }
        }
        protected override TrackingOutput DoProcess(TrackingInput input)
        {
            Rectangle boundingBox = Rectangle.Empty;

            if (input.Config.StartNewTrack)
            {
                Reset();
                _tracker.Init(input.Captured, input.Config.ObjectOfInterest);
                Log.InfoFormat("Starting tracking");
            }
            else if (_tracker != null)
            {
                _tracker.Update(input.Captured, out boundingBox);
            }

            var output = new TrackingOutput();

            output.ObjectOfInterest = boundingBox;
            return(output);
        }
        private void ProcessWithoutCalibration(object sender, EventArgs e)
        {
            if (_capture != null && _capture.Ptr != IntPtr.Zero)
            {
                Mat tmp = new Mat();

                CaptureStereoImages(_capture, _capture2, out _left, out _right);
                Mat[] edges = new Mat[2] {
                    new Mat(), new Mat()
                };
                CvInvoke.Canny(_left, edges[0], 100, 200);
                CvInvoke.Canny(_right, edges[1], 100, 200);

                if (_haveRegion && _trackerLeft == null)
                {
                    tmp          = DrawRect(_regionLeft, edges[0]);
                    _trackerLeft = new TrackerKCF();
                    _trackerLeft.Init(_left, _regionLeft);
                }
                else if (_haveRegion)
                {
                    Rectangle rectLeft = new Rectangle();
                    _trackerLeft.Update(_left, out rectLeft);
                    _regionLeft = rectLeft;
                    tmp         = DrawRect(_regionLeft, edges[0]);


                    double distances = CountDistanceThroughEdges(edges[0], edges[1], _regionLeft, _baseLine, _focalLength);
                    CvInvoke.PutText(_right, "distance = " + distances, new Point(0, _right.Height - 30),
                                     FontFace.HersheySimplex, 0.5, new Bgr(Color.Red).MCvScalar, 2);
                }
                else
                {
                    tmp = edges[0];
                }

                ShowStereoImages(tmp, _right);
            }
        }
Example #5
0
        public OpenCvSharp.Rect DrawRect(RectTransform imageTransform, Mat image, Mat downscaled, Unity.TextureConversionParams TextureParameters, ref Texture2D output)
        {
            // screen space -> image space
            Vector2 sp       = ConvertToImageSpace(imageTransform, startPoint, image.Size(), TextureParameters);
            Vector2 ep       = ConvertToImageSpace(imageTransform, endPoint, image.Size(), TextureParameters);
            Point   location = new Point(Math.Min(sp.x, ep.x), Math.Min(sp.y, ep.y));
            Size    size     = new Size(Math.Abs(ep.x - sp.x), Math.Abs(ep.y - sp.y));
            var     areaRect = new OpenCvSharp.Rect(location, size);
            Rect2d  obj      = Rect2d.Empty;

            // If not dragged - show the tracking data
            if (!isDragging)
            {
                // drop tracker if the frame's size has changed, this one is necessary as tracker doesn't hold it well
                if (frameSize.Height != 0 && frameSize.Width != 0 && downscaled.Size() != frameSize)
                {
                    DropTracking();
                }

                // we have to tracker - let's initialize one
                if (null == tracker)
                {
                    // but only if we have big enough "area of interest", this one is added to avoid "tracking" some 1x2 pixels areas
                    if ((ep - sp).magnitude >= minimumAreaDiagonal)
                    {
                        obj = new Rect2d(areaRect.X, areaRect.Y, areaRect.Width, areaRect.Height);

                        // initial tracker with current image and the given rect, one can play with tracker types here
                        tracker = Tracker.Create(TrackerTypes.MIL);
                        tracker.Init(downscaled, obj);

                        frameSize = downscaled.Size();
                    }
                }
                // if we already have an active tracker - just to to update with the new frame and check whether it still tracks object
                else
                {
                    if (!tracker.Update(downscaled, ref obj))
                    {
                        obj = Rect2d.Empty;
                    }
                }

                // save tracked object location
                if (0 != obj.Width && 0 != obj.Height)
                {
                    areaRect = new OpenCvSharp.Rect((int)obj.X, (int)obj.Y, (int)obj.Width, (int)obj.Height);
                }
            }

            // render rect we've tracker or one is being drawn by the user
            if (isDragging || (null != tracker && obj.Width != 0))
            {
                Cv2.Rectangle((InputOutputArray)image, areaRect * (1.0 / downScale), isDragging ? Scalar.Red : Scalar.Blue, 4);
            }

            // result, passing output texture as parameter allows to re-use it's buffer
            // should output texture be null a new texture will be created
            if (!isTracking)
            {
                output = Unity.MatToTexture(image, output);
            }
            return(areaRect);
        }
Example #6
0
        private void RetrieveFrame(object sender, EventArgs arg)
        {
            Mat   diff   = new Mat();
            Point center = Point.Empty;

            // retrieve image from camera
            Mat frame = new Mat();

            Camera.Retrieve(frame);
            ImageSize = frame.Size;

            // generate diff image (before drawing stuff)
            if (!ImgRef.IsEmpty && !IsTuning)
            {
                CvInvoke.AbsDiff(frame, ImgRef, diff);
            }

            if (!IsTracking)
            {
                // find marker
                Rectangle boundingBox;
                if (Roi.IsEmpty)
                {
                    center = FindMarker(frame, out boundingBox);
                }
                else
                {
                    using (Mat crop = new Mat(frame.Clone(), Roi))
                    {
                        center = FindMarker(crop, out boundingBox);
                        if (!center.IsEmpty)
                        {
                            center.X      += Roi.X;
                            center.Y      += Roi.Y;
                            boundingBox.X += Roi.X;
                            boundingBox.Y += Roi.Y;
                        }
                    }
                }

                // store marker point
                if (ScheduleTakeReference && !center.IsEmpty)
                {
                    ImgRef        = frame.Clone();
                    PointRef      = center;
                    MarkerTracker = new TrackerCSRT();
                    MarkerTracker.Init(ImgRef, boundingBox);
                    ScheduleTakeReference = false;
                    Invoke(new Action(() => UpdateInstructionText()));
                }

                // draw marker
                if (!center.IsEmpty)
                {
                    CvInvoke.Circle(frame, center, 4, new MCvScalar(0, 140, 255), -1);
                    CvInvoke.Rectangle(frame, boundingBox, new MCvScalar(0, 140, 255), 1);
                }

                // draw ROI
                if (!Roi.IsEmpty)
                {
                    using (Mat dark = new Mat(frame.Rows, frame.Cols, frame.Depth, 3))
                    {
                        dark.SetTo(new MCvScalar(1, 1, 1));
                        CvInvoke.Rectangle(dark, Roi, new MCvScalar(2, 2, 2), -1);
                        CvInvoke.Multiply(frame, dark, frame, 0.5);
                    }
                }

                ImgBoxRef.Image = frame;
            }
            else
            {
                // tracker
                if (MarkerTracker.Update(frame, out Rectangle trackingRect))
                {
                    center = new Point(trackingRect.X + trackingRect.Width / 2, trackingRect.Y + trackingRect.Height / 2);
                    CvInvoke.Circle(frame, center, 4, new MCvScalar(0, 140, 255), -1);
                    CvInvoke.Rectangle(frame, trackingRect, new MCvScalar(0, 140, 255), 1);
                }

                ImgBoxLive.Image = frame;
            }

            // update diff image box
            if (!diff.IsEmpty)
            {
                MCvScalar color = new MCvScalar(0, 140, 255);
                if (!center.IsEmpty)
                {
                    CvInvoke.Circle(diff, center, 4, color, -1);
                }
                if (!PointRef.IsEmpty)
                {
                    CvInvoke.Circle(diff, PointRef, 4, color, -1);
                }
                if (!center.IsEmpty && !PointRef.IsEmpty)
                {
                    string dist = CalcDistance(center, PointRef).ToString("0.0");
                    CvInvoke.ArrowedLine(diff, PointRef, center, color);
                    CvInvoke.PutText(diff, dist, new Point(5, diff.Height - 5), FontFace.HersheyComplexSmall, 1, color);
                }
                ImgBoxDiff.Image = diff;
            }

            // update status text
            Invoke(new Action(() => UpdateStatusText(PointRef, center)));
        }
Example #7
0
        public PointF Detect(Mat _frame)
        {
            PointF crossPoint = new PointF(-1, -1);

            frame = _frame;
            // 合法性判断
            if (frame == null)
            {
                return(crossPoint);
            }
            if (frame.IsEmpty)
            {
                return(crossPoint);
            }

            // 图像缩放
            CvInvoke.Resize(frame, frame_scale, scaleSize, 0, 0, Inter.Cubic);//缩小尺寸
            //跟踪目标并更新模型
            tracker.Update(frame_scale, out roi);

            if (roi.Width == 0 || roi.Height == 0)
            {
                return(crossPoint);
            }

            //ROI区域的放缩
            float Fheight     = frame.Rows;
            float Fwidth      = frame.Cols;
            float WidthScale  = (Fwidth / scaleSize.Width);   //宽度的缩放比例
            float HeightScale = (Fheight / scaleSize.Height); //高度的缩放比例

            //放大roi区域
            roi_scale.X      = (int)Math.Round(roi.X * WidthScale);
            roi_scale.Y      = (int)Math.Round(roi.Y * HeightScale);
            roi_scale.Width  = (int)Math.Round(roi.Width * WidthScale);
            roi_scale.Height = (int)Math.Round(roi.Height * HeightScale);

            // 打印喷头的分类处理
            src_roi    = new Mat(frame, roi_scale);
            roi_kmeans = get_roi_Sprinkler_area(ref src_roi);

            // KMeans
            // CvInvoke.Imshow("kmeans", roi_kmeans);

            roi_pro        = new Mat(frame_scale, roi); //复制跟踪包围框部分
            roi_temp       = frame_scale.Clone();       //用这个显示检测的末端点和拟合的线
            roi_singletemp = frame_scale.Clone();       //用这个单独显示检测的末端点


            // 原frame中的roi区域分类后的色空间转换、阈值化操作、边缘平滑、canny边缘提取
            Size blur_size    = new Size(3, 3);
            int  srcthreshold = 100;

            CvInvoke.CvtColor(roi_kmeans, roi_gray, ColorConversion.Bgr2Gray);           //转化为灰度图像
            CvInvoke.Threshold(roi_gray, roi_threshold, 180, 255, ThresholdType.Binary); //阈值化操作
            imageblur(roi_threshold, roi_blur, blur_size, srcthreshold);

            CvInvoke.Imshow("roi_gray", roi_gray);
            CvInvoke.Imshow("roi_threshold", roi_threshold);
            CvInvoke.Imshow("roi_blur", roi_blur);

            CvInvoke.Canny(roi_blur, roi_canny, 10, 200, 3);

            CvInvoke.Imshow("Canny", roi_canny);

            //canny检测后直线的末端点判断
            PointF final_canny_crossPoint = CrossPointFromCanny(roi_canny, roi_scale);

            //去掉canny检测后上半部分的
            roi_cannyx = SegmenttopFromCanny(roi_canny);

            CvInvoke.Imshow("roi_cannyx", roi_cannyx);

            //hough直线检测
            lines = CvInvoke.HoughLinesP(roi_cannyx, 1, Math.PI / 360, 40, 50, 380);//hough直线


            if (lines.Length > 1)
            {
                crossPoint = CrossPointFromHough(lines, roi_scale, ref roi_temp, ref final_linestand, ref final_lineanother, final_canny_crossPoint);
            }

            return(crossPoint);
        }
Example #8
0
        public bool trackFace(Image <Bgr, byte> bgrImage)
        {
            var result = false;

            using (var grayframe = new Image <Gray, byte>(new Bitmap(bgrImage.Bitmap)))
            {
                if (!isInit)
                {
                    if (bgrImage != null)
                    {
                        result = trackingInit(bgrImage);
                    }
                }
                else
                {
                    if (bgrImage != null)
                    {
                        try
                        {
                            var updateFace = new Rectangle();
                            result = tracker.Update(grayframe.Mat, out updateFace);
                            if (!result)
                            {
                                failFrameCount++;
                                findFace = false;
                            }
                            else
                            {
                                //var updateFace = new Rectangle();
                                //var result = tracker.Update(grayframe.Mat, out updateFace);

                                /*
                                 * for ( var i = 0; i < updateFace.ToArray().Count(); i ++)
                                 * {
                                 *  bgrImage.Draw(updateFace.ToArray()[i], new Bgr(Color.Red), 3);
                                 * }*/

                                Color color = Color.Gray;
                                if (Person != null)
                                {
                                    color = Color.Green;
                                }
                                else if (FailToRecognize)
                                {
                                    color = Color.Red;
                                }

                                bgrImage.Draw(updateFace, new Bgr(color), 3);
                                //TODO update name
                                if (Person != null)
                                {
                                    using (Graphics g = Graphics.FromImage(bgrImage.Bitmap))
                                    {
                                        int tWidth = (int)g.MeasureString(Person.Name, new Font("Arial", 12, FontStyle.Bold)).Width;
                                        int x;
                                        if (tWidth >= updateFace.Width)
                                        {
                                            x = updateFace.Left - ((tWidth - updateFace.Width) / 2);
                                        }
                                        else
                                        {
                                            x = (updateFace.Width / 2) - (tWidth / 2) + updateFace.Left;
                                        }

                                        g.DrawString(Person.Name, new Font("Arial", 12, FontStyle.Bold), Brushes.Green, new PointF(x, updateFace.Top - 18));
                                    }
                                }
                                humanFace.faceRectangle = updateFace;
                                findFace = true;
                                //UpdatePersonAsync();
                                if (FailToRecognize)
                                {
                                    failFrameCount++;
                                    successFrameCount = 0;
                                }
                                else
                                {
                                    failFrameCount = 0;
                                    if (successFrameCount > SUCCESSRETRYFRAMECOUNT)
                                    {
                                        FaceDetector faceDetector = new FaceDetector();

                                        var detectedFaces = faceDetector.findFace(bgrImage);
                                        var sameFaceFind  = false;
                                        foreach (var r1 in detectedFaces)
                                        {
                                            var   r1Size = r1.Height * r1.Width;
                                            var   r2     = humanFace.faceRectangle;
                                            float r2Size = r2.Height * r2.Width;
                                            var   r3     = Rectangle.Intersect(r1, r2);
                                            if (!r3.IsEmpty)
                                            {
                                                float r3Size = r3.Height * r3.Width;
                                                float ratio  = r3Size / Math.Min(r1Size, r2Size);
                                                if (ratio < INTERSECTRATIO)
                                                {
                                                    Debug.Print("Face compare, Ratio: {0}", ratio);
                                                }
                                                else
                                                {
                                                    sameFaceFind = true;
                                                }
                                            }
                                            else
                                            {
                                                Debug.Print("r3 is empty");
                                            }
                                        }



                                        if (!sameFaceFind)
                                        {
                                            missingFace = true;
                                        }
                                        faceDetector      = null;
                                        successFrameCount = 0;
                                    }
                                    else
                                    {
                                        successFrameCount++;
                                    }
                                }
                            }
                        }
                        catch (Exception ex)
                        {
                            Debug.Print(ex.ToString());
                        }
                    }
                }
            }
            return(result);
        }
 //Основной цикл трекинга
 public static void MainLoop()
 {
     //Обеспечение потокобезопасности
     lock (locker)
     {
         //Продолжение работы, до нажатия кнопки Stop или же до окончания кадров в файле
         while (TrackingActive || Capture.IsOpened)
         {
             //Заддержка для плавности работы
             Thread.Sleep(10);
             //Номер выделяемого объекта
             int nBox = 0;
             //Получение изображения с камеры
             Mat frame = GetCameraPicture();
             //Переменная для хранения данных, получаемых в результате работы трекеров
             VectorOfRect RawRects = new VectorOfRect();
             //Получение данных для нового изображения
             bool state = Tracker.Update(frame, RawRects);
             //Преобразование в массив точек
             Rectangle[] Rects = RawRects.ToArray();
             //Проверка на надобность реинициализации
             if ((!state) || (Rects.Length == 0) || (NeedReinit > VerNum))
             {
                 //Обнуление счетчика
                 NeedReinit = 0;
                 //Вывод изображения
                 MainWindowImage = frame;
                 //Реинициализация
                 Tracker = Reinit_Tracker(InType, frame);
                 //Удаление всех данных о прошлых объектах
                 AllBoxes.Clear();
                 //Установка флага
                 IsNew = true;
                 //Переход к следующей итерации
                 continue;
             }
             //Если же реинициализация не нужна, переходим к отрисовке выделений и поиску скорости
             foreach (var rect in Rects)
             {
                 //Скорость выделяемого объекта
                 float speed = 0;
                 //Проверка на то, была ли реинициализация на прошлой итерации
                 //Если да, добавление нового списка в главный
                 if (IsNew)
                 {
                     AllBoxes.Add(new List <Rectangle>());
                 }
                 //Проверка на то, есть ли в текущем списке прямоугольников значения
                 //Если да, то можно найти условную скорость объекта
                 if (AllBoxes[nBox].Count != 0)
                 {
                     //Получение точек прошлой итерации
                     Rectangle oldRect = AllBoxes[nBox][AllBoxes[nBox].Count - 1];
                     //Нахождение общего значения, на основе которого можно будет вычислить отклонение
                     float oldSpeed = (oldRect.X + oldRect.Y + oldRect.Width + oldRect.Height) / 4;
                     speed = Convert.ToSingle((rect.X + rect.Y + rect.Width + rect.Height)) / 4;
                     //Вычисление отклонения от предыдущего изображения. Чем оно больше - тем выше была скорость объекта
                     speed = Math.Abs(oldSpeed - speed);
                 }
                 //Добавление текущих точек в список
                 AllBoxes[nBox].Add(rect);
                 //Точка, где будет отрисована скорость
                 var TextLocation = new System.Drawing.Point(rect.X - 20, rect.Y - 30);
                 //Отрисовка скорости
                 CvInvoke.PutText(frame, Convert.ToString(speed), TextLocation, FontFace.HersheyScriptSimplex, 1, new MCvScalar(0, 0, 0), 3);
                 //Отрисовка найденного прямоугольника
                 CvInvoke.Rectangle(frame, rect, new MCvScalar(0, 255, 0), 2);
                 //Отрисовка траектории на основе прошлых итерации
                 foreach (var pBox in AllBoxes[nBox])
                 {
                     var PointLocation = new System.Drawing.Point(pBox.X + Convert.ToInt32(pBox.Width / 2), pBox.Y + pBox.Height);
                     CvInvoke.Circle(frame, PointLocation, 3, new MCvScalar(0, 0, 255));
                 }
                 //Переход к следующему объекту
                 nBox++;
             }
             //Смена флага
             IsNew = false;
             //Увеличение счетчика
             NeedReinit++;
             //Вывод отрисованного изображения на экран
             MainWindowImage = frame;
         }
     }
 }
Example #10
0
        public PointF Detect(Mat _frame)
        {
            PointF crossPoint = new PointF(-1, -1);

            frame = _frame;
            // 合法性判断
            if (frame == null)
            {
                return(crossPoint);
            }
            if (frame.IsEmpty)
            {
                return(crossPoint);
            }

            // 图像缩放
            CvInvoke.Resize(frame, frame_scale, scaleSize, 0, 0, Inter.Cubic);//缩小尺寸
            //跟踪目标并更新模型
            tracker.Update(frame_scale, out roi);

            if (roi.Width == 0 || roi.Height == 0)
            {
                return(crossPoint);
            }

            //ROI区域的放缩
            float Fheight     = frame.Rows;
            float Fwidth      = frame.Cols;
            float WidthScale  = (Fwidth / scaleSize.Width);   //宽度的缩放比例
            float HeightScale = (Fheight / scaleSize.Height); //高度的缩放比例

            //放大roi区域
            roi_scale.X      = (int)Math.Round(roi.X * WidthScale);
            roi_scale.Y      = (int)Math.Round(roi.Y * HeightScale);
            roi_scale.Width  = (int)Math.Round(roi.Width * WidthScale);
            roi_scale.Height = (int)Math.Round(roi.Height * HeightScale);

            // 打印喷头的边缘检测
            src_roi   = new Mat(frame, roi_scale);
            roi_sobel = sobelEdgeDetection(ref src_roi);

            roi_pro        = new Mat(frame_scale, roi); //复制跟踪包围框部分
            roi_temp       = frame_scale.Clone();       //用这个显示检测的末端点和拟合的线
            roi_singletemp = frame_scale.Clone();       //用这个单独显示检测的末端点

            //// 缩放图的roi区域的颜色空间变换、阈值化操作、形态学操作、canny边缘提取
            CvInvoke.CvtColor(roi_pro, roi_gray, ColorConversion.Bgr2Gray);                                          //转化为灰度图像
            CvInvoke.Threshold(roi_gray, roi_threshold, 30, 255, ThresholdType.Binary);                              //阈值化操作
            Mat element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(2, 1), new Point(-1, -1)); //腐蚀操作

            CvInvoke.Erode(roi_threshold, roi_erode, element, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(255, 255, 255));
            CvInvoke.Canny(roi_erode, roi_canny, 10, 200, 3);

            //canny检测后直线的末端点判断
            PointF final_canny_crossPoint = CrossPointFromCanny(roi_canny, roi);

            //hough直线检测
            lines = CvInvoke.HoughLinesP(roi_canny, 1, Math.PI / 360, 20, 7, 10);//hough直线


            if (lines.Length > 1)
            {
                crossPoint = CrossPointFromHough(lines, roi, ref roi_temp, ref final_linestand, ref final_lineanother, final_canny_crossPoint);
            }

            return(crossPoint);
        }