Esempio n. 1
0
        private void but_optflowcalib_Click(object sender, EventArgs e)
        {
            var test     = new Form();
            var imagebox = new PictureBox();

            imagebox.Dock     = DockStyle.Fill;
            imagebox.SizeMode = PictureBoxSizeMode.Zoom;
            test.Controls.Add(imagebox);

            test.Show();

            var flow = new OpticalFlow(MainV2.comPort);

            // disable on close form
            test.Closed += (o, args) =>
            {
                flow.CalibrationMode(false);
                flow.Close();
            };

            // enable calibration mode
            flow.CalibrationMode(true);

            // setup bitmap to screen
            flow.newImage += (s, eh) => imagebox.Image = (Image)eh.Image.Clone();
        }
		public VisualOdometer(Capture capture, CameraParameters cameraParameters, HomographyMatrix birdsEyeViewTransformation, OpticalFlow opticalFlow)
		{
			m_Capture = capture;
			m_CameraParameters = cameraParameters;

			this.GroundRegionTop = OdometerSettings.Default.GroundRegionTop;
			this.SkyRegionBottom = OdometerSettings.Default.SkyRegionBottom;

			this.OpticalFlow = opticalFlow;
			m_RotationAnalyzer = new RotationAnalyzer(this);
			m_TranslationAnalyzer = new TranslationAnalyzer(this, birdsEyeViewTransformation);
		}
Esempio n. 3
0
        public void Activate()
        {
            if (!MainV2.comPort.BaseStream.IsOpen)
            {
                Enabled = false;
                return;
            }

            Enabled = true;

            flow = new OpticalFlow(MainV2.comPort);

            // setup bitmap to screen
            flow.newImage += (s, eh) => imagebox.Image = (Image)eh.Image.Clone();
        }
Esempio n. 4
0
        private void ComputeSparseOpticalFlow()
        {
            // Compute optical flow using pyramidal Lukas Kanade Method
            OpticalFlow.PyrLK(grayFrame, nextGrayFrame, ActualFeature[0], new System.Drawing.Size(10, 10), 3, new MCvTermCriteria(20, 0.03d), out NextFeature, out Status, out TrackError);

            using (MemStorage storage = new MemStorage())
                nextHull = PointCollection.ConvexHull(ActualFeature[0], storage, Emgu.CV.CvEnum.ORIENTATION.CV_CLOCKWISE).ToArray();
            nextCentroid = FindCentroid(nextHull);
            for (int i = 0; i < ActualFeature[0].Length; i++)
            {
                DrawTrackedFeatures(i);
                //Uncomment this to draw optical flow vectors
                DrawFlowVectors(i);
            }
        }
    private void OnEnable()
    {
        var ms   = MonoScript.FromScriptableObject(this);
        var path = AssetDatabase.GetAssetPath(ms);

        path = path.Replace(Path.GetFileName(path), "");
        path = path + "OpticalFlow.asset";
        _obj = AssetDatabase.LoadAssetAtPath <OpticalFlow>(path);
        if (_obj == null)
        {
            _obj = ScriptableObject.CreateInstance <OpticalFlow>();
            AssetDatabase.CreateAsset(_obj, path);
            AssetDatabase.ImportAsset(path, ImportAssetOptions.ForceUpdate | ImportAssetOptions.ImportRecursive);
        }
    }
Esempio n. 6
0
        void ComputeDenseOpticalFlow()
        {
            faceGrayImage     = grayFrame;
            faceNextGrayImage = nextGrayFrame;
            // Compute dense optical flow using Horn and Schunk algo
            velx = new Image <Gray, float>(faceGrayImage.Size);
            vely = new Image <Gray, float>(faceNextGrayImage.Size);

            OpticalFlow.HS(faceGrayImage, faceNextGrayImage, true, velx, vely, 0.1d, new MCvTermCriteria(100));

            #region Dense Optical Flow Drawing
            Size winSize = new Size(10, 10);
            vectorFieldX    = (int)Math.Round((double)faceGrayImage.Width / winSize.Width);
            vectorFieldY    = (int)Math.Round((double)faceGrayImage.Height / winSize.Height);
            sumVectorFieldX = 0f;
            sumVectorFieldY = 0f;
            vectorField     = new PointF[vectorFieldX][];
            for (int i = 0; i < vectorFieldX; i++)
            {
                vectorField[i] = new PointF[vectorFieldY];
                for (int j = 0; j < vectorFieldY; j++)
                {
                    Gray  velx_gray  = velx[j * winSize.Width, i *winSize.Width];
                    float velx_float = (float)velx_gray.Intensity;
                    Gray  vely_gray  = vely[j * winSize.Height, i *winSize.Height];
                    float vely_float = (float)vely_gray.Intensity;
                    sumVectorFieldX  += velx_float;
                    sumVectorFieldY  += vely_float;
                    vectorField[i][j] = new PointF(velx_float, vely_float);

                    Cross2DF cr = new Cross2DF(
                        new PointF((i * winSize.Width) + trackingArea.X,
                                   (j * winSize.Height) + trackingArea.Y),
                        1, 1);
                    opticalFlowFrame.Draw(cr, new Bgr(Color.Red), 1);

                    LineSegment2D ci = new LineSegment2D(
                        new Point((i * winSize.Width) + trackingArea.X,
                                  (j * winSize.Height) + trackingArea.Y),
                        new Point((int)((i * winSize.Width) + trackingArea.X + velx_float),
                                  (int)((j * winSize.Height) + trackingArea.Y + vely_float)));
                    opticalFlowFrame.Draw(ci, new Bgr(Color.Yellow), 1);
                }
            }
            #endregion
        }
        public void Activate()
        {
            if (!MainV2.comPort.BaseStream.IsOpen && !MainV2.comPort.logreadmode)
            {
                Enabled = false;
                return;
            }

            Enabled = true;

            _flow = new OpticalFlow(MainV2.comPort);

            // setup bitmap to screen
            _flow.newImage += (s, eh) =>
            {
                imagebox.Image = eh.Image.ToSKImage().ToBitmap();
            };
        }
Esempio n. 8
0
        public override void Process()
        {
            CVImage swap = FPrevious;

            FPrevious = FCurrent;
            FCurrent  = swap;

            FInput.Image.GetImage(TColorFormat.L8, FCurrent);

            Image <Gray, byte>  p  = FPrevious.GetImage() as Image <Gray, byte>;
            Image <Gray, byte>  c  = FCurrent.GetImage() as Image <Gray, byte>;
            Image <Gray, float> vx = FVelocityX.GetImage() as Image <Gray, float>;
            Image <Gray, float> vy = FVelocityY.GetImage() as Image <Gray, float>;

            OpticalFlow.HS(p, c, UsePrevious, vx, vy, FLambda, new MCvTermCriteria(FIterations));

            CopyToRgb();
            FOutput.Send();
        }
Esempio n. 9
0
        public override void Process()
        {
            CVImage swap = FPrevious;

            FPrevious = FCurrent;
            FCurrent  = swap;

            FInput.Image.GetImage(TColorFormat.L8, FCurrent);

            Image <Gray, byte>  p  = FPrevious.GetImage() as Image <Gray, byte>;
            Image <Gray, byte>  c  = FCurrent.GetImage() as Image <Gray, byte>;
            Image <Gray, float> vx = FVelocityX.GetImage() as Image <Gray, float>;
            Image <Gray, float> vy = FVelocityY.GetImage() as Image <Gray, float>;

            OpticalFlow.LK(p, c, FWindowSize, vx, vy);

            CopyToRgb();
            FOutput.Send();
        }
Esempio n. 10
0
        public PointF[] TrackPointsBackward(out byte[] status)
        {
            PointF[] backwardPoints;
            float[]  trackErrors;

            OpticalFlow.PyrLK(
                _currentFrame,
                _previousFrame,
                _currentPyramidBuffer,
                _previousPyramidBuffer,
                _currentPoints,
                _searchWindowSize,
                _levels,
                _terminationCriteria,
                _backwardFlags,
                out backwardPoints,
                out status,
                out trackErrors
                );

            return(backwardPoints);
        }
Esempio n. 11
0
        public PointF[] TrackPointsForward(PointF[] previousPoints, Image <Gray, Byte> currentFrame, out byte[] status)
        {
            _previousFrame = _currentFrame;
            _currentFrame  = currentFrame;

            float[] trackErrors;

            OpticalFlow.PyrLK(
                _previousFrame,
                currentFrame,
                _previousPyramidBuffer,
                _currentPyramidBuffer,
                previousPoints,
                _searchWindowSize,
                _levels,
                _terminationCriteria,
                _forwardFlags,
                out _currentPoints,
                out status,
                out trackErrors
                );

            return(_currentPoints);
        }
Esempio n. 12
0
        private void OnApplyButtonClicked(object sender, EventArgs e)
        {
            int maxFeatureCount = m_VisualOdometer.OpticalFlow.MaxFeatureCount;

            Int32.TryParse(m_MaxFeatureCountTextBox.Text, out maxFeatureCount);

            int blockSize = m_VisualOdometer.OpticalFlow.BlockSize;

            Int32.TryParse(m_BlockSizeTextBox.Text, out blockSize);

            double qualityLevel = m_VisualOdometer.OpticalFlow.QualityLevel;

            Double.TryParse(m_QualityLevelTextBox.Text, out qualityLevel);

            double minDistance = m_VisualOdometer.OpticalFlow.MinDistance;

            Double.TryParse(m_MinDistanceTextBox.Text, out minDistance);

            OpticalFlow opticalFlow = new OpticalFlow(maxFeatureCount, blockSize, qualityLevel, minDistance);

            m_VisualOdometer.OpticalFlow = opticalFlow;

            int skyBottom;

            if (Int32.TryParse(m_SkyBottomTextBox.Text, out skyBottom))
            {
                m_VisualOdometer.SkyRegionBottom = skyBottom;
            }

            int groundTop;

            if (Int32.TryParse(m_GroundTopTextBox.Text, out groundTop))
            {
                m_VisualOdometer.GroundRegionTop = groundTop;
            }
        }
Esempio n. 13
0
        //Процесс работает в бэкграунде. В нём происходит основная работа алгоритма
        // Процесс
        private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
        {
            int ccp = CurrentCapturePosition; //Последний захваченный кадр. С ним и будем работать.

            //Проверяем, что есть достаточное колличество кадров для работы
            if (IsStackFull)
            {
                DateTime DTTest = DateTime.Now;                                        //Время начало работы, чтобы вывести отсечку
                lock (Ic[ccp].locker)                                                  //Лочим базовое изображение, чтобы его никто не тронул
                {
                    Image <Bgr, double> FF2 = new Image <Bgr, double>(Ic[ccp].I.Size); //Создаём изображение размером с базовое
                    double counter          = 0;                                       // Переменная, которая считает сколько в каждой тчоке для каждой волны мы берём элементоа

                    //Проходим для всех ззаданных пользователем длинн волн
                    for (double w = w1; w <= w2; w += StepW)
                    {
                        //DTlast - время обрабатываемого кадра
                        //DTcurr - время текущего кадра
                        DateTime DTlast = Ic[ccp].DT;
                        DateTime DTcurr = DTlast;
                        int      Index  = -1; //Счётчик того как далеко назад мы отодвинулись
                        double   Sdvid  = 0;  //Когда мы считаем гармонику по синусу, получается такая штука, что в сумме она может не давать ноль
                        //Тогда яркость изображения будет прыгать. Поетому выровняем синус так, чтобы он давал ноль. Это конечно кривенько немного,
                        //Но в принципе должно сойти

                        List <double> SinN = new List <double>();
                        //Идём назад до окончания временного окна чтобы рассчитать синус
                        while (Math.Abs((DTlast - DTcurr).TotalMilliseconds) / 1000.0 < w)
                        {
                            int pos = Index + ccp;           //Текущее положение
                            if (pos < 0)                     //Так как мы ходим по массиву в 50 элементов, то в ситуации, когда мы проходим меньше нуля
                            {
                                pos = pos + LengthOfCapture; //Нужно переключиться вверх массива, прибавив +50
                            }
                            Index--;
                            DTcurr = Ic[pos].DT;
                            double SinA = Math.Sin(2 * Math.PI * (((DTlast - DTcurr).TotalMilliseconds / 1000.0) / w)); //Текущий синус
                            SinN.Add(SinA);
                        }
                        //А вот тут вводим сдвиг по синусу, чтобы он давал ноль при суммировании
                        for (int i = 0; i < SinN.Count; i++)
                        {
                            Sdvid += SinN[i];
                        }
                        Sdvid = Sdvid / SinN.Count;

                        //Повторяем спуск по временному окну, но на этот раз уже работаем с самим изображением
                        Index  = -1;
                        DTcurr = DTlast;
                        while (Math.Abs((DTlast - DTcurr).TotalMilliseconds) / 1000.0 < w)
                        {
                            int pos = Index + ccp;
                            if (pos < 0)
                            {
                                pos = pos + LengthOfCapture;
                            }
                            Index--;
                            counter++;
                            //Залочим изображение, чтобы никто болше в него не залез
                            lock (Ic[pos].locker)
                            {
                                DTcurr = Ic[pos].DT;
                                double SinA = Math.Sin(2 * Math.PI * (((DTlast - DTcurr).TotalMilliseconds / 1000.0) / w)) - Sdvid; //Считаем синус теперь со сдвигом
                                for (int x = 0; x < Ic[pos].I.Width; x++)
                                {
                                    for (int y = 0; y < Ic[pos].I.Height; y++)
                                    {
                                        //И сворачиваем полученный синус с изорбажением
                                        FF2.Data[y, x, 0] += (Ic[pos].I.Data[y, x, 0]) * SinA;
                                        FF2.Data[y, x, 1] += (Ic[pos].I.Data[y, x, 1]) * SinA;
                                        FF2.Data[y, x, 2] += (Ic[pos].I.Data[y, x, 2]) * SinA;
                                    }
                                }
                            }
                        }
                    }
                    //После того, как обошил все кадры вглубь раскашиваем картину, которую будем выводить
                    lock (ImToD)                    //Локер который не даёт вывести недораскрашенную картинку
                    {
                        lock (Irealpic[ccp].locker) //Локер который не даёт залезть в исходный кадр который мы храним
                        {
                            ImToDisp = Irealpic[ccp].I.Convert <Bgr, Byte>();
                        }
                        //Раскрашиваем как I + a*FF2,  где FF2 - полученная картинка с приращениями
                        for (int x = 0; x < Ic[ccp].I.Width; x++)
                        {
                            for (int y = 0; y < Ic[ccp].I.Height; y++)
                            {
                                FF2.Data[y, x, 0]      = Alpha * FF2.Data[y, x, 0] / counter;
                                ImToDisp.Data[y, x, 0] = (byte)Math.Max(0, Math.Min((FF2.Data[y, x, 0] + ImToDisp.Data[y, x, 0]), 255));
                                FF2.Data[y, x, 1]      = Alpha * FF2.Data[y, x, 1] / counter;
                                ImToDisp.Data[y, x, 1] = (byte)Math.Max(0, Math.Min((FF2.Data[y, x, 1] + ImToDisp.Data[y, x, 1]), 255));
                                FF2.Data[y, x, 2]      = Alpha * FF2.Data[y, x, 2] / counter;
                                ImToDisp.Data[y, x, 2] = (byte)Math.Max(0, Math.Min((FF2.Data[y, x, 2] + ImToDisp.Data[y, x, 2]), 255));
                            }
                        }
                        //Выбираем в каком из режимов будем вести запись трека
                        if (RegimOfWork)
                        {
                            //Если в режиме разницы соседних кадров
                            //Если это не первая обработанная картинка
                            if (LastOne != null)
                            {
                                //Вычтем её из прошлой каритнки
                                DiffImg = ImToDisp.AbsDiff(LastOne);
                                //Просуммируем все пиксели
                                Bgr b = DiffImg.GetSum();
                                //Запишем в наш временной ряд.
                                Timemap2.Add(b.Blue + b.Green + b.Red);
                            }
                            LastOne = ImToDisp.Clone();
                        }
                        else
                        {
                            //Если в режиме оптического потока
                            String faceFileName = "haarcascade_frontalface_default.xml";
                            //Найдём лицо
                            using (HaarCascade face = new HaarCascade(faceFileName))
                            {
                                //Используя серое изображение
                                using (Image <Gray, Byte> gray = ImToDisp.Convert <Gray, Byte>()) //Convert it to Grayscale
                                {
                                    //Поиск лица
                                    MCvAvgComp[] facesDetected = face.Detect(
                                        gray,
                                        1.1,
                                        4,
                                        Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                        new Size(20, 20), new Size(100, 100));
                                    //Для всех найденных прямоугольников (в реальности мы конечно находим один)
                                    foreach (MCvAvgComp f in facesDetected)
                                    {
                                        //Сохраним прямоугольник,н емножко обрезав его
                                        CurrentFace = new Rectangle(f.rect.X + f.rect.Width / 10, f.rect.Y + f.rect.Height / 8, f.rect.Width - f.rect.Width / 5, 3 * f.rect.Height / 4);
                                    }
                                    if (CurrentFace.X != -1)
                                    {
                                        if (FrameFromLastTime != null)
                                        {
                                            //Ограничем рабочее поле лицом
                                            gray.ROI = new Rectangle((int)CurrentFace.X, (int)CurrentFace.Y, (int)CurrentFace.Width, (int)CurrentFace.Height);
                                            var     returnFeatures = new PointF[1];
                                            byte[]  status;
                                            float[] trackError;
                                            //Найдём набор фич для трекинга
                                            PointF[][] ActualFeature = gray.GoodFeaturesToTrack(15, 0.01d, 0.09d, 3);
                                            gray.ROI = Rectangle.Empty;
                                            //Если нашлись
                                            if (ActualFeature[0].Length != 0)
                                            {
                                                //Вернёмся в рабочее поле изображения
                                                for (int i = 0; i < ActualFeature[0].Length; i++)
                                                {
                                                    ActualFeature[0][i].X += (int)CurrentFace.X;
                                                    ActualFeature[0][i].Y += (int)CurrentFace.Y;
                                                }
                                                //Найдём оптический поток
                                                OpticalFlow.PyrLK(gray, FrameFromLastTime, ActualFeature[0], new System.Drawing.Size(10, 10), 3, new MCvTermCriteria(20, 0.03d), out returnFeatures, out status, out trackError);

                                                double sum    = 0;
                                                double tcount = 0;
                                                PointF Sum    = new PointF(0, 0);
                                                //Посчитаем общее смещение
                                                for (int i = 0; i < returnFeatures.Length; i++)
                                                {
                                                    if ((status[i] == 1) && (trackError[i] < 50))
                                                    {
                                                        double tsum = Math.Sqrt((ActualFeature[0][i].X - returnFeatures[i].X) * (ActualFeature[0][i].X - returnFeatures[i].X) + (ActualFeature[0][i].Y - returnFeatures[i].Y) * (ActualFeature[0][i].Y - returnFeatures[i].Y));
                                                        //Считаем что больших скачков нет, чтобы мы не отбрасывали такие ситуации
                                                        if (tsum < 3)
                                                        {
                                                            ImToDisp.Draw(new LineSegment2D(new Point((int)ActualFeature[0][i].X, (int)ActualFeature[0][i].Y), new Point((int)returnFeatures[i].X, (int)returnFeatures[i].Y)), new Bgr(Color.Blue), 1);
                                                            tcount++;
                                                            Sum.X += ActualFeature[0][i].X - returnFeatures[i].X;
                                                            Sum.Y += ActualFeature[0][i].Y - returnFeatures[i].Y;
                                                        }
                                                    }
                                                }
                                                if (tcount != 0)
                                                {
                                                    Timemap2.Add(Math.Sqrt(Sum.X * Sum.X + Sum.Y * Sum.Y));
                                                }
                                                else
                                                {
                                                    Timemap2.Add(0);
                                                }
                                            }
                                        }
                                    }
                                    FrameFromLastTime = gray.Clone();
                                }
                            }
                        }
                    }
                }


                //Сколько времени отработали
                St1 = (DateTime.Now - DTTest).TotalMilliseconds.ToString();
            }
        }
Esempio n. 14
0
        private void ComputeSparseOpticalFlow()
        {
            float xdirection = 0;
            float ydirection = 0;

            // Compute optical flow using pyramidal Lukas Kanade Method
            OpticalFlow.PyrLK(grayFrame, nextGrayFrame, ActualFeature[0], new System.Drawing.Size(10, 10), 3, new MCvTermCriteria(20, 0.03d), out NextFeature, out Status, out TrackError);
            //detect faces from the gray-scale image and store into an array of type 'var',i.e '
            MCvAvgComp[] hands = nextGrayFrame.DetectHaarCascade(haar, 1.4, 4, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(25, 25))[0];

            //using (MemStorage storage = new MemStorage())
            //    nextHull = PointCollection.ConvexHull(ActualFeature[0], storage, Emgu.CV.CvEnum.ORIENTATION.CV_CLOCKWISE).ToArray();
            //nextCentroid = FindCentroid(nextHull);
            for (int i = 0; i < NextFeature.Length; i++)
            {
                if (Status[i] == 1)
                {
                    xdirection += NextFeature[i].X - ActualFeature[0][i].X;
                    ydirection += NextFeature[i].Y - ActualFeature[0][i].Y;
                }
                DrawTrackedFeatures(i);
                //Uncomment this to draw optical flow vectors
                DrawFlowVectors(i);
            }
            foreach (var hand in hands)
            {
                opticalFlowFrame.Draw(hand.rect, new Bgr(Color.Green), 3);
            }
            //if (hands.Length >= 1)
            //     gestureChanged =!gestureChanged;
            //if (gestureChanged)
            //{
            //    if (xdirection > 10)
            //    {
            //        controlModel("zoom", "+{RIGHT}");
            //    }
            //    else if (xdirection < -10)
            //    {
            //        controlModel("zoom", "+{LEFT}");
            //    }
            //}
            //else
            //{

            label4.Text = "xdirection" + xdirection + "ydirection" + ydirection;
            if (xdirection > 50 && ydirection > 50)
            {
                controlModel("zoom", "{RIGHT}");
            }
            else if (xdirection < 50 && ydirection < 50)
            {
                controlModel("zoom", "{LEFT}");
            }
            else if (xdirection > 50 && ydirection < 50)
            {
                controlModel("zoom", "{DOWN}");
            }
            else if (xdirection < 50 && ydirection > 50)
            {
                controlModel("zoom", "{UP}");
            }

            /*  if (xdirection > 20)
             * {
             *    controlModel("zoom", "{RIGHT}");
             * }
             * else if (xdirection < -20)
             * {
             *    controlModel("zoom", "{LEFT}");
             * }
             * // }
             * if (ydirection > 20)
             * {
             * controlModel("zoom", "{DOWN}");
             * }
             * else if (ydirection < -20)
             * {
             * controlModel("zoom", "{UP}");
             * }*/
        }
Esempio n. 15
0
        /// <summary>
        /// this function perform "Tracking step" in Hand Tracking Algorithm
        /// </summary>
        /// <param name="elapsed_time">the interval time between frame i and frame i+1 used to calculate the feature speed</param>
        internal void StartTracking(double elapsed_time)
        {
            global_time += elapsed_time;

            GetImages();

            OpticalFlow.PyrLK(previous_image, current_image, previous_features, window_size, levels, criteria, out current_features, out status, out point_error);

            previous_image = current_image;

            CalculateFeatureVelocity(current_features);

            old_cursor_location_pt = new_cursor_location_pt;

            new_center_pt = GetCentroid(good_features.ToArray());

            ExtractHandRegion(new_center_pt);

            new_cursor_location_pt = contour_rect.Location;

            double movement_sensitivity = FindDistance(new_cursor_location_pt, old_cursor_location_pt);

            if (movement_sensitivity > 10)// to control the small movement of hand
            {
                LinearSmoothMove(Point.Round(new_center_pt), Convert.ToInt32(movement_sensitivity));
            }

            best_tracked_feature_Array = FindSkinColoredFeatures(good_features.ToArray());


            if (best_tracked_feature_Array.Length == 0)
            {
                Console.WriteLine("exception inside number of good_features {0} ", good_features.Count);
                Console.WriteLine("exception inside best_tracked_feature_Array.Length == 0 ");
            }

            AddNewFeatures(number_of_bad_features, best_tracked_feature_Array);// u can move these line above the
            //old_cursor_location_pt = new_cursor_location_pt;
            // and use good_features instead of best_tracked_feature_Array



            int diff = 30 - good_features.Count;

            if (diff > 0)
            {
                AddNewFeatures(diff, good_features.ToArray());
            }

            foreach (PointF p in good_features)
            {
                colored_temp_image.Draw(new CircleF(p, 3f), new Bgr(Color.Cyan), -1);
            }

            colored_temp_image.Draw(new CircleF(new_center_pt, 10f), new Bgr(Color.Blue), -1);

            previous_features = good_features.ToArray();

            current_features = null;
            status           = null;
            good_features.Clear();
            bad_features.Clear();
            number_of_bad_features = 0;

            //colored_temp_image.Save("H:\\debug\\test" + Convert.ToInt32(global_time) + ".jpg");

            Form1.NewImage = colored_temp_image;
        }
Esempio n. 16
0
        private unsafe void ProcessDepthFrameData(IntPtr depthFrameData, int frameSize, ushort minDepth, ushort maxDepth, DepthSpacePoint p, bool rec, bool left)
        {
            ushort *frameData = (ushort *)depthFrameData; // depth frame data is a 16 bit value
            ushort  initDepth = frameData[depthFrameDescription.Width * ((int)p.Y) + ((int)p.X)];

            if (rec && (bool)chk_recDepth.IsChecked)
            {
                string file = "";
                //FileCode: [left/right]_[gestureNumber]_[sequence]_[sequneceIndex]
                if (left)
                {
                    file = String.Format("c:/temp/PCD/pcd/dd_left_{0:00}_{1:00}_{2:00}.pcd", gestureNumber, sequenceID, depthFrameIndexL++);
                }
                else
                {
                    file = String.Format("c:/temp/PCD/pcd/dd_right_{0:00}_{1:00}_{2:00}.pcd", gestureNumber, sequenceID, depthFrameIndexR++);
                }

                pcdData = new StreamWriter(file, false);
            }

            int distanceFactor = 80;
            int index          = 0;

            currentFrame = new byte[windowSize * windowSize];

            for (int y = -frameSize; y < frameSize; y++)
            {
                for (int x = -frameSize; x < frameSize; x++)
                {
                    //Select index for smaller frame and get Depth value
                    int    offset = (depthFrameDescription.Width * ((int)p.Y + y) + ((int)p.X + x));
                    ushort depth  = frameData[offset];

                    bool isNearPalm = depth <initDepth + distanceFactor && depth> initDepth - distanceFactor;
                    depth = isNearPalm ? (ushort)(depth + (depth - initDepth) * 10) : (ushort)0;
                    depthPixels[index] = currentFrame[index] = (byte)(depth / MapDepthToByte);
                    index++;

                    //  ==== Record DepthData for nextStep (Segmentation)
                    if ((bool)chk_recDepth.IsChecked && rec)
                    {
                        if (isNearPalm)
                        {
                            var point = Helper.depthToPCD(p.X + (float)x, p.Y + (float)y, depth);
                            pcdData.WriteLine(String.Format("{0} {1} {2}", point.X.ToString().Replace(',', '.'), point.Y.ToString().Replace(',', '.'), point.Z.ToString().Replace(',', '.')));
                            pcdData.Flush();
                        }
                    }
                }
            }

            if ((bool)chk_recDepth.IsChecked && rec)
            {
                pcdData.Close();
            }

            //============== Opt Flow ========
            var thisPreviousFrame          = left ? previousFrameL : previousFrameR;
            Image <Gray, byte>  prevImg    = new Image <Gray, byte>(arrayToBitmap(thisPreviousFrame, frameSize * 2, frameSize * 2));
            Image <Gray, byte>  currentImg = new Image <Gray, byte>(arrayToBitmap(currentFrame, frameSize * 2, frameSize * 2));
            Image <Gray, float> flowX      = new Image <Gray, float>(new System.Drawing.Size(frameSize * 2, frameSize * 2));
            Image <Gray, float> flowY      = new Image <Gray, float>(new System.Drawing.Size(frameSize * 2, frameSize * 2));
            var winSize = new System.Drawing.Size(5, 5);

            try
            {
                currentImg = currentImg.SmoothMedian(5);
                OpticalFlow.LK(prevImg, currentImg, winSize, flowX, flowY);
                var bytes = (flowX.Convert <Gray, byte>() + flowY.Convert <Gray, byte>()).Bytes;
                var flow  = new Image <Gray, byte>(frameSize * 2, frameSize * 2, new Gray(bytes.Sum(e => e) / bytes.Length));

                if (left)
                {
                    previousFrameL = currentFrame;
                    this.flowBitmapLeft.WritePixels(new Int32Rect(0, 0, flow.Bitmap.Width, flow.Bitmap.Height), flow.Bytes, flow.Bitmap.Width, 0);
                }
                else
                {
                    previousFrameR = currentFrame;
                    this.flowBitmapRight.WritePixels(new Int32Rect(0, 0, flow.Bitmap.Width, flow.Bitmap.Height), flow.Bytes, flow.Bitmap.Width, 0);
                }
            }
            catch { Console.WriteLine("Optical Flow Exception"); }
            //============== OF ========
        }
        /// <summary>
        /// Interpolates the path using the iterative Lucas-Kanade method for each point in the path as a feature
        /// </summary>
        /// <param name="previousPath">The path of the previous frame</param>
        /// <param name="nextPath">The empty path of the to be interpolated frame</param>
        /// <param name="previousImage">The previous image</param>
        /// <param name="nextImage">The next image to interpolate to</param>
        /// <returns>A value between 0 and 1 indicating the accuracy of the interpolation</returns>
        protected override double Interpolate(Model.IPathContainer previousPath, Model.IPathContainer nextPath, Image <Bgr, Byte> previousImage, Image <Bgr, Byte> nextImage)
        {
            List <PointF> featureList = new List <PointF>();
            List <Path>   activePaths = new List <Path>();

            for (int i = 0; i < previousPath.LayerIndices[previousPath.ActivePathsLayer].Count; i++)
            {
                activePaths.Add(previousPath.Paths[previousPath.LayerIndices[previousPath.ActivePathsLayer][i]]);
            }

            IEnumerator <Path> pathEnumerator = activePaths.GetEnumerator();

            while (pathEnumerator.MoveNext())
            {
                LinkedList <BezierPoint> .Enumerator pointEnumerator = pathEnumerator.Current.GetEnumerator();
                while (pointEnumerator.MoveNext())
                {
                    featureList.Add(GetImagePoint(pointEnumerator.Current));
                }
            }

            Image <Gray, Byte> prevGray = previousImage.Convert <Gray, Byte>();
            Image <Gray, Byte> nextGray = nextImage.Convert <Gray, Byte>();

            PointF[] featureArray = featureList.ToArray();
            PointF[] newFeatures;
            byte[]   errors;
            float[]  trackErrors;
            Emgu.CV.Structure.MCvTermCriteria criteria = new Emgu.CV.Structure.MCvTermCriteria(10);

            OpticalFlow.PyrLK(prevGray, nextGray, featureArray, new Size(10, 10), 5, criteria, out newFeatures, out errors, out trackErrors);

            IPathContainer     tempPathContainer  = previousPath.Clone();
            IEnumerator <Path> tempPathEnumerator = tempPathContainer.GetPathsEnumerator();
            int index = 0;

            for (int j = 0; j < previousPath.Count; j++)
            {
                if (previousPath.LayerIndices[previousPath.ActivePathsLayer].Contains(j))
                {
                    LinkedList <BezierPoint> .Enumerator pointEnumerator = tempPathContainer.Paths[j].GetEnumerator();
                    while (pointEnumerator.MoveNext())
                    {
                        Translate(pointEnumerator.Current, featureArray[index], newFeatures[index++]);
                    }
                }
            }

            IEnumerator <Path> nextPathEnumerator = tempPathContainer.GetPathsEnumerator();

            while (nextPathEnumerator.MoveNext())
            {
                nextPath.AddPath(nextPathEnumerator.Current);
            }

            nextPath.ActivePathsLayer = tempPathContainer.ActivePathsLayer;
            nextPath.LayerIndices     = tempPathContainer.LayerIndices;

            double error = DetermineError(previousPath, nextPath, errors);

            return(error);
        }