Example #1
0
        private void DrawAllFeatureLocationsPreviousAndCurrent()
        {
            List <TrackedFeature> trackedFeatures = m_VisualOdometer.TrackedFeatures;

            // draw previous location
            for (int i = 0; i < trackedFeatures.Count; i++)
            {
                TrackedFeature trackedFeature = trackedFeatures[i];
                if (trackedFeature.Count > 1)
                {
                    // We have a previous value
                    DrawPreviousFeatureLocation(trackedFeature[-1], trackedFeature.IsFull, m_VisualOdometer.CurrentImage);
                }
            }

            // draw current location
            for (int i = 0; i < trackedFeatures.Count; i++)
            {
                TrackedFeature trackedFeature = trackedFeatures[i];
                DrawCurrentFeatureLocation(trackedFeature[0], trackedFeature.IsFull, m_VisualOdometer.CurrentImage);
            }

            if (m_ShowGroundFeaturesCheckBox.Checked)
            {
                DrawUsedGroundFeatures();
            }
        }
Example #2
0
        public IAnalyticsMonitorTrackedFeature TrackFeature(string featureName, string activationMethod)
        {
            TrackedFeature feature = new TrackedFeature();

            lock (lockObj) {
                if (session != null)
                {
                    feature.Feature = session.AddFeatureUse(featureName, activationMethod);
                }
            }
            return(feature);
        }
Example #3
0
        private void DrawUsedGroundFeatures()
        {
            List <TrackedFeature> usedGroundFeatures = m_VisualOdometer.TranslationAnalyzer.UsedGroundFeatures;

            // draw previous location
            for (int i = 0; i < usedGroundFeatures.Count; i++)
            {
                TrackedFeature usedGroundFeature = usedGroundFeatures[i];
                CircleF        circle            = new CircleF(usedGroundFeature[0], 6.0f);
                m_VisualOdometer.CurrentImage.Draw(circle, m_UsedGroundFeatureColor, 4);
            }
        }
Example #4
0
        private void GetObstacles(Image <Hsv, Byte> src, Image <Bgr, Byte> result)
        {
            Image <Hsv, Byte> reference;

            float[] Hist  = new float[181];
            float[] Hist1 = new float[300];
            float[] Hist2 = new float[300];
            Obstacle = new Boolean[4, 4];

            Rectangle      roi = new Rectangle(500, 300, 400, 400);
            DenseHistogram ref_hue;
            DenseHistogram ref_sat;
            DenseHistogram ref_int;

            int    value;
            double bin;
            int    value1;
            double bin1;

            reference     = src.Clone();
            reference.ROI = roi;

            result.Draw(roi, new Bgr(System.Drawing.Color.Green), 1);

            ref_hue = GetHistogramsHsv(reference, 180, 0, 180);
            ref_sat = GetHistogramsHsv(reference, 256, 1, 256);
            ref_int = GetHistogramsHsv(reference, 256, 2, 256);
            ref_hue.MatND.ManagedArray.CopyTo(Hist, 0);
            ref_int.MatND.ManagedArray.CopyTo(Hist1, 0);
            ref_sat.MatND.ManagedArray.CopyTo(Hist2, 0);
            List <TrackedFeature> trackedFeatures = m_VisualOdometer.TrackedFeatures;

            for (int i = 0; i < m_VisualOdometer.TrackedFeatures.Count; i++)
            {
                TrackedFeature t_trackedFeature = trackedFeatures[i];
                int            x = (int)t_trackedFeature[0].X;
                int            y = (int)t_trackedFeature[0].Y;


                if ((t_trackedFeature.IsFull) && (t_trackedFeature.Count > 1) && (x < 1280) && (y < 720) && (x > 0) && (y > 0))
                {
                    value  = (int)src[new Point(x, y)].Hue;
                    bin    = Hist[value];
                    value1 = (int)src[new Point(x, y)].Value;
                    bin1   = Hist1[value1];

                    if ((bin <= 0.8) && (bin1 <= 0.8) && (Hist2[(int)src[new Point(x, y)].Satuation] <= 0.5))
                    {
                        SortObstacle(x, y);
                    }
                }
            }
        }
        private void DrawFeatures()
        {
            List <TrackedFeature> trackedFeatures = m_VisualOdometer.TrackedFeatures;

            System.Drawing.PointF[] featurePointPair = new System.Drawing.PointF[2];
            for (int i = 0; i < trackedFeatures.Count; i++)
            {
                TrackedFeature trackedFeature = trackedFeatures[i];
                if (trackedFeature.Count < 2)
                {
                    continue;
                }

                // previous and current feature points need to be in the ground region
                if (!(trackedFeature[-1].Y > m_VisualOdometer.GroundRegionTop && trackedFeature[0].Y > m_VisualOdometer.GroundRegionTop))
                {
                    continue;
                }

                featurePointPair[0] = trackedFeature[-1];                 // previous feature location
                featurePointPair[1] = trackedFeature[0];                  // current featue location

                m_GroundProjectionTransformation.ProjectPoints(featurePointPair);
                m_MainForm.DrawPreviousFeatureLocation(featurePointPair[0], trackedFeature.IsFull, m_GroundProjectionImage);

                Angle headingChange = m_VisualOdometer.RotationAnalyzer.HeadingChange;

                if (m_RemoveRotationEffectCheckBox.Checked)
                {
                    PointF rotationCorrectedEndpoint = m_VisualOdometer.TranslationAnalyzer.RemoveRotationEffect(
                        headingChange, featurePointPair[1]);
                    m_MainForm.DrawCurrentFeatureLocation(rotationCorrectedEndpoint, trackedFeature.IsFull, m_GroundProjectionImage);
                }
                else
                {
                    m_MainForm.DrawCurrentFeatureLocation(featurePointPair[1], trackedFeature.IsFull, m_GroundProjectionImage);
                }

                //// Remove rotation effect on current feature location. The center of the rotation is (0,0) on the ground plane
                //Point rotationCorrectedEndPoint = new Point(
                //    c * featurePointPair[1].X - s * featurePointPair[1].Y,
                //    s * featurePointPair[1].X + c * featurePointPair[1].Y);

                //Point translationIncrement = new Point(
                //    rotationCorrectedEndPoint.X - featurePointPair[0].X,
                //    rotationCorrectedEndPoint.Y - featurePointPair[0].Y);

                //m_TranslationIncrements.Add(translationIncrement);
                //sumX += translationIncrement.X;
                //sumY += translationIncrement.Y;
            }
        }
Example #6
0
        private void DrawAllFeatureLocationsPreviousAndCurrent()
        {
            double[,] Sections = new double[10, 5];
            List <TrackedFeature> trackedFeatures = m_VisualOdometer.TrackedFeatures;
            List <LineSegment2DF> OpticFluxLines  = new List <LineSegment2DF>();
            List <PointF>         FOE             = new List <PointF>();

            for (int i = 0; i < trackedFeatures.Count; i++)
            {
                TrackedFeature trackedFeature = trackedFeatures[i];
                if (trackedFeature.Count > 1)
                {
                    OpticFluxLines.Add(new LineSegment2DF(trackedFeature[-1], trackedFeature[0]));
                    DrawOpticFluxLines(trackedFeature[-1], trackedFeature[0], trackedFeature.IsFull, m_VisualOdometer.CurrentImage);
                }
            }
        }
 public IAnalyticsMonitorTrackedFeature TrackFeature(string featureName, string activationMethod)
 {
     TrackedFeature feature = new TrackedFeature();
     lock (lockObj) {
         if (session != null) {
             feature.Feature = session.AddFeatureUse(featureName, activationMethod);
         }
     }
     return feature;
 }
		private void RepopulateFeaturePoints()
		{
			System.Drawing.PointF[] newRawTrackedFeaturePoints = this.OpticalFlow.FindFeaturesToTrack(
				m_CurrentGrayImage,
				m_TrackedFeatures,
				m_SkyRegionBottom,
				m_GroundRegionTop);

			if (newRawTrackedFeaturePoints.Length == 0)
			{
				return;
			}

			m_RawTrackedFeaturePoints.AddRange(newRawTrackedFeaturePoints);
			System.Drawing.PointF[] undistortedNewFeaturePoints = m_CameraParameters.IntrinsicCameraParameters.Undistort(
				newRawTrackedFeaturePoints, m_CameraParameters.IntrinsicCameraParameters.IntrinsicMatrix, null);

			for (int i = 0; i < undistortedNewFeaturePoints.Length; i++)
			{
				TrackedFeature trackedFeature = new TrackedFeature();
				trackedFeature.Add(undistortedNewFeaturePoints[i]);
				m_TrackedFeatures.Add(trackedFeature);
			}

			this.InitialFeaturesCount = m_TrackedFeatures.Count;
			m_ThresholdForFeatureRepopulation = this.InitialFeaturesCount * 9 / 10;
			// We ensure that we don't drop below a fixed limit
			if (m_ThresholdForFeatureRepopulation < 100)
			{
				m_ThresholdForFeatureRepopulation = 100;
			}

			m_NotTrackedFeaturesCount = 0;
		}
    public override void CalculateWeights(Mat image, ImageFeatureMap target)
    {
        DetectionTime = 0;
        if (!Enabled)
        {
            return;
        }
        byte[]   status;
        float[]  errTracker;
        PointF[] features;



        float W = image.Width;
        float H = image.Height;

        if (_isFirstFrame ||
            _prevImage.Width != image.Width ||
            _prevImage.Height != image.Height)
        {
            _prevImage    = image.Clone();
            _isFirstFrame = false;
            return;
        }

        DateTime t = DateTime.Now;

        if (_currPoints == null || _currPoints.Length < 50 ||
            (t - _time).TotalSeconds > Params.OFParameters.FeaturesUpdateTime)
        {
            _time = t;
            UnityEngine.Debug.Log("Recalculating feature points");

            GFTTDetector _GFTTdetector = new GFTTDetector(Params.OFParameters.MaxFeaturesCount);
            MKeyPoint[]  featPoints    = _GFTTdetector.Detect(image, null);

            _prevPoints = new PointF[featPoints.Length];
            int i = 0;
            foreach (var k in featPoints)
            {
                _prevPoints [i] = k.Point;
                ++i;
            }

            _currPoints = _prevPoints;
        }

        Stopwatch watch;

        watch = Stopwatch.StartNew();
        try{
            _criteria.Type    = Params.OFParameters.CriteriaType;
            _criteria.MaxIter = Params.OFParameters.Iterations;
            _criteria.Epsilon = Params.OFParameters.Epsilon;
            CvInvoke.CalcOpticalFlowPyrLK(_prevImage, image, _prevPoints, new Size((int)Params.OFParameters.SearchWindow.x, (int)Params.OFParameters.SearchWindow.y),
                                          Params.OFParameters.Level, _criteria, out features, out status, out errTracker);

            //calculate homography matrix
            CvInvoke.FindHomography(_prevPoints, features, _homography, Emgu.CV.CvEnum.HomographyMethod.Default);
        }catch (Exception e) {
            UnityEngine.Debug.Log(e.Message);
            return;
        }
        watch.Stop();
        DetectionTime = watch.ElapsedMilliseconds;

        //calculate homography transformation, and remove it from points
        Matrix4x4 m = new Matrix4x4();

        m.SetRow(0, new Vector4((float)_homography[0, 0], (float)_homography[0, 1], 0, (float)_homography[0, 2]));
        m.SetRow(1, new Vector4((float)_homography[1, 0], (float)_homography[1, 1], 0, (float)_homography[1, 2]));
        m.SetRow(2, new Vector4(0, 0, 1, 0));
        m.SetRow(3, new Vector4((float)_homography[2, 0], (float)_homography[2, 1], 0, (float)_homography[2, 2]));
        Matrix4x4 homographyInverse = Matrix4x4.Inverse(m);         //get the inverse


        //next, fill weight map


        Vector2 direction = new Vector2((float)_homography [0, 2], (float)_homography [1, 2]);

        direction.Normalize();
        _opticalFlow.Clear();
        int count = 0;

        for (int i = 0; i < features.Length; ++i)
        {
            Vector3 dp   = m * new Vector3(features [i].X, features [i].Y, 0);
            float   dist = (dp.x - _prevPoints [i].X) * (dp.x - _prevPoints [i].X) +
                           (dp.y - _prevPoints [i].Y) * (dp.y - _prevPoints [i].Y);
            if (dist > Params.OFParameters.MinDistance * Params.OFParameters.MinDistance &&
                dist < Params.OFParameters.MaxDistance * Params.OFParameters.MaxDistance)
            {
                //check if the calculated point belongs to the object motion or to camera motion
                //Vector3 d = new Vector3 (features [i].X - dp.x, features [i].Y - dp.y,0);

                /*	float len= Mathf.Sqrt(dist);//dp.magnitude;
                 *      if (len < Params.OFParameters.FeatureSimilarityThreshold) {
                 *              continue;//skip this point, correlated with camera motion
                 *      }*/
                /*
                 * Vector3 d = new Vector3 (features [i].X - _currPoints [i].X, features [i].Y - _currPoints [i].Y,0);
                 * d.Normalize ();
                 * float dp = Vector2.Dot (d, direction);
                 * if (dp > Params.OFParameters.FeatureSimilarityThreshold) {
                 *      continue;//skip this point, correlated with camera motion
                 * }*/
                // add this point
                ++count;
                float x = features [i].X / (float)W;
                float y = (features [i].Y / (float)H);
                if (x > 1 || x < 0 || y > 1 || y < 0)
                {
                    continue;
                }
                float w = 20 / W;              // Mathf.Abs(_currPoints [i].X - features [i].X)/W;
                float h = 20 / H;              //Mathf.Abs(_currPoints [i].Y - features [i].Y)/H;
                Rect  r = new Rect(x - w / 2.0f, y - h / 2.0f /*1-y-h*/, w, h);
                //target.SetWeight (x,1-y,1.0f);
                target.FillRectangle(r.x, r.y, r.width, r.height, 1);

                TrackedFeature f = new TrackedFeature();
                f.v1 = new Vector2(_currPoints[i].X / W, _currPoints[i].Y / H);
                f.v2 = new Vector2(features [i].X / W, features [i].Y / H);
                _opticalFlow.Add(f);
            }
        }

        if (count > features.Length / 10)
        {
            _featuresDetected = true;
        }
        else
        {
            _featuresDetected = false;
        }


        if (features != null)
        {
            lock (_objectsLock) {
                _prevPoints = _currPoints;
                _currPoints = features;
            }
        }

        _prevImage = image.Clone();
    }