示例#1
0
        private void DrawDebugInfo(Graphics canvas, AbstractTrackPoint point, RectangleF search)
        {
            TrackPointBlock tpb = point as TrackPointBlock;

            if (tpb == null)
            {
                return;
            }

            Font   f    = new Font("Consolas", 8, FontStyle.Bold);
            string text = string.Format("simi:{0:0.000}, age:{1}, pos:{2:0.000}×{3:0.000}", tpb.Similarity, tpb.TemplateAge, tpb.Point.X, tpb.Point.Y);
            Brush  b    = tpb.Similarity > parameters.TemplateUpdateThreshold ? Brushes.Green : Brushes.Red;

            canvas.DrawString(text, f, b, search.Location.Translate(0, -25));

            f.Dispose();
        }
示例#2
0
        public override void Draw(Graphics _canvas, AbstractTrackPoint _currentPoint, Point _directZoomTopLeft, double _fStretchFactor, Color _color, double _fOpacityFactor)
        {
            // Current Search window.
            int iSrchLeft = _currentPoint.X - (int)(((double)m_SearchWindowSize.Width * _fStretchFactor) / 2);
            int iSrchTop  = _currentPoint.Y - (int)(((double)m_SearchWindowSize.Height * _fStretchFactor) / 2);

            Rectangle SrchZone = new Rectangle(iSrchLeft, iSrchTop, (int)((double)m_SearchWindowSize.Width * _fStretchFactor), (int)((double)m_SearchWindowSize.Height * _fStretchFactor));

            _canvas.DrawRectangle(new Pen(Color.FromArgb((int)(64.0f * _fOpacityFactor), _color)), SrchZone);

            // Features coordinates are relative to the previous search window.
            foreach (Ipoint p in ((TrackPointSURF)_currentPoint).FoundFeatures)
            {
                // Use inverse color for failed points.
                Color invert = Color.FromArgb(64, 255 - _color.R, 255 - _color.G, 255 - _color.B);
                DrawFeature(_canvas, invert, 1, p, ((TrackPointSURF)_currentPoint).SearchWindow);
            }

            DrawFeature(_canvas, _color, 2, ((TrackPointSURF)_currentPoint).MatchedFeature, ((TrackPointSURF)_currentPoint).SearchWindow);
        }
示例#3
0
        public override void Draw(Graphics _canvas, AbstractTrackPoint _currentPoint, Point _directZoomTopLeft, double _fStretchFactor, Color _color, double _fOpacityFactor)
        {
            // Draws a visual indication of the algorithm.
            // This should help the user understand how the algorithm is working.
            // The visual information may only make sense for dev purposes though.

            double fX = (((double)_currentPoint.X - (double)_directZoomTopLeft.X) * _fStretchFactor);
            double fY = (((double)_currentPoint.Y - (double)_directZoomTopLeft.Y) * _fStretchFactor);

            // Current Search window.
            int       iSrchLeft = (int)(fX - (((double)m_SearchWindowSize.Width * _fStretchFactor) / 2));
            int       iSrchTop  = (int)(fY - (((double)m_SearchWindowSize.Height * _fStretchFactor) / 2));
            Rectangle SrchZone  = new Rectangle(iSrchLeft, iSrchTop, (int)((double)m_SearchWindowSize.Width * _fStretchFactor), (int)((double)m_SearchWindowSize.Height * _fStretchFactor));

            //_canvas.DrawRectangle(new Pen(Color.FromArgb((int)(64.0f * _fOpacityFactor), _color)), SrchZone);
            _canvas.FillRectangle(new SolidBrush(Color.FromArgb((int)(48.0f * _fOpacityFactor), _color)), SrchZone);

            // Current Block.
            int       iTplLeft = (int)(fX - (((double)m_BlockSize.Width * _fStretchFactor) / 2));
            int       iTplTop  = (int)(fY - (((double)m_BlockSize.Height * _fStretchFactor) / 2));
            Rectangle TplZone  = new Rectangle(iTplLeft, iTplTop, (int)((double)m_BlockSize.Width * _fStretchFactor), (int)((double)m_BlockSize.Height * _fStretchFactor));
            Pen       p        = new Pen(Color.FromArgb((int)(128.0f * _fOpacityFactor), _color));

            _canvas.DrawRectangle(p, TplZone);

            // Current Block's similarity.
            Font   f = new Font("Arial", 8, FontStyle.Regular);
            String s = String.Format("{0:0.000}", ((TrackPointBlock)_currentPoint).Similarity);
            Brush  b = new SolidBrush(Color.FromArgb((int)(255.0f * _fOpacityFactor), _color));

            _canvas.DrawString(s, f, b, new PointF((float)iSrchLeft, (float)iSrchTop));

            b.Dispose();
            p.Dispose();
            f.Dispose();
        }
示例#4
0
 /// <summary>
 /// Draw a visual representation of the tracking taking place.
 /// This may only make sense for users having an idea of how the tracking works.
 /// </summary>
 public abstract void Draw(Graphics canvas, AbstractTrackPoint point, IImageToViewportTransformer transformer, Color color, double opacityFactor);
示例#5
0
 /// <summary>
 /// Performs the tracking.
 /// Finds the coordinate in current image of the point tracked, using data from previous matches.
 /// </summary>
 /// <param name="_previousPoints">The list of tracked points so far.</param>
 /// <param name="_CurrentImage">Current image.</param>
 /// <param name="_currentPoint">The resulting point that should be added to the list.</param>
 /// <param name="_t">The current timestamp to create the TrackPoint.</param>
 /// <returns>true if the tracking is reliable, false if the point couldn't be found.</returns>
 public abstract bool Track(List <AbstractTrackPoint> previousPoints, Bitmap currentImage, long t, out AbstractTrackPoint currentPoint);
示例#6
0
        public override bool Track(List <AbstractTrackPoint> _previousPoints, Bitmap _CurrentImage, long _t, out AbstractTrackPoint _currentPoint)
        {
            //---------------------------------------------------------------------
            // The input informations we have at hand are:
            // - The current bitmap we have to find the point into.
            // - The coordinates of all the previous points tracked.
            // - Previous tracking infos, stored in the TrackPoints tracked so far.
            //---------------------------------------------------------------------

            TrackPointBlock lastTrackPoint = (TrackPointBlock)_previousPoints[_previousPoints.Count - 1];
            Point           lastPoint      = lastTrackPoint.ToPoint();

            bool bMatched = false;

            _currentPoint = null;

            if (lastTrackPoint.Template != null && _CurrentImage != null)
            {
                // Center search zone around last point.
                Point     searchCenter = lastPoint;
                Rectangle searchZone   = new Rectangle(searchCenter.X - (m_SearchWindowSize.Width / 2),
                                                       searchCenter.Y - (m_SearchWindowSize.Height / 2),
                                                       m_SearchWindowSize.Width,
                                                       m_SearchWindowSize.Height);

                searchZone.Intersect(new Rectangle(0, 0, _CurrentImage.Width, _CurrentImage.Height));

                double fBestScore    = 0;
                Point  bestCandidate = new Point(-1, -1);

                //Image<Bgr, Byte> cvTemplate = new Image<Bgr, Byte>(lastTrackPoint.Template);
                //Image<Bgr, Byte> cvImage = new Image<Bgr, Byte>(_CurrentImage);

                Bitmap img = _CurrentImage;
                Bitmap tpl = lastTrackPoint.Template;

                BitmapData imageData    = img.LockBits(new Rectangle(0, 0, img.Width, img.Height), ImageLockMode.ReadOnly, img.PixelFormat);
                BitmapData templateData = tpl.LockBits(new Rectangle(0, 0, tpl.Width, tpl.Height), ImageLockMode.ReadOnly, tpl.PixelFormat);

                Image <Bgr, Byte> cvImage    = new Image <Bgr, Byte>(imageData.Width, imageData.Height, imageData.Stride, imageData.Scan0);
                Image <Bgr, Byte> cvTemplate = new Image <Bgr, Byte>(templateData.Width, templateData.Height, templateData.Stride, templateData.Scan0);

                cvImage.ROI = searchZone;

                int resWidth  = searchZone.Width - lastTrackPoint.Template.Width + 1;
                int resHeight = searchZone.Height - lastTrackPoint.Template.Height + 1;

                Image <Gray, Single> similarityMap = new Image <Gray, Single>(resWidth, resHeight);

                //CvInvoke.cvMatchTemplate(cvImage.Ptr, cvTemplate.Ptr, similarityMap.Ptr, TM_TYPE.CV_TM_SQDIFF_NORMED);
                //CvInvoke.cvMatchTemplate(cvImage.Ptr, cvTemplate.Ptr, similarityMap.Ptr, TM_TYPE.CV_TM_CCORR_NORMED);
                CvInvoke.cvMatchTemplate(cvImage.Ptr, cvTemplate.Ptr, similarityMap.Ptr, TM_TYPE.CV_TM_CCOEFF_NORMED);

                img.UnlockBits(imageData);
                tpl.UnlockBits(templateData);

                // Find max
                Point  p1   = new Point(0, 0);
                Point  p2   = new Point(0, 0);
                double fMin = 0;
                double fMax = 0;

                CvInvoke.cvMinMaxLoc(similarityMap.Ptr, ref fMin, ref fMax, ref p1, ref p2, IntPtr.Zero);

                if (fMax > m_fSimilarityTreshold)
                {
                    bestCandidate = new Point(searchZone.Left + p2.X + tpl.Width / 2, searchZone.Top + p2.Y + tpl.Height / 2);
                    fBestScore    = fMax;
                }

                #region Monitoring
                if (m_bMonitoring)
                {
                    // Save the similarity map to file.
                    Image <Gray, Byte> mapNormalized = new Image <Gray, Byte>(similarityMap.Width, similarityMap.Height);
                    CvInvoke.cvNormalize(similarityMap.Ptr, mapNormalized.Ptr, 0, 255, NORM_TYPE.CV_MINMAX, IntPtr.Zero);

                    Bitmap bmpMap = mapNormalized.ToBitmap();

                    string tplDirectory = @"C:\Documents and Settings\Administrateur\Mes documents\Dev  Prog\Videa\Video Testing\Tracking\Template Update";
                    bmpMap.Save(tplDirectory + String.Format(@"\simiMap-{0:000}-{1:0.00}.bmp", _previousPoints.Count, fBestScore));
                }
                #endregion

                // Result of the matching.
                if (bestCandidate.X != -1 && bestCandidate.Y != -1)
                {
                    // Save template in the point.
                    _currentPoint = CreateTrackPoint(false, bestCandidate.X, bestCandidate.Y, fBestScore, _t, img, _previousPoints);
                    ((TrackPointBlock)_currentPoint).Similarity = fBestScore;

                    bMatched = true;
                }
                else
                {
                    // No match. Create the point at the center of the search window (whatever that might be).
                    _currentPoint = CreateTrackPoint(false, searchCenter.X, searchCenter.Y, 0.0f, _t, img, _previousPoints);
                    log.Debug("Track failed. No block over the similarity treshold in the search window.");
                }
            }
            else
            {
                // No image. (error case ?)
                // Create the point at the last point location.
                _currentPoint = CreateTrackPoint(false, lastTrackPoint.X, lastTrackPoint.Y, 0.0f, _t, _CurrentImage, _previousPoints);
                log.Debug("Track failed. No input image, or last point doesn't have any cached block image.");
            }

            return(bMatched);
        }
示例#7
0
        public override bool Track(List <AbstractTrackPoint> _previousPoints, Bitmap _CurrentImage, long _t, out AbstractTrackPoint _currentPoint)
        {
            //---------------------------------------------------------------------
            // The input informations we have at hand are:
            // - The current bitmap we have to find the point into.
            // - The coordinates of all the previous points tracked.
            // - Previous tracking infos, stored in the TrackPoints tracked so far.
            //---------------------------------------------------------------------

            bool bMatched = false;

            TrackPointSURF lastTrackPoint = (TrackPointSURF)_previousPoints[_previousPoints.Count - 1];

            // Create a point centered on last match.
            // This will find and register all the SURF features located in the search zone.

            // Test with grayscale image.
            Bitmap grayCurrentImage = Grayscale.CommonAlgorithms.BT709.Apply(_CurrentImage);

            _currentPoint = CreateTrackPoint(false, lastTrackPoint.X, lastTrackPoint.Y, 1.0f, _t, grayCurrentImage, _previousPoints);

            if (_currentPoint == null)
            {
                // Untrackable area.
            }
            else
            {
                if (((TrackPointSURF)_currentPoint).FoundFeatures.Count > 0)
                {
                    // Feature matching.
                    // Look for the nearest neighbour to the previous match, in the list of newly found features.
                    Match m = null;
                    COpenSURF.MatchPoint(lastTrackPoint.MatchedFeature, ((TrackPointSURF)_currentPoint).FoundFeatures, out m);

                    // Also look for a match of the first feature, to compensate for occlusion and drift).
                    Match          m2 = null;
                    TrackPointSURF firstTrackPoint = (TrackPointSURF)_previousPoints[0];
                    COpenSURF.MatchPoint(firstTrackPoint.MatchedFeature, ((TrackPointSURF)_currentPoint).FoundFeatures, out m2);

                    // Take the best match out of the two.
                    Match matchedFeature = (m.Distance2 < m2.Distance2) ? m : m2;

                    // TODO:
                    // check if distance (match similarity) is over a given threshold.

                    // 3. Store the new matched feature with associated data.
                    ((TrackPointSURF)_currentPoint).MatchedFeature = matchedFeature.Ipt2;

                    _currentPoint.X = ((TrackPointSURF)_currentPoint).SearchWindow.X + (int)matchedFeature.Ipt2.x;
                    _currentPoint.Y = ((TrackPointSURF)_currentPoint).SearchWindow.Y + (int)matchedFeature.Ipt2.y;

                    log.Debug(String.Format("Tracking result: [{0};{1}]", _currentPoint.X, _currentPoint.Y));
                    bMatched = true;
                }

                if (m_bMonitoring)
                {
                    log.Debug(_currentPoint.ToString());
                }

                // Problems:
                // The user did not choose a feature, so we have extra work to do to keep the correspondance between
                // the feature saved in the track point and the actual coordinates the user is looking for.
                // Currently we just discard the user's point entirely and try to track the closest feature.
            }
            return(bMatched);
        }
示例#8
0
        public override bool Track(List <AbstractTrackPoint> previousPoints, Bitmap currentImage, long position, out AbstractTrackPoint currentPoint)
        {
            //---------------------------------------------------------------------
            // The input informations we have at hand are:
            // - The current bitmap we have to find the point into.
            // - The coordinates of all the previous points tracked.
            // - Previous tracking scores, stored in the TrackPoints tracked so far.
            //---------------------------------------------------------------------
            TrackPointBlock lastTrackPoint = (TrackPointBlock)previousPoints[previousPoints.Count - 1];
            PointF          lastPoint      = lastTrackPoint.Point;
            PointF          subpixel       = new PointF(lastPoint.X - (int)lastPoint.X, lastPoint.Y - (int)lastPoint.Y);

            bool matched = false;

            currentPoint = null;

            if (lastTrackPoint.Template != null && currentImage != null)
            {
                // Center search zone around last point.
                PointF    searchCenter = lastPoint;
                Rectangle searchZone   = new Rectangle((int)(searchCenter.X - (searchWindow.Width / 2)),
                                                       (int)(searchCenter.Y - (searchWindow.Height / 2)),
                                                       searchWindow.Width,
                                                       searchWindow.Height);

                searchZone.Intersect(new Rectangle(0, 0, currentImage.Width, currentImage.Height));

                //Image<Bgr, Byte> cvTemplate = new Image<Bgr, Byte>(lastTrackPoint.Template);
                //Image<Bgr, Byte> cvImage = new Image<Bgr, Byte>(_CurrentImage);

                Bitmap img = currentImage;
                Bitmap tpl = lastTrackPoint.Template;

                BitmapData imageData    = img.LockBits(new Rectangle(0, 0, img.Width, img.Height), ImageLockMode.ReadOnly, img.PixelFormat);
                BitmapData templateData = tpl.LockBits(new Rectangle(0, 0, tpl.Width, tpl.Height), ImageLockMode.ReadOnly, tpl.PixelFormat);

                Image <Bgra, Byte> cvImage    = new Image <Bgra, Byte>(imageData.Width, imageData.Height, imageData.Stride, imageData.Scan0);
                Image <Bgra, Byte> cvTemplate = new Image <Bgra, Byte>(templateData.Width, templateData.Height, templateData.Stride, templateData.Scan0);

                cvImage.ROI = searchZone;

                int resWidth  = searchZone.Width - lastTrackPoint.Template.Width + 1;
                int resHeight = searchZone.Height - lastTrackPoint.Template.Height + 1;

                Image <Gray, Single> similarityMap = new Image <Gray, Single>(resWidth, resHeight);

                //CvInvoke.cvMatchTemplate(cvImage.Ptr, cvTemplate.Ptr, similarityMap.Ptr, TM_TYPE.CV_TM_SQDIFF_NORMED);
                //CvInvoke.cvMatchTemplate(cvImage.Ptr, cvTemplate.Ptr, similarityMap.Ptr, TM_TYPE.CV_TM_CCORR_NORMED);
                CvInvoke.cvMatchTemplate(cvImage.Ptr, cvTemplate.Ptr, similarityMap.Ptr, TM_TYPE.CV_TM_CCOEFF_NORMED);

                img.UnlockBits(imageData);
                tpl.UnlockBits(templateData);

                // Find max
                double bestScore     = 0;
                PointF bestCandidate = new PointF(-1, -1);
                Point  minLoc        = Point.Empty;
                Point  maxLoc        = Point.Empty;
                double min           = 0;
                double max           = 0;
                CvInvoke.cvMinMaxLoc(similarityMap.Ptr, ref min, ref max, ref minLoc, ref maxLoc, IntPtr.Zero);

                if (max > similarityTreshold)
                {
                    PointF loc = RefineLocation(similarityMap.Data, maxLoc, parameters.RefinementNeighborhood);

                    // The template matching was done on a template aligned with the integer part of the actual position.
                    // We reinject the floating point part of the orginal positon into the result.
                    loc = loc.Translate(subpixel.X, subpixel.Y);

                    bestCandidate = new PointF(searchZone.Left + loc.X + tpl.Width / 2, searchZone.Top + loc.Y + tpl.Height / 2);
                    bestScore     = max;
                }

                #region Monitoring
                if (monitoring)
                {
                    // Save the similarity map to file.
                    Image <Gray, Byte> mapNormalized = new Image <Gray, Byte>(similarityMap.Width, similarityMap.Height);
                    CvInvoke.cvNormalize(similarityMap.Ptr, mapNormalized.Ptr, 0, 255, NORM_TYPE.CV_MINMAX, IntPtr.Zero);

                    Bitmap bmpMap = mapNormalized.ToBitmap();

                    string tplDirectory = @"C:\Users\Joan\Videos\Kinovea\Video Testing\Tracking\simimap";
                    bmpMap.Save(tplDirectory + String.Format(@"\simiMap-{0:000}-{1:0.00}.bmp", previousPoints.Count, bestScore));
                }
                #endregion

                // Result of the matching.
                if (bestCandidate.X != -1 && bestCandidate.Y != -1)
                {
                    currentPoint = CreateTrackPoint(false, bestCandidate, bestScore, position, img, previousPoints);
                    ((TrackPointBlock)currentPoint).Similarity = bestScore;
                }
                else
                {
                    // No match. Create the point at the center of the search window (whatever that might be).
                    currentPoint = CreateTrackPoint(false, lastPoint, 0.0f, position, img, previousPoints);
                    log.Debug("Track failed. No block over the similarity treshold in the search window.");
                }

                matched = true;
            }
            else
            {
                // No image. (error case ?)
                // Create the point at the last point location.
                currentPoint = CreateTrackPoint(false, lastPoint, 0.0f, position, currentImage, previousPoints);
                log.Debug("Track failed. No input image, or last point doesn't have any cached block image.");
            }

            return(matched);
        }
示例#9
0
        public override bool Track(List <AbstractTrackPoint> _previousPoints, Bitmap _CurrentImage, long _t, out AbstractTrackPoint _currentPoint)
        {
            //---------------------------------------------------------------------
            // The input informations we have at hand are:
            // - The current bitmap we have to find the point into.
            // - The coordinates of all the previous points tracked.
            // - Previous tracking infos, stored in the TrackPoints tracked so far.
            //---------------------------------------------------------------------

            TrackPointBlock lastTrackPoint = (TrackPointBlock)_previousPoints[_previousPoints.Count - 1];
            Point           lastPoint      = lastTrackPoint.ToPoint();

            // Compute the projected point.
            // (coordinate of the point that would keep following the same motion as in last step).
            Point forecast;

            if (_previousPoints.Count > 1)
            {
                Point penultimate = _previousPoints[_previousPoints.Count - 2].ToPoint();

                int dx = lastPoint.X - penultimate.X;
                int dy = lastPoint.Y - penultimate.Y;

                forecast = new Point(lastPoint.X + dx, lastPoint.Y + dy);
            }
            else
            {
                forecast = _previousPoints[0].ToPoint();
            }

            bool bMatched = false;

            _currentPoint = null;

            if (lastTrackPoint.Template != null && _CurrentImage != null)
            {
                // Center search zone around last point.
                Point     searchCenter = lastPoint;
                Rectangle searchZone   = new Rectangle(searchCenter.X - (m_SearchWindowSize.Width / 2),
                                                       searchCenter.Y - (m_SearchWindowSize.Height / 2),
                                                       m_SearchWindowSize.Width,
                                                       m_SearchWindowSize.Height);

                // Convert to grayscale prior to match, if necessary.
                Bitmap workingImage = m_bWorkOnGrayscale ? Grayscale.CommonAlgorithms.BT709.Apply(_CurrentImage) : _CurrentImage;

                double fBestScore    = 0;
                Point  bestCandidate = new Point(-1, -1);

                if (m_UpdateStrategy == UpdateStrategy.Both)
                {
                    // Try to match the initial reference block in the image first.
                    // If it gets a score over a given threshold, we give it the priority over the I-1 block.
                    // This is an attempt at correcting the drift issue.

                    // Find the last reference block. (last block manually choosen by user.)
                    int iLastReferenceBlock = 0;
                    for (int b = _previousPoints.Count - 1; b >= 0; b--)
                    {
                        if (((TrackPointBlock)_previousPoints[b]).IsReferenceBlock)
                        {
                            iLastReferenceBlock = b;
                            break;
                        }
                    }

                    Bitmap originalTemplate        = ((TrackPointBlock)_previousPoints[iLastReferenceBlock]).Template;
                    Bitmap workingOriginalTemplate = m_bWorkOnGrayscale ? Grayscale.CommonAlgorithms.BT709.Apply(originalTemplate) : originalTemplate;

                    ITemplateMatching originalMatcher;
                    if (m_bCorrelationMatching)
                    {
                        originalMatcher = new CorrelationTemplateMatching(m_fOriginalSimilarityThreshold);
                    }
                    else
                    {
                        originalMatcher = new ExhaustiveTemplateMatching(m_fOriginalSimilarityThreshold);
                    }

                    TemplateMatch[] matchingsOriginal = originalMatcher.ProcessImage(workingImage, workingOriginalTemplate, searchZone);

                    if (matchingsOriginal.Length > 0)
                    {
                        // We found a block with a very good similarity to the original block selected by the user.
                        // It will take precedence over the I-1 block.
                        TemplateMatch tm = matchingsOriginal[0];
                        bestCandidate = new Point(tm.Rectangle.Left + (tm.Rectangle.Width / 2), tm.Rectangle.Top + (tm.Rectangle.Height / 2));
                        fBestScore    = tm.Similarity;

                        if (m_bMonitoring)
                        {
                            log.Debug(String.Format("Original template found with good similarity ({0:0.000}), {1} candidates.", tm.Similarity, matchingsOriginal.Length));
                        }
                    }
                    else
                    {
                        log.Debug(String.Format("Original template not found"));
                    }
                }

                if (bestCandidate.X == -1 || bestCandidate.Y == 1)
                {
                    Bitmap workingTemplate = m_bWorkOnGrayscale ? Grayscale.CommonAlgorithms.BT709.Apply(lastTrackPoint.Template) : lastTrackPoint.Template;

                    ITemplateMatching templateMatcher;
                    if (m_bCorrelationMatching)
                    {
                        templateMatcher = new CorrelationTemplateMatching(m_fSimilarityTreshold);
                    }
                    else
                    {
                        templateMatcher = new ExhaustiveTemplateMatching(m_fSimilarityTreshold);
                    }

                    TemplateMatch[] matchings = templateMatcher.ProcessImage(workingImage, workingTemplate, searchZone);

                    if (matchings.Length > 0)
                    {
                        // Find the best candidate.
                        // Score is weighted average of : similarity and closeness to forecast.
                        int    iBestCandidate  = -1;
                        double fWinnerDistance = 0;
                        for (int i = 0; i < matchings.Length; i++)
                        {
                            TemplateMatch tm = matchings[i];
                            //if(_previousPoints.Count > 1)
                            {
                                Point  candidatePoint      = new Point(tm.Rectangle.Left + (tm.Rectangle.Width / 2), tm.Rectangle.Top + (tm.Rectangle.Height / 2));
                                double fDistanceToForecast = CalibrationHelper.PixelDistance(candidatePoint, forecast);
                                double fScore = GetScore(tm.Similarity, fDistanceToForecast, m_fMaxDistance);

                                if (fScore > fBestScore)
                                {
                                    fBestScore      = fScore;
                                    fWinnerDistance = fDistanceToForecast;
                                    iBestCandidate  = i;
                                    bestCandidate   = candidatePoint;
                                }
                            }
                        }
                        if (m_bMonitoring)
                        {
                            log.Debug(String.Format("Last template found with : Score:{0:0.000}, Similarity:{1:0.000} (index:{2:00}/{3:00}), Distance to forecast (px):{4:0.00}",
                                                    fBestScore,
                                                    matchings[iBestCandidate].Similarity,
                                                    iBestCandidate,
                                                    matchings.Length,
                                                    fWinnerDistance));
                        }
                    }
                    else
                    {
                        log.Debug(String.Format("Last template not found, or score too low."));
                    }
                }

                // Result of the matching.
                if (bestCandidate.X != -1 && bestCandidate.Y != -1)
                {
                    // Save template in the point.
                    _currentPoint = CreateTrackPoint(false, bestCandidate.X, bestCandidate.Y, fBestScore, _t, _CurrentImage, _previousPoints);
                    ((TrackPointBlock)_currentPoint).Similarity = fBestScore;

                    // Finally, it is only considered a match if the score is over the threshold.
                    if (fBestScore >= m_fScoreTreshold || _previousPoints.Count == 1)
                    {
                        bMatched = true;
                    }
                }
                else
                {
                    // No match. Create the point at the center of the search window (whatever that might be).
                    _currentPoint = CreateTrackPoint(false, searchCenter.X, searchCenter.Y, fBestScore, _t, _CurrentImage, _previousPoints);
                    log.Debug("Track failed. No block over the similarity treshold in the search window.");
                }

                #region Monitoring
                if (m_bMonitoring)
                {
                    // Save current template to file, to visually monitor the drift.
                    string tplDirectory = @"C:\Documents and Settings\Administrateur\Mes documents\Dev  Prog\Videa\Video Testing\Tracking\Template Update";
                    if (_previousPoints.Count == 1)
                    {
                        // Clean up folder.
                        string[] tplFiles = Directory.GetFiles(tplDirectory, "*.bmp");
                        foreach (string f in tplFiles)
                        {
                            File.Delete(f);
                        }
                    }
                    String iFileName = String.Format("{0}\\tpl-{1:000}.bmp", tplDirectory, _previousPoints.Count);
                    ((TrackPointBlock)_currentPoint).Template.Save(iFileName);
                }
                #endregion
            }
            else
            {
                // No image. (error case ?)
                // Create the point at the last point location.
                _currentPoint = CreateTrackPoint(false, lastTrackPoint.X, lastTrackPoint.Y, 0.0f, _t, _CurrentImage, _previousPoints);
                log.Debug("Track failed. No input image, or last point doesn't have any cached block image.");
            }

            return(bMatched);
        }
示例#10
0
 /// <summary>
 /// Draw a visual representation of the tracking taking place.
 /// This may only make sense for users having an idea of how the tracking works.
 /// </summary>
 public abstract void Draw(Graphics _canvas, AbstractTrackPoint _currentPoint, Point _directZoomTopLeft, double _fStretchFactor, Color _color, double _fOpacityFactor);
示例#11
0
 /// <summary>
 /// Performs the tracking.
 /// Finds the coordinate in current image of the point tracked, using data from previous matches.
 /// </summary>
 /// <param name="_previousPoints">The list of tracked points so far.</param>
 /// <param name="_CurrentImage">Current image.</param>
 /// <param name="_currentPoint">The resulting point that should be added to the list.</param>
 /// <param name="_t">The current timestamp to create the TrackPoint. Relative to the first timestamp.</param>
 /// <returns>true if the tracking is reliable, false if the point couldn't be found.</returns>
 public abstract bool Track(List <AbstractTrackPoint> _previousPoints, Bitmap _CurrentImage, long _t, out AbstractTrackPoint _currentPoint);