Example #1
1
        private static void FindMatch(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, SurfSettings surfSettings, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix<int> indices, out Matrix<byte> mask, out HomographyMatrix homography)
        {
            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;

            if (surfSettings != null)
            {
                hessianThresh = surfSettings.HessianThresh.Value;
                uniquenessThreshold = surfSettings.UniquenessThreshold.Value;
            }

            SURFDetector surfCPU = new SURFDetector(hessianThresh, false);
            #endregion

            int k = 2;
            Stopwatch watch;
            homography = null;

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            indices = new Matrix<int>(observedDescriptors.Rows, k);
            using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix<byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }

            watch.Stop();

            matchTime = watch.ElapsedMilliseconds;
        }
		public AuxiliaryViewsForm(MainForm mainForm, VisualOdometer visualOdometer, HomographyMatrix groundProjectionTransformation)
		{
			InitializeComponent();
			this.ShowInTaskbar = false;
			m_MainForm = mainForm;
			m_VisualOdometer = visualOdometer;
			m_GroundProjectionTransformation = groundProjectionTransformation;
		}
 /// <summary>
 /// Detect planar object from the specific image
 /// </summary>
 /// <param name="image">The image where the planar object will be detected</param>
 /// <param name="h">The homography matrix which will be updated</param>
 /// <returns>The four corners of the detected region</returns>
 public PointF[] Detect(Image<Gray, Byte> image, HomographyMatrix h)
 {
     using (MemStorage stor = new MemStorage())
      {
     Seq<PointF> corners = new Seq<PointF>(stor);
     CvPlanarObjectDetectorDetect(_ptr, image, h, corners);
     return corners.ToArray();
      }
 }
		internal TranslationAnalyzer(VisualOdometer visualOdometer, HomographyMatrix groundProjectionTransformation)
		{
			m_VisualOdometer = visualOdometer;
			m_GroundProjectionTransformation = groundProjectionTransformation;
			m_GroundFeatures = new List<TrackedFeature>();
			m_UsedGroundFeatures = new List<TrackedFeature>();
			m_ScratchPadUsedGroundFeatures = new List<TrackedFeature>();
			m_TranslationIncrements = new List<Point>();
			m_AcceptedDirectionMisalignment = Angle.FromDegrees(45);
		}
		public VisualOdometer(Capture capture, CameraParameters cameraParameters, HomographyMatrix birdsEyeViewTransformation, OpticalFlow opticalFlow)
		{
			m_Capture = capture;
			m_CameraParameters = cameraParameters;

			this.GroundRegionTop = OdometerSettings.Default.GroundRegionTop;
			this.SkyRegionBottom = OdometerSettings.Default.SkyRegionBottom;

			this.OpticalFlow = opticalFlow;
			m_RotationAnalyzer = new RotationAnalyzer(this);
			m_TranslationAnalyzer = new TranslationAnalyzer(this, birdsEyeViewTransformation);
		}
		public static void Save(HomographyMatrix homographyMatrix, string filePath)
		{
			using (TextWriter writer = new StreamWriter(filePath))
			{
				for (int x = 0; x < 3; x++)
				{
					for (int y = 0; y < 3; y++)
					{
						writer.WriteLine(homographyMatrix[x, y].ToString(CultureInfo.InvariantCulture));
					}
				}
			}
		}
Example #7
0
        internal void SetBallRealPosition(HomographyMatrix warpMatrix)
        {
            PointF[] pointArray = new PointF[1];
            pointArray[0] = this.Pos;
            warpMatrix.ProjectPoints(pointArray);
            this.RelPos = pointArray[0];

            if (this.RelPos.X < -50) RelPos.X = -50;
            if (this.RelPos.X > Pitch.PitchWidth + 50) RelPos.X = Pitch.PitchWidth + 50;
            if (this.RelPos.Y < 0) RelPos.Y = 0;
            if (this.RelPos.Y > Pitch.PitchHeight + 50) RelPos.Y = Pitch.PitchHeight + 50;

            if (RelPosIsOnTable())
                AddPosition(RelPos);
        }
		public static HomographyMatrix Load(string filePath)
		{
			HomographyMatrix homographyMatrix = new HomographyMatrix();
			using (TextReader reader = new StreamReader(filePath))
			{
				for (int x = 0; x < 3; x++)
				{
					for (int y = 0; y < 3; y++)
					{
						homographyMatrix[x, y] = GetNextValue(reader);
					}
				}
			}

			return homographyMatrix;
		}
Example #9
0
 public bool Recognize(Image<Gray, Byte> observedImage, out PointF[] Region)
 {
     // extract features from the observed image
     observedKeyPoints = new VectorOfKeyPoint();
     Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
     BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
     matcher.Add(modelDescriptors);
     indices = new Matrix<int>(observedDescriptors.Rows, k);
     using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
     {
         matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
         mask = new Matrix<byte>(dist.Rows, 1);
         mask.SetValue(255);
         Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
     }
     int nonZeroCount = CvInvoke.cvCountNonZero(mask);
     if (nonZeroCount >= requiredNonZeroCount)
     {
         nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, scaleIncrement, RotationBins);
         if (nonZeroCount >= requiredNonZeroCount)
             homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, ransacReprojThreshold);
     }
     bool ObjectFound;
     if (homography != null)
     {  //draw a rectangle along the projected model
         Rectangle rect = modelImage.ROI;
         Region = new PointF[] {
         new PointF(rect.Left, rect.Bottom),
         new PointF(rect.Right, rect.Bottom),
         new PointF(rect.Right, rect.Top),
         new PointF(rect.Left, rect.Top)};
         homography.ProjectPoints(Region);
         ObjectFound = true;
     }
     else
     {
         Region = null;
         ObjectFound = false;
     }
     return ObjectFound;
 }
Example #10
0
		public MainForm()
		{
			InitializeComponent();
			m_UnitsComboBox.SelectedIndex = 0;

			CameraParameters cameraParameters = null;
			HomographyMatrix groundProjectionTransformation = null;

			bool useCamera = false;

			if (useCamera)
			{
				m_Capture = new Capture();
				m_Capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, 1280);
				m_Capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, 720);

				cameraParameters = CameraParameters.Load(@"C:\svnDev\oss\Google\drh-visual-odometry\CalibrationFiles\MicrosoftCinema\Focus14\1280x720\MicrosoftCinemaFocus14_1280x720.txt");

				groundProjectionTransformation = HomographyMatrixSupport.Load(@"C:\svnDev\oss\Google\drh-visual-odometry\CalibrationFiles\MicrosoftCinema\Focus14\1280x720\BirdsEyeViewTransformationForCalculation.txt");
				m_GroundProjectionTransformationForUI = HomographyMatrixSupport.Load(@"C:\svnDev\oss\Google\drh-visual-odometry\CalibrationFiles\MicrosoftCinema\Focus14\1280x720\BirdsEyeViewTransformationForUI.txt");
			}
			else
			{
				m_Capture = new Capture(@"C:\svnDev\oss\Google\drh-visual-odometry\TestVideos\2010-07-18 11-10-22.853.wmv");
				m_Timer.Interval = 33;
				m_Timer.Enabled = true;

				cameraParameters = CameraParameters.Load(@"C:\svnDev\oss\Google\drh-visual-odometry\CalibrationFiles\MicrosoftCinema\Focus12\1280x720\MicrosoftCinemaFocus12_1280x720.txt");

				groundProjectionTransformation = HomographyMatrixSupport.Load(@"C:\svnDev\oss\Google\drh-visual-odometry\CalibrationFiles\MicrosoftCinema\Focus12\1280x720\BirdsEyeViewTransformationForCalculation.txt");
				m_GroundProjectionTransformationForUI = HomographyMatrixSupport.Load(@"C:\svnDev\oss\Google\drh-visual-odometry\CalibrationFiles\MicrosoftCinema\Focus12\1280x720\BirdsEyeViewTransformationForUI.txt");
			}

			m_VisualOdometer = new VisualOdometer(m_Capture, cameraParameters, groundProjectionTransformation, new OpticalFlow());

			UpdateFromModel();

			m_VisualOdometer.Changed += new EventHandler(OnVisualOdometerChanged);
			Application.Idle += OnApplicationIdle;
		}
        public static void FindMatch(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix<int> indices, out Matrix<byte> mask, out HomographyMatrix homography)
        {
            int k = 2;
             double uniquenessThreshold = 0.8;
             SURFDetector surfCPU = new SURFDetector(500, false);
             Stopwatch watch;
             homography = null;
             #if !IOS
             if (GpuInvoke.HasCuda)
             {
            GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
            using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(modelImage))
            //extract features from the object image
            using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
            using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
            using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>(DistanceType.L2))
            {
               modelKeyPoints = new VectorOfKeyPoint();
               surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
               watch = Stopwatch.StartNew();

               // extract features from the observed image
               using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(observedImage))
               using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
               using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
               using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true))
               using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true))
               using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
               using (Stream stream = new Stream())
               {
                  matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                  indices = new Matrix<int>(gpuMatchIndices.Size);
                  mask = new Matrix<byte>(gpuMask.Size);

                  //gpu implementation of voteForUniquess
                  using (GpuMat<float> col0 = gpuMatchDist.Col(0))
                  using (GpuMat<float> col1 = gpuMatchDist.Col(1))
                  {
                     GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                     GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                  }

                  observedKeyPoints = new VectorOfKeyPoint();
                  surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                  //wait for the stream to complete its tasks
                  //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                  stream.WaitForCompletion();

                  gpuMask.Download(mask);
                  gpuMatchIndices.Download(indices);

                  if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                  {
                     int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                     if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                  }

                  watch.Stop();
               }
            }
             }
             else
             #endif
             {
            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            indices = new Matrix<int>(observedDescriptors.Rows, k);
            using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
               matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
               mask = new Matrix<byte>(dist.Rows, 1);
               mask.SetValue(255);
               Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
               nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
               if (nonZeroCount >= 4)
                  homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }

            watch.Stop();
             }
             matchTime = watch.ElapsedMilliseconds;
        }
Example #12
0
		public MainForm()
		{
			InitializeComponent();
			m_CameraParameters = CameraParameters.Load(@"..\..\..\..\CalibrationFiles\MicrosoftCinema\Focus14\1280x720\MicrosoftCinemaFocus14_1280x720.txt");

			m_RawImage = new Image<Bgr, byte>(@"..\..\..\..\CalibrationFiles\MicrosoftCinema\Focus14\1280x720\GroundProjectionCalibration.jpg");
			this.CurrentImage = m_RawImage.Clone();
			this.BirdsEyeImage = m_RawImage.Clone();

			InitializeUndistortMap(m_RawImage);

			Undistort(m_RawImage, this.CurrentImage);

			this.ChessBoard = new ChessBoard(8, 10);
			PointF[] foundCorners = CollectImageCorners();

			DrawFoundCorners(this.CurrentImage, foundCorners);

			// We pick four corners for perspective transform
			PointF[] outerCorners = new PointF[4];
			outerCorners[0] = foundCorners[0];
			outerCorners[1] = foundCorners[this.ChessBoard.PatternSize.Width - 1];
			outerCorners[2] = foundCorners[this.ChessBoard.PatternSize.Width * this.ChessBoard.PatternSize.Height - this.ChessBoard.PatternSize.Width];
			outerCorners[3] = foundCorners[this.ChessBoard.PatternSize.Width * this.ChessBoard.PatternSize.Height - 1];
			DrawOuterCorners(this.CurrentImage, outerCorners);

			float side;
			float bottom;
			float centerX;

			side = 25.0f;
			bottom = 310.0f;

			PointF[] physicalPointsForCalculation = new PointF[4];
			physicalPointsForCalculation[0] = new PointF(-3 * side, bottom + 8 * side);
			physicalPointsForCalculation[1] = new PointF(+3 * side, bottom + 8 * side);
			physicalPointsForCalculation[2] = new PointF(-3 * side, bottom);
			physicalPointsForCalculation[3] = new PointF(+3 * side, bottom);

			m_BirdsEyeViewTransformationForCalculation = CameraCalibration.GetPerspectiveTransform(outerCorners, physicalPointsForCalculation);
			HomographyMatrixSupport.Save(m_BirdsEyeViewTransformationForCalculation, "BirdsEyeViewTransformationForCalculation.txt");

			side = 8f;
			bottom = 700.0f;
			centerX = (float)m_CameraParameters.Intrinsic.Cx;

			PointF[] physicalPointsForUI = new PointF[4];
			physicalPointsForUI[0] = new PointF(-3 * side + centerX, bottom - 8 * side);
			physicalPointsForUI[1] = new PointF(+3 * side + centerX, bottom - 8 * side);
			physicalPointsForUI[2] = new PointF(-3 * side + centerX, bottom);
			physicalPointsForUI[3] = new PointF(+3 * side + centerX, bottom);

			m_BirdsEyeViewTransformationForUI = CameraCalibration.GetPerspectiveTransform(outerCorners, physicalPointsForUI);
			HomographyMatrixSupport.Save(m_BirdsEyeViewTransformationForUI, "BirdsEyeViewTransformationForUI.txt");

			//m_BirdsEyeViewTransformationForCalculation.ProjectPoints(outerCorners);

			CreateAndDrawBirdsEyeView();
		}
Example #13
0
    public Image <Gray, byte> ObjectTrackingSurf(Image <Gray, byte> liveImg, Image <Gray, byte> templateImg, bool showOnLiveImg)
    {
        vkpLiveKeyPoint    = surfDetector.DetectKeyPointsRaw(liveImg, null);
        mtxLiveDescriptors = surfDetector.ComputeDescriptorsRaw(liveImg, null, vkpLiveKeyPoint);

        vkpTemplateKeyPoint    = surfDetector.DetectKeyPointsRaw(templateImg, null);
        mtxTemplateDescriptors = surfDetector.ComputeDescriptorsRaw(templateImg, null, vkpTemplateKeyPoint);

        bruteForceMatcher = new BruteForceMatcher <Single> (DistanceType.L2);
        bruteForceMatcher.Add(mtxTemplateDescriptors);

        mtxMatchIndices = new Matrix <int> (mtxLiveDescriptors.Rows, KNumNearestNeighbors);
        mtxDistance     = new Matrix <Single> (mtxLiveDescriptors.Rows, KNumNearestNeighbors);

        bruteForceMatcher.KnnMatch(mtxLiveDescriptors, mtxMatchIndices, mtxDistance, KNumNearestNeighbors, null);

        mtxMask = new Matrix <Byte> (mtxDistance.Rows, 1);
        mtxMask.SetValue(255);
        Features2DToolbox.VoteForUniqueness(mtxDistance, UniquenessThreshold, mtxMask);

        NumNonZeroElements = CvInvoke.cvCountNonZero(mtxMask);
        if (NumNonZeroElements >= 4)
        {
            NumNonZeroElements = Features2DToolbox.VoteForSizeAndOrientation(vkpTemplateKeyPoint,
                                                                             vkpLiveKeyPoint,
                                                                             mtxMatchIndices,
                                                                             mtxMask,
                                                                             ScaleIncrement,
                                                                             RotationBins);
            if (NumNonZeroElements >= 4)
            {
                homographyMatrix = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(vkpTemplateKeyPoint,
                                                                                            vkpLiveKeyPoint,
                                                                                            mtxMatchIndices,
                                                                                            mtxMask,
                                                                                            RansacReprojectionThreshold);
            }
        }
        //templateImgCopy = templateImg.Copy ();
        //templateImgCopy.Draw (new Rectangle (1, 1, templateImgCopy.Width - 3, templateImgCopy.Height - 3), boxGray, 2);
        liveImgCopy = liveImg.Copy();         //.ConcateHorizontal(templateImgCopy);

        if (homographyMatrix != null)
        {
            rect.X       = 0;
            rect.Y       = 0;
            rect.Width   = templateImg.Width;
            rect.Height  = templateImg.Height;
            pointsF[0].X = rect.Left; pointsF[0].Y = rect.Top;
            pointsF[1].X = rect.Right; pointsF[1].Y = rect.Top;
            pointsF[2].X = rect.Right; pointsF[2].Y = rect.Bottom;
            pointsF[3].X = rect.Left; pointsF[3].Y = rect.Bottom;

            homographyMatrix.ProjectPoints(pointsF);
            //Debug.Log("live w: "+ liveImgCopy.Width + "live h: " + liveImgCopy.Height);
            //Debug.Log ("pf0: " + pointsF[0] + "pf1: "+ pointsF[1] + " pf2: " + pointsF[2] + " pf3: " + pointsF[3]);

            centerPointF.X = 0;
            centerPointF.Y = 0;
            for (int i = 0; i < pointsF.Length; ++i)
            {
                centerPointF.X += pointsF[i].X;
                centerPointF.Y += pointsF[i].Y;
            }
            centerPointF.X = centerPointF.X / 4f;
            centerPointF.Y = centerPointF.Y / 4f;
            //Debug.Log("centerF: " + centerPointF);
            points[0] = Point.Round(pointsF[0]);
            points[1] = Point.Round(pointsF[1]);
            points[2] = Point.Round(pointsF[2]);
            points[3] = Point.Round(pointsF[3]);

            liveImgCopy.DrawPolyline(points, true, boxGray, 4);
        }
        if (showOnLiveImg)
        {
            return(liveImgCopy);
        }
        else
        {
            return(templateImgCopy);
        }
    }
Example #14
0
        static void Run()
        {
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("box.png");
            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");
            Stopwatch          watch;
            HomographyMatrix   homography = null;

            SURFDetector surfCPU = new SURFDetector(500, false);

            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <float>   dist;
            Matrix <byte>    mask;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);

                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

                BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);
                dist    = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                    }
                }

                watch.Stop();
            }

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DTracker.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DTracker.KeypointDrawType.NOT_DRAW_SINGLE_POINTS);

            #region draw the projected region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            ImageViewer.Show(result, String.Format("Matched using {0} in {1} milliseconds", GpuInvoke.HasCuda ? "GPU" : "CPU", watch.ElapsedMilliseconds));
        }
Example #15
0
        public Image <Bgr, float> alignment(Image <Bgr, float> fImage, Image <Bgr, float> lImage)
        {
            HomographyMatrix homography = null;
            SURFDetector     surfCPU    = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;

            int    k = 2;
            double uniquenessThreshold = 0.8;


            Image <Gray, Byte> fImageG = fImage.Convert <Gray, Byte>();
            Image <Gray, Byte> lImageG = lImage.Convert <Gray, Byte>();

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(fImageG, null, modelKeyPoints);


            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(lImageG, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(fImageG, modelKeyPoints, lImageG, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {
                //draw a rectangle along the projected model
                Rectangle rect = fImageG.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);

                Image <Bgr, byte>  mosaic     = new Image <Bgr, byte>(fImageG.Width + fImageG.Width, fImageG.Height);
                Image <Bgr, byte>  warp_image = mosaic.Clone();
                Image <Bgr, float> result2    = new Image <Bgr, float>(fImage.Size);
                Image <Gray, Byte> result3    = new Image <Gray, Byte>(fImage.Size);
                CvInvoke.cvWarpPerspective(fImage.Ptr, result2, homography.Ptr, (int)INTER.CV_INTER_CUBIC + (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
                return(result2);
            }
            return(null);
        }
Example #16
0
        private bool Calibrate_perspective()
        {
            if (Image_chessboard == null)
            {
                // Chessboard-image not loaded yet
                // Load and scale to size of webcam image
                Image_chessboard = new Image<Gray, Byte>(Properties.Resources.Chessboard).Resize(Image_webcam.Width, Image_webcam.Height, Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
            }

            // Remove Text
            lbl_Info.Text = "";

            // Display chessboard
            box_Final.BackColor = Color.White;
            box_Final.SizeMode = PictureBoxSizeMode.CenterImage;
            box_Final.Image = Image_chessboard.Resize(box_Final.Width - 2 * OFFSET_CHESSBOARD, box_Final.Height - 2 * OFFSET_CHESSBOARD, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC).ToBitmap();

            // Get corner points of original and captured chessboard
            Size size_p = new Size(N_CHESSFIELDS_X - 1, N_CHESSFIELDS_Y - 1);
            Emgu.CV.CvEnum.CALIB_CB_TYPE calibrations = Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.NORMALIZE_IMAGE | Emgu.CV.CvEnum.CALIB_CB_TYPE.FILTER_QUADS;
            PointF[] corners_dst = CameraCalibration.FindChessboardCorners(Image_chessboard, size_p, calibrations);
            PointF[] corners_src = CameraCalibration.FindChessboardCorners(Image_webcam.Convert<Gray, Byte>(), size_p, calibrations);
            if (corners_src == null || corners_dst == null) return false; // Chessboard not found

            // Get matrix for transformation
            Transformation_matrix = CameraCalibration.FindHomography(corners_src, corners_dst, Emgu.CV.CvEnum.HOMOGRAPHY_METHOD.DEFAULT, 1);

            // Clear box_final
            box_Final.BackColor = Color.Black;
            box_Final.Image = null;
            Drawings.Clear(box_Final.BackColor);

            // Set size mode back to image stretch
            box_Final.SizeMode = PictureBoxSizeMode.StretchImage;

            return true; // Successful
        }
        private void frmMain_Loaded(object sender, RoutedEventArgs e)
        {
            //Kinect connect
            foreach(var potentialSensor in KinectSensor.KinectSensors)
                if(potentialSensor.Status == KinectStatus.Connected)
                {

                    sensor = potentialSensor;
                    break;
                }

            //initialize Kinect sensor
            if(sensor != null)
            {
                sensor.DepthStream.Range = DepthRange.Near;
                sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
                sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength];
                depthPixels = new DepthImagePixel[this.sensor.DepthStream.FramePixelDataLength];
                colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);
                depthBitmap = new WriteableBitmap(this.sensor.DepthStream.FrameWidth, this.sensor.DepthStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);

                //SetDisplay();

                sensor.AllFramesReady += sensor_AllFramesReady;

                try
                { sensor.Start(); }
                catch (IOException ee)
                {
                    log(ee.Message);
                    sensor = null;
                }

                //Kinect Tilt
                //sldKinectTilt.Value = sensor.ElevationAngle;
                sldKinectTilt.Maximum = sensor.MaxElevationAngle;
                sldKinectTilt.Minimum = sensor.MinElevationAngle;
                CvInvoke.cvNamedWindow("Display");

                poolPos[0] = new System.Drawing.Point(0, 0);
                poolPos[1] = new System.Drawing.Point(0, 0);

                //output warping point setting
                srcs[0] = new System.Drawing.PointF(0, 0);
                srcs[1] = new System.Drawing.PointF(640, 0);
                srcs[2] = new System.Drawing.PointF(640, 480);
                srcs[3] = new System.Drawing.PointF(0, 480);
                srcs.CopyTo(dest, 0);

                //image initialize
                imgDisplay = new Image<Bgr, byte>(poolWidth, poolHeight, new Bgr(System.Drawing.Color.Black));
                imgDepth = new Image<Bgr, byte>(640, 480);
                imgBall = new Image<Bgr, byte>(640, 480);
                imgColor = new Image<Bgr, byte>(640, 480);
                imgBallGray = new Image<Gray, byte>(640, 480);

                mywarpmat = CameraCalibration.GetPerspectiveTransform(srcs, dest);

            }

            if (sensor == null)
            {
                log("Kinect Connect Fail");
                pnlSetting.IsEnabled = false;
                pnlSetting2.IsEnabled = false;
            }
        }
Example #18
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime)
        {
            Stopwatch        watch;
            HomographyMatrix homography = null;

            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                            }

                                                            watch.Stop();
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                watch.Stop();
            }

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }
Example #19
0
        private bool Calibrate_perspective()
        {
            if (Image_chessboard == null)
            {//Chessboard-image not loaded yet
                //Load (with same size as original)
                Image_chessboard = new Image<Gray, Byte>(Laserboard.Properties.Resources.Chessboard).Resize(Image_webcam.Width, Image_webcam.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }

            //Display
            box_final.BackColor = Color.Black;
            box_final.Image = Image_chessboard.Resize(box_final.Width - 2 * OFFSET_CHESSBOARD, box_final.Height - 2 * OFFSET_CHESSBOARD, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC).ToBitmap();

            //Get corner-points of original and captured chessboard
            Size size_p = new Size(N_CHESSFIELDS_X - 1, N_CHESSFIELDS_Y - 1);
            Emgu.CV.CvEnum.CALIB_CB_TYPE calibrations = Emgu.CV.CvEnum.CALIB_CB_TYPE.ADAPTIVE_THRESH | Emgu.CV.CvEnum.CALIB_CB_TYPE.NORMALIZE_IMAGE | Emgu.CV.CvEnum.CALIB_CB_TYPE.FILTER_QUADS;
            PointF[] corners_dst = CameraCalibration.FindChessboardCorners(Image_chessboard, size_p, calibrations);
            PointF[] corners_src = CameraCalibration.FindChessboardCorners(Image_webcam.Convert<Gray, Byte>(), size_p, calibrations);
            if (corners_src == null || corners_dst == null) return false; //Chessboard not found

            //Get matrix for transformation
            Transformation_matrix = CameraCalibration.FindHomography(corners_src, corners_dst, Emgu.CV.CvEnum.HOMOGRAPHY_METHOD.DEFAULT, 1);

            //Clear box_final
            box_final.Image = null;
            //box_final.BackColor = Color.Black;

            return true; //Successful
        }
        /// <summary>
        /// Output Image Button Warping Adjust Method
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void btnOutWrap_Click(object sender, RoutedEventArgs e)
        {
            string tag = ((Button)sender).Tag.ToString();
            string pos = tag.Substring(0, 2);
            char dir = tag[2];
            System.Drawing.PointF nextPos;
            int pointNum = 0;
            switch(pos)
            {
                case "LT": pointNum = 0; break;
                case "RT": pointNum = 1; break;
                case "RB": pointNum = 2; break;
                case "LB": pointNum = 3; break;
            }
            nextPos = dest[pointNum];

            switch(dir)
            {
                case 'U': nextPos.Y -= nextPos.Y > 0 ? 1 : 0; break;
                case 'R': nextPos.X += nextPos.X < poolWidth-1 ? 1 : 0; break;
                case 'D': nextPos.Y += nextPos.Y < poolHeight-1 ? 1 : 0; break;
                case 'L': nextPos.X -= nextPos.X > 0 ? 1 : 0; break;
            }

            dest[pointNum] = nextPos;

            mywarpmat = CameraCalibration.GetPerspectiveTransform(srcs, dest);
        }
Example #21
0
        public Image <Bgr, Byte> DrawResult(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out double area, int minarea, out Point center)
        {
            center = new Point(320, 240);
            Stopwatch watch;

            area = 0;
            double modelarea = (modelImage.ROI.Right - modelImage.ROI.Left) * (modelImage.ROI.Bottom - modelImage.ROI.Top);
            //单应矩阵
            HomographyMatrix homography = null;

            //surf算法检测器
            SURFDetector surfCPU = new SURFDetector(500, false);

            //原图与实际图中的关键点
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            Matrix <int>  indices;
            Matrix <byte> mask;

            //knn匹配的系数
            int k = 2;
            //滤波系数
            double uniquenessThreshold = 0.8;


            //从标记图中,提取surf特征点与描述子
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // 从实际图片提取surf特征点与描述子
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

            if (observedDescriptors == null)
            {
                watch.Stop(); matchTime = watch.ElapsedMilliseconds;
                return(null);
            }

            //使用BF匹配算法,匹配特征向量
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);
            indices = new Matrix <int>(observedDescriptors.Rows, k);
            //通过特征向量筛选匹配对
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                //最近邻2点特征向量匹配
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                //匹配成功的,将特征点存入mask
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                //通过滤波系数,过滤非特征点,剩余特征点存入mask
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 10)
            {
                //过滤旋转与变形系数异常的特征点,剩余存入mask
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 10)
                {
                    //使用剩余特征点,构建单应矩阵
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();
            // }

            //画出匹配的特征点
            //Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,indices, new Bgr(0, 0, 255), new Bgr(0, 255, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            Image <Bgr, byte> result = null;

            System.Drawing.Bitmap bm = observedImage.ToBitmap();
            result = new Image <Bgr, byte>(bm);
            #region draw the projected region on the image
            //画出单应矩阵
            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                //根据整个图片的旋转、变形情况,计算出原图中四个顶点转换后的坐标,并画出四边形
                homography.ProjectPoints(pts);
                area = getarea(pts); double xsum = 0; double ysum = 0;
                foreach (PointF point in pts)
                {
                    xsum += point.X; ysum += point.Y;
                }
                center = new Point(Convert.ToInt32(xsum / 4), Convert.ToInt32(ysum / 4));
                if (area > minarea)
                {
                    Image <Bgr, byte> temp = new Image <Bgr, Byte>(result.Width, result.Height);
                    temp.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);

                    //temp.Save("D:\\temp\\" + (++index) + ".jpg");

                    int a = CountContours(temp.ToBitmap());
                    if (a == 2)
                    {
                        result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
                    }
                    else
                    {
                        matchTime = 0; area = 0; return(result);
                    }
                }
            }
            else
            {
                area = 0;
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }
        //框出相同點
        public Image <Bgr, Byte> DrawMatchPoly(Image <Gray, Byte> modelImage, Image <Bgr, Byte> result, HomographyMatrix homography)
        {
            if (homography != null)
            {  //draw a rectangle along the projected model
                //表示有對比到結過
                Console.WriteLine("Match! ");
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }

            return(result);
        }
        private Image <Bgr, byte> Match(Image <Bgr, byte> image1, Image <Bgr, byte> image2, int flag)
        {
            HomographyMatrix homography      = null;
            SURFDetector     surfDetectorCPU = new SURFDetector(500, false);

            int    k = 2;           //number of matches that we want ot find between image1 and image2
            double uniquenessThreshold = 0.8;

            Matrix <int>  indices;
            Matrix <byte> mask;

            VectorOfKeyPoint KeyPointsImage1;
            VectorOfKeyPoint KeyPointsImage2;

            Image <Gray, Byte> Image1G = image1.Convert <Gray, Byte>();
            Image <Gray, Byte> Image2G = image2.Convert <Gray, Byte>();

            if (GpuInvoke.HasCuda)      //Using CUDA, the GPUs can be used for general purpose processing (i.e., not exclusively graphics), speed up performance
            {
                Console.WriteLine("Here");
                GpuSURFDetector surfDetectorGPU = new GpuSURFDetector(surfDetectorCPU.SURFParams, 0.01f);

                // extract features from Image1
                using (GpuImage <Gray, Byte> gpuImage1 = new GpuImage <Gray, byte>(Image1G))                                                     //convert CPU input image to GPUImage(greyscale)
                    using (GpuMat <float> gpuKeyPointsImage1 = surfDetectorGPU.DetectKeyPointsRaw(gpuImage1, null))                              //find key points for image
                        using (GpuMat <float> gpuDescriptorsImage1 = surfDetectorGPU.ComputeDescriptorsRaw(gpuImage1, null, gpuKeyPointsImage1)) //calculate descriptor for each key point
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))                     //create a new matcher object
                            {
                                KeyPointsImage1 = new VectorOfKeyPoint();
                                surfDetectorGPU.DownloadKeypoints(gpuKeyPointsImage1, KeyPointsImage1);                                 //copy the Matrix from GPU to CPU

                                // extract features from Image2
                                using (GpuImage <Gray, Byte> gpuImage2 = new GpuImage <Gray, byte>(Image2G))
                                    using (GpuMat <float> gpuKeyPointsImage2 = surfDetectorGPU.DetectKeyPointsRaw(gpuImage2, null))
                                        using (GpuMat <float> gpuDescriptorsImage2 = surfDetectorGPU.ComputeDescriptorsRaw(gpuImage2, null, gpuKeyPointsImage2))

                                            //for each descriptor of each image2 , we find k best matching points and their distances from image1 descriptors

                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuDescriptorsImage2.Size.Height, k, 1, true))      //stores indices of k best mathces
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuDescriptorsImage2.Size.Height, k, 1, true)) //stores distance of k best matches

                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))               //stores result of comparison
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuDescriptorsImage2, gpuDescriptorsImage1, gpuMatchIndices, gpuMatchDist, k, null, stream); //matching descriptors of image2 to image1 and storing the k best indices and corresponding distances

                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream); //by setting stream, we perform an Async Task
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);         //col0 >= 0.8col1 , only then is it considered a good match
                                                                }

                                                            KeyPointsImage2 = new VectorOfKeyPoint();
                                                            surfDetectorGPU.DownloadKeypoints(gpuKeyPointsImage2, KeyPointsImage2);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(KeyPointsImage1, KeyPointsImage2, indices, mask, 1.5, 20); //count the number of nonzero points in the mask(this stored the comparison result of col0 >= 0.8col1)
                                                                //we can create a homography matrix only if we have atleast 4 matching points
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(KeyPointsImage1, KeyPointsImage2, indices, mask, 2);
                                                                }
                                                            }
                                                        }
                            }
            }
            else
            {
                Console.WriteLine("No CUDA");
                //extract features from image2
                KeyPointsImage1 = new VectorOfKeyPoint();
                Matrix <float> DescriptorsImage1 = surfDetectorCPU.DetectAndCompute(Image1G, null, KeyPointsImage1);

                //extract features from image1
                KeyPointsImage2 = new VectorOfKeyPoint();
                Matrix <float>            DescriptorsImage2 = surfDetectorCPU.DetectAndCompute(Image2G, null, KeyPointsImage2);
                BruteForceMatcher <float> matcher           = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(DescriptorsImage1);

                indices = new Matrix <int>(DescriptorsImage2.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(DescriptorsImage2.Rows, k))
                {
                    matcher.KnnMatch(DescriptorsImage2, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(KeyPointsImage1, KeyPointsImage2, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(KeyPointsImage1, KeyPointsImage2, indices, mask, 2);
                    }
                }
            }
            Image <Bgr, Byte> mImage = image1.Convert <Bgr, Byte>();
            Image <Bgr, Byte> oImage = image2.Convert <Bgr, Byte>();
            Image <Bgr, Byte> result = new Image <Bgr, byte>(mImage.Width + oImage.Width, mImage.Height);

            //Image<Bgr, Byte> temp = Features2DToolbox.DrawMatches(image1, KeyPointsImage1, image2, KeyPointsImage2, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = image1.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };

                homography.ProjectPoints(pts);

                HomographyMatrix origin = new HomographyMatrix();                //I perform a copy of the left image with a not real shift operation on the origin
                origin.SetIdentity();
                origin.Data[0, 2] = 0;
                origin.Data[1, 2] = 0;
                Image <Bgr, Byte> mosaic = new Image <Bgr, byte>(mImage.Width + oImage.Width, mImage.Height * 2);

                Image <Bgr, byte> warp_image = mosaic.Clone();
                mosaic = mImage.WarpPerspective(origin, mosaic.Width, mosaic.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_DEFAULT, new Bgr(0, 0, 0));

                warp_image = oImage.WarpPerspective(homography, warp_image.Width, warp_image.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Bgr(200, 0, 0));
                Image <Gray, byte> warp_image_mask = oImage.Convert <Gray, byte>();
                warp_image_mask.SetValue(new Gray(255));
                Image <Gray, byte> warp_mosaic_mask = mosaic.Convert <Gray, byte>();
                warp_mosaic_mask.SetZero();
                warp_mosaic_mask = warp_image_mask.WarpPerspective(homography, warp_mosaic_mask.Width, warp_mosaic_mask.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Gray(0));

                warp_image.Copy(mosaic, warp_mosaic_mask);
                if (flag == 1)
                {
                    Console.WriteLine("Using Image Blending");
                    return(blend(mosaic, warp_image, warp_mosaic_mask, 2));
                }
                else
                {
                    Console.WriteLine("No Image Blending");
                    return(mosaic);
                }
            }
            return(null);
        }
Example #24
0
    public Image <Bgr, Byte> ObjectDetector(Image <Bgr, Byte> modelImage, string filepath)
    {
        Stopwatch                watch;
        HomographyMatrix         homography = null;
        SURFDetector             surfCPU    = new SURFDetector(500, false);
        FastDetector             fastCPU    = new FastDetector(10, true);
        VectorOfKeyPoint         modelKeyPoints;
        BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();
        Image <Gray, byte>       grayImage  = new Image <Gray, Byte>(filepath);

        modelKeyPoints = fastCPU.DetectKeyPointsRaw(grayImage, null);
        Matrix <byte> modelDescriptors = descriptor.ComputeDescriptorsRaw(grayImage, null, modelKeyPoints);

        Image <Bgr, Byte> result = Features2DToolbox.DrawKeypoints(grayImage, modelKeyPoints, new Bgr(0, 0, 255), Features2DToolbox.KeypointDrawType.DRAW_RICH_KEYPOINTS);

        result.Save("C:\\Users\\Sandeep\\Documents\\What_Are_Those\\Assets\\picture645.jpg");
        //Image<Bgr, Byte> result = modelImage;

        MKeyPoint[]   modelpoints = modelKeyPoints.ToArray();
        List <PointF> points      = new List <PointF>();
        //List<PointF> boundarypointsList = new List<PointF>();
        Dictionary <float, float> boundaryPoints           = new Dictionary <float, float>();
        Dictionary <float, float> boundaryPointshorizontal = new Dictionary <float, float>();
        Dictionary <float, float> boundaryPointsModified   = new Dictionary <float, float>();
        Dictionary <float, float> boundaryPointsRed        = new Dictionary <float, float>();

        for (int i = 0; i < modelpoints.Length; i++)
        {
            points.Add(modelpoints[i].Point);
            //print("X is " + points.ToArray()[i].X + "Y is " + points.ToArray()[i].Y);
        }
        points.Sort((a, b) => a.X.CompareTo(b.X));
        float x = points.ToArray()[0].X;
        float y = points.ToArray()[0].Y;
        float nextx, nexty;
        float miny = grayImage.Height;
        float maxx = grayImage.Width;

        for (int i = 0; i < points.ToArray().Length - 1; i++)
        {
            x     = points.ToArray()[i].X;
            y     = points.ToArray()[i].Y;
            nextx = points.ToArray()[i + 1].X;
            nexty = points.ToArray()[i + 1].Y;
            if (x == nextx)
            {
                miny = Mathf.Min(y, nexty);
            }
            else
            {
                boundaryPoints.Add(x, miny);

                //boundarypointsList.Add(new PointF(x, miny));
            }
            //print("X is " + points.ToArray()[i].X + " Y is " + points.ToArray()[i].Y);
        }
        int lastindex = points.ToArray().Length - 1;

        if (x != points.ToArray()[lastindex].X)
        {
            PointF lastpoint = points.ToArray()[lastindex];
            boundaryPoints.Add(lastpoint.X, lastpoint.Y);
        }
        points.Sort((a, b) => a.Y.CompareTo(b.Y));
        for (int i = 0; i < points.ToArray().Length - 1; i++)
        {
            x     = points.ToArray()[i].X;
            y     = points.ToArray()[i].Y;
            nextx = points.ToArray()[i + 1].X;
            nexty = points.ToArray()[i + 1].Y;
            if (y == nexty)
            {
                maxx = Mathf.Max(x, nextx);
            }
            else
            {
                boundaryPointshorizontal.Add(y, maxx);

                //boundarypointsList.Add(new PointF(x, miny));
            }
            //print("X is " + points.ToArray()[i].X + " Y is " + points.ToArray()[i].Y);
        }
        lastindex = points.ToArray().Length - 1;
        if (y != points.ToArray()[lastindex].Y)
        {
            PointF lastpoint = points.ToArray()[lastindex];
            boundaryPointshorizontal.Add(lastpoint.X, lastpoint.Y);
        }
        var min  = boundaryPoints.ElementAt(0);
        var max  = boundaryPoints.ElementAt(0);
        var hmax = boundaryPoints.ElementAt(0);

        for (int i = 0; i < boundaryPoints.Count; i++)
        {
            var   item      = boundaryPoints.ElementAt(i);
            float itemKey   = item.Key;
            float itemValue = item.Value;
            if (itemValue < min.Value)
            {
                min = item;
            }
            if (itemValue > max.Value || max.Value == result.Rows)
            {
                max = item;
            }
            //print("X is " + itemKey + " Y is " + itemValue);
        }
        for (int i = 0; i < boundaryPointshorizontal.Count; i++)
        {
            var   item      = boundaryPointshorizontal.ElementAt(i);
            float itemKey   = item.Key;
            float itemValue = item.Value;
            if (itemValue < min.Value)
            {
                min = item;
            }
            if (itemValue > hmax.Value || hmax.Value == result.Cols)
            {
                hmax = item;
            }
            // print("horizontal Y is " + itemKey + " horizontal X is " + itemValue);
        }
        //print("MIN is " + min.Key + " " + min.Value);
        //print("MAX is " + max.Key + " " + max.Value);
        //print("HMAX is " + hmax.Key + " " + hmax.Value);

        float prev = boundaryPoints.ElementAt(0).Value;
        int   mid  = 0;

        for (int i = 0; i < boundaryPoints.ElementAt(0).Key; i++)
        {
            boundaryPointsModified[(float)i] = boundaryPoints.ElementAt(0).Value;
        }
        for (int i = 0; i < boundaryPoints.Count && boundaryPoints.ElementAt(i).Key != boundaryPointshorizontal.ElementAt(1).Value; i++)
        {
            var   item      = boundaryPoints.ElementAt(i);
            float itemKey   = item.Key;
            float itemValue = item.Value;

            //print("itemKey "+itemKey+ " itemValue " + itemValue + " prev " + prev);

            if (itemValue > prev)
            {
                boundaryPointsModified[itemKey] = prev;
            }
            else if ((prev - itemValue < 80 && prev != result.Rows) || (prev == result.Rows && prev - itemValue > 0))
            {
                boundaryPointsModified[itemKey] = itemValue;
                prev = itemValue;
            }
            else
            {
                boundaryPointsModified[itemKey] = prev;
            }
            mid = i;
        }
        for (int i = mid + 1; i < boundaryPoints.Count; i++)
        {
            var   item      = boundaryPoints.ElementAt(i);
            float itemKey   = item.Key;
            float itemValue = item.Value;
            boundaryPointsModified[itemKey] = 0;
        }
        for (int i = 0; i < boundaryPointsModified.Count - 1; i++)
        {
            var item      = boundaryPointsModified.ElementAt(i);
            var itemKey   = item.Key;
            var itemValue = item.Value;

            //print("X modified is " + itemKey + " Y modified is " + itemValue);
        }



        byte[,,] data       = result.Data;
        byte[,,] data_model = modelImage.Data;

        int xstop = (int)boundaryPointsModified.ElementAt(0).Key;
        int ystop = (int)boundaryPointsModified.ElementAt(2).Value;



        /*     print("xstop is " + xstop + " ystop is "+ystop);
         *    for (int i = 0; i <= xstop; i++)
         *    {
         *        for (int j = 0; j <= ystop; j++)
         *        {
         *        data_model[j, i, 0] = 255;
         *        data_model[j, i, 1] = 255;
         *        data_model[j, i, 2] = 255;
         *        }
         *    }
         *    modelImage.Data = data_model; */



        for (int run = 19; run >= 0; run--)
        {
            for (int i = 0; i <= modelImage.Cols - 1; i++)
            {
                for (int j = 0; j <= modelImage.Rows - 1; j++)
                {
                    if (boundaryPoints.ContainsKey((float)i))
                    {
                        float stoppingPoint = boundaryPointsModified[(float)i];
                        //print("Stoppping Point is " + stoppingPoint);
                        if ((float)j <= stoppingPoint)
                        {
                            //print("j is "+j+" i is "+i+" red "+result[j, i].Red);
                            data_model[j, i, 0] = 246;
                            data_model[j, i, 1] = 246;
                            data_model[j, i, 2] = 246;
                        }

                        /*    else if (i == 600 || i == 612){
                         *      data[j, i, 0] = 255;
                         *      data[j, i, 1] = 0;
                         *      data[j, i, 2] = 0;
                         *  } */
                    }
                    else
                    {
                        float stoppingPoint = 0;
                        //print(" i is " + i);
                        if (i < boundaryPointsModified.Count)
                        {
                            stoppingPoint = boundaryPointsModified.ElementAt(i).Value;
                        }
                        //print("Stoppping Point is " + stoppingPoint);

                        if ((float)j <= stoppingPoint)
                        {
                            //print("j is "+j+" i is "+i+" red "+result[j, i].Red);
                            data_model[j, i, 0] = 246;
                            data_model[j, i, 1] = 246;
                            data_model[j, i, 2] = 246;
                        }
                    }
                }
            }
            modelImage.Data = data_model;
        }

        //  for (int run = 19; run >= 0; run--)
        //  {

        if (min.Key < mid)
        {
            mid = (int)min.Value;
        }

        //print("mid is " + mid);
        for (int i = result.Cols - 1; i >= mid; i--)
        {
            for (int j = 0; j <= result.Rows - 1; j++)
            {
                //      if (boundaryPointshorizontal.ContainsKey((float)i))
                //      {
                //float startingPoint = boundaryPointshorizontal[(float)i];
                // print("Stoppping Point is " + stoppingPoint);
                /*startingPoint <= j */

                /*            if (data[j, i, 2] < 180)
                 *          {
                 *              data[j, i, 0] = 255;
                 *              data[j, i, 1] = 255;
                 *              data[j, i, 2] = 255;
                 *
                 *          }
                 *          else
                 *          {
                 *          break;
                 *          } */

                if (data[j, i, 2] >= 240)
                {
                    boundaryPointsRed.Add(i, j);
                    //print("i is " + i + " j is " + j);
                    break;
                }


                //             }
            }
        }
        //result.Data = data;
        //     }

        int maxredx = 0;
        int maxredy = 0;

        for (int run = 19; run >= 0; run--)
        {
            for (int i = result.Cols - 1; i >= mid; i--)
            {
                for (int j = 0; j <= result.Rows - 1; j++)
                {
                    if (boundaryPointsRed.ContainsKey(i))
                    {
                        if (i > maxredx)
                        {
                            maxredx = i;
                        }
                        if (j > maxredy)
                        {
                            maxredy = j;
                        }
                        float stoppingPoint = boundaryPointsRed[i];

                        if ((float)j <= stoppingPoint /* && i != 600 && i != 612 */)
                        {
                            //print("j is "+j+" i is "+i+" red "+result[j, i].Red);
                            data_model[j, i, 0] = 246;
                            data_model[j, i, 1] = 246;
                            data_model[j, i, 2] = 246;
                        }
                    }
                }
            }
            modelImage.Data = data_model;
        }

        for (int run = 19; run >= 0; run--)
        {
            for (int i = maxredy; i >= 0; i--)
            {
                for (int j = result.Cols - 1; j >= maxredx; j--)
                {
                    data_model[i, j, 0] = 246;
                    data_model[i, j, 1] = 246;
                    data_model[i, j, 2] = 246;
                }
            }
            modelImage.Data = data_model;
        }


        for (int run = 19; run >= 0; run--)
        {
            for (int i = result.Rows - 1; i >= max.Value; i--)
            {
                for (int j = 0; j <= result.Cols - 1; j++)
                {
                    data_model[i, j, 0] = 246;
                    data_model[i, j, 1] = 246;
                    data_model[i, j, 2] = 246;
                }
            }
            modelImage.Data = data_model;
        }

        for (int run = 19; run >= 0; run--)
        {
            for (int i = result.Cols - 1; i >= hmax.Value; i--)
            {
                for (int j = 0; j <= result.Rows - 1; j++)
                {
                    data_model[j, i, 0] = 246;
                    data_model[j, i, 1] = 246;
                    data_model[j, i, 2] = 246;
                }
            }
            modelImage.Data = data_model;
        }



        return(modelImage);
    }
Example #25
0
        private void SURFfeature(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(300, false);

            homography = null;

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
        }
Example #26
0
        /// <summary>
        /// 環境看板辨識使用BruteForce匹配(較精確但較慢)
        /// </summary>
        /// <param name="template">樣板的特徵點類別</param>
        /// <param name="observedScene">被觀察的場景匹配的特徵點</param>
        /// <returns>回傳匹配的資料類別</returns>
        public static SURFMatchedData MatchSURFFeatureByBruteForceForObjs(SURFFeatureData template, SURFFeatureData observedScene)
        {
            //This matrix indicates which row is valid for the matches.
            Matrix <byte> mask;
            //Number of nearest neighbors to search for
            int k = 5;
            //The distance different ratio which a match is consider unique, a good number will be 0.8 , NNDR match
            double uniquenessThreshold = 0.5;  //default 0.8

            //The resulting n*k matrix of descriptor index from the training descriptors
            Matrix <int>     trainIdx;
            HomographyMatrix homography = null;
            Stopwatch        watch;

            try
            {
                watch = Stopwatch.StartNew();
                #region Surf for CPU
                //match
                BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2Sqr);
                matcher.Add(template.GetDescriptors());

                trainIdx = new Matrix <int>(observedScene.GetDescriptors().Rows, k);
                //The resulting n*k matrix of distance value from the training descriptors
                using (Matrix <float> distance = new Matrix <float>(observedScene.GetDescriptors().Rows, k))
                {
                    matcher.KnnMatch(observedScene.GetDescriptors(), trainIdx, distance, k, null);
                    mask = new Matrix <byte>(distance.Rows, 1);
                    mask.SetValue(255); //Mask is 拉式信號匹配
                    //http://stackoverflow.com/questions/21932861/how-does-features2dtoolbox-voteforuniqueness-work
                    //how the VoteForUniqueness work...
                    Features2DToolbox.VoteForUniqueness(distance, uniquenessThreshold, mask);
                }

                Image <Bgr, byte> result = null;
                int nonZeroCount         = CvInvoke.cvCountNonZero(mask); //means good match
                Console.WriteLine("VoteForUniqueness nonZeroCount=> " + nonZeroCount.ToString());
                if (nonZeroCount >= (template.GetKeyPoints().Size * 0.2)) //set 10
                {
                    //50 is model and mathing image rotation similarity ex: m1 = 60 m2 = 50 => 60 - 50 <=50 so is similar
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(template.GetKeyPoints(), observedScene.GetKeyPoints(), trainIdx, mask, 1.2, 50); //default 1.5,10
                    Console.WriteLine("VoteForSizeAndOrientation nonZeroCount=> " + nonZeroCount.ToString());
                    if (nonZeroCount >= (template.GetKeyPoints().Size * 0.5))                                                                                   //default 4 ,set 15
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(template.GetKeyPoints(), observedScene.GetKeyPoints(), trainIdx, mask, 5);
                    }

                    PointF[] matchPts = GetMatchBoundingBox(homography, template);

                    //Draw the matched keypoints
                    result = Features2DToolbox.DrawMatches(template.GetImg(), template.GetKeyPoints(), observedScene.GetImg(), observedScene.GetKeyPoints(),
                                                           trainIdx, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.NOT_DRAW_SINGLE_POINTS);
                    if (matchPts != null)
                    {
                        result.DrawPolyline(Array.ConvertAll <PointF, Point>(matchPts, Point.Round), true, new Bgr(Color.Red), 2);
                    }
                }
                #endregion
                watch.Stop();
                Console.WriteLine("\nCal SURF Match time=======\n=> " + watch.ElapsedTicks.ToString() + "ms\nCal SURF Match time=======");


                return(new SURFMatchedData(trainIdx, homography, mask, nonZeroCount, template));
            }
            catch (CvException ex)
            {
                System.Windows.Forms.MessageBox.Show(ex.ErrorMessage);
                return(null);
            }
        }
Example #27
0
        public static Image <Bgr, Byte> Draw(String modelImageFileName, String observedImageFileName, out long matchTime)
        {
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>(modelImageFileName);
            Image <Gray, Byte> observedImage = new Image <Gray, byte>(observedImageFileName);
            Stopwatch          watch;
            HomographyMatrix   homography = null;

            SURFDetector surfCPU = new SURFDetector(600, false);

            //SIFTDetector surfCPU = new SIFTDetector();
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                //nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            Image <Bgr, Byte> modelImage2    = new Image <Bgr, Byte>(modelImageFileName);
            Image <Bgr, Byte> observedImage2 = new Image <Bgr, Byte>(observedImageFileName);

            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage2, modelKeyPoints, observedImage2, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            for (int i = 0; i < observedKeyPoints.Size; ++i)
            {
                Color color      = Color.FromArgb((int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Red, (int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Green, (int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Blue);
                float hue        = color.GetHue();
                float sat        = color.GetSaturation();
                float bright     = color.GetBrightness();
                float satThr     = (float)0.0f / 240.0f;
                float brightTrh  = (float)40.0f / 240.0f;
                float brightThr2 = (float)15.0f / 24.0f;
                if (sat < satThr && bright < brightTrh)
                {
                    continue;
                }
                if (bright > brightThr2)
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.White), -1);
                    continue;
                }
                if (hue > 230)//rosu
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Red), -1);
                }
                //else if(hue>180)//mov
                //    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Purple), -1);
                else if (hue > 120)//albastru
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Blue), -1);
                }
                else if (hue > 60) //verde
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Yellow), -1);
                }
                else if (hue > 30)//galben
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Yellow), -1);
                }
                else
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Red), -1);
                }
            }

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }
Example #28
0
        public Rectangle ProcessFrame(Image<Bgr, Byte> image)
        {
            // Invalidate old ROI/projected points
            roi = Rectangle.Empty;
            projectedPoints = null;

            Image<Gray, Byte> imageToDetect = image.Convert<Gray, Byte>();

            // Detect KP and calculate descriptors...
            observedKP = surfDetector.DetectKeyPointsRaw(image.Convert<Gray, Byte>(), null);
            observedDescriptors = surfDetector.ComputeDescriptorsRaw(image.Convert<Gray, Byte>(), null, observedKP);

            // Matching
            int k = 2;
            indices = new Matrix<int>(observedDescriptors.Rows, k);
            dist = new Matrix<float>(observedDescriptors.Rows, k);
            matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

            //
            mask = new Matrix<byte>(dist.Rows, 1);
            mask.SetValue(255);
            Features2DToolbox.VoteForUniqueness(dist, 0.8, mask);

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(itemKP, observedKP, indices, mask, 1.5, 20);

                // If we have enough info (???), create a homography matrix.
                if (nonZeroCount >= 4)
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(itemKP, observedKP, indices, mask, 2); //last arg - 2 or 3?
                else homography = null;
            }

            // Get keypoints.
            keyPts = new PointF[itemKP.Size];
            classes = new int[itemKP.Size];
            for (int i = 0; i < itemKP.Size; i++)
            {
                keyPts[i] = itemKP[i].Point;
                classes[i] = itemKP[i].ClassId;
            }

            prevFrame = image;

            if (homography != null)
            {
                Rectangle rect = itemImage.ROI;
                projectedPoints = new PointF[] {
                   new PointF(rect.Left, rect.Bottom),
                   new PointF(rect.Right, rect.Bottom),
                   new PointF(rect.Right, rect.Top),
                   new PointF(rect.Left, rect.Top)};
                homography.ProjectPoints(projectedPoints);

                // Get the ROI.
                PointF minXY = new PointF();
                minXY.X = float.MaxValue;
                minXY.Y = float.MaxValue;
                PointF maxXY = new PointF();
                maxXY.X = 0; maxXY.Y = 0;
                for (int i = 0; i < 4; i++)
                {
                    PointF pt = projectedPoints[i];
                    if (pt.X < minXY.X) minXY.X = pt.X;
                    if (pt.Y < minXY.Y) minXY.Y = pt.Y;
                    if (pt.X > maxXY.X) maxXY.X = pt.X;
                    if (pt.Y > maxXY.Y) maxXY.Y = pt.Y;

                    roi = new Rectangle(
                    (int)minXY.X,
                    (int)minXY.Y,
                    (int)(maxXY.X - minXY.X),
                    (int)(maxXY.Y - minXY.Y));
                }
            }

            // Debug
            //Image<Bgr, Byte> result = Features2DToolbox.DrawMatches<Gray>(itemImage, itemKP, imageToDetect, observedKP, indices, new Bgr(255, 0, 0), new Bgr(255,0,0), mask, Features2DToolbox.KeypointDrawType.DRAW_RICH_KEYPOINTS);
            //result.DrawPolyline(Array.ConvertAll<PointF, Point>(projectedPoints, Point.Round), true, new Bgr(Color.Red), 5);
            //CvInvoke.cvShowImage("test", result);

            return roi;
        }
Example #29
0
        public static Image <Bgr, Byte> Parallelogram(String modelImageFileName, String observedImageFileName, out long matchTime)
        {
            //Image<Gray, Byte> cannyEdges = gray.Canny(cannyThreshold, cannyThresholdLinking);
            //Load the image from file
            Image <Bgr, Byte> observedImage = new Image <Bgr, byte>(observedImageFileName);
            Stopwatch         watch;
            HomographyMatrix  homography = null;

            watch = Stopwatch.StartNew();

            Image <Gray, Byte> graySoft = observedImage.Convert <Gray, Byte>();//.PyrDown().PyrUp();
            //ImageViewer.Show(graySoft, "graysoft");
            //Image<Gray, Byte> gray = graySoft.SmoothGaussian(3);
            //ImageViewer.Show(gray, "graysoft");
            //gray = gray.AddWeighted(graySoft, 1.5, -0.5, 0);
            //ImageViewer.Show(graySoft, "graysoft");

            Gray cannyThreshold             = new Gray(149);
            Gray cannyThresholdLinking      = new Gray(149);
            Gray circleAccumulatorThreshold = new Gray(1000);

            Image <Gray, Byte> cannyEdges = graySoft.Canny(cannyThreshold, cannyThresholdLinking);
            Image <Gray, Byte> modelImage = new Image <Gray, Byte>(modelImageFileName).Canny(cannyThreshold, cannyThresholdLinking);
            SURFDetector       surfCPU    = new SURFDetector(200, false);
            VectorOfKeyPoint   modelKeyPoints;
            VectorOfKeyPoint   observedKeyPoints;
            Matrix <int>       indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.99;

            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(cannyEdges, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(cannyEdges, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                //nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            //Image<Bgr, Byte> modelImage2 = new Image<Bgr, Byte>(modelImageFileName);
            //Image<Bgr, Byte> observedImage2 = new Image<Bgr, Byte>(observedImageFileName);

            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, cannyEdges, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            watch.Stop();
            matchTime = watch.ElapsedMilliseconds;
            return(result);
        }
Example #30
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, int state, out long matchTime, out int p)
        {
            Stopwatch        watch;
            HomographyMatrix homography = null;


            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (state == 1)
            {
                uniquenessThreshold = 0.8;
            }



            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }



            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 1)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 1)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            p = mask.ManagedArray.OfType <byte>().ToList().Where(q => q > 0).Count();


            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            matchTime = watch.ElapsedMilliseconds;



            return(result);
        }
Example #31
0
        private static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, SurfSettings surfSettings, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;

            if (surfSettings != null)
            {
                hessianThresh       = surfSettings.HessianThresh.Value;
                uniquenessThreshold = surfSettings.UniquenessThreshold.Value;
            }

            SURFDetector surfCPU = new SURFDetector(hessianThresh, false);
            #endregion



            int       k = 2;
            Stopwatch watch;
            homography = null;


            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            matchTime = watch.ElapsedMilliseconds;
        }
Example #32
0
        public List <ImageRecord> QueryImage(string queryImagePath, SurfSettings surfSetting = null)
        {
            List <ImageRecord> rtnImageList = new List <ImageRecord>();

            var observerFeatureSets = SurfRepository.GetSurfRecordList();

            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;
            int    minGoodMatchPercent = 50;

            if (surfSetting != null)
            {
                hessianThresh       = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }

            SURFDetector surfDectector = new SURFDetector(hessianThresh, false);
            #endregion

            using (Image <Gray, byte> modelImage = new Image <Gray, byte>(queryImagePath))
            {
                ImageFeature <float>[] modelFeatures = surfDectector.DetectFeatures(modelImage, null);

                if (modelFeatures.Length < 4)
                {
                    throw new InvalidOperationException("Model image didn't have any significant features to detect");
                }

                Features2DTracker <float> tracker = new Features2DTracker <float>(modelFeatures);
                foreach (var surfRecord in observerFeatureSets)
                {
                    string queryImageName = System.IO.Path.GetFileName(queryImagePath);
                    string modelImageName = surfRecord.ImageName;

                    Features2DTracker <float> .MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(surfRecord.observerFeatures, 2);

                    Features2DTracker <float> .MatchedImageFeature[] uniqueFeatures = Features2DTracker <float> .VoteForUniqueness(matchedFeatures, uniquenessThreshold);

                    Features2DTracker <float> .MatchedImageFeature[] uniqueRotOriFeatures = Features2DTracker <float> .VoteForSizeAndOrientation(uniqueFeatures, 1.5, 20);

                    int goodMatchCount = 0;
                    goodMatchCount = uniqueRotOriFeatures.Length;
                    bool isMatch = false;

                    double totalnumberOfModelFeature = modelFeatures.Length;
                    double matchPercentage           = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    matchPercentage = (1 - matchPercentage) * 100;
                    matchPercentage = Math.Round(matchPercentage);
                    if (matchPercentage >= minGoodMatchPercent)
                    {
                        HomographyMatrix homography =
                            Features2DTracker <float> .GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);

                        if (homography != null)
                        {
                            isMatch = homography.IsValid(5);
                            if (isMatch)
                            {
                                surfRecord.Distance = matchPercentage;
                                rtnImageList.Add((ImageRecord)surfRecord);
                            }
                        }
                    }

                    //bool isMatch = false;
                    //if (uniqueFeatures.Length > 4)
                    //{
                    //    HomographyMatrix homography =
                    //        Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);
                    //    if (homography != null)
                    //    {
                    //        isMatch = homography.IsValid(5);
                    //    }
                    //}

                    //if (isMatch)
                    //{
                    //    surfRecord.Distance = goodMatchCount;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}

                    //int goodMatchCount = 0;
                    //foreach (Features2DTracker<float>.MatchedImageFeature ms in matchedFeatures)
                    //{
                    //    if (ms.SimilarFeatures[0].Distance < uniquenessThreshold)
                    //        goodMatchCount++;
                    //}



                    //double totalnumberOfModelFeature = modelFeatures.Length;
                    //double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    //matchPercentage = (1 - matchPercentage) * 100;
                    //matchPercentage = Math.Round(matchPercentage);
                    //if (matchPercentage >= minGoodMatchPercent)
                    //{
                    //    surfRecord.Distance = matchPercentage;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}
                }
            }
            rtnImageList = rtnImageList.OrderByDescending(x => x.Distance).ToList();
            return(rtnImageList);
        }
Example #33
0
        public static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

         #if !IOS
            if (CudaInvoke.HasCuda)
            {
                CudaSURFDetector surfCuda = new CudaSURFDetector((float)hessianThresh);
                using (GpuMat gpuModelImage = new GpuMat(modelImage))
                    //extract features from the object image
                    using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (CudaBruteForceMatcher matcher = new CudaBruteForceMatcher(DistanceType.L2))
                            {
                                surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuMat gpuObservedImage = new GpuMat(observedImage))
                                    using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                        //using (GpuMat tmp = new GpuMat())
                                        //using (Stream stream = new Stream())
                                        {
                                            matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                                            surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                            mask = new Matrix <byte>(matches.Size, 1);
                                            mask.SetValue(255);
                                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                                            if (nonZeroCount >= 4)
                                            {
                                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                           matches, mask, 1.5, 20);
                                                if (nonZeroCount >= 4)
                                                {
                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                          observedKeyPoints, matches, mask, 2);
                                                }
                                            }
                                        }
                                watch.Stop();
                            }
            }

            else
         #endif
            {
                using (UMat uModelImage = modelImage.Mat.ToUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.Mat.ToUMat(AccessType.Read))
                    {
                        SURFDetector surfCPU = new SURFDetector(hessianThresh);
                        //extract features from the object image
                        UMat modelDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        watch = Stopwatch.StartNew();

                        // extract features from the observed image

                        UMat observedDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                        BruteForceMatcher matcher = new BruteForceMatcher(DistanceType.L2);
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Matrix <byte>(matches.Size, 1);
                        mask.SetValue(255);
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                      observedKeyPoints, matches, mask, 2);
                            }
                        }

                        watch.Stop();
                    }
            }
            matchTime = watch.ElapsedMilliseconds;
        }
 /// <summary>
 /// Recover the homography matrix using RANDSAC. If the matrix cannot be recovered, null is returned.
 /// </summary>
 /// <param name="model">The model keypoints</param>
 /// <param name="observed">The observed keypoints</param>
 /// <param name="matchIndices">The match indices</param>
 /// <param name="ransacReprojThreshold">
 /// The maximum allowed reprojection error to treat a point pair as an inlier. 
 /// If srcPoints and dstPoints are measured in pixels, it usually makes sense to set this parameter somewhere in the range 1 to 10.
 /// </param>
 /// <param name="mask">
 /// The mask matrix of which the value might be modified by the function. 
 /// As input, if the value is 0, the corresponding match will be ignored when computing the homography matrix. 
 /// If the value is 1 and RANSAC determine the match is an outlier, the value will be set to 0.
 /// </param>
 /// <returns>The homography matrix, if it cannot be found, null is returned</returns>
 public static HomographyMatrix GetHomographyMatrixFromMatchedFeatures(VectorOfKeyPoint model, VectorOfKeyPoint observed, Matrix<int> matchIndices, Matrix<Byte> mask, double ransacReprojThreshold)
 {
     HomographyMatrix homography = new HomographyMatrix();
      bool found = getHomographyMatrixFromMatchedFeatures(model, observed, matchIndices, mask, ransacReprojThreshold, homography);
      if (found)
      {
     return homography;
      }
      else
      {
     homography.Dispose();
     return null;
      }
 }
Example #35
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Image <Bgr, Byte>  frame         = _capture.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            Image <Gray, Byte> grayframe     = frame.Convert <Gray, Byte>();
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("DataPlate/" + 10 + ".jpg");
            Image <Gray, Byte> observedImage = grayframe;
            Stopwatch          watch;
            HomographyMatrix   homography = null;
            SURFDetector       surfCPU    = new SURFDetector(500, false);
            VectorOfKeyPoint   modelKeyPoints;
            VectorOfKeyPoint   observedKeyPoints;
            Matrix <int>       indices;
            Matrix <float>     dist;
            Matrix <byte>      mask;


            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))

                    #region SURF
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);
                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
                #endregion
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);
                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>    observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher matcher             = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);

                dist = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 20)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 20)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                        XMLData();
                    }
                    else
                    {
                        textBox1.Text = string.Empty;
                        textBox2.Text = string.Empty;
                        textBox3.Text = string.Empty;
                        textBox4.Text = string.Empty;
                        textBox5.Text = string.Empty;
                    }
                }
                watch.Stop();
                #region draw the projected region on the image
                if (homography != null)
                {  //draw a rectangle along the projected model
                    Rectangle rect = modelImage.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    homography.ProjectPoints(pts);
                    frame.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 2);
                }
                #endregion
                CaptureImageBox.Image = frame;
                DataImageBox.Image    = modelImage;
            }
        }
Example #36
0
        private void RemovePerspective(OLSRegression regression)
        {
            float tBit = 300;
            float B = regression.B;
            float A = regression.A;
            float x1 = A * (tBit / (B + tBit));
            float x2 = A + ((400 - A) * (B / (B + tBit)));

            PointF[] srcs = new PointF[4];
            srcs[0] = new PointF((float)x1, 0);
            srcs[1] = new PointF((float)x2, 0);
            srcs[2] = new PointF(400, tBit);
            srcs[3] = new PointF(0, tBit);

            PointF[] dsts = new PointF[4];
            dsts[0] = new PointF(0, 0);
            dsts[1] = new PointF(400, 0);
            dsts[2] = new PointF(400, 300);
            dsts[3] = new PointF(0, 300);

            HomographyMatrix warpMat = CameraCalibration.GetPerspectiveTransform(srcs, dsts);
            WarpedImage = BoardImage.WarpPerspective(warpMat, 400, 300,
                Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC,
                Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS,
                GetBottomBorderColor(BoardImage));

            m_InverseWarpMatrix = CameraCalibration.GetPerspectiveTransform(dsts, srcs);
            //WarpedImage = WarpedImage.WarpPerspective(m_InverseWarpMatrix, 400, 300,
            //    Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC,
            //    Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS,
            //    GetBottomBorderColor(BoardImage));
        }
Example #37
0
        private void ProcessFrameFindFaces()
        {
            var stereoCalibration = Options.StereoCalibrationOptions;

            if (stereoCalibration == null)
            {
                return;
            }

            var leftImageR  = new Image <Gray, byte>(_cameras[0].Image.Width, _cameras[0].Image.Height);
            var rightImageR = new Image <Gray, byte>(_cameras[1].Image.Width, _cameras[1].Image.Height);

            try
            {
                CvInvoke.cvRemap(_cameras[0].Image.Ptr, leftImageR.Ptr,
                                 stereoCalibration.MapXLeft, stereoCalibration.MapYLeft, 0, new MCvScalar(0));
            }
            catch (Exception ex)
            {
            }

            CvInvoke.cvRemap(_cameras[1].Image.Ptr, rightImageR.Ptr,
                             stereoCalibration.MapXRight, stereoCalibration.MapYRight, 0, new MCvScalar(0));

            // find first face points
            var leftFaceRegions  = Helper2D.GetFaceRegion2Ds(leftImageR, FaceWidth, FaceHeight, true, true);
            var rightFaceRegions = Helper2D.GetFaceRegion2Ds(rightImageR, FaceWidth, FaceHeight, true, true);

            FaceRegion2D leftFace;
            FaceRegion2D rightFace;

            if (leftFaceRegions != null &&
                rightFaceRegions != null &&
                (leftFace = leftFaceRegions.FirstOrDefault()) != null &&
                (rightFace = rightFaceRegions.FirstOrDefault()) != null)
            {
                if (leftFace.EyeAngle != 0)
                {
                    _leftRoll = leftFace.EyeAngle;
                }
                if (rightFace.EyeAngle != 0)
                {
                    _rightRoll = rightFace.EyeAngle;
                }

                var leftPoints  = new Point[4]; // face location, left eye, right eye, mouth
                var rightPoints = new Point[4];

                #region Points

                // face
                leftPoints[0]  = new Point(leftFace.Face.Location.X + leftFace.Face.Width / 2, leftFace.Face.Location.Y + leftFace.Face.Height / 2);
                rightPoints[0] = new Point(rightFace.Face.Location.X + rightFace.Face.Width / 2, rightFace.Face.Location.Y + rightFace.Face.Height / 2);

                // left eye
                if (leftFace.LeftEye != null && rightFace.LeftEye != null)
                {
                    leftPoints[1] = new Point(leftFace.Face.Location.X + leftFace.LeftEye.Location.X + leftFace.LeftEye.Width / 2,
                                              leftFace.Face.Location.Y + leftFace.LeftEye.Location.Y + leftFace.LeftEye.Height / 2);

                    rightPoints[1] = new Point(rightFace.Face.Location.X + rightFace.LeftEye.Location.X + rightFace.LeftEye.Width / 2,
                                               rightFace.Face.Location.Y + rightFace.LeftEye.Location.Y + rightFace.LeftEye.Height / 2);
                }

                // right eye
                if (leftFace.RightEye != null && rightFace.RightEye != null)
                {
                    leftPoints[2] = new Point(leftFace.Face.Location.X + leftFace.RightEye.Location.X + leftFace.RightEye.Width / 2,
                                              leftFace.Face.Location.Y + leftFace.RightEye.Location.Y + leftFace.RightEye.Height / 2);

                    rightPoints[2] = new Point(rightFace.Face.Location.X + rightFace.RightEye.Location.X + rightFace.RightEye.Width / 2,
                                               rightFace.Face.Location.Y + rightFace.RightEye.Location.Y + rightFace.RightEye.Height / 2);
                }

                // mouth
                if (leftFace.Mouth != null && rightFace.Mouth != null)
                {
                    leftPoints[3] = new Point(leftFace.Face.Location.X + leftFace.Mouth.Location.X + leftFace.Mouth.Width / 2,
                                              leftFace.Face.Location.Y + leftFace.Mouth.Location.Y + leftFace.Mouth.Height / 2);

                    rightPoints[3] = new Point(rightFace.Face.Location.X + rightFace.Mouth.Location.X + rightFace.Mouth.Width / 2,
                                               rightFace.Face.Location.Y + rightFace.Mouth.Location.Y + rightFace.Mouth.Height / 2);
                }

                #endregion

                #region Manual Point Cloud Calculation

                {
                    var pointCloud = new MCvPoint3D64f[leftPoints.Length];

                    #region Calculate Point Cloud

                    for (int i = 0; i < leftPoints.Length; i++)
                    {
                        if (leftPoints[i].X == 0 && leftPoints[i].Y == 0)
                        {
                            continue;
                        }

                        var d = rightPoints[i].X - leftPoints[i].X;

                        var X = leftPoints[i].X * stereoCalibration.Q[0, 0] + stereoCalibration.Q[0, 3];
                        var Y = leftPoints[i].Y * stereoCalibration.Q[1, 1] + stereoCalibration.Q[1, 3];
                        var Z = stereoCalibration.Q[2, 3];
                        var W = d * stereoCalibration.Q[3, 2] + stereoCalibration.Q[3, 3];

                        X = X / W;
                        Y = Y / W;
                        Z = Z / W;

                        leftImageR.Draw(string.Format("{0:0.0} {1:0.0} {2:0.0}", X, Y, Z), ref _font, leftPoints[i], new Gray(255));
                        rightImageR.Draw(string.Format("{0:0.0} {1:0.0} {2:0.0}", X, Y, Z), ref _font, rightPoints[i], new Gray(255));

                        pointCloud[i] = new MCvPoint3D64f(X, Y, Z);
                    }

                    #endregion


                    _foundFace3d = new Face3D()
                    {
                        Location = pointCloud[0].x == 0 && pointCloud[0].y == 0 && pointCloud[0].z == 0 ? (MCvPoint3D64f?)null : pointCloud[0],
                        LeftEye  = pointCloud[1].x == 0 && pointCloud[1].y == 0 && pointCloud[1].z == 0 ? (MCvPoint3D64f?)null : pointCloud[1],
                        RightEye = pointCloud[2].x == 0 && pointCloud[2].y == 0 && pointCloud[2].z == 0 ? (MCvPoint3D64f?)null : pointCloud[2],
                        Mouth    = pointCloud[3].x == 0 && pointCloud[3].y == 0 && pointCloud[3].z == 0 ? (MCvPoint3D64f?)null : pointCloud[3],
                    };

                    if (_foundFace3d.LeftEye != null &&
                        _foundFace3d.RightEye != null &&
                        _foundFace3d.Mouth != null)
                    {
                        var srcMatrix = new Matrix <float>(3, 4);

                        srcMatrix[0, 0] = (float)_foundFace3d.LeftEye.Value.x;
                        srcMatrix[1, 0] = (float)_foundFace3d.LeftEye.Value.y;
                        srcMatrix[2, 0] = (float)_foundFace3d.LeftEye.Value.z;

                        srcMatrix[0, 1] = (float)_foundFace3d.RightEye.Value.x;
                        srcMatrix[1, 1] = (float)_foundFace3d.RightEye.Value.y;
                        srcMatrix[2, 1] = (float)_foundFace3d.RightEye.Value.z;

                        srcMatrix[0, 2] = (float)_foundFace3d.Mouth.Value.x;
                        srcMatrix[1, 2] = (float)_foundFace3d.Mouth.Value.y;
                        srcMatrix[2, 2] = (float)_foundFace3d.Mouth.Value.z;

                        srcMatrix[0, 3] = (float)_foundFace3d.Location.Value.x;
                        srcMatrix[1, 3] = (float)_foundFace3d.Location.Value.y;
                        srcMatrix[2, 3] = (float)_foundFace3d.Location.Value.z;


                        var dstMatrix = new Matrix <float>(3, 4);

                        dstMatrix[0, 0] = (float)_foundFace3d.LeftEye.Value.x;
                        dstMatrix[1, 0] = (float)_foundFace3d.LeftEye.Value.y;
                        dstMatrix[2, 0] = (float)30;

                        dstMatrix[0, 1] = (float)_foundFace3d.RightEye.Value.x;
                        dstMatrix[1, 1] = (float)_foundFace3d.RightEye.Value.y;
                        dstMatrix[2, 1] = (float)30;

                        dstMatrix[0, 2] = (float)_foundFace3d.Mouth.Value.x;
                        dstMatrix[1, 2] = (float)_foundFace3d.Mouth.Value.y;
                        dstMatrix[2, 2] = (float)30;

                        dstMatrix[0, 3] = (float)_foundFace3d.Location.Value.x;
                        dstMatrix[1, 3] = (float)_foundFace3d.Location.Value.y;
                        dstMatrix[2, 3] = (float)30;

                        HomographyMatrix homographyMatrix = CameraCalibration.FindHomography(srcMatrix, dstMatrix, HOMOGRAPHY_METHOD.DEFAULT, 1);

                        if (homographyMatrix != null)
                        {
                            try
                            {
                                leftImageR = leftImageR.WarpPerspective(homographyMatrix, INTER.CV_INTER_LINEAR, WARP.CV_WARP_DEFAULT, new Gray(0));
                            }
                            catch (Exception ex)
                            {
                            }
                        }
                    }
                }

                #endregion

                #region Automatic Point Cloud

                {
                    _imagePointsDisparity = new Image <Gray, byte>(_cameras[0].Image.Width, _cameras[0].Image.Height);
                    _imagePointsLeft      = new Image <Gray, byte>(_cameras[0].Image.Width, _cameras[0].Image.Height, new Gray(255));
                    _imagePointsRight     = new Image <Gray, byte>(_cameras[0].Image.Width, _cameras[0].Image.Height, new Gray(255));

                    for (int i = 0; i < leftPoints.Length; i++)
                    {
                        if (leftPoints[i].X == 0 && leftPoints[i].Y == 0)
                        {
                            continue;
                        }

                        _imagePointsLeft.Draw(new Rectangle(new Point(leftPoints[i].X, leftPoints[i].Y), new Size(10, 10)), new Gray(0), 10);
                        _imagePointsRight.Draw(new Rectangle(new Point(rightPoints[i].X, rightPoints[i].Y), new Size(10, 10)), new Gray(0), 10);
                    }

                    var imagePointsDisparityGpu = new GpuImage <Gray, byte>(_imagePointsDisparity);

                    _stereoSolver.FindStereoCorrespondence(new GpuImage <Gray, byte>(_imagePointsLeft), new GpuImage <Gray, byte>(_imagePointsRight),
                                                           imagePointsDisparityGpu, null);

                    _imagePointsDisparity = imagePointsDisparityGpu.ToImage();



                    //MCvPoint3D32f[] pointCloud = PointCollection.ReprojectImageTo3D(_imagePointsDisparity, stereoCalibration.Q);

                    //var filteredPointCloud = pointCloud.
                    //    Where(item => item.z != 10000).
                    //    GroupBy(item => item.z).
                    //    Select(item => new
                    //    {
                    //        z = item.Key,
                    //        x = item.Average(point => point.x),
                    //        y = item.Average(point => point.y)
                    //    }).ToArray();

                    //for (int i = 0; i < filteredPointCloud.Length; i++)
                    //{
                    //    _imagePointsDisparity.Draw(string.Format("{0:0.0} {1:0.0} {2:0.0}", filteredPointCloud[i].x, filteredPointCloud[i].y, filteredPointCloud[i].z),
                    //        ref _font, new Point((int)filteredPointCloud[i].x, (int)filteredPointCloud[i].y), new Gray(255));
                    //}
                }

                #endregion
            }

            var oldLeft  = _cameras[0].Image;
            var oldRight = _cameras[1].Image;

            _cameras[0].Image = leftImageR;
            _cameras[1].Image = rightImageR;

            oldLeft.Dispose();
            oldRight.Dispose();
        }
Example #38
0
        public static Tuple <Image <Bgr, byte>, HomographyMatrix> DrawHomography(Image <Gray, byte> model, Image <Gray, byte> observed, double uniquenessThreshold, int TM, int hessianThreshould)
        {
            HomographyMatrix  homography = null;
            Image <Bgr, Byte> result     = null;
            /////surf
            SURFDetector surfCPU = new SURFDetector(hessianThreshould, false);

            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;
            int k = 2;

            try
            {
                result = observed.Convert <Bgr, byte>();
            }
            catch (Exception)
            { }

            try
            {
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(model, null); // Extract features from the object image
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints);

                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null); // Extract features from the observed image

                if (modelKeyPoints.Size <= 0)
                {
                    throw new System.ArgumentException("Can't find any keypoints in your model image!");
                }

                if (observedKeyPoints.Size > 0)
                {
                    Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints);

                    BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    indices = new Matrix <int>(observedDescriptors.Rows, k);

                    using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                    {
                        matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                        mask = new Matrix <byte>(dist.Rows, 1);
                        mask.SetValue(255);
                        Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                    }



                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                    if (nonZeroCount >= TM)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                        if (nonZeroCount >= TM)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                        }
                    }

                    result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints,
                                                           indices, new Bgr(12, 200, 214), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
                }
                return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography));
            }
            catch (Exception)
            {
                throw;
            }
            return(null);
        }
Example #39
0
        public void TestBruteForceHammingDistance()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, byte>       box   = new Image <Gray, byte>("box.png");
                FastDetector             fast  = new FastDetector(100, true);
                BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(box, modelKeypoints);
                Mat modelDescriptors = new Mat();
                brief.Compute(box, modelKeypoints, modelDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

                #region extract features from the observed image
                stopwatch.Reset(); stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(observedImage, observedKeypoints);
                Mat observedDescriptors = new Mat();
                brief.Compute(observedImage, observedKeypoints, observedDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                HomographyMatrix homography = null;
                using (GpuMat <Byte> gpuModelDescriptors = new GpuMat <byte>(modelDescriptors)) //initialization of GPU code might took longer time.
                {
                    stopwatch.Reset(); stopwatch.Start();
                    CudaBruteForceMatcher hammingMatcher = new CudaBruteForceMatcher(DistanceType.Hamming);

                    //BruteForceMatcher hammingMatcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.Hamming, modelDescriptors);
                    int            k        = 2;
                    Matrix <int>   trainIdx = new Matrix <int>(observedKeypoints.Size, k);
                    Matrix <float> distance = new Matrix <float>(trainIdx.Size);

                    using (GpuMat <Byte> gpuObservedDescriptors = new GpuMat <byte>(observedDescriptors))
                        //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true))
                        //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true))
                        using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        {
                            Stopwatch w2 = Stopwatch.StartNew();
                            hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);
                            w2.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec",
                                                          w2.ElapsedMilliseconds));
                            //gpuTrainIdx.Download(trainIdx);
                            //gpuDistance.Download(distance);


                            Matrix <Byte> mask = new Matrix <byte>(distance.Rows, 1);
                            mask.SetValue(255);
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints,
                                                                                           matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints,
                                                                                                          observedKeypoints, matches, mask, 2);
                                }
                                nonZeroCount = CvInvoke.CountNonZero(mask);
                            }

                            stopwatch.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec",
                                                          stopwatch.ElapsedMilliseconds));
                        }
                }

                if (homography != null)
                {
                    Rectangle rect = box.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    PointF[] points = pts.Clone() as PointF[];
                    homography.ProjectPoints(points);

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = box.ConcateVertical(observedImage);

                    for (int i = 0; i < points.Length; i++)
                    {
                        points[i].Y += box.Height;
                    }
                    res.DrawPolyline(Array.ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                    //ImageViewer.Show(res);
                }
            }
        }
Example #40
0
    public Image <Bgr, Byte> Drawtwo(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage)
    {
        HomographyMatrix homography = null;

        FastDetector     fastCPU = new FastDetector(10, true);
        VectorOfKeyPoint modelKeyPoints;
        VectorOfKeyPoint observedKeyPoints;
        Matrix <int>     indices;

        BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

        Matrix <byte> mask;
        int           k = 2;
        double        uniquenessThreshold = 0.8;

        //extract features from the object image
        modelKeyPoints = fastCPU.DetectKeyPointsRaw(modelImage, null);
        Matrix <Byte> modelDescriptors = descriptor.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

        // extract features from the observed image
        observedKeyPoints = fastCPU.DetectKeyPointsRaw(observedImage, null);
        Matrix <Byte>            observedDescriptors = descriptor.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
        BruteForceMatcher <Byte> matcher             = new BruteForceMatcher <Byte>(DistanceType.L2);

        matcher.Add(modelDescriptors);

        indices = new Matrix <int>(observedDescriptors.Rows, k);
        using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
        {
            matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
            mask = new Matrix <byte>(dist.Rows, 1);
            mask.SetValue(255);
            Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
        }

        nonZeroCount = CvInvoke.cvCountNonZero(mask);
        //print("nonZeroCount is "+nonZeroCount);
        if (nonZeroCount >= 4)
        {
            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
            if (nonZeroCount >= 4)
            {
                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                    modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }
        }

        //Draw the matched keypoints
        Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                 indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

        #region draw the projected region on the image
        if (homography != null)
        {  //draw a rectangle along the projected model
            Rectangle rect = modelImage.ROI;
            PointF[]  pts  = new PointF[] {
                new PointF(rect.Left, rect.Bottom),
                new PointF(rect.Right, rect.Bottom),
                new PointF(rect.Right, rect.Top),
                new PointF(rect.Left, rect.Top)
            };
            homography.ProjectPoints(pts);
            //area = Math.Abs((rect.Top - rect.Bottom) * (rect.Right - rect.Left));
            result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(System.Drawing.Color.Red), 5);
        }
        #endregion



        return(result);
    }
Example #41
0
 /// <summary>
 /// Use the specific method to find perspective transformation H=||h_ij|| between the source and the destination planes 
 /// </summary>
 /// <param name="srcPoints">Point coordinates in the original plane, 2xN, Nx2, 3xN or Nx3 array (the latter two are for representation in homogenious coordinates), where N is the number of points</param>
 /// <param name="dstPoints">Point coordinates in the destination plane, 2xN, Nx2, 3xN or Nx3 array (the latter two are for representation in homogenious coordinates) </param>
 /// <param name="method">FindHomography method</param>
 /// <param name="ransacReprojThreshold">The maximum allowed reprojection error to treat a point pair as an inlier. The parameter is only used in RANSAC-based homography estimation. E.g. if dst_points coordinates are measured in pixels with pixel-accurate precision, it makes sense to set this parameter somewhere in the range ~1..3</param>
 /// <returns>The 3x3 homography matrix if found. Null if not found.</returns>
 public static HomographyMatrix FindHomography(
     Matrix<float> srcPoints,
     Matrix<float> dstPoints,
     CvEnum.HOMOGRAPHY_METHOD method,
     double ransacReprojThreshold)
 {
     HomographyMatrix homography = new HomographyMatrix();
      if (0 == CvInvoke.cvFindHomography(srcPoints.Ptr, dstPoints.Ptr, homography.Ptr, method, ransacReprojThreshold, IntPtr.Zero))
      {
     homography.Dispose();
     return null;
      }
      return homography;
 }
Example #42
0
        /// <summary>
        /// 商品辨識使用BruteForce匹配(較精確但較慢)
        /// </summary>
        /// <param name="template">樣板的特徵點類別</param>
        /// <param name="observedScene">被觀察的場景匹配的特徵點</param>
        /// <returns>回傳匹配的資料類別</returns>
        public static SURFMatchedData MatchSURFFeatureByBruteForceForGoods(SURFFeatureData template, SURFFeatureData observedScene)
        {
            //This matrix indicates which row is valid for the matches.
            Matrix <byte> mask;
            //Number of nearest neighbors to search for
            int k = 2;
            //The distance different ratio which a match is consider unique, a good number will be 0.8 , NNDR match
            double uniquenessThreshold = 0.8; //default:0.8

            //The resulting n*k matrix of descriptor index from the training descriptors
            Matrix <int>     indices;
            HomographyMatrix homography = null;
            Stopwatch        watch;

            try
            {
                watch = Stopwatch.StartNew();
                #region bruteForce match for CPU
                //match
                BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2Sqr); //default:L2
                matcher.Add(template.GetDescriptors());

                indices = new Matrix <int>(observedScene.GetDescriptors().Rows, k);
                //The resulting n*k matrix of distance value from the training descriptors
                using (Matrix <float> dist = new Matrix <float>(observedScene.GetDescriptors().Rows, k))
                {
                    matcher.KnnMatch(observedScene.GetDescriptors(), indices, dist, k, null);
                    #region Test Output
                    //for (int i = 0; i < indices.Rows; i++)
                    //{
                    //    for (int j = 0; j < indices.Cols; j++)
                    //    {
                    //        Console.Write(indices[i, j] + " ");
                    //    }
                    //    Console.Write("\n");
                    //}
                    //Console.WriteLine("\n distance");
                    //for (int i = 0; i < dist.Rows; i++)
                    //{
                    //    for (int j = 0; j < dist.Cols; j++)
                    //    {
                    //        Console.Write(dist[i, j] + " ");
                    //    }
                    //    Console.Write("\n");
                    //}
                    //Console.WriteLine("\n");
                    #endregion

                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255); //mask is 拉式信號
                    //http://stackoverflow.com/questions/21932861/how-does-features2dtoolbox-voteforuniqueness-work
                    //how the VoteForUniqueness work...
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask); //means good match
                Console.WriteLine("-----------------\nVoteForUniqueness pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                if (nonZeroCount >= 4)
                {
                    //50 is model and mathing image rotation similarity ex: m1 = 60 m2 = 50 => 60 - 50 <=50 so is similar
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 1.5, 50); //default:1.5 , 10
                    Console.WriteLine("VoteForSizeAndOrientation pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                    if (nonZeroCount >= 15)                                                                                                                    //defalut :4 ,set 15
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 5);
                    }
                }
                #endregion
                watch.Stop();
                Console.WriteLine("Cal SURF Match time => " + watch.ElapsedMilliseconds.ToString() + "\n-----------------");

                return(new SURFMatchedData(indices, homography, mask, nonZeroCount, template));
            }
            catch (CvException ex)
            {
                System.Windows.Forms.MessageBox.Show(ex.ErrorMessage);
                return(null);
            }
        }
Example #43
0
        public void Update(Image<Bgr, byte> nowImage)
        {
            DebugImage = nowImage;
            DerivePitchEdges(nowImage);

            TopLeft = GetTopLeft();
            TopRight = GetTopRight();
            BottomLeft = GetBottomLeft();
            BottomRight = GetBottomRight();

            PointF[] sourcePoints = { TopLeft, TopRight, BottomLeft, BottomRight };
            PointF[] destPoints = {
                                      new PointF(Instep, Border),
                                      new PointF(PitchWidth - Instep, Border) ,
                                      new PointF(Instep, PitchHeight + Border),
                                      new PointF(PitchWidth - Instep, PitchHeight + Border) };

            m_WarpMat = CameraCalibration.GetPerspectiveTransform(sourcePoints, destPoints);
            m_WarpMatInv = CameraCalibration.GetPerspectiveTransform(destPoints, sourcePoints);

            PerspImage = nowImage.WarpPerspective(m_WarpMat, 1205, (int)(PitchHeight + Border * 2),
                Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC,
                Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS,
                new Bgr(200, 200, 200)).Convert<Bgr, byte>();
            ThresholdedPerspImage = ImageProcess.ThresholdHsv(PerspImage, 22, 89, 33, 240, 40, 250).
                ThresholdBinaryInv(new Gray(100), new Gray(255));

            //DerivePolePositions();
        }
Example #44
0
        static void Run()
        {
            SURFDetector surfParam = new SURFDetector(500, false);

            Image <Gray, Byte> modelImage = new Image <Gray, byte>("box.png");

            //extract features from the object image
            ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null);

            //Create a Feature Tracker
            Features2DTracker tracker = new Features2DTracker(modelFeatures);

            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

            Stopwatch watch = Stopwatch.StartNew();

            // extract features from the observed image
            ImageFeature[] imageFeatures = surfParam.DetectFeatures(observedImage, null);

            Features2DTracker.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
            matchedFeatures = Features2DTracker.VoteForUniqueness(matchedFeatures, 0.8);
            matchedFeatures = Features2DTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
            HomographyMatrix homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

            watch.Stop();

            //Merge the object image and the observed image into one image for display
            Image <Gray, Byte> res = modelImage.ConcateVertical(observedImage);

            #region draw lines between the matched features
            foreach (Features2DTracker.MatchedImageFeature matchedFeature in matchedFeatures)
            {
                PointF p = matchedFeature.ObservedFeature.KeyPoint.Point;
                p.Y += modelImage.Height;
                res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Gray(0), 1);
            }
            #endregion

            #region draw the project region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                {
                    pts[i].Y += modelImage.Height;
                }

                res.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
            }
            #endregion

            ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds));
        }
Example #45
0
            /// <summary>
            /// Transform a 2D input point (0-1 space) into the output point space (i.e. the verts in WPF).
            /// Z is implied as 0.
            /// </summary>
            /// <param name="tIn">The input point.</param>
            /// <returns>The transformed output point.</returns>
            public Point TransformPoint(Point tIn)
            {
                var p = HomographyMatrix.Transform(new Point3D(tIn.X, tIn.Y, 0));

                return(new Point(p.X, p.Y));
            }
Example #46
0
        public static Image <Bgr, Byte> FAST(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage)
        {
            bool isFound = false;

            long      matchTime;
            Stopwatch watch;

            HomographyMatrix homography = null;

            FastDetector     fastCPU = new FastDetector(10, true);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            watch = Stopwatch.StartNew();

            //extract features from the object image
            modelKeyPoints = fastCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <Byte> modelDescriptors = descriptor.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = fastCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <Byte>            observedDescriptors = descriptor.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <Byte> matcher             = new BruteForceMatcher <Byte>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                        modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                if (CvInvoke.cvCountNonZero(mask) >= 10)
                {
                    isFound = true;
                }


                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.LightGreen), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;
            _richTextBox1.Clear();
            _richTextBox1.AppendText("objek ditemukan: " + isFound + "\n");
            _richTextBox1.AppendText("waktu pendeteksian FAST: " + matchTime + "ms\n");
            _richTextBox1.AppendText("fitur model yang terdeteksi: " + modelKeyPoints.Size + "\n");
            _richTextBox1.AppendText("match yang ditemukan: " + CvInvoke.cvCountNonZero(mask).ToString());

            return(result);
        }
        //主要是在這端對比的
        public void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints,
                              out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(500, false); //設定處理特徵值的方式
            Stopwatch    watch;                                              //監看處理時間

            homography = null;                                               //如果相同,取得四邊形

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); //modelKeyPoints : 算出 特徵點? //modelDescriptors :



            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints); //observedKeyPoints : 取得特徵點 //

            //ImagePrecess processor = new ImagePrecess(observedImage.ToBitmap(),320,240);
            //observedDescriptors = processor.GetImageFeature();
            //observedKeyPoints=processor.GetImageVectorOfKeyPoint();


            watch = Stopwatch.StartNew();
            //
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null); //取得對比
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);//會把剛剛match完的結果抓來看是不是不明確或是不確定的,而跑完的結果放在mask中。
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
            watch.Stop();
            matchTime = watch.ElapsedMilliseconds;
        }
Example #48
0
        public static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(500, false);
            Stopwatch    watch;

            homography = null;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                            }

                                                            watch.Stop();
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = new VectorOfKeyPoint();
                Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = new VectorOfKeyPoint();
                Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                watch.Stop();
            }
            matchTime = watch.ElapsedMilliseconds;
        }
Example #49
0
        public void SURFDetect(Image<Bgr, Byte> image)
        {
            // Detect KP and calculate descriptors...
            observedKP = surfDetector.DetectKeyPointsRaw(image.Convert<Gray,Byte>(), null);
            observedDescriptors = surfDetector.ComputeDescriptorsRaw(image.Convert<Gray, Byte>(), null, observedKP);

            // Matching
            int k = 2;
            indices = new Matrix<int>(observedDescriptors.Rows, k);
            dist = new Matrix<float>(observedDescriptors.Rows, k);
            matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

            //
            mask = new Matrix<byte>(dist.Rows, 1);
            mask.SetValue(255);
            Features2DToolbox.VoteForUniqueness(dist, 0.8, mask);

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(itemKP, observedKP, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(itemKP, observedKP, indices, mask, 3);
            }

            // Get keypoints.
            keyPts = new PointF[itemKP.Size];
            classes = new int[itemKP.Size];
            for (int i = 0; i < itemKP.Size; i++)
            {
                keyPts[i] = itemKP[i].Point;
                classes[i] = itemKP[i].ClassId;
            }

            prevFrame = image;

            #region

            // Find ROI
            PointF minXY = new PointF();
            PointF maxXY = new PointF();
            for (int i = 0; i < itemKP.Size; i++)
            {
                PointF pt = keyPts[i];
                if (pt.X < minXY.X) minXY.X = pt.X;
                if (pt.Y < minXY.Y) minXY.Y = pt.Y;
                if (pt.X > maxXY.X) maxXY.X = pt.X;
                if (pt.Y > maxXY.Y) maxXY.Y = pt.Y;
            }

            // Convert ROI to rect
            //roi = new Rectangle((int)minXY.X, (int)minXY.Y, (int)(maxXY.X - minXY.X), (int)(maxXY.Y - minXY.Y));

            //Console.WriteLine("Position: ({0},{1}) \tWidth: {2}\tHeight: {3}", roi.X, roi.Y, roi.Width, roi.Height);

            #endregion

            projectedPoints = null;
            if (homography != null) {
                Rectangle rect = itemImage.ROI;
                projectedPoints = new PointF[] {
                   new PointF(rect.Left, rect.Bottom),
                   new PointF(rect.Right, rect.Bottom),
                   new PointF(rect.Right, rect.Top),
                   new PointF(rect.Left, rect.Top)};
                homography.ProjectPoints(projectedPoints);

                roi = new Rectangle((int)(projectedPoints[3].X), (int)projectedPoints[3].Y, (int)(projectedPoints[1].X - projectedPoints[3].X), (int)(projectedPoints[1].Y - projectedPoints[3].Y));

                // We're always gonna track with CMT now, so this will get initialized no matter what.
                /*
                 * if (trackWithCMT) cmtTracker.Initialize(image, roi);l
                 * else cmtTracker = null;
                 */

                // Initialize CMT unconditionally.
                cmtEngine.Initialize(image, roi);

            }
        }
//..........metoda żeby SURF zrobić........................................................................................
        public void DoSURFDetectAndUpdateForm(object sender, EventArgs e)
        {
            try
            {
                imgSceneColor             = captureWebcam.QueryFrame(); //try pobrać jedną klatkę z obrazu kamery
                lbPreparingCamera.Visible = false;
            }
            catch (Exception ex)                                    //jak się nie da to error wyświetlamy
            {
                this.Text = ex.Message;
            }


            if (imgSceneColor == null)
            {
                this.Text = "error, nie wczytano obrazu z kamery";      //gdy nie odczytano następnej klatki do zmiennej obrazka
            }
            if (imgToFindColor == null)                                 //jeśli jeszcze nie mamy obrazka do znalezienia...
            {
                ibResult.Image = imgSceneColor.ToBitmap();              //...to wywołaj obraz sceny do imageBoxu
            }
            //gdy dotarliśmy aż tutaj, obydwa obrazki są OK i możemy rozpocząć SURF detection

            SURFDetector surfDetector = new SURFDetector(500, false);   //objekt surf, parametr treshold(jak duże punkty bierze pod uwagę i extended flag

            Image <Gray, Byte> imgSceneGray  = null;                    //szary obraz sceny
            Image <Gray, Byte> imgToFindGray = null;                    //szary obrazek do znalezienia

            VectorOfKeyPoint vkpSceneKeyPoints;                         //vektor punktów na obrazie sceny
            VectorOfKeyPoint vkpToFindKeyPoints;                        //vektor punktów na obrazku do znalezienia

            Matrix <Single> mtxSceneDescriptors;                        //macierz deskryptorów do pytania o najbliższe sąsiedztwo
            Matrix <Single> mtxToFindDescriptor;                        //macierz deskryptorów dla szukanego obrazka

            Matrix <int>    mtxMatchIndices;                            //macierz ze wskaźnikami deskryptorów, będzie wypełniana przy trenowaniu deskryptorów (KnnMatch())
            Matrix <Single> mtxDistance;                                //macierz z wartościami odległości, po treningu jak wyżej
            Matrix <Byte>   mtxMask;                                    //input i output dla funkcji VoteForUniqueness(), wskazującej, który rząd pasuje

            BruteForceMatcher <Single> bruteForceMatcher;               //dla każdego deskryptora w pierwszym zestawie, matcher szuka...
                                                                        //...najbliższego deskryptora w drugim zestawie ustawionym przez trening każdego jednego

            HomographyMatrix homographyMatrix = null;                   //dla ProjectPoints() aby ustawić lokalizację znalezionego obrazka w scenie
            int    intKNumNearestNeighbors    = 2;                      //k, liczba najbliższego sąsiedztwa do przeszukania
            double dblUniquenessThreshold     = 0.8;                    //stosunek różncy dystansu dla porównania, żeby wypadło unikalne

            int intNumNonZeroElements;                                  //jako wartość zwracana dla liczby nie-zerowych elementów obu w macierzy maski,...
                                                                        //...także z wywołania GetHomographyMatrixFromMatchedFeatures()

            //parametry do używania przy wywołaniach VoteForSizeAndOrientation()

            double dblScareIncrement = 1.5;                      //określa różnicę w skali dla sąsiadujących komórek
            int    intRotationBins   = 20;                       //liczba komórek dla rotacji z 360 stopni (jeśli =20 to każda komórka pokrywa 18 stopni (20*18=360))

            double dblRansacReprojectionThreshold = 2.0;         //do użycia z GetHomographyMatrixFromMatchedFeatures(), max. dozwolony błąd odwzorowania...
                                                                 //...aby uznać parę punktów za ?inlier?

            Rectangle rectImageToFind = new Rectangle();         //prostokąt obejmujący cały obrazek do znalezienia

            PointF [] pointsF;                                   //4 punkty określające ramkę wokół lokacji znalezionego obrazka na scenie (float)
            Point []  points;                                    //4 punkty, to samo, ale (int)

            imgSceneGray = imgSceneColor.Convert <Gray, Byte>(); //ta sama scena do Graya

            if (isImgToFind == true)
            {
                try
                {
                    imgToFindGray = imgToFindColor.Convert <Gray, Byte>();       // obrazek do znalezienia do Graya
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                }

                vkpSceneKeyPoints   = surfDetector.DetectKeyPointsRaw(imgSceneGray, null);                       //wykrywa punkty w scenie, drugi param. to maska, jeśli null to nie potrzebna
                mtxSceneDescriptors = surfDetector.ComputeDescriptorsRaw(imgSceneGray, null, vkpSceneKeyPoints); //oblicza deskrptory sceny, param. to obraz sceny...
                //...maska, punkty na scenie

                vkpToFindKeyPoints = surfDetector.DetectKeyPointsRaw(imgToFindGray, null);                          //wykrywa punkty na obrazku do znalezienia, drugi param. to...
                //...maska, null bo nie potrzebna

                mtxToFindDescriptor = surfDetector.ComputeDescriptorsRaw(imgToFindGray, null, vkpToFindKeyPoints);                //oblicza aby znaleźć deskryptory(szukany obrazek, maska, szukanego o. punkty)

                bruteForceMatcher = new BruteForceMatcher <Single>(DistanceType.L2);                                              //objekt brute force matchera z L2, kwadrat odległ. Euklidesowej
                bruteForceMatcher.Add(mtxToFindDescriptor);                                                                       //dodaj macierz dla szukanych deskryptorów do brute force matchera

                if (mtxSceneDescriptors != null)                                                                                  //gdy obraz nie ma cech np. ściana
                {
                    mtxMatchIndices = new Matrix <int>(mtxSceneDescriptors.Rows, intKNumNearestNeighbors);                        //objekt macierzy indeksów/komórek (wiersze, kolumny)
                    mtxDistance     = new Matrix <Single>(mtxSceneDescriptors.Rows, intKNumNearestNeighbors);                     //to samo z dystansami

                    bruteForceMatcher.KnnMatch(mtxSceneDescriptors, mtxMatchIndices, mtxDistance, intKNumNearestNeighbors, null); //znajduje k-najbliższy match, (jak null to maska nie potrzebna)

                    mtxMask = new Matrix <Byte>(mtxDistance.Rows, 1);                                                             //objekt macierzy maski
                    mtxMask.SetValue(255);                                                                                        //ustawia wartości wszystkich elementów w macierzy maski

                    Features2DToolbox.VoteForUniqueness(mtxDistance, dblUniquenessThreshold, mtxMask);                            //filtruje pasujące cechy tj. czy match NIE jest unikalny to jest odrzucany

                    intNumNonZeroElements = CvInvoke.cvCountNonZero(mtxMask);                                                     //pobierz liczbę nie-zerowych elementów w macierzy maski
                    if (intNumNonZeroElements >= 4)
                    {
                        //eliminuje dopasowanye cechy, których skla i rotacja nie zgadzają się ze skalą i rotacją większości
                        intNumNonZeroElements = Features2DToolbox.VoteForSizeAndOrientation(vkpToFindKeyPoints, vkpSceneKeyPoints, mtxMatchIndices, mtxMask, dblScareIncrement, intRotationBins);
                        if (intNumNonZeroElements >= 4)             //jeśli ciągle są co najmniej 4 nie-zerowe elementy

                        //pobierz homography matrix używając RANSAC (random sample consensus)
                        {
                            homographyMatrix = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(vkpToFindKeyPoints, vkpSceneKeyPoints, mtxMatchIndices, mtxMask, dblRansacReprojectionThreshold);
                        }
                    }

                    imgCopyOfImageToFindWithBorder = imgToFindColor.Copy();     //robi kopię obrazka do znalezienia aby na tej kopi rysować, bez zmieniania oryginalnego obrazka

                    //rysuje 2pix ramkę wkoło kopi obrazka do znalezienia, używając takiego samego koloru jaki ma box na znaleziony obrazek
                    imgCopyOfImageToFindWithBorder.Draw(new Rectangle(1, 1, imgCopyOfImageToFindWithBorder.Width - 3, imgCopyOfImageToFindWithBorder.Height - 3), bgrFoundImageColor, 2);

                    //rysowanie obrazu sceny i obrazka do znalezienia razem na obrazie rezultatu
                    //3 warunki w zależności od tego, który checkBox jest zaznaczony (rysuj punkty i/lub rysuj linie)
                    if (ckDrawKeyPoints.Checked == true && ckDrawMatchingLines.Checked == true)
                    {
                        //używa DrawMatches() aby połączyć obraz sceny z obrazkiem do znalezienia, potem rysuje punkty i linie
                        imgResult = Features2DToolbox.DrawMatches(imgCopyOfImageToFindWithBorder,
                                                                  vkpToFindKeyPoints,
                                                                  imgSceneColor,
                                                                  vkpSceneKeyPoints,
                                                                  mtxMatchIndices,
                                                                  bgrMatchingLineColor,
                                                                  bgrKeyPointColor,
                                                                  mtxMask,
                                                                  Features2DToolbox.KeypointDrawType.DEFAULT);
                    }
                    else if (ckDrawKeyPoints.Checked == true && ckDrawMatchingLines.Checked == false)
                    {
                        //rysuje scenę z punktami na obrazie rezultatu
                        imgResult = Features2DToolbox.DrawKeypoints(imgSceneColor,
                                                                    vkpSceneKeyPoints,
                                                                    bgrKeyPointColor,
                                                                    Features2DToolbox.KeypointDrawType.DEFAULT);
                        //potem rysuje punkty na kopi obrazka do znalezienia
                        imgCopyOfImageToFindWithBorder = Features2DToolbox.DrawKeypoints(imgCopyOfImageToFindWithBorder,
                                                                                         vkpToFindKeyPoints,
                                                                                         bgrKeyPointColor,
                                                                                         Features2DToolbox.KeypointDrawType.DEFAULT);
                        //potem łączy kopię obrazka do znaleienia na obrazie rezultatu
                        imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);
                    }
                    else if (ckDrawKeyPoints.Checked == false && ckDrawMatchingLines.Checked == false)
                    {
                        imgResult = imgSceneColor;                                                  //dołącza scenę do obrazu rezultatu
                        imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);    //wiąże kopię szukanego obrazka na obrazie rezultatu
                    }
                    else
                    {
                        MessageBox.Show("Błąd");     //tu już nie powinno nigdy dojść
                    }
                }
                else
                {
                    imgResult = imgSceneColor;                                                  //dołącza scenę do obrazu rezultatu
                    imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);    //wiąże kopię szukanego obrazka na obrazie rezultatu
                }

                if (homographyMatrix != null)    //sprawdzanie czy na pewno coś w tej macierzy jest
                {
                    //rysuje ramkę na kawałku sceny z obrazu rezultatu, w miejscu gdzie jest znaleziony szukany obrazek
                    rectImageToFind.X      = 0;     //na starcie ustawia rozmiar prostokąta na pełny rozmiar obrazka do znalezienia
                    rectImageToFind.Y      = 0;
                    rectImageToFind.Width  = imgToFindGray.Width;
                    rectImageToFind.Height = imgToFindGray.Height;

                    //tworzymy obiekt -> array (szereg) tablica na PointF odpowiadające prostokątom
                    pointsF = new PointF[] { new PointF(rectImageToFind.Left, rectImageToFind.Top),
                                             new PointF(rectImageToFind.Right, rectImageToFind.Top),
                                             new PointF(rectImageToFind.Right, rectImageToFind.Bottom),
                                             new PointF(rectImageToFind.Left, rectImageToFind.Bottom) };

                    //ProjectionPoints() ustawia ptfPointsF(przez referencję) na bycie lokacją ramki na fragmencie sceny gdzie jest znaleziony szukany obrazek
                    homographyMatrix.ProjectPoints(pointsF);

                    //konwersja z PointF() do Point() bo ProjectPoints() używa typ PointF() a DrawPolyline() używa Point()
                    points = new Point[] { Point.Round(pointsF[0]),
                                           Point.Round(pointsF[1]),
                                           Point.Round(pointsF[2]),
                                           Point.Round(pointsF[3]) };

                    //rysowanie ramki wkoło znalezionego obrazka na fragmencie sceny obrazu rezultatu
                    imgResult.DrawPolyline(points, true, new Bgr(0, 255, 0), 2);

                    //rysowanie czerwonego myślnika na środku obiektu
                    int x, y, x1, y1, xW, yW;

                    x  = Convert.ToInt32(points[0].X);
                    y  = Convert.ToInt32(points[0].Y);
                    x1 = Convert.ToInt32(points[2].X);
                    y1 = Convert.ToInt32(points[2].Y);

                    xW  = x1 - x;
                    xW /= 2;
                    xW += x;
                    yW  = y1 - y;
                    yW /= 2;
                    yW += y;
                    Point [] pp = new Point[] { new Point(xW, yW), new Point(xW + 10, yW) };    //rysowanie środka wykrytego obiektu
                    imgResult.DrawPolyline(pp, true, new Bgr(0, 0, 255), 5);

                    XX = xW.ToString();
                    YY = yW.ToString();
                    //////////gdy obiekt znika z pola widzenia
                    if (xW == 0 || yW == 0 || xW < -200 || yW < -200 || xW > 800 || yW > 800)
                    {
                        targetLost(-1);
                    }
                    else
                    {
                        targetLost(1);
                    }
                    //////////
                }
                else
                {
                    targetLost(-1);     //strzał w 10!
                }
                //koniec SURF, update całego form

                ibResult.Image = imgResult.ToBitmap();          //pokazanie rezultatu na imageBoxie
            }
        }
        /// <summary>
        /// Do find the matching by using the "BruteForceMatcher".
        /// </summary>
        public void FindMatch(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix<int> indices, out Matrix<byte> mask, out HomographyMatrix homography)
        {
            int k = Shared.SharedData.Rows;
            double uniquenessThreshold = Shared.SharedData.Trash;
            SURFDetector surfCPU = new SURFDetector(Shared.SharedData.HessianThresh, false);
            Stopwatch watch;
            homography = null;

            indices = null;
            mask = null;
            observedKeyPoints = null;

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();
            if (modelDescriptors != null)
            {
                // extract features from the observed image
                observedKeyPoints = new VectorOfKeyPoint();
                Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
                BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L1);
                matcher.Add(modelDescriptors);

                indices = new Matrix<int>(observedDescriptors.Rows, k);

                Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k);
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix<byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);

                    distResult = dist;
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
            watch.Stop();
            matchTime = watch.ElapsedMilliseconds;
        }
Example #52
0
        //public static string DoImageRegonition(string FullFeaFName, Stream ImgStream)

        public static Dictionary <string, string> DoImageRegonition(string[] SevenFeas, string[] CocaFeas, Stream ImgStream)
        {
            Dictionary <string, string> regres = new Dictionary <string, string>(4);
            StringBuilder sb       = new StringBuilder();
            const int     Seven_DV = 400;
            const int     Coca_DV  = 300;

            try
            {
                SIFTDetector siftdector = new SIFTDetector();
                //the following code is unnecessary because we will extract the feature first.
                // this other way this image is pre-transformed to gray-scale.
                //the following codes are needed to be refactory
                // Image<Gray, Byte> modelImage = new Image<Gray, byte>(FullMoldeImg);
                //Image<Gray, Byte> modelImage = new Image<Gray, byte>(FullMoldeImgName);
                BinaryFormatter _bf      = new BinaryFormatter();
                int             sevenlen = SevenFeas.Length;
                int             cocalen  = CocaFeas.Length;
                //initial the dictionary variable

                regres.Add("seven", "no");
                regres.Add("coca", "no");
                regres.Add("ma", "none");
                regres.Add("excep", "none");

                //Initialize the image that to be comparased
                Image <Gray, Byte> observedImage = GetCVImage(ImgStream);
                MKeyPoint[]        objmkps       = siftdector.DetectKeyPoints(observedImage);
                ImageFeature[]     imageFeatures = siftdector.ComputeDescriptors(observedImage, objmkps);

                //PointF[] _obimgPA = GetPointFfromFeatures(imageFeatures, imageFeatures.Length);
                //int _obimgPN = _obimgPA.Length;

                //Doing seven matching

                for (int idx = 0; idx < sevenlen; idx++)
                {
                    //Get the feature file
                    Stream stream = File.Open(SevenFeas[idx], FileMode.Open);
                    //Deserilizing the file to get the feature
                    ImageFeature[] sevenFeatures = (ImageFeature[])_bf.Deserialize(stream);
                    stream.Dispose();
                    int slen = sevenFeatures.Length;
                    //PointF[] sevenPA = GetPointFfromFeatures(sevenFeatures, _obimgPN);


                    //set up the tractor
                    Features2DTracker seventrac = new Features2DTracker(sevenFeatures);
                    ////Doing seven matching
                    Features2DTracker.MatchedImageFeature[] sevenmatchedfea = seventrac.MatchFeature(imageFeatures, 2, 20);
                    sevenmatchedfea = Features2DTracker.VoteForUniqueness(sevenmatchedfea, 0.8);
                    sevenmatchedfea = Features2DTracker.VoteForSizeAndOrientation(sevenmatchedfea, 1.5, 20);

                    //Get matching result matrix
                    HomographyMatrix sevenhomography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(sevenmatchedfea);
                    //Matrix<float>  sevenhomography =  CameraCalibration.FindHomography(sevenPA,_obimgPA,HOMOGRAPHY_METHOD.RANSAC,3).Convert<float>();
                    //sevenmatchedfea.
                    //fill result into dictionary variable
                    if (sevenhomography != null)
                    {
                        if (Math.Abs(sevenhomography.Sum) > Seven_DV)
                        {
                            regres["seven"] = "yes";
                        }

                        sb.Append("ssum:");
                        sb.Append(sevenhomography.Sum.ToString());
                        //sb.Append("| sidx:");
                        // sb.Append(idx.ToString());

                        break;
                    }
                }

                //Doing Coca image matching
                for (int idx2 = 0; idx2 < cocalen; idx2++)
                {
                    //Get the feature file
                    Stream stream = File.Open(CocaFeas[idx2], FileMode.Open);
                    //Deserilizing the file to get the feature
                    ImageFeature[] cocaFeatures = (ImageFeature[])_bf.Deserialize(stream);
                    stream.Dispose();
                    //PointF[] cocaPA = GetPointFfromFeatures(cocaFeatures, _obimgPN);
                    //cocaFeatures.CopyTo(cocaPA, 0);

                    //Matrix<float> cocahomography = CameraCalibration.FindHomography(cocaPA, _obimgPA, HOMOGRAPHY_METHOD.RANSAC, 3).Convert<float>();
                    //set up the tractor
                    Features2DTracker cocatrac = new Features2DTracker(cocaFeatures);
                    ////Doing seven matching
                    Features2DTracker.MatchedImageFeature[] cocamatchedfea = cocatrac.MatchFeature(imageFeatures, 2, 20);
                    cocamatchedfea = Features2DTracker.VoteForUniqueness(cocamatchedfea, 0.8);
                    cocamatchedfea = Features2DTracker.VoteForSizeAndOrientation(cocamatchedfea, 1.5, 20);
                    //Get matching result matrix
                    HomographyMatrix cocahomography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(cocamatchedfea);
                    //fill result into dictionary variable
                    if (cocahomography != null)
                    {
                        if (Math.Abs(cocahomography.Sum) > Coca_DV)
                        {
                            regres["coca"] = "yes";
                        }
                        sb.Append("#csum:");
                        sb.Append(cocahomography.Sum.ToString());
                        //sb.Append(",cidx:");
                        //sb.Append(idx2.ToString());
                        break;
                    }
                }
            }
            catch (Exception err)
            {
                regres["excep"] = err.Message;
                Console.WriteLine(err.Message);
            }
            if (sb.Length > 0)
            {
                regres["ma"] = sb.ToString();
                sb           = null;
            }
            return(regres);
        }
Example #53
0
 /// <summary>
 /// Transform a 3D input point (0-1 space) into the output point space (i.e. the verts in WPF).
 /// </summary>
 /// <param name="tIn">The input point.</param>
 /// <returns>The transformed output point.</returns>
 public Point3D TransformPoint(Point3D tIn)
 {
     return(HomographyMatrix.Transform(tIn));
 }
Example #54
0
        /// <summary>
        /// calculates matrix of perspective transform such that:
        /// (t_i x'_i,t_i y'_i,t_i)^T=map_matrix (x_i,y_i,1)^T
        /// where dst(i)=(x'_i,y'_i), src(i)=(x_i,y_i), i=0..3.
        /// </summary>
        /// <param name="src">Coordinates of 4 quadrangle vertices in the source image</param>
        /// <param name="dest">Coordinates of the 4 corresponding quadrangle vertices in the destination image</param>
        /// <returns>The 3x3 Homography matrix</returns>
        public static HomographyMatrix GetPerspectiveTransform(PointF[] src, PointF[] dest)
        {
            Debug.Assert(src.Length >= 4, "The source should contain at least 4 points");
             Debug.Assert(dest.Length >= 4, "The destination should contain at least 4 points");

             HomographyMatrix rot = new HomographyMatrix();
             CvInvoke.cvGetPerspectiveTransform(src, dest, rot);
             return rot;
        }
Example #55
0
        public void TestPlanarObjectDetector()
        {
            Image<Gray, byte> box = new Image<Gray, byte>("box.png");
             Image<Gray, byte> scene = new Image<Gray,byte>("box_in_scene.png");
             //Image<Gray, Byte> scene = box.Rotate(1, new Gray(), false);

             using (PlanarObjectDetector detector = new PlanarObjectDetector())
             {
            Stopwatch watch = Stopwatch.StartNew();
            LDetector keypointDetector = new LDetector();
            keypointDetector.SetDefaultParameters();

            PatchGenerator pGen = new PatchGenerator();
            pGen.SetDefaultParameters();

            detector.Train(box, 300, 31, 50, 9, 5000, ref keypointDetector, ref pGen);
            watch.Stop();
            Trace.WriteLine(String.Format("Training time: {0} milliseconds.", watch.ElapsedMilliseconds));

            MKeyPoint[] modelPoints = detector.GetModelPoints();
            int i = modelPoints.Length;

            HomographyMatrix h = new HomographyMatrix();
            watch = Stopwatch.StartNew();
            PointF[] corners = detector.Detect(scene, h);
            watch.Stop();
            Trace.WriteLine(String.Format("Detection time: {0} milliseconds.", watch.ElapsedMilliseconds));

            foreach (PointF c in corners)
            {
               scene.Draw(new CircleF(c, 2), new Gray(255), 1);
            }
            scene.DrawPolyline(Array.ConvertAll<PointF, Point>(corners, Point.Round), true, new Gray(255), 2);
             }
        }