public void TestNot()
      {
         Matrix<byte> m = new Matrix<byte>(10, 8);
         m.SetValue(1.0);
         m._Not();
         byte[,] d2 = m.Data;

         foreach (byte v in d2)
            EmguAssert.IsTrue(254.0 == v);
      }
        public void Evaluate(int SpreadMax)
        {
            FStatus.SliceCount = SpreadMax;
            FOutPositions1.SliceCount = SpreadMax;
            FOutPositions2.SliceCount = SpreadMax;

            for (int i = 0; i < SpreadMax; i++)
            {
                if (!FDo[i])
                    continue;

                var input1 = FInput1[i];
                var input2 = FInput2[i];

                if (input1 == null || input2 == null)
                    continue;
                if (!input1.Allocated || !input2.Allocated)
                    continue;

                Matrix<byte> mask;
                var matcher = new BruteForceMatcher<float>(DistanceType.L2);
                matcher.Add(input2.Descriptors);

                var indices = new Matrix<int>(input1.Descriptors.Rows, 2);
                using (Matrix<float> distance = new Matrix<float>(input1.Descriptors.Rows, 2))
                {
                    matcher.KnnMatch(input1.Descriptors, indices, distance, 2, null);
                    mask = new Matrix<byte>(distance.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(distance, FUniqueness[i], mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(input2.KeyPoints, input1.KeyPoints, indices, mask, 1.5, 20);

                var positions1 = FOutPositions1[i];
                var positions2 = FOutPositions2[i];

                positions1.SliceCount = 0;
                positions2.SliceCount = 0;

                for (int j = 0; j < mask.Rows; j++)
                {
                    if (mask[j, 0] != 0)
                    {
                        var index2 = indices[j, 0];
                        var point1 = input1.KeyPoints[j];
                        var point2 = input2.KeyPoints[index2];

                        positions1.Add(new Vector2D(point1.Point.X, point1.Point.Y));
                        positions2.Add(new Vector2D(point2.Point.X, point2.Point.Y));
                    }
                }
            }
        }
Exemple #3
0
        public static void Test()
        {
            Matrix mA = new Matrix(2, 3);
            Matrix mB = new Matrix(3, 2);
            Matrix mC = new Matrix(2, 2);

            mA.SetValue(0, 0, 0.11);
            mA.SetValue(0, 1, 0.12);
            mA.SetValue(0, 2, 0.13);
            mA.SetValue(1, 0, 0.21);
            mA.SetValue(1, 1, 0.22);
            mA.SetValue(1, 2, 0.23);

            mB.SetValue(0, 0, 1011);
            mB.SetValue(0, 1, 1012);
            mB.SetValue(1, 0, 1021);
            mB.SetValue(1, 1, 1022);
            mB.SetValue(2, 0, 1031);
            mB.SetValue(2, 1, 1032);

            Blas.DGemm(Blas.TransposeType.NoTranspose, Blas.TransposeType.NoTranspose, 1.0, mA, mB, 0.0, ref mC);

            Console.WriteLine(mC.GetValue(0, 0) + " , " + mC.GetValue(0, 1));
            Console.WriteLine(mC.GetValue(1, 0) + " , " + mC.GetValue(1, 1));
        }
Exemple #4
0
        public static void Test2()
        {
            const uint MARGIN = 1;

            Matrix mA = new Matrix(2 + MARGIN, 3 + MARGIN);
            Matrix mB = new Matrix(3, 2);
            Matrix mC = new Matrix(2, 2);

            mA.SetValue(0 + MARGIN, 0 + MARGIN, 0.11);
            mA.SetValue(0 + MARGIN, 1 + MARGIN, 0.12);
            mA.SetValue(0 + MARGIN, 2 + MARGIN, 0.13);
            mA.SetValue(1 + MARGIN, 0 + MARGIN, 0.21);
            mA.SetValue(1 + MARGIN, 1 + MARGIN, 0.22);
            mA.SetValue(1 + MARGIN, 2 + MARGIN, 0.23);

            mB.SetValue(0, 0, 1011);
            mB.SetValue(0, 1, 1012);
            mB.SetValue(1, 0, 1021);
            mB.SetValue(1, 1, 1022);
            mB.SetValue(2, 0, 1031);
            mB.SetValue(2, 1, 1032);

            MatrixView mViewA = new MatrixView(mA, MARGIN, MARGIN, mA.Columns - MARGIN, mA.Rows - MARGIN);
            MatrixView mViewB = new MatrixView(mB, 0, 0, mB.Columns, mB.Rows);
            MatrixView mViewC = new MatrixView(mC, 0, 0, mC.Columns, mC.Rows);
            Blas.DGemm(Blas.TransposeType.NoTranspose, Blas.TransposeType.NoTranspose, 1.0, mViewA, mViewB, 0.0, ref mViewC);

            Console.WriteLine(mC.GetValue(0, 0) + " , " + mC.GetValue(0, 1));
            Console.WriteLine(mC.GetValue(1, 0) + " , " + mC.GetValue(1, 1));
        }
Exemple #5
0
 public bool Recognize(Image<Gray, Byte> observedImage, out PointF[] Region)
 {
     // extract features from the observed image
     observedKeyPoints = new VectorOfKeyPoint();
     Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
     BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
     matcher.Add(modelDescriptors);
     indices = new Matrix<int>(observedDescriptors.Rows, k);
     using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
     {
         matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
         mask = new Matrix<byte>(dist.Rows, 1);
         mask.SetValue(255);
         Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
     }
     int nonZeroCount = CvInvoke.cvCountNonZero(mask);
     if (nonZeroCount >= requiredNonZeroCount)
     {
         nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, scaleIncrement, RotationBins);
         if (nonZeroCount >= requiredNonZeroCount)
             homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, ransacReprojThreshold);
     }
     bool ObjectFound;
     if (homography != null)
     {  //draw a rectangle along the projected model
         Rectangle rect = modelImage.ROI;
         Region = new PointF[] {
         new PointF(rect.Left, rect.Bottom),
         new PointF(rect.Right, rect.Bottom),
         new PointF(rect.Right, rect.Top),
         new PointF(rect.Left, rect.Top)};
         homography.ProjectPoints(Region);
         ObjectFound = true;
     }
     else
     {
         Region = null;
         ObjectFound = false;
     }
     return ObjectFound;
 }
        public Image<Bgr, byte> DrawResult(Image<Gray, byte> modelImage, Image<Gray, byte> observedImage,out double area, int minarea, out Point center)
        {
            //double estimated_dist =99999;
            center = new Point(400, 224);
            area = 0;
            //modelImage.Save("D:\\temp\\modelimage.jpg");
            //observedImage.Save("D:\\temp\\observedimage.jpg");

            //单应矩阵
            HomographyMatrix homography = null;

            //surf算法检测器
            var surfCpu = new SURFDetector(500, false);

            //原图与实际图中的关键点

            Matrix<byte> mask;

            //knn匹配的系数
            var k = 2;
            //滤波系数
            var uniquenessThreshold = 0.8;

            //从标记图中,提取surf特征点与描述子
            var modelKeyPoints = surfCpu.DetectKeyPointsRaw(modelImage, null);
            var modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            // 从实际图片提取surf特征点与描述子
            var observedKeyPoints = surfCpu.DetectKeyPointsRaw(observedImage, null);
            var observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            if (observedDescriptors == null)
            {

                return null;
            }

            //使用BF匹配算法,匹配特征向量
            //var bfmatcher = new BruteForceMatcher<float>(DistanceType.L2);
            //bfmatcher.Add(modelDescriptors);
            var indices = new Matrix<int>(observedDescriptors.Rows, k);
            var flannMatcher = new Index(modelDescriptors, 4);
            //通过特征向量筛选匹配对
            using (var dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
                //最近邻2点特征向量匹配
                //bfmatcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                flannMatcher.KnnSearch(observedDescriptors, indices, dist, k, 24);
                //匹配成功的,将特征点存入mask
                mask = new Matrix<byte>(dist.Rows, 1);
                mask.SetValue(255);
                //通过滤波系数,过滤非特征点,剩余特征点存入mask
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            var nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 10)
            {
                //过滤旋转与变形系数异常的特征点,剩余存入mask
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices,
                    mask, 1.5, 20);
                if (nonZeroCount >= 10)
                    //使用剩余特征点,构建单应矩阵
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                        observedKeyPoints, indices, mask, 2);
            }

            // }

            //画出匹配的特征点
            //Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,indices, new Bgr(0, 0, 255), new Bgr(0, 255, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            // result.Save("D:\\temp\\matchedpoints.jpg");
            observedImage.ToBitmap();
            var result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                indices, new Bgr(0, 0, 255), new Bgr(0, 255, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the Image

            //画出单应矩阵
            if (homography != null)
            {
                var rect = modelImage.ROI;
                /*PointF[] pts = new PointF[] {
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)
                    };*/
                var pts = new[]
                {
                    new PointF(rect.Left + (rect.Right - rect.Left)/5, rect.Bottom - (rect.Bottom - rect.Top)/5),
                    new PointF(rect.Right - (rect.Right - rect.Left)/5, rect.Bottom - (rect.Bottom - rect.Top)/5),
                    new PointF(rect.Right - (rect.Right - rect.Left)/5, rect.Top + (rect.Bottom - rect.Top)/5),
                    new PointF(rect.Left + (rect.Right - rect.Left)/5, rect.Top + (rect.Bottom - rect.Top)/5)
                };
                //根据整个图片的旋转、变形情况,计算出原图中四个顶点转换后的坐标,并画出四边形
                homography.ProjectPoints(pts);
                area = Getarea(pts);
                double xsum = 0;
                double ysum = 0;
                foreach (var point in pts)
                {
                    xsum += point.X;
                    ysum += point.Y;
                }
                center = new Point(Convert.ToInt32(xsum / 4), Convert.ToInt32(ysum / 4));
                if (area > minarea)
                {
                    var temp = new Image<Bgr, byte>(result.Width, result.Height);
                    temp.DrawPolyline(Array.ConvertAll(pts, Point.Round), true, new Bgr(Color.Red), 5);
                    //estimated_dist = GetDist(pts);

                    var a = CountContours(temp.ToBitmap());
                    if (a == 2)
                    {
                        result.DrawPolyline(Array.ConvertAll(pts, Point.Round), true, new Bgr(Color.Red), 5);
                        //result.Save("D:\\temp\\" + estimated_dist.ToString() + ".jpg");
                    }
                    else
                    {
                        area = 0; //dst = estimated_dist;
                        return result;
                    }
                }
            }
            else area = 0;

            #endregion

            //dst = estimated_dist;
            return result;
        }
        public bool testSIFT(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage)
        {
            bool isFound = false;
            HomographyMatrix homography = null;

            SIFTDetector siftCPU = new SIFTDetector();
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix<int> indices;

            Matrix<byte> mask;
            int k = 2;
            double uniquenessThreshold = 0.8;

            //extract features from the object image
            modelKeyPoints = siftCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix<float> modelDescriptors = siftCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = siftCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix<float> observedDescriptors = siftCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            indices = new Matrix<int>(observedDescriptors.Rows, k);
            using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix<byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }

            //Draw the matched keypoints
            Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
               indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[] pts = new PointF[] {
                   new PointF(rect.Left, rect.Bottom),
                   new PointF(rect.Right, rect.Bottom),
                   new PointF(rect.Right, rect.Top),
                   new PointF(rect.Left, rect.Top)};
                homography.ProjectPoints(pts);

                if (CvInvoke.cvCountNonZero(mask) >= 10)
                    isFound = true;

                result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.LightGreen), 5);
            }
            #endregion
            return isFound;
        }
Exemple #8
0
        private static Matrix MultiplyInternal(Vector3D left, Vector3D right)
        {
            var matrix = new Matrix(3, 3);
            matrix.SetValue(0, 0, left.X * right.X);
               matrix.SetValue(0, 1, left.X * right.Y);
             matrix.SetValue(0, 2, left.X * right.Z);

             matrix.SetValue(1, 0, left.Y * right.X);
             matrix.SetValue(1, 1, left.Y * right.Y);
             matrix.SetValue(1, 2, left.Y * right.Z);

            matrix.SetValue(2, 0, left.Z * right.X);
             matrix.SetValue(2, 1, left.Z * right.Y);
             matrix.SetValue(2, 2, left.Z * right.Z);

            return matrix;
        }
        public static void FindMatch(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix<int> indices, out Matrix<byte> mask, out HomographyMatrix homography)
        {
            int k = 2;
              double uniquenessThreshold = 0.8;
              SURFDetector surfCPU = new SURFDetector(500, false);
              Stopwatch watch;
              homography = null;

              //extract features from the object image
              modelKeyPoints = new VectorOfKeyPoint();
              Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

              watch = Stopwatch.StartNew();

              // extract features from the observed image
              observedKeyPoints = new VectorOfKeyPoint();
              Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
              BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
              matcher.Add(modelDescriptors);

              indices = new Matrix<int>(observedDescriptors.Rows, k);
              using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
              {
              matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
              mask = new Matrix<byte>(dist.Rows, 1);
              mask.SetValue(255);
              Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
              }

              int nonZeroCount = CvInvoke.cvCountNonZero(mask);
              if (nonZeroCount >= 4)
              {
              nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
              if (nonZeroCount >= 4)
                 homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
              }
              watch.Stop();

              matchTime = watch.ElapsedMilliseconds;
        }
Exemple #10
0
      public void TestRTreesLetterRecognition()
      {
         Matrix<float> data, response;
         ReadLetterRecognitionData(out data, out response);

         int trainingSampleCount = (int) (data.Rows * 0.8);

         Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
         varType.SetValue((byte) MlEnum.VarType.Numerical); //the data is numerical
         varType[data.Cols, 0] = (byte) MlEnum.VarType.Categorical; //the response is catagorical

         Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
         using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
            sampleRows.SetValue(255);

         using (RTrees forest = new RTrees())
         using (TrainData td = new TrainData(data, MlEnum.DataLayoutType.RowSample, response, null, sampleIdx, null, varType))
         {
            forest.MaxDepth = 10;
            forest.MinSampleCount = 10;
            forest.RegressionAccuracy = 0.0f;
            forest.UseSurrogates = false;
            forest.MaxCategories = 15;
            forest.CalculateVarImportance = true;
            forest.ActiveVarCount = 4;
            forest.TermCriteria = new MCvTermCriteria(100, 0.01f);
            bool success = forest.Train(td);

            if (!success)
               return;
            
            double trainDataCorrectRatio = 0;
            double testDataCorrectRatio = 0;
            for (int i = 0; i < data.Rows; i++)
            {
               using (Matrix<float> sample = data.GetRow(i))
               {
                  double r = forest.Predict(sample, null);
                  r = Math.Abs(r - response[i, 0]);
                  if (r < 1.0e-5)
                  {
                     if (i < trainingSampleCount)
                        trainDataCorrectRatio++;
                     else
                        testDataCorrectRatio++;
                  }
               }
            }

            trainDataCorrectRatio /= trainingSampleCount;
            testDataCorrectRatio /= (data.Rows - trainingSampleCount);

            StringBuilder builder = new StringBuilder("Variable Importance: ");
            /*
            using (Matrix<float> varImportance = forest.VarImportance)
            {
               for (int i = 0; i < varImportance.Cols; i++)
               {
                  builder.AppendFormat("{0} ", varImportance[0, i]);
               }
            }*/

            EmguAssert.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio * 100));
            EmguAssert.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio * 100));
            EmguAssert.WriteLine(builder.ToString());
         }
      }
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImageFileName">The model image</param>
        /// <param name="observedImageBitmap">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        private System.Drawing.Point[] DrawBruteForceMatch(String modelImageFileName, Bitmap observedImageBitmap, out long matchTime)
            {
                try
                {
                    Image<Gray, Byte> modelImage = new Image<Gray, byte>(modelImageFileName);
                    Image<Gray, Byte> observedImage = new Image<Gray, byte>(observedImageBitmap);
                    HomographyMatrix homography = null;
                    Stopwatch watch;
                    SURFDetector surfCPU = new SURFDetector(500, false);
                    VectorOfKeyPoint modelKeyPoints;
                    VectorOfKeyPoint observedKeyPoints;
                    Matrix<int> indices;

                    Matrix<byte> mask;
                    int k = 2;
                    double uniquenessThreshold = 0.8;

                    //extract features from the object image
                    modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                    Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                    Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                    BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    indices = new Matrix<int>(observedDescriptors.Rows, k);
                    Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k);
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix<byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                    watch.Stop();

                    //Draw the matched keypoints
                    Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                            indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

                    System.Drawing.Point[] newpts = null;
                    #region draw the projected region on the image
                    if (homography != null)
                    {
                        //draw a rectangle along the projected model
                        Rectangle rect = modelImage.ROI;
                        PointF[] pts = new PointF[] { 
                                                               new PointF(rect.Left, rect.Bottom),
                                                               new PointF(rect.Right, rect.Bottom),
                                                               new PointF(rect.Right, rect.Top),
                                                               new PointF(rect.Left, rect.Top)};
                        homography.ProjectPoints(pts);
                        //result.DrawPolyline(Array.ConvertAll<PointF, System.Drawing.Point>(pts, System.Drawing.Point.Round), true, new Bgr(Color.Red), 2);
                        //result.Save(@"E:\1.jpg");
                        newpts = Array.ConvertAll<PointF, System.Drawing.Point>(pts, System.Drawing.Point.Round);

                    }
                    #endregion
                    matchTime = watch.ElapsedMilliseconds;
                    return newpts;
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                    matchTime = 0;
                    return new System.Drawing.Point[] { new System.Drawing.Point(-1, -1), new System.Drawing.Point(-1, -1), new System.Drawing.Point(-1, -1), new System.Drawing.Point(-1, -1) };
                }
        }
Exemple #12
0
        /// <summary>
        /// Decomposes the specified matrix, using a QR decomposition.
        /// </summary>
        /// <param name="matrix">The matrix to decompose.</param>
        public void Decompose(Matrix matrix)
        {
            qr = matrix.Clone();
             diagonal = new double[qr.Columns];

             // Main loop.
             for (var k = 0; k < qr.Columns; k++)
             {
            // Compute 2-norm of k-th column without under/overflow.
            double nrm = 0;

            for (var i = k; i < qr.Rows; i++)
            {
               nrm = MathAlgorithms.Hypotenuse(nrm, qr[i, k]);
            }

            if (nrm != 0.0)
            {

               // Form k-th Householder vector.
               if (qr.GetValue(k, k) < 0)
               {
                  nrm = -nrm;
               }
               for (var i = k; i < qr.Rows; i++)
               {
                  qr.SetValue(i, k, qr.GetValue(i, k) / nrm);
               }

               qr.SetValue(k, k, qr.GetValue(k, k) + 1.0);

               // Apply transformation to remaining columns.
               for (var j = k + 1; j < qr.Columns; j++)
               {

                  var s = 0.0;

                  for (var i = k; i < qr.Rows; i++)
                  {
                     s += qr.GetValue(i, k) * qr.GetValue(i, j);
                  }

                  s = (-s) / qr.GetValue(k, k);

                  for (var i = k; i < qr.Rows; i++)
                  {
                     qr.SetValue(i, j, qr.GetValue(i, j) + (s * qr.GetValue(i, k)));
                  }
               }
            }
            diagonal[k] = -nrm;
             }
        }
Exemple #13
0
        /// <summary>
        /// Decomposes the specified matrix using a LU decomposition.
        /// </summary>
        /// <param name="matrix">The matrix to decompose.</param>
        public void Decompose(Matrix matrix)
        {
            LU = matrix.Clone();

             pivots = new int[LU.Rows];

             for (var i = 0; i < LU.Rows; i++)
             {
            pivots[i] = i;
             }

             pivotSign = 1;

             var column = new double[LU.Rows];

             for (var j = 0; j < LU.Columns; j++)
             {
            for (var i = 0; i < LU.Rows; i++)
            {
               column[i] = LU.GetValue(i, j);
            }

            // Apply previous transformations.
            for (var i = 0; i < LU.Rows; i++)
            {
               // Most of the time is spent in the following dot product.
               var kmax = Math.Min(i, j);
               var s = 0.0;

               for (var k = 0; k < kmax; k++)
               {
                  s += LU.GetValue(i, k) * column[k];
               }

               LU.SetValue(i, j, column[i] - s);
               column[i] -= s;
            }

            // Find pivot and exchange if necessary.
            var p = j;

            for (var i = j + 1; i < LU.Rows; i++)
            {
               if (Math.Abs(column[i]) > Math.Abs(column[p]))
               {
                  p = i;
               }
            }

            if (p != j)
            {
               for (var k = 0; k < LU.Columns; k++)
               {
                  var t = LU[p, k];
                  LU.SetValue(p, k, LU[j, k]);
                  LU.SetValue(j, k, t);
               }

               Swapper.Swap(pivots, p, j);

               pivotSign = -pivotSign;
            }

            // Compute multipliers.
            if ((j < LU.Rows) && (LU.GetValue(j, j) != 0.0))
            {
               for (var i = j + 1; i < LU.Rows; i++)
               {
                  LU.SetValue(i, j, LU.GetValue(i, j) / LU.GetValue(j, j));
               }
            }
             }
        }
        public static Boolean Detect(ObjectDetectee observedScene, ObjectDetectee obj)
        {
            HomographyMatrix homography = null;

            VectorOfKeyPoint observedKeyPoints;
            Matrix<int> indices;

            Matrix<byte> mask;
            int k = 2;
            double uniquenessThreshold = 0.8;
            int testsPassed = 0;

            // extract features from the observed image
            observedKeyPoints = observedScene.objectKeyPoints;
            Matrix<float> observedDescriptors = observedScene.objectDescriptors;
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add(obj.objectDescriptors);
            if (observedDescriptors == null)
            {
                return false;
            }
            indices = new Matrix<int>(observedDescriptors.Rows, k);
            using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix<byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZero = 0;
            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(obj.objectKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(obj.objectKeyPoints, observedKeyPoints, indices, mask, 2);
                    for (int i = 0; i < mask.Height; i++)
                    {
                        for (int j = 0; j < mask.Width; j++)
                        {
                            if (mask[i, j] != 0)
                            {
                                nonZero++;
                            }
                        }
                    }
                    if (nonZero > 4)
                    {
                        testsPassed++;
                    }
                }

            }

            if (homography != null)
            {
                //draw a rectangle along the projected model
                Rectangle rect = obj.objectImage.ROI;
                PointF[] pts = new PointF[] {
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};

                using (MemStorage m1 = new MemStorage())
                using (MemStorage m2 = new MemStorage())
                {

                    Contour<PointF> objPoly = new Contour<PointF>(m1);
                    Contour<PointF> scenePoly = new Contour<PointF>(m2);
                    pts.OrderBy(p => p.X).ThenBy(p => p.Y);
                    foreach (PointF i in pts)
                    {
                        objPoly.Push(i);
                    }
                    homography.ProjectPoints(pts);
                    pts.OrderBy(p => p.X).ThenBy(p => p.Y);
                    foreach (PointF i in pts)
                    {
                        scenePoly.Push(i);
                    }
                    double shapeMatch = CvInvoke.cvMatchShapes(objPoly, scenePoly, Emgu.CV.CvEnum.CONTOURS_MATCH_TYPE.CV_CONTOURS_MATCH_I3, 0);
                    double ratio = scenePoly.Area / objPoly.Area;
                    foreach (PointF i in pts)
                    {
                        if (i.X < 0 || i.Y < 0)
                        {
                            return false;
                        }
                    }
                    if (shapeMatch != 0 && shapeMatch <= 2)
                    {
                        testsPassed++;
                    }
                    if (ratio > 0.001 && ratio < 5.25)
                    {
                        testsPassed++;
                    }
                    if (!(Math.Abs(homography.Data[2, 0]) > .005 && Math.Abs(homography.Data[2, 1]) > .005))
                    {
                        testsPassed++;
                    }

                    if (testsPassed >= 2)
                    {
                        return true;
                    }
                    else
                    {
                        return false;
                    }
                }
            }
            else
            {
                return false;
            }
        }
Exemple #15
0
      public void TestCompare()
      {
         Matrix<float> f1 = new Matrix<float>(1, 380);
         f1.SetValue(0.8);
         Matrix<float> f2 = new Matrix<float>(f1.Size);
         f2.SetValue(1.0);
         Matrix<byte> mask1 = new Matrix<byte>(f1.Size);
         CvInvoke.Compare(f1, f2, mask1, CvEnum.CmpType.LessEqual);
         int total1 = CvInvoke.CountNonZero(mask1);

         EmguAssert.IsTrue(total1 == f1.Width * f1.Height);

         Matrix<Byte> mask2 = new Matrix<byte>(f1.Size);
         using (ScalarArray ia = new ScalarArray(1.0))
         {
            CvInvoke.Compare(f1, ia, mask2, CvEnum.CmpType.LessEqual);
            int total2 = CvInvoke.CountNonZero(mask2);
            EmguAssert.IsTrue(total1 == total2);
         }
      }
Exemple #16
0
        public void TestNot()
        {
            Matrix<byte> m = new Matrix<byte>(10, 8);
             m.SetValue(1.0);
             m._Not();
             byte[,] d2 = m.Data;

             foreach (byte v in d2)
            Assert.AreEqual(254.0, v);
        }
Exemple #17
0
        private void PerformSurfDetection(object sender, EventArgs e)
        {
            this.Text = "working...";
            Application.DoEvents();
            stopwatch.Restart();

            HomographyMatrix homographyMatrix = null;
            SURFDetector surfDetector = new SURFDetector(500, false);
            Image<Gray, Byte> imgMasterGray;
            Image<Gray, Byte> imgToFindGray;
            VectorOfKeyPoint vkpMasterKeyPoints;
            VectorOfKeyPoint vkpToFindKeyPoints;
            Matrix<float> mtxMasterDescriptors;
            Matrix<float> mtxToFindDescriptors;
            Matrix<int> mtxMatchIndices;
            Matrix<float> mtxDistance;
            Matrix<Byte> mtxMask;
            BruteForceMatcher<float> bruteForceMatcher;

            int neighbors = 2;
            double ratioUnique = 0.5;
            int nonZeroElements;
            double scaleIncrement = 1.5;
            int rotationBin = 20;
            double maxReprojectionError = 2.0;

            //PointF[] ptfPointsF;
            //Point ptPoints;

            imgMasterGray = new Image<Gray, byte>(imgMasterColor.ToBitmap());
            imgToFindGray = new Image<Gray, byte>(imgToFindColor.ToBitmap());

            vkpMasterKeyPoints = surfDetector.DetectKeyPointsRaw(imgMasterGray, null);
            mtxMasterDescriptors = surfDetector.ComputeDescriptorsRaw(imgMasterGray, null, vkpMasterKeyPoints);

            vkpToFindKeyPoints = surfDetector.DetectKeyPointsRaw(imgToFindGray, null);
            mtxToFindDescriptors = surfDetector.ComputeDescriptorsRaw(imgToFindGray, null, vkpToFindKeyPoints);

            bruteForceMatcher = new BruteForceMatcher<float>(DistanceType.L2);
            bruteForceMatcher.Add(mtxToFindDescriptors);

            mtxMatchIndices = new Matrix<int>(mtxMasterDescriptors.Rows, neighbors);
            mtxDistance = new Matrix<float>(mtxMasterDescriptors.Rows, neighbors);

            bruteForceMatcher.KnnMatch(mtxMasterDescriptors, mtxMatchIndices, mtxDistance, neighbors, null);

            mtxMask = new Matrix<byte>(mtxDistance.Rows, 1);
            mtxMask.SetValue(255);

            Features2DToolbox.VoteForUniqueness(mtxDistance, ratioUnique, mtxMask);

            nonZeroElements = CvInvoke.cvCountNonZero(mtxMask);
            if (nonZeroElements >= 4)
            {
                nonZeroElements = Features2DToolbox.VoteForSizeAndOrientation(vkpToFindKeyPoints, vkpMasterKeyPoints, mtxMatchIndices, mtxMask, scaleIncrement, rotationBin);
                if (nonZeroElements >= 4)
                {
                    homographyMatrix = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(vkpToFindKeyPoints, vkpMasterKeyPoints, mtxMatchIndices, mtxMask, maxReprojectionError);
                }
            }

            imgCopyToFind = imgToFindColor.Copy();
            imgCopyToFind.Draw(new Rectangle(1, 1, imgCopyToFind.Width - 3, imgCopyToFind.Height - 3), bgrFoundImageColor, 2);

            imgResult = imgMasterColor;
            imgResult = imgResult.ConcateHorizontal(imgCopyToFind);

            if (homographyMatrix != null)
            {
                // draw a rectangle along the projected model
                Rectangle rect = imgCopyToFind.ROI;
                PointF[] pts = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };

                homographyMatrix.ProjectPoints(pts);

                Point[] ptPoints = { Point.Round(pts[0]), Point.Round(pts[1]), Point.Round(pts[2]), Point.Round(pts[3]) };

                imgResult.DrawPolyline(ptPoints, true, bgrFoundImageColor, 2);

                int X = Convert.ToInt16((pts[0].X + pts[1].X) / 2) + this.Left;
                int Y = Convert.ToInt16((pts[1].Y + pts[2].Y) / 2) + this.Top + 30;

                LeftClick(X, Y);
            }

            stopwatch.Stop();
            //this.Text = "working time = " + stopwatch.Elapsed.TotalSeconds.ToString() + "sec, done ! ";
        }
        public void SURFDetect(Image<Bgr, Byte> image)
        {
            // Detect KP and calculate descriptors...
            observedKP = surfDetector.DetectKeyPointsRaw(image.Convert<Gray,Byte>(), null);
            observedDescriptors = surfDetector.ComputeDescriptorsRaw(image.Convert<Gray, Byte>(), null, observedKP);

            // Matching
            int k = 2;
            indices = new Matrix<int>(observedDescriptors.Rows, k);
            dist = new Matrix<float>(observedDescriptors.Rows, k);
            matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

            //
            mask = new Matrix<byte>(dist.Rows, 1);
            mask.SetValue(255);
            Features2DToolbox.VoteForUniqueness(dist, 0.8, mask);

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(itemKP, observedKP, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(itemKP, observedKP, indices, mask, 3);
            }

            // Get keypoints.
            keyPts = new PointF[itemKP.Size];
            classes = new int[itemKP.Size];
            for (int i = 0; i < itemKP.Size; i++)
            {
                keyPts[i] = itemKP[i].Point;
                classes[i] = itemKP[i].ClassId;
            }

            prevFrame = image;

            #region

            // Find ROI
            PointF minXY = new PointF();
            PointF maxXY = new PointF();
            for (int i = 0; i < itemKP.Size; i++)
            {
                PointF pt = keyPts[i];
                if (pt.X < minXY.X) minXY.X = pt.X;
                if (pt.Y < minXY.Y) minXY.Y = pt.Y;
                if (pt.X > maxXY.X) maxXY.X = pt.X;
                if (pt.Y > maxXY.Y) maxXY.Y = pt.Y;
            }

            // Convert ROI to rect
            //roi = new Rectangle((int)minXY.X, (int)minXY.Y, (int)(maxXY.X - minXY.X), (int)(maxXY.Y - minXY.Y));

            //Console.WriteLine("Position: ({0},{1}) \tWidth: {2}\tHeight: {3}", roi.X, roi.Y, roi.Width, roi.Height);

            #endregion

            projectedPoints = null;
            if (homography != null) {
                Rectangle rect = itemImage.ROI;
                projectedPoints = new PointF[] {
                   new PointF(rect.Left, rect.Bottom),
                   new PointF(rect.Right, rect.Bottom),
                   new PointF(rect.Right, rect.Top),
                   new PointF(rect.Left, rect.Top)};
                homography.ProjectPoints(projectedPoints);

                roi = new Rectangle((int)(projectedPoints[3].X), (int)projectedPoints[3].Y, (int)(projectedPoints[1].X - projectedPoints[3].X), (int)(projectedPoints[1].Y - projectedPoints[3].Y));

                // We're always gonna track with CMT now, so this will get initialized no matter what.
                /*
                 * if (trackWithCMT) cmtTracker.Initialize(image, roi);l
                 * else cmtTracker = null;
                 */

                // Initialize CMT unconditionally.
                cmtEngine.Initialize(image, roi);

            }
        }
        public static void TestLUDecomposition()
        {
            //-----------------------------
            //| 0.18 | 0.41 | 0.14 | 0.51 |
            //| 0.60 | 0.24 | 0.30 | 0.13 |
            //| 0.57 | 0.99 | 0.97 | 0.19 |
            //| 0.96 | 0.58 | 0.66 | 0.85 |
            //-----------------------------

            Matrix matrix = new Matrix(4, 4);
            matrix.SetValue(0, 0, 0.18);
            matrix.SetValue(0, 1, 0.60);
            matrix.SetValue(0, 2, 0.57);
            matrix.SetValue(0, 3, 0.96);
            matrix.SetValue(1, 0, 0.41);
            matrix.SetValue(1, 1, 0.24);
            matrix.SetValue(1, 2, 0.99);
            matrix.SetValue(1, 3, 0.58);
            matrix.SetValue(2, 0, 0.14);
            matrix.SetValue(2, 1, 0.30);
            matrix.SetValue(2, 2, 0.97);
            matrix.SetValue(2, 3, 0.66);
            matrix.SetValue(3, 0, 0.51);
            matrix.SetValue(3, 1, 0.13);
            matrix.SetValue(3, 2, 0.19);
            matrix.SetValue(3, 3, 0.85);
            Vector b = new Vector(4);
            b.SetValue(0, 1);
            b.SetValue(1, 2);
            b.SetValue(2, 3);
            b.SetValue(3, 4);
            Vector x = new Vector(4);

            //LU分解による解法
            int sig;
            Permutation perm = new Permutation(4);
            LinearAlgebra.LUDecomposition(ref matrix, ref perm, out sig);
            LinearAlgebra.LUSolve(matrix, perm, b, ref x);
            LinearAlgebra.LUSolve(matrix, perm, ref b);

            //QR分解による解法
            /*Vector tau = new Vector(4);
            LinearAlgebra.QRDecomposition(ref matrix, ref tau);
            LinearAlgebra.QRSolve(matrix, tau, b, ref x);*/

            Console.WriteLine(x.GetValue(0));
            Console.WriteLine(x.GetValue(1));
            Console.WriteLine(x.GetValue(2));
            Console.WriteLine(x.GetValue(3));
        }
        public static void TestInverse()
        {
            //----------------------
            //| 0.18 | 0.41 | 0.14 |
            //| 0.60 | 0.24 | 0.30 |
            //| 0.57 | 0.99 | 0.97 |
            //----------------------

            Matrix matrix = new Matrix(3, 3);
            Matrix matrix2 = new Matrix(4, 4);
            matrix.SetValue(0, 0, 0.18);
            matrix.SetValue(0, 1, 0.41);
            matrix.SetValue(0, 2, 0.14);
            matrix.SetValue(1, 0, 0.60);
            matrix.SetValue(1, 1, 0.24);
            matrix.SetValue(1, 2, 0.30);
            matrix.SetValue(2, 0, 0.57);
            matrix.SetValue(2, 1, 0.99);
            matrix.SetValue(2, 2, 0.97);
            double[,] test = matrix.ToArray();
            for (uint i = 0; i < matrix.Columns; i++)
            {
                for (uint j = 0; j < matrix.Rows; j++)
                {
                    matrix2.SetValue(i + 1, j + 1, matrix.GetValue(i, j));
                }
            }

            //LU分解による方法
            Matrix inv = new Matrix(3, 3);
            int sig;
            Permutation perm = new Permutation(3);
            perm.Initialize();
            LinearAlgebra.LUDecomposition(ref matrix, ref perm, out sig);
            LinearAlgebra.LUInvert(matrix, perm, ref inv);
            for (uint i = 0; i < inv.Columns; i++)
            {
                for (uint j = 0; j < inv.Rows; j++)
                {
                    Console.Write(inv.GetValue(i, j).ToString("F4").PadLeft(8) + " | ");
                }
                Console.WriteLine();
            }
            Console.WriteLine();

            //部分行列のテスト
            perm.Initialize();
            Matrix inv2 = new Matrix(4, 4);
            MatrixView mView = new MatrixView(matrix2, 1, 1, 3, 3);
            MatrixView mViewINV = new MatrixView(inv2, 0, 1, 3, 3);
            LinearAlgebra.LUDecomposition(ref mView, ref perm, out sig);
            LinearAlgebra.LUInvert(mView, perm, ref mViewINV);
            for (uint i = 0; i < mViewINV.ColumnSize; i++)
            {
                for (uint j = 0; j < mViewINV.RowSize; j++)
                {
                    Console.Write(mViewINV.GetValue(i, j).ToString("F4").PadLeft(8) + " | ");
                }
                Console.WriteLine();
            }
            Console.WriteLine();

            for (uint i = 0; i < inv2.Columns; i++)
            {
                for (uint j = 0; j < inv2.Rows; j++)
                {
                    Console.Write(inv2.GetValue(i, j).ToString("F4").PadLeft(8) + " | ");
                }
                Console.WriteLine();
            }

            Console.Read();
        }
Exemple #21
0
        public void TestDTreesMushroom()
        {
            Matrix<float> data, response;
             ReadMushroomData(out data, out response);

             //Use the first 80% of data as training sample
             int trainingSampleCount = (int)(data.Rows * 0.8);

             Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
             varType.SetValue((byte)MlEnum.VAR_TYPE.CATEGORICAL); //the data is categorical

             Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
             using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
            sampleRows.SetValue(255);

             float[] priors = new float[] {1, 0.5f};
             GCHandle priorsHandle = GCHandle.Alloc(priors, GCHandleType.Pinned);

             MCvDTreeParams param = new MCvDTreeParams();
             param.maxDepth = 8;
             param.minSampleCount = 10;
             param.regressionAccuracy = 0;
             param.useSurrogates = true;
             param.maxCategories = 15;
             param.cvFolds = 10;
             param.use1seRule = true;
             param.truncatePrunedTree = true;
             param.priors = priorsHandle.AddrOfPinnedObject();

             using (DTree dtree = new DTree())
             {
            bool success = dtree.Train(
               data,
               Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
               response,
               null,
               sampleIdx,
               varType,
               null,
               param);

            if (!success) return;
            double trainDataCorrectRatio = 0;
            double testDataCorrectRatio = 0;
            for (int i = 0; i < data.Rows; i++)
            {
               using (Matrix<float> sample = data.GetRow(i))
               {
                  double r = dtree.Predict(sample, null, false).value;
                  r = Math.Abs(r - response[i, 0]);
                  if (r < 1.0e-5)
                  {
                     if (i < trainingSampleCount)
                        trainDataCorrectRatio++;
                     else
                        testDataCorrectRatio++;
                  }
               }
            }

            trainDataCorrectRatio /= trainingSampleCount;
            testDataCorrectRatio /= (data.Rows - trainingSampleCount);

            Trace.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio*100));
            Trace.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio*100));
             }

             priorsHandle.Free();
        }
        public static bool FindModelImageInObservedImage( Image<Gray, byte> modelImage, Image<Gray, byte> observedImage )
        {
            var surfCpu = new SURFDetector(500, false);
             VectorOfKeyPoint modelKeyPoints;
             VectorOfKeyPoint observedKeyPoints;
             Matrix<int> indices;

             Matrix<byte> mask;
             int k = 2;
             double uniquenessThreshold = 0.8;
             if ( GpuInvoke.HasCuda )
             {
            GpuSURFDetector surfGpu = new GpuSURFDetector(surfCpu.SURFParams, 0.01f);
            using ( GpuImage<Gray, byte> gpuModelImage = new GpuImage<Gray, byte>( modelImage ) )
            //extract features from the object image
            using ( GpuMat<float> gpuModelKeyPoints = surfGpu.DetectKeyPointsRaw( gpuModelImage, null ) )
            using ( GpuMat<float> gpuModelDescriptors = surfGpu.ComputeDescriptorsRaw( gpuModelImage, null, gpuModelKeyPoints ) )
            using ( GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>( DistanceType.L2 ) )
            {
               modelKeyPoints = new VectorOfKeyPoint();
               surfGpu.DownloadKeypoints( gpuModelKeyPoints, modelKeyPoints );

               // extract features from the observed image
               using ( GpuImage<Gray, byte> gpuObservedImage = new GpuImage<Gray, byte>( observedImage ) )
               using ( GpuMat<float> gpuObservedKeyPoints = surfGpu.DetectKeyPointsRaw( gpuObservedImage, null ) )
               using ( GpuMat<float> gpuObservedDescriptors = surfGpu.ComputeDescriptorsRaw( gpuObservedImage, null, gpuObservedKeyPoints ) )
               using ( GpuMat<int> gpuMatchIndices = new GpuMat<int>( gpuObservedDescriptors.Size.Height, k, 1, true ) )
               using ( GpuMat<float> gpuMatchDist = new GpuMat<float>( gpuObservedDescriptors.Size.Height, k, 1, true ) )
               using ( GpuMat<Byte> gpuMask = new GpuMat<byte>( gpuMatchIndices.Size.Height, 1, 1 ) )
               using ( var stream = new Emgu.CV.GPU.Stream() )
               {
                  matcher.KnnMatchSingle( gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream );
                  indices = new Matrix<int>( gpuMatchIndices.Size );
                  mask = new Matrix<byte>( gpuMask.Size );

                  //gpu implementation of voteForUniquess
                  using ( GpuMat<float> col0 = gpuMatchDist.Col( 0 ) )
                  using ( GpuMat<float> col1 = gpuMatchDist.Col( 1 ) )
                  {
                     GpuInvoke.Multiply( col1, new MCvScalar( uniquenessThreshold ), col1, stream );
                     GpuInvoke.Compare( col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream );
                  }

                  observedKeyPoints = new VectorOfKeyPoint();
                  surfGpu.DownloadKeypoints( gpuObservedKeyPoints, observedKeyPoints );

                  //wait for the stream to complete its tasks
                  //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                  stream.WaitForCompletion();

                  gpuMask.Download( mask );
                  gpuMatchIndices.Download( indices );

                  if ( GpuInvoke.CountNonZero( gpuMask ) >= 4 )
                  {
                     int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                     if ( nonZeroCount >= 4 )
                     {
                        Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 );
                     }
                     if ( (double)nonZeroCount / mask.Height > 0.02 )
                     {
                        return true;
                     }
                  }
               }
            }
             }
             else
             {
            //extract features from the object image
            modelKeyPoints = surfCpu.DetectKeyPointsRaw( modelImage, null );
            Matrix<float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = surfCpu.DetectKeyPointsRaw( observedImage, null );
            Matrix<float> observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add( modelDescriptors );

            indices = new Matrix<int>( observedDescriptors.Rows, k );
            using ( Matrix<float> dist = new Matrix<float>( observedDescriptors.Rows, k ) )
            {
               matcher.KnnMatch( observedDescriptors, indices, dist, k, null );
               mask = new Matrix<byte>( dist.Rows, 1 );
               mask.SetValue( 255 );
               Features2DToolbox.VoteForUniqueness( dist, uniquenessThreshold, mask );
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if ( nonZeroCount >= 4 )
            {
               nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation( modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20 );
               if ( nonZeroCount >= 4 )
               {
                  Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 );
               }
            }

            if ( (double)nonZeroCount/mask.Height > 0.02 )
            {
               return true;
            }
             }

             //Draw the matched keypoints
             //var result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(0, 0, 255), new Bgr(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
             //result.Save( @"C:\Users\D.Markachev\Desktop\bleh-keypoints.jpg" );

             return false;
        }
Exemple #23
0
        public void TestRTreesLetterRecognition()
        {
            Matrix<float> data, response;
             ReadLetterRecognitionData(out data, out response);

             int trainingSampleCount = (int)(data.Rows * 0.8);

             Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
             varType.SetValue((byte)MlEnum.VAR_TYPE.NUMERICAL); //the data is numerical
             varType[data.Cols, 0] = (byte) MlEnum.VAR_TYPE.CATEGORICAL; //the response is catagorical

             Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
             using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
            sampleRows.SetValue(255);

             MCvRTParams param = new MCvRTParams();
             param.maxDepth = 10;
             param.minSampleCount = 10;
             param.regressionAccuracy = 0.0f;
             param.useSurrogates = false;
             param.maxCategories = 15;
             param.priors = IntPtr.Zero;
             param.calcVarImportance = true;
             param.nactiveVars = 4;
             param.termCrit = new MCvTermCriteria(100, 0.01f);
             param.termCrit.type = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;

             using (RTrees forest = new RTrees())
             {
            bool success = forest.Train(
               data,
               Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
               response,
               null,
               sampleIdx,
               varType,
               null,
               param);

            if (!success) return;

            double trainDataCorrectRatio = 0;
            double testDataCorrectRatio = 0;
            for (int i = 0; i < data.Rows; i++)
            {
               using (Matrix<float> sample = data.GetRow(i))
               {
                  double r = forest.Predict(sample, null);
                  r = Math.Abs(r - response[i, 0]);
                  if (r < 1.0e-5)
                  {
                     if (i < trainingSampleCount)
                        trainDataCorrectRatio++;
                     else
                        testDataCorrectRatio++;
                  }
               }
            }

            trainDataCorrectRatio /= trainingSampleCount;
            testDataCorrectRatio /= (data.Rows - trainingSampleCount);

            StringBuilder builder = new StringBuilder("Variable Importance: ");
            using (Matrix<float> varImportance = forest.VarImportance)
            {
               for (int i = 0; i < varImportance.Cols; i++)
               {
                  builder.AppendFormat("{0} ", varImportance[0, i]);
               }
            }

            Trace.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio*100));
            Trace.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio*100));
            Trace.WriteLine(builder.ToString());
             }
        }
        /// <summary>
        /// Detect the if the model features exist in the observed features. If true, an homography matrix is returned, otherwise, null is returned.
        /// </summary>
        /// <param name="modelDescriptors">The descriptors from the model image</param>
        /// <param name="modelKeyPoints">The keypoints drom the model image</param>
        /// <param name="observedDescriptors">The descriptors from the descriptor image</param>
        /// <param name="observedKeyPoints">The keypoints from the observed image</param>
        /// <param name="uniquenessThreshold">The distance different ratio which a match is consider unique, a good number will be 0.8</param>
        /// <returns>If the model features exist in the observed features, an homography matrix is returned, otherwise, null is returned.</returns>
        public static HomographyMatrix Detect(
         VectorOfKeyPoint modelKeyPoints, Matrix<float> modelDescriptors,
         VectorOfKeyPoint observedKeyPoints, Matrix<float> observedDescriptors, double uniquenessThreshold)
        {
            using (BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32))
             using (Matrix<int> indices = new Matrix<int>(observedKeyPoints.Size, 2))
             using (Matrix<float> dist = new Matrix<float>(indices.Size))
             using (Matrix<byte> mask = new Matrix<byte>(dist.Rows, 1))
             {
            matcher.Add(modelDescriptors);
            matcher.KnnMatch(observedDescriptors, indices, dist, 2, null);

            mask.SetValue(255);

            //Stopwatch w1 = Stopwatch.StartNew();
            VoteForUniqueness(dist, uniquenessThreshold, mask);
            //Trace.WriteLine(w1.ElapsedMilliseconds);

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount < 4)
               return null;

            //Stopwatch w2 = Stopwatch.StartNew();
            nonZeroCount = VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
            if (nonZeroCount < 4)
               return null;

            return GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
             }
        }
        private Matrix<double> GetObjectiveFunction(
            ref Matrix<double> XOfGradients,
            ref Matrix<double> YOfGradients,
            ref Matrix<byte> weight
            )
        {
            Matrix<double> result = new Matrix<double>(weight.Size);
            result.SetValue(0);
            double numGradients = weight.Rows * weight.Cols;
            for (int iRow = 0; iRow < weight.Rows; iRow++) {
                for (int iCol = 0; iCol < weight.Cols; iCol++) {
                    double gx = XOfGradients.Data[iRow, iCol];
                    double gy = YOfGradients.Data[iRow, iCol];
                    if (gx == 0.0 && gy == 0.0)
                        continue;
                    for (int cRow = 0; cRow < weight.Rows; cRow++) {
                        for (int cCol = 0; cCol < weight.Cols; cCol++) {
                            if (iCol == cCol && iRow == cRow)
                                continue;
                            double dx = iCol - cCol;
                            double dy = iRow - cRow;
                            double mag = Math.Sqrt(dx * dx + dy * dy);
                            dx = dx / mag;
                            dy = dy / mag;
                            double dotProduct = Math.Max(0.0, dx * gx + dy * gy);
                            result[cRow, cCol] += dotProduct * dotProduct;
                        }
                    }
                }
            }

            for (int cRow = 0; cRow < weight.Rows; cRow++) {
                for (int cCol = 0; cCol < weight.Cols; cCol++) {
                    result[cRow, cCol] *= 1.0 / numGradients;
                    if (_isWeightEnabled)
                        result[cRow, cCol] *= weight.Data[cRow, cCol];
                }
            }

            return result;
        }
        public static void FindMatch(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix<int> indices, out Matrix<byte> mask, out HomographyMatrix homography)
        {
            int k = 2;
             double uniquenessThreshold = 0.8;
             SURFDetector surfCPU = new SURFDetector(500, false);
             Stopwatch watch;
             homography = null;
             #if !IOS
             if (GpuInvoke.HasCuda)
             {
            GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
            using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(modelImage))
            //extract features from the object image
            using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
            using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
            using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>(DistanceType.L2))
            {
               modelKeyPoints = new VectorOfKeyPoint();
               surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
               watch = Stopwatch.StartNew();

               // extract features from the observed image
               using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(observedImage))
               using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
               using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
               using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true))
               using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true))
               using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
               using (Stream stream = new Stream())
               {
                  matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                  indices = new Matrix<int>(gpuMatchIndices.Size);
                  mask = new Matrix<byte>(gpuMask.Size);

                  //gpu implementation of voteForUniquess
                  using (GpuMat<float> col0 = gpuMatchDist.Col(0))
                  using (GpuMat<float> col1 = gpuMatchDist.Col(1))
                  {
                     GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                     GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                  }

                  observedKeyPoints = new VectorOfKeyPoint();
                  surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                  //wait for the stream to complete its tasks
                  //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                  stream.WaitForCompletion();

                  gpuMask.Download(mask);
                  gpuMatchIndices.Download(indices);

                  if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                  {
                     int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                     if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                  }

                  watch.Stop();
               }
            }
             }
             else
             #endif
             {
            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            indices = new Matrix<int>(observedDescriptors.Rows, k);
            using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
               matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
               mask = new Matrix<byte>(dist.Rows, 1);
               mask.SetValue(255);
               Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
               nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
               if (nonZeroCount >= 4)
                  homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }

            watch.Stop();
             }
             matchTime = watch.ElapsedMilliseconds;
        }
Exemple #27
0
        /// <summary>
        /// Gets the upper triangular factor (U).
        /// </summary>
        private Matrix GetUpperTriangularFactor()
        {
            var matrix = new Matrix(LU.Rows, LU.Columns);

             for (var i = 0; i < LU.Rows; i++)
             {
            for (var j = 0; j < LU.Columns; j++)
            {
               if (i <= j)
               {
                  matrix.SetValue(i, j, LU.GetValue(i, j));
               }
               else
               {
                  matrix.SetValue(i, j, 0.0);
               }
            }
             }

             return matrix;
        }
Exemple #28
0
      public void TestMinMax2()
      {
         Matrix<Single> matrix = new Matrix<Single> (10, 10);
         matrix.SetValue (5);
         matrix [5, 5] = 10;
         matrix [3, 3] = 0;

         double minVal = 5;
         double maxVal = 5;
         Point minLoc = new Point();
         Point maxLoc = new Point();

         matrix.MinMax (out minVal, out maxVal, out minLoc, out maxLoc);
         EmguAssert.IsTrue(minVal == 0);
         EmguAssert.IsTrue(maxVal == 10);
         EmguAssert.IsTrue(minLoc.Equals(new Point(3, 3)));
         EmguAssert.IsTrue(maxLoc.Equals(new Point(5, 5)));
      }
Exemple #29
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image<Bgr, Byte> Draw(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime)
        {
            Stopwatch watch;
            HomographyMatrix homography = null;

            SURFDetector surfCPU = new SURFDetector (500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix<int> indices;

            Matrix<byte> mask;
            int k = 2;
            double uniquenessThreshold = 0.8;
            if (GpuInvoke.HasCuda) {
                GpuSURFDetector surfGPU = new GpuSURFDetector (surfCPU.SURFParams, 0.01f);
                using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte> (modelImage))
                    //extract features from the object image
                using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw (gpuModelImage, null))
                using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw (gpuModelImage, null, gpuModelKeyPoints))
                using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float> (DistanceType.L2)) {
                    modelKeyPoints = new VectorOfKeyPoint ();
                    surfGPU.DownloadKeypoints (gpuModelKeyPoints, modelKeyPoints);
                    watch = Stopwatch.StartNew ();

                    // extract features from the observed image
                    using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte> (observedImage))
                    using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw (gpuObservedImage, null))
                    using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw (gpuObservedImage, null, gpuObservedKeyPoints))
                    using (GpuMat<int> gpuMatchIndices = new GpuMat<int> (gpuObservedDescriptors.Size.Height, k, 1, true))
                    using (GpuMat<float> gpuMatchDist = new GpuMat<float> (gpuObservedDescriptors.Size.Height, k, 1, true))
                    using (GpuMat<Byte> gpuMask = new GpuMat<byte> (gpuMatchIndices.Size.Height, 1, 1))
                    using (Stream stream = new Stream ()) {
                        matcher.KnnMatchSingle (gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                        indices = new Matrix<int> (gpuMatchIndices.Size);
                        mask = new Matrix<byte> (gpuMask.Size);

                        //gpu implementation of voteForUniquess
                        using (GpuMat<float> col0 = gpuMatchDist.Col (0))
                        using (GpuMat<float> col1 = gpuMatchDist.Col (1)) {
                            GpuInvoke.Multiply (col1, new MCvScalar (uniquenessThreshold), col1, stream);
                            GpuInvoke.Compare (col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                        }

                        observedKeyPoints = new VectorOfKeyPoint ();
                        surfGPU.DownloadKeypoints (gpuObservedKeyPoints, observedKeyPoints);

                        //wait for the stream to complete its tasks
                        //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                        stream.WaitForCompletion ();

                        gpuMask.Download (mask);
                        gpuMatchIndices.Download (indices);

                        if (GpuInvoke.CountNonZero (gpuMask) >= 4) {
                            int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation (modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures (modelKeyPoints, observedKeyPoints, indices, mask, 2);
                        }

                        watch.Stop ();
                    }
                }
            } else {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw (modelImage, null);
                Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw (modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew ();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw (observedImage, null);
                Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw (observedImage, null, observedKeyPoints);
                BruteForceMatcher<float> matcher = new BruteForceMatcher<float> (DistanceType.L2);
                matcher.Add (modelDescriptors);

                indices = new Matrix<int> (observedDescriptors.Rows, k);
                using (Matrix<float> dist = new Matrix<float> (observedDescriptors.Rows, k)) {
                    matcher.KnnMatch (observedDescriptors, indices, dist, k, null);
                    mask = new Matrix<byte> (dist.Rows, 1);
                    mask.SetValue (255);
                    Features2DToolbox.VoteForUniqueness (dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero (mask);
                if (nonZeroCount >= 4) {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation (modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures (modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }

                watch.Stop ();
            }

            //Draw the matched keypoints
            Image<Bgr, Byte> result = Features2DToolbox.DrawMatches (modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                          indices, new Bgr (255, 255, 255), new Bgr (255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null) {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[] pts = new PointF[] {
                    new PointF (rect.Left, rect.Bottom),
                    new PointF (rect.Right, rect.Bottom),
                    new PointF (rect.Right, rect.Top),
                    new PointF (rect.Left, rect.Top)
                };
                homography.ProjectPoints (pts);

                result.DrawPolyline (Array.ConvertAll<PointF, Point> (pts, Point.Round), true, new Bgr (Color.Red), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return result;
        }
Exemple #30
0
        /// <inheritdoc />
        protected override IMatrix<double> MultiplySafe(IVector<double> vector)
        {
            var matrix = new Matrix(3, 3);
            matrix.SetValue(0, 0, X * vector[0]);
            matrix.SetValue(0, 1, X * vector[1]);
            matrix.SetValue(0, 2, X * vector[2]);

            matrix.SetValue(1, 0, Y * vector[0]);
            matrix.SetValue(1, 1, Y * vector[1]);
            matrix.SetValue(1, 2, Y * vector[2]);

            matrix.SetValue(2, 0, Z * vector[0]);
            matrix.SetValue(2, 1, Z * vector[1]);
            matrix.SetValue(2, 2, Z * vector[2]);
            return matrix;
        }