Exemplo n.º 1
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);
            long   matchTime; int Inliers = 10, OutLiers;
            string mImage = "C:\\inetpub\\wwwroot\\memomface\\UserContent\\Member\\2\\1f99e8d4-be1d-4ca1-b094-67a45e298f65.JPG";
            string oImage = "C:\\inetpub\\wwwroot\\memomface\\UserContent\\Album\\1\\45d2c5b1-2079-408a-af48-e60edad56f5f--IMG_0185-3.JPG";

            //using(Image<Gray, Byte> modelImage = new Image<Gray, byte>("FaceBoy.jpg"))
            //using (Image<Gray, Byte> observedImage = new Image<Gray, byte>("YesBoy.jpg"))
            using (Image <Gray, Byte> modelImage = new Image <Gray, byte>(mImage))
                using (Image <Gray, Byte> observedImage = new Image <Gray, byte>(oImage))
                {
                    //Image<Bgr, byte> result = DrawMatches.Draw(modelImage, observedImage, out matchTime);
                    Image <Bgr, byte> result = BruteForceMatcher.Draw(modelImage, observedImage, out matchTime, out Inliers, out OutLiers);
                    ImageViewer.Show(result, String.Format("Matched using {0} in {1} milliseconds", GpuInvoke.HasCuda ? "GPU" : "CPU", matchTime));
                }
        }
Exemplo n.º 2
0
        public static FindMatchResult FindMatchInSource(MatchInput matchInput)
        {
            FindMatchResult matchResult = new FindMatchResult();
            long            matchTime; int inLiers, outLiers;
            string          MatchFolderPath, MatchFile, MatchAbsolutePath, MatchedFaceFile;

            MatchFolderPath = MatchFile = MatchedFaceFile = MatchAbsolutePath = "";

            //file folders assignment
            MatchFolderPath   = matchInput.WebFolderPath;
            MatchAbsolutePath = matchInput.FindInFile.DirectoryName + "\\" + "MatchFiles";


            using (Image <Gray, Byte> modelImage = new Image <Gray, byte>(matchInput.MatchFile.FullName))
                using (Image <Gray, Byte> observedImage = new Image <Gray, byte>(matchInput.FindInFile.FullName))
                {
                    Image <Bgr, byte> result = BruteForceMatcher.Draw(modelImage, observedImage, out matchTime, out inLiers, out outLiers);
                    //ImageViewer.Show(result, String.Format("Matched using {0} in {1} milliseconds", GpuInvoke.HasCuda ? "GPU" : "CPU", matchTime));
                    if (inLiers > matchInput.InlierThreshold)
                    {
                        matchResult.Matched = true;
                        MatchedFaceFile     = Guid.NewGuid().ToString();

                        bool exists = System.IO.Directory.Exists(MatchAbsolutePath);
                        if (!exists)
                        {
                            System.IO.Directory.CreateDirectory(MatchAbsolutePath);
                        }

                        result.Save(MatchAbsolutePath + "\\" + MatchedFaceFile + matchInput.FindInFile.Extension);
                    }
                    matchResult.Inliers         = inLiers;
                    matchResult.Outliers        = outLiers;
                    matchResult.FolderPath      = MatchFolderPath;
                    matchResult.AbsolutePath    = MatchAbsolutePath + "\\";
                    matchResult.MatchedFaceFile = MatchedFaceFile;
                }
            return(matchResult);
        }
Exemplo n.º 3
0
        public static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(500, false);
            Stopwatch    watch;

            homography = null;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                            }

                                                            watch.Stop();
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = new VectorOfKeyPoint();
                Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = new VectorOfKeyPoint();
                Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                watch.Stop();
            }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 4
0
        static void Run(string[] args, bool verboseShow)
        {
            // Dependency Injection to reuse all stuff developed so far
            IKernel kernel = new StandardKernel();

            DependencyInjection(kernel);

            // service initializatons
            var _albumInstanceDetailSvc = kernel.Get <IAlbumInstanceDetailService>();
            var _albumInstanceSvc       = kernel.Get <IAlbumInstanceService>();
            var _memberSvc = kernel.Get <IMemberService>();

            //class variable intializations

            string MatchFolderPath, MatchFile;

            IEnumerable <UserAlbumInstanceDetail> batchPhotos = _albumInstanceDetailSvc.GetPhotosForBatchProcssing();

            Console.WriteLine("About to process {0} records", batchPhotos.Count().ToString());

            if (batchPhotos.Count() > 0)
            {
                if (verboseShow)
                {
                    Console.WriteLine("Total records retrieved for processing : {0}", batchPhotos.Count().ToString());
                }

                foreach (UserAlbumInstanceDetail batchPhoto in batchPhotos)
                {
                    long ticks = DateTime.Now.Ticks;
                    if (verboseShow)
                    {
                        Console.WriteLine("Ticks-{0} : Processig record AlbumInstanceKey : {1}, MmeberKey : {2}, FaceToFind : {3}....", ticks.ToString(), batchPhoto.UserAlbumInstanceKey, batchPhoto.MemberKey, batchPhoto.FaceImage);
                    }
                    UserAlbumInstance photoImage      = null;
                    Member            member          = null;
                    FindMatchResult   findMatchResult = null;

                    MatchFolderPath = MatchFile = "";
                    photoImage      = _albumInstanceSvc.FindAlbumInstance(batchPhoto.UserAlbumInstanceKey);
                    member          = _memberSvc.FindMember(batchPhoto.MemberKey);

                    MatchInput matchInput = new MatchInput();
                    matchInput.FindInFile    = new FileInfo(photoImage.AbsolutePath);
                    matchInput.MatchFile     = new FileInfo(member.AbsoultePath + batchPhoto.FaceImage);
                    matchInput.WebFolderPath = photoImage.FolderPath + "MatchFiles";

                    if (verboseShow)
                    {
                        Console.WriteLine(JsonConvert.SerializeObject(matchInput, Formatting.Indented));
                    }
                    try
                    {
                        findMatchResult = BruteForceMatcher.FindMatchInSource(matchInput);
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine("Error processing AlbumInstanceKey : {0}", batchPhoto.UserAlbumInstanceKey);
                        Console.Error.WriteLine("Exception");
                        Console.Error.WriteLine(e.Message);
                        Console.Error.WriteLine("Inner Exception");
                        Console.Error.WriteLine(e.InnerException.ToString());
                    }
                    finally
                    {
                        if (findMatchResult != null)
                        {
                            if (findMatchResult.Matched)
                            {
                                batchPhoto.FaceMatchFile = findMatchResult.MatchedFaceFile;
                            }
                            batchPhoto.Inliers      = findMatchResult.Inliers;
                            batchPhoto.OpenCVMethod = findMatchResult.OpenCvMethod;
                            batchPhoto.FaceFound    = findMatchResult.Matched;
                            batchPhoto.FolderPath   = findMatchResult.FolderPath;
                            batchPhoto.AbsolutePath = findMatchResult.AbsolutePath;
                            batchPhoto.ProcessedOn  = DateTime.Now;
                            batchPhoto.Processed    = true;

                            _albumInstanceDetailSvc.Update(batchPhoto);

                            if (verboseShow)
                            {
                                Console.WriteLine("Ticks-{0} : Processed successfully AlbumInstanceKey : {1}, MmeberKey : {2}, FaceToFind : {3}....", ticks.ToString(), batchPhoto.UserAlbumInstanceKey, batchPhoto.MemberKey, batchPhoto.FaceImage);
                            }
                        }
                    }
                }
            }
        }
Exemplo n.º 5
0
        public static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

         #if !IOS
            if (CudaInvoke.HasCuda)
            {
                CudaSURFDetector surfCuda = new CudaSURFDetector((float)hessianThresh);
                using (GpuMat gpuModelImage = new GpuMat(modelImage))
                    //extract features from the object image
                    using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (CudaBruteForceMatcher matcher = new CudaBruteForceMatcher(DistanceType.L2))
                            {
                                surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuMat gpuObservedImage = new GpuMat(observedImage))
                                    using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                        //using (GpuMat tmp = new GpuMat())
                                        //using (Stream stream = new Stream())
                                        {
                                            matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                                            surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                            mask = new Matrix <byte>(matches.Size, 1);
                                            mask.SetValue(255);
                                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                                            if (nonZeroCount >= 4)
                                            {
                                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                           matches, mask, 1.5, 20);
                                                if (nonZeroCount >= 4)
                                                {
                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                          observedKeyPoints, matches, mask, 2);
                                                }
                                            }
                                        }
                                watch.Stop();
                            }
            }

            else
         #endif
            {
                using (UMat uModelImage = modelImage.Mat.ToUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.Mat.ToUMat(AccessType.Read))
                    {
                        SURFDetector surfCPU = new SURFDetector(hessianThresh);
                        //extract features from the object image
                        UMat modelDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        watch = Stopwatch.StartNew();

                        // extract features from the observed image

                        UMat observedDescriptors = new UMat();
                        surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                        BruteForceMatcher matcher = new BruteForceMatcher(DistanceType.L2);
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Matrix <byte>(matches.Size, 1);
                        mask.SetValue(255);
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                      observedKeyPoints, matches, mask, 2);
                            }
                        }

                        watch.Stop();
                    }
            }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 6
0
        static void Run()
        {
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("box.png");
            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");
            Stopwatch          watch;
            HomographyMatrix   homography = null;

            SURFDetector surfCPU = new SURFDetector(500, false);

            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <float>   dist;
            Matrix <byte>    mask;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);

                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

                BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);
                dist    = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                    }
                }

                watch.Stop();
            }

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DTracker.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DTracker.KeypointDrawType.NOT_DRAW_SINGLE_POINTS);

            #region draw the projected region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            ImageViewer.Show(result, String.Format("Matched using {0} in {1} milliseconds", GpuInvoke.HasCuda ? "GPU" : "CPU", watch.ElapsedMilliseconds));
        }