/// <summary> /// StereoBM_GPU コンストラクタ /// </summary> /// <param name="preset"></param> /// <param name="ndisparities"></param> /// <param name="winSize"></param> #else /// <summary> /// StereoBM_GPU Constructor /// </summary> /// <param name="preset"></param> /// <param name="ndisparities"></param> /// <param name="winSize"></param> #endif public StereoBM_GPU(int preset, int ndisparities = DEFAULT_NDISP, int winSize = DEFAULT_WINSZ) { ptr = GpuInvoke.StereoBM_GPU_new2(preset, ndisparities, winSize); if (ptr == IntPtr.Zero) { throw new OpenCvSharpException(); } }
/// <summary> /// デフォルトのパラメータで初期化. /// </summary> #else /// <summary> /// Default constructor /// </summary> #endif public StereoBM_GPU() { ptr = GpuInvoke.StereoBM_GPU_new1(); if (ptr == IntPtr.Zero) { throw new OpenCvSharpException(); } }
public Image <Gray, Byte> DetectSkin(byte[] image) { ImageUtil.UpdateBgrImage(image, bgrImage.Data, width, height); bgrImageGpu.Upload(bgrImage); GpuInvoke.CvtColor(bgrImageGpu, yccImageGpu, Emgu.CV.CvEnum.COLOR_CONVERSION.CV_BGR2YCrCb, IntPtr.Zero); FilterSkin(yccImageGpu.Ptr, skinGpu.Ptr); GpuInvoke.MorphologyEx(skinGpu, skinGpu, CV_MORPH_OP.CV_MOP_OPEN, Kernel.Ptr, buffer1, buffer2, new Point(2, 2), 1, IntPtr.Zero); skinGpu.Download(skin); return(skin); }
/// <summary> /// static constructor /// </summary> static StereoBM_GPU() { try { SizeOf = (int)GpuInvoke.StereoBM_GPU_sizeof(); } catch (DllNotFoundException e) { PInvokeHelper.DllImportError(e); throw; } catch (BadImageFormatException e) { PInvokeHelper.DllImportError(e); throw; } catch (Exception) { throw; } }
/// <summary> /// /// </summary> /// <param name="left"></param> /// <param name="right"></param> /// <param name="disparity"></param> #else /// <summary> /// /// </summary> /// <param name="left"></param> /// <param name="right"></param> /// <param name="disparity"></param> #endif public void Run(GpuMat left, GpuMat right, GpuMat disparity) { if (disposed) { throw new ObjectDisposedException("StereoBM_GPU"); } if (left == null) { throw new ArgumentNullException("left"); } if (right == null) { throw new ArgumentNullException("right"); } if (disparity == null) { throw new ArgumentNullException("disparity"); } GpuInvoke.StereoBM_GPU_run1(ptr, left.CvPtr, right.CvPtr, disparity.CvPtr); }
/// <summary> /// リソースの解放 /// </summary> /// <param name="disposing"> /// trueの場合は、このメソッドがユーザコードから直接が呼ばれたことを示す。マネージ・アンマネージ双方のリソースが解放される。 /// falseの場合は、このメソッドはランタイムからファイナライザによって呼ばれ、もうほかのオブジェクトから参照されていないことを示す。アンマネージリソースのみ解放される。 ///</param> #else /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing"> /// If disposing equals true, the method has been called directly or indirectly by a user's code. Managed and unmanaged resources can be disposed. /// If false, the method has been called by the runtime from inside the finalizer and you should not reference other objects. Only unmanaged resources can be disposed. /// </param> #endif protected override void Dispose(bool disposing) { if (!disposed) { try { if (disposing) { } if (IsEnabledDispose) { GpuInvoke.StereoBM_GPU_delete(ptr); } disposed = true; } finally { base.Dispose(disposing); } } }
public static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography) { int k = 2; double uniquenessThreshold = 0.8; SURFDetector surfCPU = new SURFDetector(500, false); Stopwatch watch; homography = null; if (GpuInvoke.HasCuda) { GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f); using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage)) //extract features from the object image using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null)) using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2)) { modelKeyPoints = new VectorOfKeyPoint(); surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); watch = Stopwatch.StartNew(); // extract features from the observed image using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage)) using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null)) using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints)) using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true)) using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true)) using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1)) using (Stream stream = new Stream()) { matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream); indices = new Matrix <int>(gpuMatchIndices.Size); mask = new Matrix <byte>(gpuMask.Size); //gpu implementation of voteForUniquess using (GpuMat <float> col0 = gpuMatchDist.Col(0)) using (GpuMat <float> col1 = gpuMatchDist.Col(1)) { GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream); GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream); } observedKeyPoints = new VectorOfKeyPoint(); surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints); //wait for the stream to complete its tasks //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete. stream.WaitForCompletion(); gpuMask.Download(mask); gpuMatchIndices.Download(indices); if (GpuInvoke.CountNonZero(gpuMask) >= 4) { int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } watch.Stop(); } } } else { //extract features from the object image modelKeyPoints = new VectorOfKeyPoint(); Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); watch = Stopwatch.StartNew(); // extract features from the observed image observedKeyPoints = new VectorOfKeyPoint(); Matrix <float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints); BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix <int>(observedDescriptors.Rows, k); using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix <byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } watch.Stop(); } matchTime = watch.ElapsedMilliseconds; }
/// <summary> /// Draw the model image and observed image, the matched features and homography projection. /// </summary> /// <param name="modelImage">The model image</param> /// <param name="observedImage">The observed image</param> /// <param name="matchTime">The output total time for computing the homography matrix.</param> /// <returns>The model image and observed image, the matched features and homography projection.</returns> public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime) { Stopwatch watch; HomographyMatrix homography = null; SURFDetector surfCPU = new SURFDetector(500, false); VectorOfKeyPoint modelKeyPoints; VectorOfKeyPoint observedKeyPoints; Matrix <int> indices; Matrix <byte> mask; int k = 2; double uniquenessThreshold = 0.8; if (GpuInvoke.HasCuda) { GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f); using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage)) //extract features from the object image using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null)) using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2)) { modelKeyPoints = new VectorOfKeyPoint(); surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); watch = Stopwatch.StartNew(); // extract features from the observed image using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage)) using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null)) using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints)) using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true)) using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true)) using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1)) using (Stream stream = new Stream()) { matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream); indices = new Matrix <int>(gpuMatchIndices.Size); mask = new Matrix <byte>(gpuMask.Size); //gpu implementation of voteForUniquess using (GpuMat <float> col0 = gpuMatchDist.Col(0)) using (GpuMat <float> col1 = gpuMatchDist.Col(1)) { GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream); GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream); } observedKeyPoints = new VectorOfKeyPoint(); surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints); //wait for the stream to complete its tasks //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete. stream.WaitForCompletion(); gpuMask.Download(mask); gpuMatchIndices.Download(indices); if (GpuInvoke.CountNonZero(gpuMask) >= 4) { int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } watch.Stop(); } } } else { //extract features from the object image modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null); Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints); watch = Stopwatch.StartNew(); // extract features from the observed image observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null); Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints); BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix <int>(observedDescriptors.Rows, k); using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix <byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } watch.Stop(); } //Draw the matched keypoints Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT); #region draw the projected region on the image if (homography != null) { //draw a rectangle along the projected model Rectangle rect = modelImage.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; homography.ProjectPoints(pts); result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5); } #endregion matchTime = watch.ElapsedMilliseconds; return(result); }
/// <summary> /// /// </summary> /// <returns></returns> public static bool CheckIfGpuCallReasonable() { return(GpuInvoke.StereoBM_GPU_checkIfGpuCallReasonable() != 0); }
private Image <Bgr, byte> Match(Image <Bgr, byte> image1, Image <Bgr, byte> image2, int flag) { HomographyMatrix homography = null; SURFDetector surfDetectorCPU = new SURFDetector(500, false); int k = 2; //number of matches that we want ot find between image1 and image2 double uniquenessThreshold = 0.8; Matrix <int> indices; Matrix <byte> mask; VectorOfKeyPoint KeyPointsImage1; VectorOfKeyPoint KeyPointsImage2; Image <Gray, Byte> Image1G = image1.Convert <Gray, Byte>(); Image <Gray, Byte> Image2G = image2.Convert <Gray, Byte>(); if (GpuInvoke.HasCuda) //Using CUDA, the GPUs can be used for general purpose processing (i.e., not exclusively graphics), speed up performance { Console.WriteLine("Here"); GpuSURFDetector surfDetectorGPU = new GpuSURFDetector(surfDetectorCPU.SURFParams, 0.01f); // extract features from Image1 using (GpuImage <Gray, Byte> gpuImage1 = new GpuImage <Gray, byte>(Image1G)) //convert CPU input image to GPUImage(greyscale) using (GpuMat <float> gpuKeyPointsImage1 = surfDetectorGPU.DetectKeyPointsRaw(gpuImage1, null)) //find key points for image using (GpuMat <float> gpuDescriptorsImage1 = surfDetectorGPU.ComputeDescriptorsRaw(gpuImage1, null, gpuKeyPointsImage1)) //calculate descriptor for each key point using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2)) //create a new matcher object { KeyPointsImage1 = new VectorOfKeyPoint(); surfDetectorGPU.DownloadKeypoints(gpuKeyPointsImage1, KeyPointsImage1); //copy the Matrix from GPU to CPU // extract features from Image2 using (GpuImage <Gray, Byte> gpuImage2 = new GpuImage <Gray, byte>(Image2G)) using (GpuMat <float> gpuKeyPointsImage2 = surfDetectorGPU.DetectKeyPointsRaw(gpuImage2, null)) using (GpuMat <float> gpuDescriptorsImage2 = surfDetectorGPU.ComputeDescriptorsRaw(gpuImage2, null, gpuKeyPointsImage2)) //for each descriptor of each image2 , we find k best matching points and their distances from image1 descriptors using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuDescriptorsImage2.Size.Height, k, 1, true)) //stores indices of k best mathces using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuDescriptorsImage2.Size.Height, k, 1, true)) //stores distance of k best matches using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1)) //stores result of comparison using (Stream stream = new Stream()) { matcher.KnnMatchSingle(gpuDescriptorsImage2, gpuDescriptorsImage1, gpuMatchIndices, gpuMatchDist, k, null, stream); //matching descriptors of image2 to image1 and storing the k best indices and corresponding distances indices = new Matrix <int>(gpuMatchIndices.Size); mask = new Matrix <byte>(gpuMask.Size); //gpu implementation of voteForUniquess using (GpuMat <float> col0 = gpuMatchDist.Col(0)) using (GpuMat <float> col1 = gpuMatchDist.Col(1)) { GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream); //by setting stream, we perform an Async Task GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream); //col0 >= 0.8col1 , only then is it considered a good match } KeyPointsImage2 = new VectorOfKeyPoint(); surfDetectorGPU.DownloadKeypoints(gpuKeyPointsImage2, KeyPointsImage2); //wait for the stream to complete its tasks //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete. stream.WaitForCompletion(); gpuMask.Download(mask); gpuMatchIndices.Download(indices); if (GpuInvoke.CountNonZero(gpuMask) >= 4) { int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(KeyPointsImage1, KeyPointsImage2, indices, mask, 1.5, 20); //count the number of nonzero points in the mask(this stored the comparison result of col0 >= 0.8col1) //we can create a homography matrix only if we have atleast 4 matching points if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(KeyPointsImage1, KeyPointsImage2, indices, mask, 2); } } } } } else { Console.WriteLine("No CUDA"); //extract features from image2 KeyPointsImage1 = new VectorOfKeyPoint(); Matrix <float> DescriptorsImage1 = surfDetectorCPU.DetectAndCompute(Image1G, null, KeyPointsImage1); //extract features from image1 KeyPointsImage2 = new VectorOfKeyPoint(); Matrix <float> DescriptorsImage2 = surfDetectorCPU.DetectAndCompute(Image2G, null, KeyPointsImage2); BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(DescriptorsImage1); indices = new Matrix <int>(DescriptorsImage2.Rows, k); using (Matrix <float> dist = new Matrix <float>(DescriptorsImage2.Rows, k)) { matcher.KnnMatch(DescriptorsImage2, indices, dist, k, null); mask = new Matrix <byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(KeyPointsImage1, KeyPointsImage2, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(KeyPointsImage1, KeyPointsImage2, indices, mask, 2); } } } Image <Bgr, Byte> mImage = image1.Convert <Bgr, Byte>(); Image <Bgr, Byte> oImage = image2.Convert <Bgr, Byte>(); Image <Bgr, Byte> result = new Image <Bgr, byte>(mImage.Width + oImage.Width, mImage.Height); //Image<Bgr, Byte> temp = Features2DToolbox.DrawMatches(image1, KeyPointsImage1, image2, KeyPointsImage2, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT); if (homography != null) { //draw a rectangle along the projected model Rectangle rect = image1.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; homography.ProjectPoints(pts); HomographyMatrix origin = new HomographyMatrix(); //I perform a copy of the left image with a not real shift operation on the origin origin.SetIdentity(); origin.Data[0, 2] = 0; origin.Data[1, 2] = 0; Image <Bgr, Byte> mosaic = new Image <Bgr, byte>(mImage.Width + oImage.Width, mImage.Height * 2); Image <Bgr, byte> warp_image = mosaic.Clone(); mosaic = mImage.WarpPerspective(origin, mosaic.Width, mosaic.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_DEFAULT, new Bgr(0, 0, 0)); warp_image = oImage.WarpPerspective(homography, warp_image.Width, warp_image.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Bgr(200, 0, 0)); Image <Gray, byte> warp_image_mask = oImage.Convert <Gray, byte>(); warp_image_mask.SetValue(new Gray(255)); Image <Gray, byte> warp_mosaic_mask = mosaic.Convert <Gray, byte>(); warp_mosaic_mask.SetZero(); warp_mosaic_mask = warp_image_mask.WarpPerspective(homography, warp_mosaic_mask.Width, warp_mosaic_mask.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Gray(0)); warp_image.Copy(mosaic, warp_mosaic_mask); if (flag == 1) { Console.WriteLine("Using Image Blending"); return(blend(mosaic, warp_image, warp_mosaic_mask, 2)); } else { Console.WriteLine("No Image Blending"); return(mosaic); } } return(null); }
public void classify(BitmapSource frame) { Console.WriteLine(relativeURI); //byte[] classifiedImage = frame; //WriteableBitmap frameImage = new WriteableBitmap(frameWidth, frameHeight, 96, 96, PixelFormats.Bgr32, null); //BitmapSource frameImage = BitmapSource.Create(frameWidth, frameHeight, 96, 96, PixelFormats.Bgr32, null, frame, stride); /* * resultsPtr = CvInvoke.cvHaarDetectObjects( * Marshal.GetIUnknownForObject(frame), * classifier, * resultsPtr, * 1.1, * 3, * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, * new System.Drawing.Size(0,0), * new System.Drawing.Size(0,0) * ); * * Console.WriteLine("Classified?!? Pointer below: "); * Console.WriteLine(resultsPtr.ToString()); */ //return classifiedImage; Console.WriteLine(" - - - Converting Bitmap..."); System.Drawing.Bitmap bitmapFrame; using (MemoryStream outStream = new MemoryStream()) { BitmapEncoder enc = new BmpBitmapEncoder(); enc.Frames.Add(BitmapFrame.Create(frame)); enc.Save(outStream); bitmapFrame = new System.Drawing.Bitmap(outStream); } Console.WriteLine(" - - - Bitmap converted!"); Image <Bgr, Byte> image = new Image <Bgr, Byte>(bitmapFrame); Console.WriteLine(" - - - Image set"); Console.WriteLine(" - - - Check CUDA..."); if (GpuInvoke.HasCuda) { Console.WriteLine(" - - - Has CUDA!"); using (GpuCascadeClassifier target = new GpuCascadeClassifier(classifierURI)) { using (GpuImage <Bgr, Byte> gpuImage = new GpuImage <Bgr, byte>(image)) using (GpuImage <Gray, Byte> gpuGray = gpuImage.Convert <Gray, Byte>()) { Console.WriteLine(" - - - Detecting!"); Rectangle[] targetSet = target.DetectMultiScale(gpuGray, 1.1, 10, System.Drawing.Size.Empty); Console.WriteLine(" - - - Detected :D :D :D Printing rectangle set: "); foreach (Rectangle f in targetSet) { Console.WriteLine("Rectangle found at: " + f.ToString()); //draw the face detected in the 0th (gray) channel with blue color image.Draw(f, new Bgr(System.Drawing.Color.Blue), 2); } Console.WriteLine(" - - - DONE"); } } } else { using (HOGDescriptor des = new HOGDescriptor()) { //des.SetSVMDetector } Console.WriteLine(" - - - No CUDA :( "); Console.WriteLine(" - - - Devices available: " + GpuInvoke.GetCudaEnabledDeviceCount()); } }
public static bool FindModelImageInObservedImage(Image <Gray, byte> modelImage, Image <Gray, byte> observedImage) { var surfCpu = new SURFDetector(500, false); VectorOfKeyPoint modelKeyPoints; VectorOfKeyPoint observedKeyPoints; Matrix <int> indices; Matrix <byte> mask; int k = 2; double uniquenessThreshold = 0.8; if (GpuInvoke.HasCuda) { GpuSURFDetector surfGpu = new GpuSURFDetector(surfCpu.SURFParams, 0.01f); using (GpuImage <Gray, byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage)) //extract features from the object image using (GpuMat <float> gpuModelKeyPoints = surfGpu.DetectKeyPointsRaw(gpuModelImage, null)) using (GpuMat <float> gpuModelDescriptors = surfGpu.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2)) { modelKeyPoints = new VectorOfKeyPoint(); surfGpu.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); // extract features from the observed image using (GpuImage <Gray, byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage)) using (GpuMat <float> gpuObservedKeyPoints = surfGpu.DetectKeyPointsRaw(gpuObservedImage, null)) using (GpuMat <float> gpuObservedDescriptors = surfGpu.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints)) using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true)) using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true)) using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1)) using (var stream = new Emgu.CV.GPU.Stream()) { matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream); indices = new Matrix <int>(gpuMatchIndices.Size); mask = new Matrix <byte>(gpuMask.Size); //gpu implementation of voteForUniquess using (GpuMat <float> col0 = gpuMatchDist.Col(0)) using (GpuMat <float> col1 = gpuMatchDist.Col(1)) { GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream); GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream); } observedKeyPoints = new VectorOfKeyPoint(); surfGpu.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints); //wait for the stream to complete its tasks //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete. stream.WaitForCompletion(); gpuMask.Download(mask); gpuMatchIndices.Download(indices); if (GpuInvoke.CountNonZero(gpuMask) >= 4) { int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } if ((double)nonZeroCount / mask.Height > 0.02) { return(true); } } } } } else { //extract features from the object image modelKeyPoints = surfCpu.DetectKeyPointsRaw(modelImage, null); Matrix <float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints); // extract features from the observed image observedKeyPoints = surfCpu.DetectKeyPointsRaw(observedImage, null); Matrix <float> observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints); BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix <int>(observedDescriptors.Rows, k); using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix <byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } if ((double)nonZeroCount / mask.Height > 0.02) { return(true); } } //Draw the matched keypoints //var result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(0, 0, 255), new Bgr(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT); //result.Save( @"C:\Users\D.Markachev\Desktop\bleh-keypoints.jpg" ); return(false); }