Exemplo n.º 1
1
      public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
      {
         int k = 2;
         double uniquenessThreshold = 0.8;
         double hessianThresh = 300;
         
         Stopwatch watch;
         homography = null;

         modelKeyPoints = new VectorOfKeyPoint();
         observedKeyPoints = new VectorOfKeyPoint();

         #if !__IOS__
         if ( CudaInvoke.HasCuda)
         {
            CudaSURF surfCuda = new CudaSURF((float) hessianThresh);
            using (GpuMat gpuModelImage = new GpuMat(modelImage))
            //extract features from the object image
            using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
            using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
            using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
            {
               surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
               watch = Stopwatch.StartNew();

               // extract features from the observed image
               using (GpuMat gpuObservedImage = new GpuMat(observedImage))
               using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
               using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
               //using (GpuMat tmp = new GpuMat())
               //using (Stream stream = new Stream())
               {
                  matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                  surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                  mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                  mask.SetTo(new MCvScalar(255));
                  Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                  int nonZeroCount = CvInvoke.CountNonZero(mask);
                  if (nonZeroCount >= 4)
                  {
                     nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                        matches, mask, 1.5, 20);
                     if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                           observedKeyPoints, matches, mask, 2);
                  }
               }
                  watch.Stop();
               }
            }
         else
         #endif
         {
            using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
            using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
            {
               SURF surfCPU = new SURF(hessianThresh);
               //extract features from the object image
               UMat modelDescriptors = new UMat();
               surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

               watch = Stopwatch.StartNew();

               // extract features from the observed image
               UMat observedDescriptors = new UMat();
               surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
               BFMatcher matcher = new BFMatcher(DistanceType.L2);
               matcher.Add(modelDescriptors);

               matcher.KnnMatch(observedDescriptors, matches, k, null);
               mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
               mask.SetTo(new MCvScalar(255));
               Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

               int nonZeroCount = CvInvoke.CountNonZero(mask);
               if (nonZeroCount >= 4)
               {
                  nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                     matches, mask, 1.5, 20);
                  if (nonZeroCount >= 4)
                     homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                        observedKeyPoints, matches, mask, 2);
               }

               watch.Stop();
            }
         }
         matchTime = watch.ElapsedMilliseconds;
      }
Exemplo n.º 2
0
      static void Run()
      {
         IImage image;

         //Read the files as an 8-bit Bgr image  

         image = new UMat("lena.jpg", ImreadModes.Color); //UMat version
         //image = new Mat("lena.jpg", ImreadModes.Color); //CPU version

         long detectionTime;
         List<Rectangle> faces = new List<Rectangle>();
         List<Rectangle> eyes = new List<Rectangle>();

         DetectFace.Detect(
           image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", 
           faces, eyes,
           out detectionTime);

         foreach (Rectangle face in faces)
            CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 2);
         foreach (Rectangle eye in eyes)
            CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 2);

         //display the image 
         using (InputArray iaImage = image.GetInputArray())
         ImageViewer.Show(image, String.Format(
            "Completed face and eye detection using {0} in {1} milliseconds", 
            (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda) ? "CUDA" :
            (iaImage.IsUMat && CvInvoke.UseOpenCL) ? "OpenCL" 
            : "CPU",
            detectionTime));
      }
Exemplo n.º 3
0
        public Mat Calculate(Bitmap referenceBitmap, Bitmap currentBitmap)
        {
            Mat homography;
            using (var detector = new SURF(threshold))
            using (var model = new Image<Gray, byte>(referenceBitmap))
            using (var modelMat = model.Mat.ToUMat(AccessType.Read))
            using (var modelKeyPoints = new VectorOfKeyPoint())
            using (var modelDescriptors = new UMat())
            using (var observed = new Image<Gray, byte>(currentBitmap))
            using (var observedMat = observed.Mat.ToUMat(AccessType.Read))
            using (var observedKeyPoints = new VectorOfKeyPoint())
            using (var observedDescriptors = new UMat())
            using (var matcher = new BFMatcher(DistanceType.L2))
            using (var matches = new VectorOfVectorOfDMatch())
            {
                detector.DetectAndCompute(modelMat, null, modelKeyPoints, modelDescriptors, false);
                detector.DetectAndCompute(observedMat, null, observedKeyPoints, observedDescriptors, false);

                matcher.Add(modelDescriptors);
                matcher.KnnMatch(observedDescriptors, matches, k, null);

                homography = TryFindHomography(modelKeyPoints, observedKeyPoints, matches);
            }

            return homography;
        }
Exemplo n.º 4
0
      static void Main(string[] args)
      {
         MCvPoint3D32f[] _points;
         Mat _left = CvInvoke.Imread("imL.png", ImreadModes.Color);
         Mat _right = CvInvoke.Imread("imR.png", ImreadModes.Color);
         Mat disparityMap = new Mat();

         Stopwatch watch = Stopwatch.StartNew();
         UMat leftGray = new UMat();
         UMat rightGray = new UMat();
         CvInvoke.CvtColor(_left, leftGray, ColorConversion.Bgr2Gray);
         CvInvoke.CvtColor(_right, rightGray, ColorConversion.Bgr2Gray);
         Mat points = new Mat();
         Computer3DPointsFromStereoPair(leftGray, rightGray, disparityMap, points);
         watch.Stop();
         long disparityComputationTime = watch.ElapsedMilliseconds;

         Mat pointsArray = points.Reshape(points.NumberOfChannels, points.Rows*points.Cols);
         Mat colorArray = _left.Reshape(_left.NumberOfChannels, _left.Rows*_left.Cols);
         Mat colorArrayFloat = new Mat();
         colorArray.ConvertTo(colorArrayFloat, DepthType.Cv32F);
         WCloud cloud = new WCloud(pointsArray, colorArray);

         Emgu.CV.Viz3d v = new Emgu.CV.Viz3d("Simple stereo reconstruction");
         WText wtext = new WText("3d point cloud", new System.Drawing.Point(20, 20), 20, new MCvScalar(255, 255, 255));
         WCoordinateSystem wCoordinate = new WCoordinateSystem(1.0);
         v.ShowWidget("text", wtext);
         //v.ShowWidget("coordinate", wCoordinate);
         v.ShowWidget("cloud", cloud);
         v.Spin();
      }
Exemplo n.º 5
0
      public void TestOclKernel()
      {
         if (CvInvoke.HaveOpenCL && CvInvoke.UseOpenCL)
         {

            Ocl.Device defaultDevice = Ocl.Device.Default;

            Mat img = EmguAssert.LoadMat("lena.jpg");
            Mat imgGray = new Mat();
            CvInvoke.CvtColor(img, imgGray, ColorConversion.Bgr2Gray);
            Mat imgFloat = new Mat();
            imgGray.ConvertTo(imgFloat, DepthType.Cv32F, 1.0/255);
            UMat umat = imgFloat.GetUMat(AccessType.Read, UMat.Usage.AllocateDeviceMemory);
            UMat umatDst = new UMat();
            umatDst.Create(umat.Rows, umat.Cols, DepthType.Cv32F, umat.NumberOfChannels, UMat.Usage.AllocateDeviceMemory);
            
            String buildOpts = String.Format("-D dstT={0}", Ocl.OclInvoke.TypeToString(umat.Depth));
    
            String sourceStr = @"
__constant sampler_t samplerLN = CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_CLAMP_TO_EDGE | CLK_FILTER_LINEAR;
__kernel void shift(const image2d_t src, float shift_x, float shift_y, __global uchar* dst, int dst_step, int dst_offset, int dst_rows, int dst_cols)
{
   int x = get_global_id(0);
   int y = get_global_id(1);
   if (x >= dst_cols) return;
   int dst_index = mad24(y, dst_step, mad24(x, (int)sizeof(dstT), dst_offset));
   __global dstT *dstf = (__global dstT *)(dst + dst_index);
   float2 coord = (float2)((float)x+0.5f+shift_x, (float)y+0.5f+shift_y);
   dstf[0] = (dstT)read_imagef(src, samplerLN, coord).x;
}";

            using (CvString errorMsg = new CvString())
            using (Ocl.ProgramSource ps = new Ocl.ProgramSource(sourceStr))
            using (Ocl.Kernel kernel = new Ocl.Kernel())
            using (Ocl.Image2D image2d = new Ocl.Image2D(umat))
            using (Ocl.KernelArg ka = new Ocl.KernelArg(Ocl.KernelArg.Flags.ReadWrite, umatDst))
            {
               float shiftX = 100.5f;
               float shiftY = -50.0f;

               bool success = kernel.Create("shift", ps, buildOpts, errorMsg);
               EmguAssert.IsTrue(success, errorMsg.ToString());
               int idx = 0;
               idx = kernel.Set(idx, image2d);
               idx = kernel.Set(idx, ref shiftX);
               idx = kernel.Set(idx, ref shiftY);
               idx = kernel.Set(idx, ka);
               IntPtr[] globalThreads = new IntPtr[] {new IntPtr(umat.Cols), new IntPtr(umat.Rows), new IntPtr(1) };
               success = kernel.Run(globalThreads, null, true);
               EmguAssert.IsTrue(success, "Failed to run the kernel");
               using (Mat matDst = umatDst.GetMat(AccessType.Read))
               using (Mat saveMat = new Mat())
               {
                  matDst.ConvertTo(saveMat, DepthType.Cv8U, 255.0);
                  saveMat.Save("tmp.jpg");
               }
            }
         }
      }
Exemplo n.º 6
0
 private void calculatedescriptors(Mat image, UMat imageDescriptors, VectorOfKeyPoint imageKeyPoints)
 {
     using (UMat mImage = image.ToUMat(Emgu.CV.CvEnum.AccessType.Read))
     {
         SIFT sift = new SIFT();
         sift.DetectAndCompute(mImage, null, imageKeyPoints, imageDescriptors, false);
     }
 }
Exemplo n.º 7
0
        private void button2_Click(object sender, EventArgs e)
        {
            loadImage(textBox1.Text);
            grayImage = new Mat();
            CvInvoke.CvtColor(loadedImage, grayImage, ColorConversion.Bgr2Gray);
            originalImageDescriptors = new UMat();

            originalImageKeyPoints = new VectorOfKeyPoint();
            calculatedescriptors(loadedImage, originalImageDescriptors, originalImageKeyPoints);

            videoCompute();
        }
Exemplo n.º 8
0
 public void TestUMatConvert()
 {
    using (UMat image = new UMat(10, 10, DepthType.Cv8U, 3))
    {
       image.SetTo(new MCvScalar(3, 4, 5));
       using (Image<Bgr, byte> imageDataSameColorSameDepth = image.ToImage<Bgr, byte>())
       using (Image<Gray, byte> imageDataDifferentColorSameDepth = image.ToImage<Gray, byte>())
       using (Image<Bgr, float> imageDataSameColorDifferentDepth = image.ToImage<Bgr, float>())
       {
       }
    }
 }
Exemplo n.º 9
0
   // Use this for initialization
   void Start()
   {  
      Texture2D lenaTexture = Resources.Load<Texture2D>("lena");    

      UMat img = new UMat();
      TextureConvert.Texture2dToOutputArray(lenaTexture, img);
      CvInvoke.Flip(img, img, FlipType.Vertical);

      //String fileName = "haarcascade_frontalface_default";
      //String fileName = "lbpcascade_frontalface";
      String fileName = "haarcascade_frontalface_alt2";
      String filePath = Path.Combine(Application.persistentDataPath, fileName + ".xml");
      //if (!File.Exists(filePath))
      {
         TextAsset cascadeModel = Resources.Load<TextAsset>(fileName);
         
#if UNITY_METRO
         UnityEngine.Windows.File.WriteAllBytes(filePath, cascadeModel.bytes);
#else
         File.WriteAllBytes(filePath, cascadeModel.bytes);
#endif
      }

      using (CascadeClassifier classifier = new CascadeClassifier(filePath))
      using (UMat gray = new UMat())
      {
         CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);

         Rectangle[] faces = null;
         try
         {
            faces = classifier.DetectMultiScale(gray);

            foreach (Rectangle face in faces)
            {
               CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
            }
         }
         catch (Exception e)
         {
            Debug.Log(e.Message);
            
            return;
         }
      }

      Texture2D texture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);

      this.GetComponent<GUITexture>().texture = texture;
      Size s = img.Size;
      this.GetComponent<GUITexture>().pixelInset = new Rect(-s.Width / 2, -s.Height / 2, s.Width, s.Height);
   }
Exemplo n.º 10
0
        /// <summary>
        /// Find the pedestrian in the image
        /// </summary>
        /// <param name="image">The image</param>
        /// <param name="processingTime">The pedestrian detection time in milliseconds</param>
        /// <returns>The region where pedestrians are detected</returns>
        public static Rectangle[] Find(Mat image, bool tryUseCuda, out long processingTime)
        {
            Stopwatch watch;

            Rectangle[] regions;

#if !(__IOS__ || NETFX_CORE)
            //check if there is a compatible Cuda device to run pedestrian detection
            if (tryUseCuda && CudaInvoke.HasCuda)
            { //this is the Cuda version
                using (CudaHOG des = new CudaHOG(new Size(64, 128), new Size(16, 16), new Size(8, 8), new Size(8, 8)))
                {
                    des.SetSVMDetector(des.GetDefaultPeopleDetector());

                    watch = Stopwatch.StartNew();
                    using (GpuMat cudaBgr = new GpuMat(image))
                        using (GpuMat cudaBgra = new GpuMat())
                            using (VectorOfRect vr = new VectorOfRect())
                            {
                                CudaInvoke.CvtColor(cudaBgr, cudaBgra, ColorConversion.Bgr2Bgra);
                                des.DetectMultiScale(cudaBgra, vr);
                                regions = vr.ToArray();
                            }
                }
            }
            else
#endif
            {
                //this is the CPU/OpenCL version
                using (HOGDescriptor des = new HOGDescriptor())
                {
                    des.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());

                    //load the image to umat so it will automatically use opencl is available
                    UMat umat = image.ToUMat(AccessType.Read);

                    watch = Stopwatch.StartNew();

                    MCvObjectDetection[] results = des.DetectMultiScale(umat);
                    regions = new Rectangle[results.Length];
                    for (int i = 0; i < results.Length; i++)
                    {
                        regions[i] = results[i].Rect;
                    }
                    watch.Stop();
                }
            }

            processingTime = watch.ElapsedMilliseconds;

            return(regions);
        }
Exemplo n.º 11
0
        private void button_canny_Click(object sender, EventArgs e)
        {
            CvInvoke.UseOpenCL = true;

            Bitmap bm = new Bitmap(pictureBox1.Image);

            Image <Gray, byte> im = new Image <Gray, byte>(bm);
            UMat u = im.ToUMat();

            CvInvoke.Canny(u, u, 150, 50);

            pictureBox2.Image = u.Bitmap;
        }
Exemplo n.º 12
0
        private UMat GrayScaleImage(Image <Bgr, Byte> imgOriginal)
        {
            //Convert the image to grayscale and filter out the noise
            UMat result = new UMat();

            CvInvoke.CvtColor(imgOriginal, result, ColorConversion.Bgr2Gray);
            //use image pyr to remove noise
            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(result, pyrDown);
            CvInvoke.PyrUp(pyrDown, result);
            return(result);
        }
Exemplo n.º 13
0
        private FeatureModel ExtractFeatures(Mat modelImage)
        {
            var modelKeyPoints   = new VectorOfKeyPoint();
            Mat modelDescriptors = new Mat();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
            {
                //featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                surf.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
            }

            return(new FeatureModel(modelKeyPoints, modelDescriptors));
        }
Exemplo n.º 14
0
        public static Bitmap ToBitmap(this UMat umat)
        {
            Mat mat = umat.GetMat((AccessType)16777216);

            try
            {
                return(mat.ToBitmap());
            }
            finally
            {
                ((IDisposable)mat)?.Dispose();
            }
        }
Exemplo n.º 15
0
 /// <summary>
 /// Create the standard vector of UMat
 /// </summary>
 public VectorOfUMat(MCvERStat[][] values)
     : this()
 {
     using (UMat v = new UMat())
     {
         for (int i = 0; i < values.Length; i++)
         {
             v.Push(values[i]);
             Push(v);
             v.Clear();
         }
     }
 }
Exemplo n.º 16
0
        public static Bitmap getBinarizedBitmap(Bitmap bitmap)
        {
            var image         = new Image <Bgr, byte>(bitmap);
            var uimage        = new UMat();
            var pyrDown       = new UMat();
            var imageBynarize = image.Convert <Gray, Byte>();

            CvInvoke.CvtColor(image, uimage, ColorConversion.Bgr2Gray);
            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);
            CvInvoke.AdaptiveThreshold(imageBynarize, imageBynarize, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 255, 16);
            return(imageBynarize.ToBitmap(bitmap.Width, bitmap.Height));
        }
 public void Norm4Disp(UMat img, bool log = false, bool norm = true)
 {
     if (log)
     {
         using (UMat imgPlusOne = img + 1) {
             CvInvoke.Log(imgPlusOne, img);
         }
     }
     if (norm)
     {
         CvInvoke.Normalize(img, img, 0, 255, normType: NormType.MinMax, dType: DepthType.Cv8U);
     }
 }
Exemplo n.º 18
0
        public bool SetImage(UMat img)
        {
            uMatImage = img;

            if (uMatImage != null)
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
        private string DoInnerTissueSegmentation(Image <Bgr, byte> img, Image <Gray, byte> mask, string filename)
        {
            InnerTissueSegmentation seg = new InnerTissueSegmentation(img.Clone(), mask.Clone(), registrationParameters.InnerStructuresSegParams);

            seg.Execute();
            UMat innerMask = seg.GetOutput()[0].Clone();

            seg.Dispose();
            string filepathResult = registrationParameters.OutputDirectory + filename;

            ReadWriteUtils.WriteUMatToFile(filepathResult, innerMask);
            return(filepathResult);
        }
        public override LineSegment2D[] GetLines(UMat cannyEdges)
        {
            //IOutputArray result = new VectorOfVectorOfPointF();
            //CvInvoke.HoughLines(cannyEdges, result, Rho.Value, (Theta.Value * Math.PI) / 180, Threshold.Value, Srn.Value, Stn.Value);

            List<LineSegment2D> lines = new List<LineSegment2D>();
            //foreach (var line in (result as VectorOfVectorOfPointF).ToArrayOfArray())
            //{
            //    lines.Add(new LineSegment2D(new Point((int)line[0].X, (int)line[0].Y), new Point((int)line[1].X, (int)line[1].Y)));
            //}

            return lines.ToArray();
        }
Exemplo n.º 21
0
        public void Circles(ref UMat uimage, double cannyThreshold, double circleAccumulatorThreshold, double dp, double minDist, int minRadio = 0, int maxRadio = 0)
        {
            Stopwatch watch = Stopwatch.StartNew();


            circles = CvInvoke.HoughCircles(uimage, HoughType.Gradient, dp, minDist, cannyThreshold, circleAccumulatorThreshold, minRadio, maxRadio);

            watch.Stop();
            msgBuilder.Append(String.Format("Hough circles - {0} ms; ", watch.ElapsedMilliseconds));


            // circleImageBox.Image = circleImage;
        }
        public LicensePlateRecognitionForm()
        {
            InitializeComponent();
            System.Net.ServicePointManager.Expect100Continue = true;
            System.Net.ServicePointManager.SecurityProtocol  = System.Net.SecurityProtocolType.Tls12;

            _licensePlateDetector = new LicensePlateDetector("");
            Mat  m  = new Mat("license-plate.jpg");
            UMat um = m.GetUMat(AccessType.ReadWrite);

            imageBox1.Image = um;
            ProcessImage(m);
        }
Exemplo n.º 23
0
        public string TextRecognize(UMat plate, Tesseract ocr)
        {
            string str;

            //ocr.SetImage(FilterPlate(imgSceneRect));
            ocr.SetImage(plate);
            ocr.Recognize();

            str = ocr.GetUTF8Text();
            ocr.Dispose();

            return(str);
        }
Exemplo n.º 24
0
        public static UMat getExposureCorrected(ref UMat img, double ev)
        {
            UMat dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat outImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            //outImg = (UMat)ev*dblImg;
            CvInvoke.AddWeighted(dblImg, ev, dblImg, 0, 0, outImg);
            //CvInvoke.cvConvertScale(dblImg, outImg, ev,0);
            dblImg.Dispose();
            img.Dispose();
            return(outImg);
        }
Exemplo n.º 25
0
        UMat FindEdges()
        {
            // Canny and edge detection
            double cannyThreshold        = 180.0;
            double cannyThresholdLinking = 120.0;

            Double.TryParse(textBox1.Text, out cannyThreshold);
            Double.TryParse(textBox2.Text, out cannyThresholdLinking);
            UMat cannyEdges = new UMat();

            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);
            return(cannyEdges);
        }
Exemplo n.º 26
0
        public static UMat getContrastAdjusted(ref UMat img, double cont1, double cont2)
        {
            UMat dblImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);
            UMat outImg = new UMat(img.Rows, img.Cols, Emgu.CV.CvEnum.DepthType.Cv64F, img.NumberOfChannels);

            img.ConvertTo(dblImg, Emgu.CV.CvEnum.DepthType.Cv64F);
            //outImg = (UMat)ev*dblImg;
            CvInvoke.AddWeighted(dblImg, cont1, dblImg, 0, cont1 * (-128) + cont2 + 128, outImg);
            //CvInvoke.cvConvertScale(dblImg, outImg, ev,0);
            dblImg.Dispose();
            img.Dispose();
            return(outImg);
        }
Exemplo n.º 27
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="source"></param>
        /// <returns></returns>
        public static VectorOfKeyPoint FindFeature(Mat source, out Mat modelDescriptors)
        {
            var vectorOfKeyPoint = new VectorOfKeyPoint();

            modelDescriptors = new Mat();
            using (UMat uImage = source.GetUMat(AccessType.Read))
            {
                var kaze = new KAZE();
                kaze.DetectAndCompute(uImage, null, vectorOfKeyPoint, modelDescriptors, false);
            }

            return(vectorOfKeyPoint);
        }
Exemplo n.º 28
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 500;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    //extract features from the object image
                    UMat modelDescriptors = new UMat();
                    surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();
                    surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }

            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 29
0
        static void Main(string[] args)
        {
            var path = new DirectoryInfo(System.Windows.Forms.Application.StartupPath).Parent.Parent.FullName;
            //載圖
            String            imagePath = $"{path}/dice.png";
            Image <Bgr, Byte> src       = new Image <Bgr, byte>(@imagePath);

            CvInvoke.Imshow("src", src);
            //轉灰
            UMat grayImage = new UMat();

            CvInvoke.CvtColor(src, grayImage, ColorConversion.Bgr2Gray);

            //高斯去躁模糊
            CvInvoke.GaussianBlur(grayImage, grayImage, new Size(5, 5), 5);
            CvInvoke.Imshow("Blur Image", grayImage);


            //霍夫測園
            //CircleF[] circles = CvInvoke.HoughCircles(grayImage, HoughType.Gradient, 0.5, 41, 70, 30, 10,175);


            //建新圖
            UMat cannyEdges = new UMat();

            //抓節點 200~255 色階
            CvInvoke.Canny(grayImage, cannyEdges, 255, 255);
            CvInvoke.Imshow("Canny Image", cannyEdges);

            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);

                //霍夫測園
                CircleF[] circles = CvInvoke.HoughCircles(cannyEdges, HoughType.Gradient, 1, 5, 10, 20, 0, 100);
                //int count = contours.Size;
                Console.Write("點數為:" + circles.Length);

                //建新圖 畫判斷的位置
                Image <Bgr, Byte> circleImage2 = src.Clone();
                foreach (CircleF circle in circles)
                {
                    circleImage2.Draw(circle, new Bgr(Color.Blue), 5);
                    CvInvoke.Imshow("HoughCircles", circleImage2);
                }
            }



            CvInvoke.WaitKey(0);
        }
Exemplo n.º 30
0
        static void Run()
        {
            IImage image;

            //Read the files as an 8-bit Bgr image

            image = new UMat("lena.jpg", ImreadModes.Color); //UMat version
            //image = new Mat("lena.jpg", ImreadModes.Color); //CPU version

            long detectionTime;

            /*
             * List<Rectangle> faces = new List<Rectangle>();
             * List<Rectangle> eyes = new List<Rectangle>();
             *
             * DetectFace.Detect(
             * image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
             * faces, eyes,
             * out detectionTime);
             *
             * foreach (Rectangle face in faces)
             * CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 2);
             * foreach (Rectangle eye in eyes)
             * CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 2);
             * */
            ProcessImage(image, out detectionTime);

            //display the image
            using (InputArray iaImage = image.GetInputArray())
            {
                var viewer = new ImageViewer(image, String.Format(
                                                 "Completed face and eye detection using {0} in {1} milliseconds",
                                                 (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda)
                      ? "CUDA" : (iaImage.IsUMat && CvInvoke.UseOpenCL) ? "OpenCL" : "CPU",
                                                 detectionTime));
                viewer.ImageBox.ImageReload += (sender, e) =>
                {
                    ProcessImage(e, out detectionTime);
                    //(sender as ImageBox)
                };
                Application.Run(viewer);

                /*
                 * ImageViewer.Show(image, String.Format(
                 *  "Completed face and eye detection using {0} in {1} milliseconds",
                 *  (iaImage.Kind == InputArray.Type.CudaGpuMat && CudaInvoke.HasCuda)
                 *      ? "CUDA": (iaImage.IsUMat && CvInvoke.UseOpenCL)? "OpenCL": "CPU",
                 *  detectionTime));
                 * */
            }
        }
Exemplo n.º 31
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="image"></param>
        /// <param name="ocr_mode"></param>
        /// <param name="count"></param>
        /// <param name="canny_thres">Canny threshold will take 3 values 20, 30, 40, 50</param>
        /// <returns></returns>
        private bool ProcessImage(IInputOutputArray image, int ocr_mode)
        {
            List <IInputOutputArray> licensePlateImagesList         = new List <IInputOutputArray>();
            List <IInputOutputArray> filteredLicensePlateImagesList = new List <IInputOutputArray>();
            List <RotatedRect>       licenseBoxList = new List <RotatedRect>();
            List <string>            words          = new List <string>();
            var           result        = false;
            UMat          filteredPlate = new UMat();
            StringBuilder strBuilder    = new StringBuilder();

            CvInvoke.CvtColor(image, filteredPlate, ColorConversion.Bgr2Gray);

            UMat uImg = (UMat)image;

            words = _licensePlateDetector.DetectLicensePlate(
                image,
                licensePlateImagesList,
                filteredLicensePlateImagesList,
                licenseBoxList,
                ocr_mode);


            var validWords         = new List <string>();
            var validLicencePlates = new List <IInputOutputArray>();

            for (int w = 0; w < words.Count; w++)
            {
                string replacement2 = Regex.Replace(words[w], @"\t|\n|\r", "");
                string replacement  = Regex.Replace(replacement2, "[^0-9a-zA-Z]+", "");
                if (replacement.Length >= 6 && replacement != null)
                {
                    var filteredLicence = FilterLicenceSpain(replacement);
                    if (!string.IsNullOrWhiteSpace(filteredLicence))
                    {
                        if (!validWords.Contains(replacement))
                        {
                            validWords.Add(filteredLicence);
                            validLicencePlates.Add(licensePlateImagesList[w]);
                        }
                    }
                }
            }

            ShowResults(image, validLicencePlates, filteredLicensePlateImagesList, licenseBoxList, validWords);

            SetImageBitmap(uImg.Bitmap);
            uImg.Dispose();

            result = true;
            return(result);
        }
Exemplo n.º 32
0
        static void Main(string[] args)
        {
            var iName             = @"D:\+Data\Experiments\14.08.20\Стабильные\2 750_150 hz 30x 77dg C\750_150_30_00139.jpg";
            Image <Bgr, Byte> img = new Image <Bgr, byte>(iName);//.Resize(400, 400, Emgu.CV.CvEnum.Inter.Linear, true);

            UMat uimage = new UMat();

            CvInvoke.CvtColor(img, uimage, ColorConversion.Bgr2Gray);

            UMat pyrDown = new UMat();

            CvInvoke.PyrDown(uimage, pyrDown);
            CvInvoke.PyrUp(pyrDown, uimage);

            double cannyThreshold        = 90; //180.0;
            double cannyThresholdLinking = 60; //120.0;
            UMat   cannyEdges            = new UMat();

            CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking);

            VectorOfVectorOfPoint conturs = new VectorOfVectorOfPoint();
            Mat hierarchi = new Mat();

            CvInvoke.FindContours(cannyEdges, conturs, hierarchi, RetrType.External, ChainApproxMethod.LinkRuns);

            for (int i = 0; i < conturs.Size; i++)
            {
                double        perimetr     = CvInvoke.ArcLength(conturs[0], true);
                VectorOfPoint aproximation = new VectorOfPoint();
                CvInvoke.ApproxPolyDP(conturs[i], aproximation, 0.04 * perimetr, true);
                if (conturs[i].Size > 10)
                {
                    var rct = CvInvoke.FitEllipse(conturs[i]);
                    CvInvoke.DrawContours(img, conturs, i, new MCvScalar(0, 0, 255), 2);
                    //CvInvoke.Rectangle(img, rct, new MCvScalar(255, 0, 0));
                    //CvInvoke.IsContourConvex(conturs[i]);
                    var vertices = rct.GetVertices();
                    for (int t = 0; t < 4; t++)
                    {
                        CvInvoke.Line(img,
                                      new Point((int)vertices[t].X, (int)vertices[t].Y),
                                      new Point((int)vertices[(t + 1) % 4].X, (int)vertices[(t + 1) % 4].Y),
                                      new MCvScalar(0, 255, 0));
                    }
                }
                Console.WriteLine(conturs[i].Size);
            }
            CvInvoke.Imwrite("cntrs.jpg", cannyEdges);
            CvInvoke.Imwrite("test.jpg", img);
            Console.ReadKey();
        }
Exemplo n.º 33
0
        public FaceDetectionPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform Face Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                if (image == null || image [0] == null)
                {
                    return;
                }
                GetLabel().Text = "Please wait...";
                SetImage(null);
                Task <Tuple <Mat, long> > t = new Task <Tuple <Mat, long> >(
                    () =>
                {
                    String faceFile;
                    String eyeFile;
                    bool fileOk = CheckCascadeFile("haarcascade_frontalface_default.xml", "haarcascade_eye.xml",
                                                   out faceFile,
                                                   out eyeFile);

                    long time;
                    List <Rectangle> faces = new List <Rectangle>();
                    List <Rectangle> eyes  = new List <Rectangle>();

                    using (UMat img = image[0].GetUMat(AccessType.ReadWrite))
                        DetectFace.Detect(img, faceFile, eyeFile, faces, eyes, out time);

                    foreach (Rectangle rect in faces)
                    {
                        CvInvoke.Rectangle(image[0], rect, new MCvScalar(0, 0, 255), 2);
                    }
                    foreach (Rectangle rect in eyes)
                    {
                        CvInvoke.Rectangle(image[0], rect, new MCvScalar(255, 0, 0), 2);
                    }

                    return(new Tuple <Mat, long>(image[0], time));
                });
                t.Start();

                var result = await t;
                SetImage(t.Result.Item1);
                String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";
                GetLabel().Text = String.Format("Detected with {1} in {0} milliseconds.", t.Result.Item2, computeDevice);
            };
        }
Exemplo n.º 34
0
        /// <summary>
        /// Do Otsu thresholding on grayscale UMat.
        /// </summary>
        /// <param name="input">grayscale image</param>
        /// <param name="maskWhitePixels">flag to specify if white pixels should be masked</param>
        /// <returns>thresholded image</returns>
        public static UMat ThresholdOtsu(UMat input, bool maskWhitePixels = false)
        {
            UMat result = new UMat();
            //CvInvoke.Threshold(input, result, 0, 255, ThresholdType.Otsu);
            double otsu_threshold = CvInvoke.Threshold(input, result, 0, 255, ThresholdType.Otsu | ThresholdType.Binary);

            if (maskWhitePixels)
            {
                CvInvoke.Threshold(input, result, otsu_threshold, 255, ThresholdType.BinaryInv);
            }

            input.Dispose();
            return(result);
        }
Exemplo n.º 35
0
        public static Mat Detect(Mat image, int selectedAlgorithmIndex, bool detectEyes = false, bool detectMouth = false, bool detectNose = false)
        {
            var faces    = new List <Rectangle>();
            var ugray    = new UMat();
            var fileName = GetFileName(selectedAlgorithmIndex);

            using (var face = new CascadeClassifier(fileName))
            {
                CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray);
                CvInvoke.EqualizeHist(ugray, ugray);
                Rectangle[] facesDetected = face.DetectMultiScale(
                    ugray,
                    1.1,
                    10,
                    new Size(20, 20));
                faces.AddRange(facesDetected);
            }
            foreach (var face in faces)
            {
                CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 3);
            }
            if (detectEyes)
            {
                var eyes = new List <Rectangle>();
                DetectItem(faces, ugray, eyes, EyeFileName);
                foreach (var eye in eyes)
                {
                    CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 3);
                }
            }
            if (detectMouth)
            {
                var mouth = new List <Rectangle>();
                DetectItem(faces, ugray, mouth, MouthFileName);
                foreach (var m in mouth)
                {
                    CvInvoke.Rectangle(image, m, new Bgr(Color.Green).MCvScalar, 3);
                }
            }
            if (detectNose)
            {
                var noses = new List <Rectangle>();
                DetectItem(faces, ugray, noses, NoseFileName);
                foreach (var nose in noses)
                {
                    CvInvoke.Rectangle(image, nose, new Bgr(Color.BlueViolet).MCvScalar, 3);
                }
            }
            return(image);
        }
Exemplo n.º 36
0
        public List <ImageDetectResult> DetectFaces(string path)
        {
            var result = new List <ImageDetectResult>();

            try
            {
                if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
                {
                    // Comment from EMGUCV:
                    // Mac OS’s security policy has been changing quite a bit in the last few OS releases, it
                    // may have blocked access to the temporary directory that Open CV used to cache OpenCL kernels.
                    // You can probably call “CvInvoke.UseOpenCL = false” to disable OpenCL if the running platform
                    // is MacOS.It will disable Open CL and no kernels will be cached on disk.Not an ideal solution,
                    // but unless Open CV address this on future release(or Apple stop changing security policies that
                    // often) this will be a problem for Mac.
                    CvInvoke.UseOpenCL = false;
                }

                //var img = bitmap.ToMat();
                using var img     = CvInvoke.Imread(path);
                using var imgGray = new UMat();
                CvInvoke.CvtColor(img, imgGray, ColorConversion.Bgr2Gray);
                CvInvoke.EqualizeHist(imgGray, imgGray);

                foreach (var model in classifiers)
                {
                    var faces = model.Classifier.DetectMultiScale(imgGray, 1.1, 10, new Size(20, 20), Size.Empty).ToList();

                    if (faces.Any())
                    {
                        result.AddRange(faces.Distinct()
                                        .Select(x => new ImageDetectResult
                        {
                            Rect         = x,
                            Tag          = model.ClassifierTag.Transform(To.SentenceCase),
                            Service      = "Emgu",
                            ServiceModel = model.ClassifierFile
                        }));
                    }
                }

                return(DetectDupeRects(result));
            }
            catch (Exception ex)
            {
                Console.WriteLine($"Exception: {ex}");
            }

            return(result);
        }
Exemplo n.º 37
0
        public void SelectAreaToEnd(Rectangle selRoi, UMat magT, UMat phT)
        {
            ProcessCopyOverNCenter(magT, phT, selRoi,
                                   out _, out _,
                                   out _, out Point selRoiMaxMagLoc,
                                   out this.ctrRoi, out this.ctrRoiMaxMagLoc,
                                   out this.fTLtdSelRoi, this.magFoT, this.phFoT);
            this.fTSelRoiMaxMagAbsLoc = selRoi.Location + (Size)selRoiMaxMagLoc;


            //continue the chain
            InputParametersToEnd(this.rDist, this.sensorPxWidth, this.sensorPxHeight, this.wavelength,
                                 this.magFoT, this.phFoT, this.ctrRoi, this.ctrRoiMaxMagLoc, ref this.unitPhDiffDM);
        }
Exemplo n.º 38
0
        public static void Detect(string inputName, string haarData, Byte[] data)
        {
            Mat img          = CvInvoke.Imread(inputName);
            var faceDetector = new CascadeClassifier(haarData);

            UMat imgGray = new UMat();

            CvInvoke.CvtColor(img, imgGray, ColorConversion.Bgr2Gray);
            foreach (Rectangle face in faceDetector.DetectMultiScale(imgGray, 1.1, 10, new Size(20, 20), Size.Empty))
            {
                CvInvoke.Rectangle(img, face, new MCvScalar(255, 255, 255));
            }
            img.CopyTo(data);
        }
Exemplo n.º 39
0
        /// <summary>
        /// Convert the standard vector to arrays of int
        /// </summary>
        /// <returns>Arrays of int</returns>
        public MCvERStat[][] ToArrayOfArray()
        {
            int size = Size;

            MCvERStat[][] res = new MCvERStat[size][];
            for (int i = 0; i < size; i++)
            {
                using (UMat v = this[i])
                {
                    res[i] = v.ToArray();
                }
            }
            return(res);
        }
Exemplo n.º 40
0
        /// <summary>
        /// Compute the red pixel mask for the given image.
        /// A red pixel is a pixel where:  20 &lt; hue &lt; 160 AND saturation &gt; 10
        /// </summary>
        /// <param name="image">The color image to find red mask from</param>
        /// <param name="mask">The red pixel mask</param>
        private static void GetRedPixelMask(IInputArray image, IInputOutputArray mask)
        {
            bool useUMat;

            using (InputOutputArray ia = mask.GetInputOutputArray())
                useUMat = ia.IsUMat;

            IInputOutputArray hsv;
            IInputOutputArray s;

            if (useUMat)
            {
                hsv = new UMat();
                s   = new UMat();
            }
            else
            {
                hsv = new Mat();
                s   = new Mat();
            }

            try
            {
                CvInvoke.CvtColor(image, hsv, ColorConversion.Bgr2Hsv);
                CvInvoke.ExtractChannel(hsv, mask, 0);
                CvInvoke.ExtractChannel(hsv, s, 1);

                //the mask for hue less than 20 or larger than 160
                using (ScalarArray lower = new ScalarArray(20))
                    using (ScalarArray upper = new ScalarArray(160))
                        CvInvoke.InRange(mask, lower, upper, mask);
                CvInvoke.BitwiseNot(mask, mask);

                //s is the mask for saturation of at least 10, this is mainly used to filter out white pixels
                CvInvoke.Threshold(s, s, 10, 255, ThresholdType.Binary);
                CvInvoke.BitwiseAnd(mask, s, mask, null);
            }
            finally
            {
                if (useUMat)
                {
                    (hsv as UMat)?.Dispose();
                }
                else
                {
                    (hsv as Mat)?.Dispose();
                }
            }
        }
Exemplo n.º 41
0
        public static void Detect(Mat image, String faceFileName, 
            String eyeFileName, List<Rectangle> faces, List<Rectangle> eyes)
        {
            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(faceFileName))
            using (CascadeClassifier eye = new CascadeClassifier(eyeFileName))
            {
               Size size = new Size(image.Rows, image.Cols);
                using (UMat ugray = new UMat(size, DepthType.Cv8U, 1))
                {
                    //CvInvoke.CvtColor(image, ugray, ColorConversion.Bgr2Gray);

                    //normalizes brightness and increases contrast of the image
                    CvInvoke.EqualizeHist(image, ugray);

                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    Rectangle[] facesDetected = face.DetectMultiScale(
                       ugray,
                       1.1,
                       10,
                       new Size(20, 20));

                    faces.AddRange(facesDetected);

                    foreach (Rectangle f in facesDetected)
                    {
                        //Get the region of interest on the faces
                        using (UMat faceRegion = new UMat(ugray, f))
                        {
                            Rectangle[] eyesDetected = eye.DetectMultiScale(
                               faceRegion,
                               1.1,
                               10,
                               new Size(20, 20));

                            foreach (Rectangle e in eyesDetected)
                            {
                                Rectangle eyeRect = e;
                                eyeRect.Offset(f.X, f.Y);
                                eyes.Add(eyeRect);
                            }
                        }
                    }
                }
            }
        }
Exemplo n.º 42
0
        private static UMat FilterPlate(UMat plate)
        {
            UMat thresh = new UMat();
            CvInvoke.Threshold(plate, thresh, 120, 255, ThresholdType.BinaryInv);
            //Image<Gray, Byte> thresh = plate.ThresholdBinaryInv(new Gray(120), new Gray(255));

            Size plateSize = plate.Size;
            using (Mat plateMask = new Mat(plateSize.Height, plateSize.Width, DepthType.Cv8U, 1))
            using (Mat plateCanny = new Mat())
            using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
            {
                plateMask.SetTo(new MCvScalar(255.0));
                CvInvoke.Canny(plate, plateCanny, 100, 50);
                CvInvoke.FindContours(plateCanny, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);

                int count = contours.Size;
                for (int i = 1; i < count; i++)
                {
                    using (VectorOfPoint contour = contours[i])
                    {

                        Rectangle rect = CvInvoke.BoundingRectangle(contour);
                        if (rect.Height > (plateSize.Height >> 1))
                        {
                            rect.X -= 1;
                            rect.Y -= 1;
                            rect.Width += 2;
                            rect.Height += 2;
                            Rectangle roi = new Rectangle(Point.Empty, plate.Size);
                            rect.Intersect(roi);
                            CvInvoke.Rectangle(plateMask, rect, new MCvScalar(), -1);
                            //plateMask.Draw(rect, new Gray(0.0), -1);
                        }
                    }

                }

                thresh.SetTo(new MCvScalar(), plateMask);
            }

            CvInvoke.Erode(thresh, thresh, null, new Point(-1, -1), 1, BorderType.Constant,
                CvInvoke.MorphologyDefaultBorderValue);
            CvInvoke.Dilate(thresh, thresh, null, new Point(-1, -1), 1, BorderType.Constant,
                CvInvoke.MorphologyDefaultBorderValue);

            return thresh;
        }
Exemplo n.º 43
0
      public void TestOclKernel()
      {
         if (CvInvoke.HaveOpenCL && CvInvoke.UseOpenCL)
         {

            OclDevice defaultDevice = OclDevice.Default;

            UMat umat = new UMat(256, 256, DepthType.Cv8U, 1);
            umat.SetTo(new MCvScalar(8));

            int rowsPerWI = 1;
            int cn = 1;
            
            String buildOpts = String.Format("-D rowsPerWI={0} -D cn={1} -D srcT1_C1=uchar -DdstT_C1=uchar", rowsPerWI, cn);

            String sourceStr = @"
__kernel void mytest(__global const uchar * srcptr1, int srcstep1, int srcoffset1, 
                 __global uchar *dstptr, int dststep, int dstoffset,
                 int rows, int cols )
{
               int x = get_global_id(0);
               int y0 = get_global_id(1) * rowsPerWI;

               if (x < cols)
               {
                  int src1_index = mad24(y0, srcstep1, mad24(x, (int)sizeof(srcT1_C1) * cn, srcoffset1));
                  int dst_index = mad24(y0, dststep, mad24(x, (int)sizeof(dstT_C1) * cn, dstoffset));

                  for (int y = y0, y1 = min(rows, y0 + rowsPerWI); y < y1; ++y, src1_index += srcstep1, dst_index += dststep)
                  {
                     *(__global uchar*) (dstptr + dst_index)= *(srcptr1 + src1_index);
                  }
               }
            }";

            

            using (CvString errorMsg = new CvString())
            using (OclProgramSource ps = new OclProgramSource(sourceStr))
            using (OclKernel kernel = new OclKernel())
            {
               bool success = kernel.Create("mytest", ps, buildOpts, errorMsg);
               bool empty = kernel.Empty;
            }
         }
      }
Exemplo n.º 44
0
      public Simple3DReconstruction()
      {
         InitializeComponent();
         _left = CvInvoke.Imread("imL.png", LoadImageType.Color);
         _right = CvInvoke.Imread("imR.png", LoadImageType.Color);
         Mat disparityMap = new Mat();

         Stopwatch watch = Stopwatch.StartNew();
         UMat leftGray = new UMat();
         UMat rightGray = new UMat();
         CvInvoke.CvtColor(_left, leftGray, ColorConversion.Bgr2Gray);
         CvInvoke.CvtColor(_right, rightGray, ColorConversion.Bgr2Gray);
         Computer3DPointsFromStereoPair(leftGray, rightGray, disparityMap, out _points);
         watch.Stop();
         long disparityComputationTime = watch.ElapsedMilliseconds;

         //Display the disparity map
         imageBox1.Image = disparityMap;

         watch.Reset(); 
      }
Exemplo n.º 45
0
        private void button1_Click(object sender, EventArgs e)
        {
            String win1 = "Test Window"; //The name of the window
            CvInvoke.NamedWindow(win1); //Create the window using the specific name
            UMat img=new UMat();
            Image<Bgr, byte> old_img = new Image<Bgr, byte>(100, 100);
            Mat img2 = new Mat();
            long start=DateTime.Now.ToFileTime()*10/1000000;

            for (int i = 0; i < 200; i++)
            {

                //img = new UMat(100, 100,DepthType.Cv8U, 3); //Create a 3 channel image of 400x200

                //img.SetTo(new Bgr(255, 0, 0).MCvScalar); // set it to Blue color

                old_img = new Image<Bgr, byte>(1200, 1200);
                old_img.SetValue(new Bgr(255, 0, 0));

                //img2 = new Mat(1200, 1200, DepthType.Cv8U, 3);
                //img2.SetTo(new Bgr(255, 0, 0).MCvScalar);

                //Draw "Hello, world." on the image using the specific font
                CvInvoke.PutText(
                   old_img,
                   "Hello, world "+i,
                   new System.Drawing.Point(10, 80),
                   FontFace.HersheyComplex,
                   1.0,
                   new Bgr(0, 255, 0).MCvScalar);

            }
            Console.WriteLine(DateTime.Now.ToFileTime()*10/1000000 - start);
            start = DateTime.Now.ToFileTime() * 10 / 1000000;

            CvInvoke.Imshow(win1, old_img); //Show the image
            CvInvoke.WaitKey(0);  //Wait for the key pressing event
            CvInvoke.DestroyWindow(win1); //Destroy the window if key is pressed
        }
Exemplo n.º 46
0
 public void TestUMatCreate()
 {
    if (CvInvoke.HaveOpenCL)
    {
       CvInvoke.UseOpenCL = true;
       using (UMat m1 = new UMat())
       {
          m1.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
          EmguAssert.IsTrue(m1.Data == null);
       }
       CvInvoke.UseOpenCL = false;
       using(UMat m2 = new UMat())
          m2.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
          //EmguAssert.IsTrue(m2.Data != null);
       
    } else
    {
       UMat m2 = new UMat();
       m2.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
       //EmguAssert.IsTrue(m2.Data != null);
    }
 }
Exemplo n.º 47
0
      public void TestRuntimeSerialize()
      {
         UMat img = new UMat(100, 80, DepthType.Cv8U, 3);

         using (MemoryStream ms = new MemoryStream())
         {
            //img.SetRandNormal(new MCvScalar(100, 100, 100), new MCvScalar(50, 50, 50));
            //img.SerializationCompressionRatio = 9;
            CvInvoke.SetIdentity(img, new MCvScalar(1, 2, 3));
            System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
                formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
            formatter.Serialize(ms, img);
            Byte[] bytes = ms.GetBuffer();

            using (MemoryStream ms2 = new MemoryStream(bytes))
            {
               Object o = formatter.Deserialize(ms2);
               UMat img2 = (UMat)o;
               EmguAssert.IsTrue(img.Equals(img2));
            }
         }
      }
Exemplo n.º 48
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = new Mat();
            Image<Bgr, Byte> frame1;
            frameNum = _capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames);
            _capture.Retrieve(frame, 0);
            frame1 = frame.ToImage<Bgr, Byte>();
            frame1 = frame1.Resize(.5, Emgu.CV.CvEnum.Inter.Cubic);
            frame = frame1.Mat;
            //MessageBox.Show(_capture.Height + " " + _capture.Width + "\n" + frame1.Height + " " + frame1.Width);
            if (frame != null)
            {
                using (UMat ugray = new UMat())
                {
                    CvInvoke.CvtColor(frame, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                    CvInvoke.EqualizeHist(ugray, ugray);
                    Rectangle[] breastDetected = cascadeBreast.DetectMultiScale(
                       ugray,
                       1.1,
                       30,
                       new Size(20, 20));
                    /*Rectangle[] pussyDetected = cascadePuss.DetectMultiScale(
                       ugray,
                       1.1,
                       30,
                       new Size(20, 20));
                    Rectangle[] dickDetected = cascadePen.DetectMultiScale(
                       ugray,
                       1.1,
                       35,
                       new Size(20, 20));*/
                    progressBar1.Invoke(new MethodInvoker(delegate { progressBar1.Increment(1); label1.Text = frameNum.ToString();}));
                    
                }
                
            }
            

        }
Exemplo n.º 49
0
        public static void detect1(Mat image, String fileName, List<Rectangle> breast, bool tryUseCuda, bool tryUseOpenCL,out long detectionTime)
        {

            //Many opencl functions require opencl compatible gpu devices. 
            //As of opencv 3.0-alpha, opencv will crash if opencl is enable and only opencv compatible cpu device is presented
            //So we need to call CvInvoke.HaveOpenCLCompatibleGpuDevice instead of CvInvoke.HaveOpenCL (which also returns true on a system that only have cpu opencl devices).
            CvInvoke.UseOpenCL = tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice;

            Stopwatch watch;
            //Read the HaarCascade objects
            using (CascadeClassifier face = new CascadeClassifier(fileName))
            {
                watch = Stopwatch.StartNew();
                using (UMat ugray = new UMat())
                {
                    CvInvoke.CvtColor(image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                    //normalizes brightness and increases contrast of the image
                    CvInvoke.EqualizeHist(ugray, ugray);

                    //Detect the faces  from the gray scale image and store the locations as rectangle
                    //The first dimensional is the channel
                    //The second dimension is the index of the rectangle in the specific channel
                    Rectangle[] facesDetected = face.DetectMultiScale(
                       ugray,
                       1.1,
                       10,
                       new Size(20, 20));

                    breast.AddRange(facesDetected);


                }
                watch.Stop();
            }

            detectionTime = watch.ElapsedMilliseconds;
            
        }
Exemplo n.º 50
0
 public void TestAccumulateWeighted()
 {
    int startValue = 50;
    Image<Gray, Single> img1 = new Image<Gray, float>(100, 40, new Gray(100));
    Image<Gray, Single> acc = new Image<Gray, float>(100, 40, new Gray(startValue));
    //IImage img = img2;
    //img1.AccumulateWeighted(acc, 0.5);
    CvInvoke.AccumulateWeighted(img1, acc, 0.3, null);
    TestOpenCL(delegate
             {
                UMat src = img1.ToUMat();
                UMat result = new UMat(img1.Rows, img1.Cols, CvEnum.DepthType.Cv32F, 1);
                
                result.SetTo(new MCvScalar(startValue), null);
                //IImage img = img2;
                //img1.AccumulateWeighted(result, 0.5);
                CvInvoke.AccumulateWeighted(src, result, 0.3, null);
                Image<Gray, Single> tmp = new Image<Gray, float>(img1.Size);
                result.CopyTo(tmp, null);
                CvInvoke.AbsDiff(acc, result, result);
                int nonZeroCount = CvInvoke.CountNonZero(result);
                EmguAssert.IsTrue(nonZeroCount == 0);
             });
 }
Exemplo n.º 51
0
      private void FindLicensePlate(
         VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny,
         List<IInputOutputArray> licensePlateImagesList, List<IInputOutputArray> filteredLicensePlateImagesList, List<RotatedRect> detectedLicensePlateRegionList,
         List<String> licenses)
      {
         for (; idx >= 0;  idx = hierachy[idx,0])
         {
            int numberOfChildren = GetNumberOfChildren(hierachy, idx);      
            //if it does not contains any children (charactor), it is not a license plate region
            if (numberOfChildren == 0) continue;

            using (VectorOfPoint contour = contours[idx])
            {
               if (CvInvoke.ContourArea(contour) > 400)
               {
                  if (numberOfChildren < 3)
                  {
                     //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor)
                     //However we should search the children of this contour to see if any of them is a license plate
                     FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                        filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                     continue;
                  }

                  RotatedRect box = CvInvoke.MinAreaRect(contour);
                  if (box.Angle < -45.0)
                  {
                     float tmp = box.Size.Width;
                     box.Size.Width = box.Size.Height;
                     box.Size.Height = tmp;
                     box.Angle += 90.0f;
                  }
                  else if (box.Angle > 45.0)
                  {
                     float tmp = box.Size.Width;
                     box.Size.Width = box.Size.Height;
                     box.Size.Height = tmp;
                     box.Angle -= 90.0f;
                  }

                  double whRatio = (double) box.Size.Width/box.Size.Height;
                  if (!(3.0 < whRatio && whRatio < 10.0))
                     //if (!(1.0 < whRatio && whRatio < 2.0))
                  {
                     //if the width height ratio is not in the specific range,it is not a license plate 
                     //However we should search the children of this contour to see if any of them is a license plate
                     //Contour<Point> child = contours.VNext;
                     if (hierachy[idx, 2] > 0)
                        FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList,
                           filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses);
                     continue;
                  }

                  using (UMat tmp1 = new UMat())
                  using (UMat tmp2 = new UMat())
                  {
                     PointF[] srcCorners = box.GetVertices();
                     
                     PointF[] destCorners = new PointF[] {
                        new PointF(0, box.Size.Height - 1),
                        new PointF(0, 0),
                        new PointF(box.Size.Width - 1, 0), 
                        new PointF(box.Size.Width - 1, box.Size.Height - 1)};
                     
                     using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners))
                     {
                        CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size));           
                     }

                     //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract
                     Size approxSize = new Size(240, 180);
                     double scale = Math.Min(approxSize.Width/box.Size.Width, approxSize.Height/box.Size.Height);
                     Size newSize = new Size( (int)Math.Round(box.Size.Width*scale),(int) Math.Round(box.Size.Height*scale));
                     CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic);

                     //removes some pixels from the edge
                     int edgePixelSize = 2;
                     Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize),
                        tmp2.Size - new Size(2*edgePixelSize, 2*edgePixelSize));
                     UMat plate = new UMat(tmp2, newRoi);

                     UMat filteredPlate = FilterPlate(plate);

                     Tesseract.Character[] words;
                     StringBuilder strBuilder = new StringBuilder();
                     using (UMat tmp = filteredPlate.Clone())
                     {
                        _ocr.Recognize(tmp);
                        words = _ocr.GetCharacters();

                        if (words.Length == 0) continue;

                        for (int i = 0; i < words.Length; i++)
                        {
                           strBuilder.Append(words[i].Text);
                        }
                     }

                     licenses.Add(strBuilder.ToString());
                     licensePlateImagesList.Add(plate);
                     filteredLicensePlateImagesList.Add(filteredPlate);
                     detectedLicensePlateRegionList.Add(box);

                  }
               }
            }
         }
      }
Exemplo n.º 52
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            if (time == 10)
            {
                Mat frame = new Mat();
                capture.Retrieve(frame, 0);
                Mat grayVideo = new Mat();
                CvInvoke.CvtColor(frame, grayVideo, ColorConversion.Bgr2Gray);
                UMat videoDescriptors = new UMat();
                VectorOfKeyPoint videoKeyPoints = new VectorOfKeyPoint();
                calculatedescriptors(grayVideo, videoDescriptors, videoKeyPoints);
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                BFMatcher matcher = new BFMatcher(DistanceType.L2);
                matcher.Add(originalImageDescriptors);
                matcher.KnnMatch(videoDescriptors, matches, 2, null);
                Mat mask = mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
                Mat homography = new Mat();
                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(originalImageKeyPoints, videoKeyPoints,
                       matches, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(originalImageKeyPoints, videoKeyPoints, matches, mask, 2);
                }

                Mat result = new Mat();
                Features2DToolbox.DrawMatches(grayImage, originalImageKeyPoints, grayVideo, videoKeyPoints,
                   matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);
                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, grayImage.Size);
                    PointF[] pts = new PointF[]
                    {
                              new PointF(rect.Left, rect.Bottom),
                              new PointF(rect.Right, rect.Bottom),
                              new PointF(rect.Right, rect.Top),
                              new PointF(rect.Left, rect.Top)
                    };
                    pts = CvInvoke.PerspectiveTransform(pts, homography);

                    Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
                    using (VectorOfPoint vp = new VectorOfPoint(points))
                    {
                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                    }
                    viewer.Image = result;
                    
                }

                
                time = 0; 
            }
            else
            {
                time++;
            }
    }
Exemplo n.º 53
0
 private void ProcessFrame(object sender, EventArgs arg)
 {
     Mat frame = new Mat();
     frameNum = _capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames);
     Image<Bgr, Byte> frame1;
     int breastCount = 0;
     int pussyCount = 0;
     int dickCount = 0;
     string temp = "";
     _capture.Retrieve(frame, 0);
     frame1 = frame.ToImage<Bgr, Byte>();
     frame1 = frame1.Resize(_rescale, Emgu.CV.CvEnum.Inter.Cubic);
     frame = frame1.Mat;
     //MessageBox.Show(_nn.ToString());
     if (frame != null && frameCtr == _frameskip)
     {
         frameCtr = 0;
          using(UMat ugray = new UMat())
         {
             CvInvoke.CvtColor(frame, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
             CvInvoke.EqualizeHist(ugray, ugray);
             Rectangle[] breastDetected = cascadeBreast.DetectMultiScale(
                ugray,
                1.1,
                _nn,
                new Size(20, 20));
             Rectangle[] pussyDetected = cascadePuss.DetectMultiScale(
                ugray,
                1.1,
                _nn,
                new Size(20, 20));
             Rectangle[] dickDetected = cascadePen.DetectMultiScale(
                ugray,
                1.1,
                50,
                new Size(20, 20));
             foreach (Rectangle b in breastDetected)
             {
                 CvInvoke.Rectangle(frame, b, new Bgr(Color.Red).MCvScalar, 2);
                 
             }
             foreach (Rectangle b in pussyDetected)
             {
                 CvInvoke.Rectangle(frame, b, new Bgr(Color.Blue).MCvScalar, 2);
             }
             foreach (Rectangle b in dickDetected)
             {
                 CvInvoke.Rectangle(frame, b, new Bgr(Color.Green).MCvScalar, 2);
             }
              breastCount = breastDetected.Length;
              pussyCount = pussyDetected.Length;
              dickCount = dickDetected.Length;
              totalBreastCount += breastCount;
              totalPussyCount += pussyCount;
              totalDickCount += dickCount;
              if ((breastCount > 0 || pussyCount > 0 || dickCount > 0) && _pauseAtDetection)
              {
                  _capture.Pause();
                  playToggle.Invoke(new MethodInvoker(delegate { playToggle.Text = "Start"; }));
                  _captureInProgress = false;
                  if (breastCount > 0)
                  {
                      temp += ""+ breastCount + "breast(s) found\n"; 
                  }
                  if (pussyCount > 0)
                  {
                      temp += ""+ pussyCount+"pussy(s) found\n";
                  }
                  if (dickCount > 0)
                  {
                      temp += "" + dickCount + "dick(s) found\n";
                  }
                  MessageBox.Show(temp);
              }
         }            
     }
     if (_frameskip > 0)
     {
         frameCtr++;
     }
     label4.Invoke(new MethodInvoker(delegate { label4.Text = frameNum.ToString(); logger(frameNum, breastCount, pussyCount,dickCount); totalBreast.Text = totalBreastCount.ToString(); totalF.Text = totalPussyCount.ToString(); totalG.Text = totalDickCount.ToString(); }));
     imgBox.Image = frame;
     
 }
 public abstract LineSegment2D[] GetLines(UMat cannyEdges);
 public override LineSegment2D[] GetLines(UMat cannyEdges)
 {
     return CvInvoke.HoughLinesP(cannyEdges, Rho.Value, (Theta.Value * Math.PI) / 180, Threshold.Value, MinLineLength.Value, MaxLineGap.Value);
 }
Exemplo n.º 56
0
 /// <summary>
 /// Create an OclImage2D object from UMat
 /// </summary>
 /// <param name="src">The UMat from which to get image properties and data</param>
 /// <param name="norm">Flag to enable the use of normalized channel data types</param>
 /// <param name="alias">Flag indicating that the image should alias the src UMat. If true, changes to the image or src will be reflected in both objects.</param>
 public Image2D(UMat src, bool norm = false, bool alias = false)
 {
    _ptr = OclInvoke.oclImage2DFromUMat(src, norm, alias);
 }
Exemplo n.º 57
0
      public void TestOclPyr()
      {
         if (!CvInvoke.HaveOpenCL)
            return;

         Image<Gray, Byte> img = new Image<Gray, byte>(640, 480);
         //add some randome noise to the image
         img.SetRandUniform(new MCvScalar(), new MCvScalar(255, 255, 255));
         Image<Gray, Byte> down = img.PyrDown();

         //Emgu.CV.UI.ImageViewer.Show(down);

         Image<Gray, Byte> up = down.PyrUp();

         UMat gImg = img.ToUMat();
         UMat gDown = new UMat();
         UMat gUp = new UMat();
         CvInvoke.PyrDown(gImg, gDown);
         CvInvoke.PyrUp(gDown, gUp);

         CvInvoke.AbsDiff(down, gDown.ToImage<Gray, Byte>(), down);
         CvInvoke.AbsDiff(up, gUp.ToImage<Gray, Byte>(), up);
         double[] minVals, maxVals;
         Point[] minLocs, maxLocs;
         down.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
         double maxVal = 0.0;
         for (int i = 0; i < maxVals.Length; i++)
         {
            if (maxVals[i] > maxVal)
               maxVal = maxVals[i];
         }
         Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
         EmguAssert.IsTrue(maxVal <= 1.0);
         //Assert.LessOrEqual(maxVal, 1.0);

         up.MinMax(out minVals, out maxVals, out minLocs, out maxLocs);
         maxVal = 0.0;
         for (int i = 0; i < maxVals.Length; i++)
         {
            if (maxVals[i] > maxVal)
               maxVal = maxVals[i];
         }
         Trace.WriteLine(String.Format("Max diff: {0}", maxVal));
         EmguAssert.IsTrue(maxVal <= 1.0);
         //Assert.LessOrEqual(maxVal, 1.0);
      }
Exemplo n.º 58
0
      public void TestOclChangeDefaultDevice()
      {
         
         using (VectorOfOclPlatformInfo oclPlatformInfos = OclInvoke.GetPlatformsInfo())
         {
            if (oclPlatformInfos.Size > 0)
            {
               for (int i = 0; i < oclPlatformInfos.Size; i++)
               {
                  OclPlatformInfo platformInfo = oclPlatformInfos[i];

                  for (int j = 0; j < platformInfo.DeviceNumber; j++)
                  {
                     OclDevice device = platformInfo.GetDevice(j);

                     Trace.WriteLine(String.Format("{0}Setting device to {1}", Environment.NewLine, device.Name));
                     //OclDevice d = new OclDevice();
                     //d.Set(device.NativeDevicePointer);


                     OclDevice defaultDevice = OclDevice.Default;
                     defaultDevice.Set(device.NativeDevicePointer);

                     Trace.WriteLine(String.Format("Current OpenCL default device: {0}", defaultDevice.Name));

                     UMat m = new UMat(2048, 2048, DepthType.Cv8U, 3);
                     m.SetTo(new MCvScalar(100, 100, 100));
                     CvInvoke.GaussianBlur(m, m, new Size(3, 3), 3 );

                     Stopwatch watch = Stopwatch.StartNew();
                     m.SetTo(new MCvScalar(100, 100, 100));
                     CvInvoke.GaussianBlur(m, m, new Size(3, 3), 3);
                     watch.Stop();
                     Trace.WriteLine(String.Format("Device '{0}' time: {1} milliseconds", defaultDevice.Name, watch.ElapsedMilliseconds));
                     CvInvoke.OclFinish();
                  }
               }
            }

            Trace.WriteLine(Environment.NewLine  + "Disable OpenCL");
            CvInvoke.UseOpenCL = false;
            UMat m2 = new UMat(2048, 2048, DepthType.Cv8U, 3);
            m2.SetTo(new MCvScalar(100, 100, 100));
            CvInvoke.GaussianBlur(m2, m2, new Size(3, 3), 3);

            Stopwatch watch2 = Stopwatch.StartNew();
            m2.SetTo(new MCvScalar(100, 100, 100));
            CvInvoke.GaussianBlur(m2, m2, new Size(3, 3), 3);
            watch2.Stop();
            Trace.WriteLine(String.Format("CPU time: {0} milliseconds", watch2.ElapsedMilliseconds));
            CvInvoke.UseOpenCL = true;
         }
      }
Exemplo n.º 59
0
 public EmguType()
 {
     KeyPoints = new VectorOfKeyPoint();
     Descriptors = new UMat();
 }
Exemplo n.º 60
0
 /// <summary>
 /// Set the parameters for the kernel
 /// </summary>
 /// <param name="i">The index of the parameter</param>
 /// <param name="umat">The umat</param>
 /// <returns>The next index value to be set</returns>
 public int Set(int i, UMat umat)
 {
    return OclInvoke.oclKernelSetUMat(_ptr, i, umat);
 }