Exemplo n.º 1
0
 public void updateCamSize(System.Drawing.Size size, VideoFormat format)
 {
     if (format.FrameType.Subtype.Equals(MediaSubtypes.Y800))
     {
         raw.Create(size.Height, size.Width, DepthType.Cv8U, 1);
     }
     else if (format.FrameType.Subtype.Equals(MediaSubtypes.Y16))
     {
         raw.Create(size.Height, size.Width, DepthType.Cv16U, 1);
     }
 }
Exemplo n.º 2
0
 public virtual void updateImgSize(Size size)
 {
     if (showColorMap)
     {
         disp_1.Create(size.Height, size.Width, DepthType.Cv8U, 3);
         disp_2.Create(size.Height, size.Width, DepthType.Cv8U, 3);
     }
     else
     {
         disp_1.Create(size.Height, size.Width, DepthType.Cv8U, 1);
         disp_2.Create(size.Height, size.Width, DepthType.Cv8U, 1);
     }
 }
Exemplo n.º 3
0
      public void TestOclKernel()
      {
         if (CvInvoke.HaveOpenCL && CvInvoke.UseOpenCL)
         {

            Ocl.Device defaultDevice = Ocl.Device.Default;

            Mat img = EmguAssert.LoadMat("lena.jpg");
            Mat imgGray = new Mat();
            CvInvoke.CvtColor(img, imgGray, ColorConversion.Bgr2Gray);
            Mat imgFloat = new Mat();
            imgGray.ConvertTo(imgFloat, DepthType.Cv32F, 1.0/255);
            UMat umat = imgFloat.GetUMat(AccessType.Read, UMat.Usage.AllocateDeviceMemory);
            UMat umatDst = new UMat();
            umatDst.Create(umat.Rows, umat.Cols, DepthType.Cv32F, umat.NumberOfChannels, UMat.Usage.AllocateDeviceMemory);
            
            String buildOpts = String.Format("-D dstT={0}", Ocl.OclInvoke.TypeToString(umat.Depth));
    
            String sourceStr = @"
__constant sampler_t samplerLN = CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_CLAMP_TO_EDGE | CLK_FILTER_LINEAR;
__kernel void shift(const image2d_t src, float shift_x, float shift_y, __global uchar* dst, int dst_step, int dst_offset, int dst_rows, int dst_cols)
{
   int x = get_global_id(0);
   int y = get_global_id(1);
   if (x >= dst_cols) return;
   int dst_index = mad24(y, dst_step, mad24(x, (int)sizeof(dstT), dst_offset));
   __global dstT *dstf = (__global dstT *)(dst + dst_index);
   float2 coord = (float2)((float)x+0.5f+shift_x, (float)y+0.5f+shift_y);
   dstf[0] = (dstT)read_imagef(src, samplerLN, coord).x;
}";

            using (CvString errorMsg = new CvString())
            using (Ocl.ProgramSource ps = new Ocl.ProgramSource(sourceStr))
            using (Ocl.Kernel kernel = new Ocl.Kernel())
            using (Ocl.Image2D image2d = new Ocl.Image2D(umat))
            using (Ocl.KernelArg ka = new Ocl.KernelArg(Ocl.KernelArg.Flags.ReadWrite, umatDst))
            {
               float shiftX = 100.5f;
               float shiftY = -50.0f;

               bool success = kernel.Create("shift", ps, buildOpts, errorMsg);
               EmguAssert.IsTrue(success, errorMsg.ToString());
               int idx = 0;
               idx = kernel.Set(idx, image2d);
               idx = kernel.Set(idx, ref shiftX);
               idx = kernel.Set(idx, ref shiftY);
               idx = kernel.Set(idx, ka);
               IntPtr[] globalThreads = new IntPtr[] {new IntPtr(umat.Cols), new IntPtr(umat.Rows), new IntPtr(1) };
               success = kernel.Run(globalThreads, null, true);
               EmguAssert.IsTrue(success, "Failed to run the kernel");
               using (Mat matDst = umatDst.GetMat(AccessType.Read))
               using (Mat saveMat = new Mat())
               {
                  matDst.ConvertTo(saveMat, DepthType.Cv8U, 255.0);
                  saveMat.Save("tmp.jpg");
               }
            }
         }
      }
Exemplo n.º 4
0
 public override void updateImgSize(Size size)
 {
     base.updateImgSize(size);
     mat252.Create(size.Height, size.Width, DepthType.Cv8U, 1);
     mat252.SetTo(new MCvScalar(252));
     mat255.Create(size.Height, size.Width, DepthType.Cv8U, 1);
     mat255.SetTo(new MCvScalar(255));
 }
Exemplo n.º 5
0
        public void TestOclKernel()
        {
            if (CvInvoke.HaveOpenCL && CvInvoke.UseOpenCL)
            {
                Ocl.Device defaultDevice = Ocl.Device.Default;

                Mat img     = EmguAssert.LoadMat("lena.jpg");
                Mat imgGray = new Mat();
                CvInvoke.CvtColor(img, imgGray, ColorConversion.Bgr2Gray);
                Mat imgFloat = new Mat();
                imgGray.ConvertTo(imgFloat, DepthType.Cv32F, 1.0 / 255);
                UMat umat    = imgFloat.GetUMat(AccessType.Read, UMat.Usage.AllocateDeviceMemory);
                UMat umatDst = new UMat();
                umatDst.Create(umat.Rows, umat.Cols, DepthType.Cv32F, umat.NumberOfChannels, UMat.Usage.AllocateDeviceMemory);

                String buildOpts = String.Format("-D dstT={0}", Ocl.OclInvoke.TypeToString(umat.Depth));

                String sourceStr = @"
__constant sampler_t samplerLN = CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_CLAMP_TO_EDGE | CLK_FILTER_LINEAR;
__kernel void shift(const image2d_t src, float shift_x, float shift_y, __global uchar* dst, int dst_step, int dst_offset, int dst_rows, int dst_cols)
{
   int x = get_global_id(0);
   int y = get_global_id(1);
   if (x >= dst_cols) return;
   int dst_index = mad24(y, dst_step, mad24(x, (int)sizeof(dstT), dst_offset));
   __global dstT *dstf = (__global dstT *)(dst + dst_index);
   float2 coord = (float2)((float)x+0.5f+shift_x, (float)y+0.5f+shift_y);
   dstf[0] = (dstT)read_imagef(src, samplerLN, coord).x;
}";

                using (CvString errorMsg = new CvString())
                    using (Ocl.ProgramSource ps = new Ocl.ProgramSource(sourceStr))
                        using (Ocl.Kernel kernel = new Ocl.Kernel())
                            using (Ocl.Image2D image2d = new Ocl.Image2D(umat))
                                using (Ocl.KernelArg ka = new Ocl.KernelArg(Ocl.KernelArg.Flags.ReadWrite, umatDst))
                                {
                                    float shiftX = 100.5f;
                                    float shiftY = -50.0f;

                                    bool success = kernel.Create("shift", ps, buildOpts, errorMsg);
                                    EmguAssert.IsTrue(success, errorMsg.ToString());
                                    int idx = 0;
                                    idx = kernel.Set(idx, image2d);
                                    idx = kernel.Set(idx, ref shiftX);
                                    idx = kernel.Set(idx, ref shiftY);
                                    idx = kernel.Set(idx, ka);
                                    IntPtr[] globalThreads = new IntPtr[] { new IntPtr(umat.Cols), new IntPtr(umat.Rows), new IntPtr(1) };
                                    success = kernel.Run(globalThreads, null, true);
                                    EmguAssert.IsTrue(success, "Failed to run the kernel");
                                    using (Mat matDst = umatDst.GetMat(AccessType.Read))
                                        using (Mat saveMat = new Mat())
                                        {
                                            matDst.ConvertTo(saveMat, DepthType.Cv8U, 255.0);
                                            saveMat.Save("tmp.jpg");
                                        }
                                }
            }
        }
Exemplo n.º 6
0
 public void TestUMatCreate()
 {
     if (CvInvoke.HaveOpenCL)
     {
         CvInvoke.UseOpenCL = true;
         using (UMat m1 = new UMat())
         {
             m1.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
             EmguAssert.IsTrue(m1.GetData() != null);
         }
         CvInvoke.UseOpenCL = false;
         using (UMat m2 = new UMat())
             m2.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
         //EmguAssert.IsTrue(m2.Data != null);
     }
     else
     {
         UMat m2 = new UMat();
         m2.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
         //EmguAssert.IsTrue(m2.Data != null);
     }
 }
Exemplo n.º 7
0
 public void TestUMatCreate()
 {
    if (CvInvoke.HaveOpenCL)
    {
       CvInvoke.UseOpenCL = true;
       using (UMat m1 = new UMat())
       {
          m1.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
          EmguAssert.IsTrue(m1.Data == null);
       }
       CvInvoke.UseOpenCL = false;
       using(UMat m2 = new UMat())
          m2.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
          //EmguAssert.IsTrue(m2.Data != null);
       
    } else
    {
       UMat m2 = new UMat();
       m2.Create(10, 12, CvEnum.DepthType.Cv8U, 1);
       //EmguAssert.IsTrue(m2.Data != null);
    }
 }
Exemplo n.º 8
0
        public void ProcessCopyOverNCenter(UMat magT, UMat phT, Rectangle selRoi,
                                           out double selRoiMinMagVal, out double selRoiMaxMagVal,
                                           out Point selRoiMinMagLoc, out Point selRoiMaxMagLoc,
                                           out Rectangle ctrRoi, out Point ctrRoiMaxMagLoc,
                                           out Rectangle ltdSelRoi,
                                           UMat magFoT, UMat phFoT)
        {
            ///Takes the selected roi, copies and pastes it
            ///onto a zero image with its maximum value at the bottom-right
            ///of the centre, updates magFoT and phFoT.

            //create UMats filled with '0's to "paste" the first-order's Transform into
            magFoT.Create(magT.Rows, magT.Cols, DepthType.Cv32F, 1);
            magFoT.SetTo(new MCvScalar(0));
            magFoT.CopyTo(phFoT);
            if (_ManualCentering)
            {
                selRoiMinMagVal = 0;
                selRoiMaxMagVal = 0;
                selRoiMinMagLoc = new Point(0, 0);
                selRoiMaxMagLoc = fTView.GetManualCent() - (Size)selRoi.Location;
            }
            else
            {
                using (UMat magSelT = new UMat(magT, selRoi)) {
                    // get the values and locations of maximum and minimum points
                    // for each channel, but we only have 1 channel
                    // so we only use the [0] index.
                    magSelT.MinMax(out double[] selRoiMinMagValues, out double[] selRoiMaxMagValues,
                                   out Point[] selRoiMinMagLocations, out Point[] selRoiMaxMagLocations);
                    selRoiMinMagVal = selRoiMinMagValues[0];
                    selRoiMaxMagVal = selRoiMaxMagValues[0];
                    selRoiMinMagLoc = selRoiMinMagLocations[0];
                    selRoiMaxMagLoc = selRoiMaxMagLocations[0];
                    if (_WeightCentering)
                    {
                        using (UMat Mask = magSelT.Clone()) {
                            UMat aaaaa = new UMat();
                            Mask.SetTo(new MCvScalar((long)(selRoiMaxMagVal * 0.3)));
                            CvInvoke.Compare(magSelT, Mask, aaaaa, CmpType.GreaterEqual);
                            Moments m = CvInvoke.Moments(aaaaa, true);
                            selRoiMaxMagLoc = new Point((int)(m.M10 / m.M00), (int)(m.M01 / m.M00));
                        }
                    }
                }
            }
            //find 0Hz point in image, which is at the bottom-right of the centre,
            //because the width and height are even numbers as I cropped them
            //earlier in SwitchQuadrants(). There's no +1 because coordinates
            //start at index 0.
            Point foT0HzLoc = new Point(magFoT.Cols / 2, magFoT.Rows / 2);

            //calculate ctrRoi on foT to paste into, where the copied's max value
            //is at foT's 0Hz point.
            ctrRoi = new Rectangle(foT0HzLoc - (Size)selRoiMaxMagLoc, selRoi.Size);
            //it's possible for ctrRoi to go out of the image if you select
            //a region more than (width or height)/2, and the max value point
            //is at a corner, so we limit it. However, this means that the sizes
            //of selRoi and ctrRoi are different, so we limit selRoi too.
            LimitRoiToImage2(magFoT, ref ctrRoi);
            //calculate ltdSelRoi by going backwards and "pasting" ctrRoi
            //on magT.
            //find the new maxMagLoc in ctrRoi (it might change after limiting)
            ctrRoiMaxMagLoc = foT0HzLoc - (Size)ctrRoi.Location;
            Point selRoiMaxMagAbsLoc = selRoi.Location + (Size)selRoiMaxMagLoc;            //location relative to origin of magT

            ltdSelRoi = new Rectangle(selRoiMaxMagAbsLoc - (Size)ctrRoiMaxMagLoc,
                                      ctrRoi.Size);
            ;
            //finally, copy ltdSelRoi in T to ctrRoi in foT
            using (UMat magLtdSelT = new UMat(magT, ltdSelRoi),
                   phLtdSelT = new UMat(phT, ltdSelRoi),
                   magCtrFoT = new UMat(magFoT, ctrRoi),
                   phCtrFoT = new UMat(phFoT, ctrRoi)) {
                magLtdSelT.CopyTo(magCtrFoT);
                phLtdSelT.CopyTo(phCtrFoT);
            }
        }
Exemplo n.º 9
0
        public void ProcessForwardT(UMat inImg, UMat outMagT, UMat outPhT, bool zeroPad = false, bool switchQuadrants = true)
        {
            ///Accepts a 1-channel image, updates outMagT and outPhT.
            ///magnitude and phase, cause I can't think of why you
            ///would wanna look at real and imaginary Transforms.
            ///Also can't think of how you can get complex-valued images.
            ///Quadrant rearranging doesn't support odd rows or cols.
            ///T stuff, reference: https://docs.opencv.org/master/d8/d01/tutorial_discrete_fourier_transform.html
            //convert image to 32bit float because spacial frequency
            //domain values are way bigger than spatial domain values.
            UMat re = outMagT;            //32-bit float real image, use memory from

            //outMagT cause it's gonna be updated anyway
            inImg.ConvertTo(re, DepthType.Cv32F);
            if (zeroPad)
            {
                //zero pad for faster dft
                ZeroPadImage(re);
            }
            //create imaginary image of zeros of same depthType
            //and size as image representing real plane
            UMat im = outPhT;                                           //imaginary

            im.Create(re.Rows, re.Cols, re.Depth, re.NumberOfChannels); //allocate memory so you can set it to zero array
            //if memory hasn't already been allocated for it
            im.SetTo(new MCvScalar(0));

            /// Quick exerpt about VectorOfUMat:
            /// if you create a VectorOfUMat vec, only if the first UMat variable
            /// to store the object is the vector node, like
            /// VectorOfUMat vec = new VectorOfUmat(new Umat(), new Umat());
            /// vec.Push(someUMat.Clone());
            /// then vec.Dispose/Clear actually disposes all the objects referenced
            /// to by the UMats in vec. In this case, if you did:
            /// VectorOfUMat vec = new VectorOfUMat(inImg.Clone(), inImg.Clone());
            /// UMat one = vec[0];
            /// one.Dispose();
            /// one.Dispose actually does nothing.
            /// Otherwise, if
            /// UMat one = new UMat();
            /// UMat two = new UMat();
            /// VectorOfUMat vec = new VectorOfUmat(one);
            /// vec.Push(two);
            /// calling vec.Dispose() doesn't dispose the objects.
            /// you have to call one.Dispose and two.Dispose.
            /// Note: no matter whether the UMat's first variable stored
            /// in is in a vector node or not, calling vec[index].Dispose
            /// does NOTHING.
            /// The situation is the same for vec.Clear, except Clear doesn't
            /// dispose of vec itself, it just disposes the objects the UMats in
            /// it reference to.
            //Dft accepts 2-channel images, so we use Merge to merge
            //our 2 1-channel images into a single 2-channel image.
            //Merge accepts object arrays, so we create a VectorOfUMat
            //of our 2 images to feed into Merge.

            VectorOfUMat vec  = this.vec;
            UMat         cImg = this.img32f2c;

            vec.Push(re);
            vec.Push(im);            //vec[0] = re, vec[1] = im
            ;
            CvInvoke.Merge(vec, cImg);
            CvInvoke.Dft(cImg, cImg, DxtType.Forward, 0);            //use back the same memory
            //switch quadrants while images are still combined
            if (switchQuadrants)
            {
                SwitchQuadrants(cImg);
            }
            //make the 2-channel array into 2 1-channel arrays
            CvInvoke.Split(cImg, vec); //vec[0] is reT, vec[1] is imT, they are new objects.
            CvInvoke.CartToPolar(vec[0], vec[1], outMagT, outPhT);
            vec.Clear();               //dispose reT and imT.TODO: find a way to get rid of allocating memory for reT and imT.
        }
Exemplo n.º 10
0
        public void TestOclKernel()
        {
            if (CvInvoke.HaveOpenCL && CvInvoke.UseOpenCL)
            {
                Ocl.Device defaultDevice = Ocl.Device.Default;

                Mat img     = EmguAssert.LoadMat("lena.jpg");
                Mat imgGray = new Mat();
                CvInvoke.CvtColor(img, imgGray, ColorConversion.Bgr2Gray);
                Mat imgFloat = new Mat();
                imgGray.ConvertTo(imgFloat, DepthType.Cv32F, 1.0 / 255);
                UMat umat    = imgFloat.GetUMat(AccessType.Read, UMat.Usage.AllocateDeviceMemory);
                UMat umatDst = new UMat();
                umatDst.Create(umat.Rows, umat.Cols, DepthType.Cv32F, umat.NumberOfChannels, UMat.Usage.AllocateDeviceMemory);

                String buildOpts = String.Format("-D dstT={0}", Ocl.OclInvoke.TypeToString(umat.Depth));

                String sourceStr = @"
__kernel void magnutude_filter_8u(
       __global const uchar* src, int src_step, int src_offset,
       __global uchar* dst, int dst_step, int dst_offset, int dst_rows, int dst_cols,
       float scale)
{
   int x = get_global_id(0);
   int y = get_global_id(1);
   if (x < dst_cols && y < dst_rows)
   {
       int dst_idx = y * dst_step + x + dst_offset;
       if (x > 0 && x < dst_cols - 1 && y > 0 && y < dst_rows - 2)
       {
           int src_idx = y * src_step + x + src_offset;
           int dx = (int)src[src_idx]*2 - src[src_idx - 1]          - src[src_idx + 1];
           int dy = (int)src[src_idx]*2 - src[src_idx - 1*src_step] - src[src_idx + 1*src_step];
           dst[dst_idx] = convert_uchar_sat(sqrt((float)(dx*dx + dy*dy)) * scale);
       }
       else
       {
           dst[dst_idx] = 0;
       }
   }
}";

                using (CvString errorMsg = new CvString())
                    using (Ocl.ProgramSource ps = new Ocl.ProgramSource(sourceStr))
                        using (Ocl.Kernel kernel = new Ocl.Kernel())
                            using (Ocl.Image2D image2d = new Ocl.Image2D(umat))
                                using (Ocl.KernelArg ka = new Ocl.KernelArg(Ocl.KernelArg.Flags.ReadWrite, umatDst))
                                {
                                    float shiftX = 100.5f;
                                    float shiftY = -50.0f;

                                    bool success = kernel.Create("myshift", ps, buildOpts, errorMsg);
                                    EmguAssert.IsTrue(success, errorMsg.ToString());
                                    int idx = 0;
                                    idx = kernel.Set(idx, image2d);
                                    idx = kernel.Set(idx, ref shiftX);
                                    idx = kernel.Set(idx, ref shiftY);
                                    idx = kernel.Set(idx, ka);
                                    IntPtr[] globalThreads = new IntPtr[] { new IntPtr(umat.Cols), new IntPtr(umat.Rows), new IntPtr(1) };
                                    success = kernel.Run(globalThreads, null, true);
                                    EmguAssert.IsTrue(success, "Failed to run the kernel");
                                    using (Mat matDst = umatDst.GetMat(AccessType.Read))
                                        using (Mat saveMat = new Mat())
                                        {
                                            matDst.ConvertTo(saveMat, DepthType.Cv8U, 255.0);
                                            saveMat.Save("tmp.jpg");
                                        }
                                }
            }
        }