Example #1
0
        public CVImage[] Split()
        {
            List <CVImage> channels = new List <CVImage>();

            int w = RegionOfInterest.Width;
            int h = RegionOfInterest.Height;

            CVImage c0 = Channels >= 1 ? new CVImage(w, h, this.Depth, 1) : null;
            CVImage c1 = Channels >= 2 ? new CVImage(w, h, this.Depth, 1) : null;
            CVImage c2 = Channels >= 3 ? new CVImage(w, h, this.Depth, 1) : null;
            CVImage c3 = Channels >= 4 ? new CVImage(w, h, this.Depth, 1) : null;

            Split(c0, c1, c2, c3);

            if (c0 != null)
            {
                channels.Add(c0);
            }
            if (c1 != null)
            {
                channels.Add(c1);
            }
            if (c2 != null)
            {
                channels.Add(c2);
            }
            if (c3 != null)
            {
                channels.Add(c3);
            }
            return(channels.ToArray());
        }
Example #2
0
        ///<summery>
        ///Finds global minimum and maximum in array or subarray.
        ///</summery>
        public static void CVMinMaxLoc(CVImage image,
                                       out double minVal,
                                       out double maxVal,
                                       out System.Drawing.Point minLocation,
                                       out System.Drawing.Point maxLocation,
                                       CVArr mask)
        {
            // Prepare out paramaters:
            __CvPoint min_loc = new __CvPoint();
            __CvPoint max_loc = new __CvPoint();
            double    min_val = -1;
            double    max_val = -1;


            //CvArr tempMask = 0;
            //if (mask != nullptr) {
            //    tempMask = mask->Array;
            //}
            //CVArr tempMask = mask.Array;

            minLocation = new System.Drawing.Point(0, 0);
            maxLocation = new System.Drawing.Point(0, 0);

            // Native call to openCV cvMinMaxLoc:
            PInvoke.cvMinMaxLoc(
                new __CvArrPtr(image),
                out min_val, out max_val,
                out min_loc, out max_loc, new __CvArrPtr(mask));
            CVUtils.CheckLastError();
            minVal      = min_val;
            maxVal      = max_val;
            minLocation = new System.Drawing.Point(min_loc.x, min_loc.y);
            maxLocation = new System.Drawing.Point(max_loc.x, max_loc.y);
        }
Example #3
0
        public CVHistogram CalcHistogram(int[] binSizes, CVPair[] binRanges, CVImage mask)
        {
            CVHistogram h = new CVHistogram(binSizes, binRanges);

            __IplImagePtr[] images = new __IplImagePtr[this.Channels];
            if (this.Channels == 1)
            {
                images[0] = this.Internal;
            }
            else
            {
                CVImage[] planes = this.Split();
                for (int i = 0; i < planes.Length; ++i)
                {
                    images[i] = planes[i].Internal;
                }
            }

            __CvArrPtr maskArr = IntPtr.Zero;

            if (mask != null)
            {
                maskArr = mask.Array;
            }

            PInvoke.cvCalcHist(images, h.Internal, 0, maskArr);
            CVUtils.CheckLastError();
            return(h);
        }
Example #4
0
        public CVImage CopyRegion(System.Drawing.Rectangle rect)
        {
            CVImage roi = new CVImage(this);

            roi.RegionOfInterest = rect;
            return(roi);
        }
Example #5
0
        public CVImage CalcBackProject(CVHistogram histogram)
        {
            CVImage[] planes = Split();

            CVImage backProjection =
                new CVImage(
                    planes[0].RegionOfInterest.Width,
                    planes[0].RegionOfInterest.Height,
                    planes[0].Depth,
                    planes[0].Channels);

            __IplImagePtr[] iplImages = new __IplImagePtr[planes.Length];
            for (int i = 0; i < planes.Length; ++i)
            {
                iplImages[i] = planes[i].Internal;
            }

            PInvoke.cvCalcBackProject(iplImages, backProjection.Internal, histogram.Internal);
            CVUtils.CheckLastError();

            for (int i = 0; i < planes.Length; ++i)
            {
                planes[i].Release();
            }

            return(backProjection);
        }
Example #6
0
        ///<summery>
        ///Finds global minimum and maximum in array or subarray.
        ///</summery>
        public static void CVMinMaxLoc(CVImage image,
            out double minVal,
            out double maxVal,
            out System.Drawing.Point minLocation,
            out System.Drawing.Point maxLocation,
            CVArr mask)
        {
            // Prepare out paramaters:
            __CvPoint min_loc = new __CvPoint();
            __CvPoint max_loc = new __CvPoint();
            double min_val = -1;
            double max_val = -1;

            //CvArr tempMask = 0;
            //if (mask != nullptr) {
            //    tempMask = mask->Array;
            //}
            //CVArr tempMask = mask.Array;

            minLocation = new System.Drawing.Point(0, 0);
            maxLocation = new System.Drawing.Point(0, 0);

            // Native call to openCV cvMinMaxLoc:
            PInvoke.cvMinMaxLoc(
                new __CvArrPtr(image),
                ref min_val, ref max_val,
                ref min_loc, ref max_loc, new __CvArrPtr(mask));

            minVal = min_val;
            maxVal = max_val;
            minLocation = new System.Drawing.Point(min_loc.x, min_loc.y);
            maxLocation = new System.Drawing.Point(max_loc.x, max_loc.y);
        }
Example #7
0
        public CVImage DrawContours()
        {
            CVImage grayscaled = (this.Channels == 1 ? this : this.ToGrayscale());

            __CvMemStoragePtr storage = PInvoke.cvCreateMemStorage(0);
            __CvSeqPtr        first_contour;
            CVImage           result = new CVImage(this.Width, this.Height, CVDepth.Depth8U, 3);

            unsafe
            {
                int num_contours = PInvoke.cvFindContours(
                    grayscaled.Internal,
                    storage,
                    out first_contour,
                    sizeof(__CvContour),
                    CV_RETR.CV_RETR_EXTERNAL,
                    CV_CHAIN.CV_CHAIN_APPROX_SIMPLE,
                    new __CvPoint(0, 0)
                    );

                // Makes an output image and draw contours:
                __CvSeq *cont = first_contour.ToPointer();

                for (; (cont = cont->_cvSequenceFields.__cvTreeNodeFields.h_next.ToPointer()) != null;)
                {
                    PInvoke.cvDrawContours(result.Array, new __CvSeqPtr(cont), new __CvScalar(255, 0, 0), new __CvScalar(0, 0, 0), 0, (int)CVGlobalConsts.CV_FILLED);
                    CVUtils.CheckLastError();
                }
            }

            PInvoke.cvReleaseMemStorage(ref storage);
            CVUtils.CheckLastError();
            return(result);
        }
        public CVImage Smooth(SmoothType smoothtype, int param1, int param2, double param3, double param4)
        {
            CVImage dst = this.Clone();

            PInvoke.cvSmooth(new __CvArrPtr(this), new __CvArrPtr(dst), (int)smoothtype, param1, param2, param3, param4);
            return(dst);
        }
        /// <summary>
        /// Convolves the image with the kernel
        /// </summary>
        public CVImage Filter2D(CVMat kernel, Point anchor)
        {
            CVImage dst = this.Clone();

            PInvoke.cvFilter2D(new __CvArrPtr(this), new __CvArrPtr(dst), new __CvMatPtr(kernel), new __CvPoint(anchor));
            return(dst);
        }
        ///<summery>
        /// Compares template against overlapped image regions
        /// <param name="image">
        /// Image where the search is running.
        /// It should be 8-bit or 32-bit floating-point.
        /// </param>
        /// <param name="templ">
        /// Searched template;
        /// must be not greater than the source image
        /// and the same data type as the image.
        /// </param>
        /// <param name="result">
        /// A map of comparison results; single-channel
        /// 32-bit floating-point.
        /// If image is W×H and templ is w×h then result must be W-w+1×H-h+1.
        /// </param>
        /// <param name="method">
        /// Specifies the way the template must be compared with
        /// image regions (see below).
        /// </param>
        /// <remarks>
        /// The function cvMatchTemplate is similiar to
        /// cvCalcBackProjectPatch. It slids through image,
        /// compares overlapped patches of size w×h with templ
        /// using the specified method and stores the comparison results to result. Here are the formula for the different comparison methods one may use (I denotes image, T - template, R - result. The summation is done over template and/or the image patch: x'=0..w-1, y'=0..h-1):
        ///
        /// method=CV_TM_SQDIFF:
        /// R(x,y)=sumx',y'[T(x',y')-I(x+x',y+y')]2

        /// method=CV_TM_SQDIFF_NORMED:
        /// R(x,y)=sumx',y'[T(x',y')-I(x+x',y+y')]2/sqrt[sumx',y'T(x',y')2•sumx',y'I(x+x',y+y')2]

        /// method=CV_TM_CCORR:
        /// R(x,y)=sumx',y'[T(x',y')•I(x+x',y+y')]

        /// method=CV_TM_CCORR_NORMED:
        /// R(x,y)=sumx',y'[T(x',y')•I(x+x',y+y')]/sqrt[sumx',y'T(x',y')2•sumx',y'I(x+x',y+y')2]

        /// method=CV_TM_CCOEFF:
        /// R(x,y)=sumx',y'[T'(x',y')•I'(x+x',y+y')],

        /// where T'(x',y')=T(x',y') - 1/(w•h)•sumx",y"T(x",y")
        ///       I'(x+x',y+y')=I(x+x',y+y') - 1/(w•h)•sumx",y"I(x+x",y+y")

        /// method=CV_TM_CCOEFF_NORMED:
        /// R(x,y)=sumx',y'[T'(x',y')•I'(x+x',y+y')]/sqrt[sumx',y'T'(x',y')2•sumx',y'I'(x+x',y+y')2]

        /// After the function finishes comparison, the best matches can be found as global minimums (CV_TM_SQDIFF*) or maximums (CV_TM_CCORR* and CV_TM_CCOEFF*) using cvMinMaxLoc function. In case of color image and template summation in both numerator and each sum in denominator is done over all the channels (and separate mean values are used for each channel).
        /// </remarks>
        ///</summery>
        public static CVImage MatchTemplate(CVImage image,
                                            CVImage templateToSearch,
                                            TemplateMatchMethod method)
        {
            //specify the size needed by the match function
            int resultW = image.Width - templateToSearch.Width + 1;
            int resultH = image.Height - templateToSearch.Height + 1;

            if (image.Channels > 1)
            {
                throw new CVException("CVMatchTemplate supports only one channel image format.");
            }
            if (!(image.Depth == CVDepth.Depth32F || image.Depth == CVDepth.Depth8U))
            {
                throw new CVException("CVMatchTemplate supports only 32F or 8U image format.");
            }
            if (image.Depth != templateToSearch.Depth || image.Channels != templateToSearch.Channels)
            {
                throw new CVException("image and template should be of the same type format.");
            }

            CVImage result = new CVImage(resultW, resultH, CVDepth.Depth32F, 1);

            // Native call to openCV cvMatchTemplate function:
            PInvoke.cvMatchTemplate(new __CvArrPtr(image), new __CvArrPtr(templateToSearch), new __CvArrPtr(result), (int)method);

            return(result);
        }
Example #11
0
        public void Open(string filename)
        {
            Release();

            capture.ptr = IntPtr.Zero;
            asImage     = null;

            string ext = System.IO.Path.GetExtension(filename);

            // if the extension of the filename is not AVI, try opening as an image.
            if (ext.ToUpper().CompareTo(".AVI") != 0)
            {
                asImage = new CVImage(filename);
            }
            else
            {
                capture = PInvoke.cvCreateFileCapture(filename);
                CVUtils.CheckLastError();
                if (capture.ptr == IntPtr.Zero)
                {
                    throw new CVException(
                              string.Format("Unable to open file	'{0}' for capture.", filename));
                }
            }

            this.filename = filename;
        }
Example #12
0
        public CVImage Clone()
        {
            CVImage n = new CVImage((__IplImagePtr)IntPtr.Zero);

            n.image = PInvoke.cvCloneImage(this.Internal);
            CVUtils.CheckLastError();
            return(n);
        }
        /// <summary>
        /// Converts input array pixels from one color space to another.
        /// </summary>
        public static CVImage ConvertColorSpace(CVImage image, ColorSpace colorSpace)
        {
            int     numberOfChannels = (colorSpace.ToString().EndsWith("GRAY") ? 1 : 3);
            CVImage dst = new CVImage(image.Width, image.Height, image.Depth, numberOfChannels);

            PInvoke.cvCvtColor(new __CvArrPtr(image), new __CvArrPtr(dst), (int)colorSpace);
            return(dst);
        }
Example #14
0
        public void Merge(CVImage blue, CVImage green, CVImage red)
        {
            __IplImagePtr c0 = blue != null ? blue.image : IntPtr.Zero;
            __IplImagePtr c1 = green != null ? green.image : IntPtr.Zero;
            __IplImagePtr c2 = red != null ? red.image : IntPtr.Zero;

            PInvoke.cvMerge(c0, c1, c2, IntPtr.Zero, image);
            CVUtils.CheckLastError();
        }
Example #15
0
        private static void TestDrawContours()
        {
            CVImage image = new CVImage(100, 100, CVDepth.Depth8U, 1);

            image.DrawRectangle(new System.Drawing.Rectangle(10, 10, 20, 30), System.Drawing.Color.Red, 3);
            new BitmapViewer(image.ToBitmap()).ShowDialog();
            CVImage res = image.DrawContours();
            new BitmapViewer(res.ToBitmap()).ShowDialog();
        }
Example #16
0
        public void Split(CVImage ch0, CVImage ch1, CVImage ch2, CVImage ch3)
        {
            __IplImagePtr d0 = ch0 != null ? ch0.image : IntPtr.Zero;
            __IplImagePtr d1 = ch1 != null ? ch1.image : IntPtr.Zero;
            __IplImagePtr d2 = ch2 != null ? ch2.image : IntPtr.Zero;
            __IplImagePtr d3 = ch3 != null ? ch3.image : IntPtr.Zero;

            PInvoke.cvSplit(image, d0, d1, d2, d3);
            CVUtils.CheckLastError();
        }
Example #17
0
        public CVHistogram CalcHistogram(int binsSize, CVImage mask)
        {
            Int32[]  binSizes  = new Int32[3];
            CVPair[] binRanges = new CVPair[3];

            binSizes[0]  = binSizes[1] = binSizes[2] = binsSize;
            binRanges[0] = binRanges[1] = binRanges[2] = new CVPair(0, 255);

            return(CalcHistogram(binSizes, binRanges, mask));
        }
Example #18
0
        public void Resize(int newWidth, int newHeight, CVInterpolation interpolation)
        {
            CVImage newImage = new CVImage(newWidth, newHeight, Depth, Channels);

            PInvoke.cvResize(this.image, newImage.image, (int)interpolation);
            CVUtils.CheckLastError();
            Release();
            this.image       = newImage.image;
            newImage.created = false;
        }
Example #19
0
        public CVImage ToGrayscale()
        {
            CVImage gs = new CVImage(Width, Height, Depth, 1);

            System.Drawing.Rectangle prevRoi = this.RegionOfInterest;
            this.ResetROI();
            PInvoke.cvConvertImage(this.Internal, gs.Internal, (int)CVConvertImageFlags.Default);
            CVUtils.CheckLastError();
            this.RegionOfInterest = prevRoi;
            gs.RegionOfInterest   = prevRoi;

            return(gs);
        }
        public static CVImage CannyEdgeDetection(CVImage image, double threshold1, double threshold2, int aperture_size)
        {
            if (image.Channels != 1)
            {
                throw new CVException("Canny edge detection supports only one channel image format.");
            }

            CVImage result = new CVImage(image.Width, image.Height, CVDepth.Depth8U, 1);

            // Native call to openCV canny algorithm:
            PInvoke.cvCanny(new __CvArrPtr(image), new __CvArrPtr(result), threshold1, threshold2, aperture_size);

            return(result);
        }
Example #21
0
        public void Release()
        {
            if (asImage != null)
            {
                asImage.Release();
                asImage = null;
            }

            if (capture.ptr != IntPtr.Zero)
            {
                PInvoke.cvReleaseCapture(ref capture);
                CVUtils.CheckLastError();
            }
        }
Example #22
0
        public CVImage QueryFrame()
        {
            if (asImage != null)
            {
                return(asImage.Clone());
            }

            __IplImagePtr frame = PInvoke.cvQueryFrame(capture);

            CVUtils.CheckLastError();
            if (frame.ptr == IntPtr.Zero)
            {
                return(null);
            }
            CVImage newImage = new CVImage(new CVImage(frame));

            return(newImage);
        }
Example #23
0
        void DrawHistogram(PictureBox window, CVHistogram histo, int channelIdx)
        {
            int imageWidth = window.Width;
            int imageHeight = window.Height;

            int bins = histo.BinSizes[0];
            int binWidth = imageWidth / bins;
            if (binWidth <= 0) binWidth = 1;

            CVPair minMax = histo.MinMaxValue;
            CVImage outputImage = new CVImage(imageWidth, imageHeight, CVDepth.Depth8U, 3);
            outputImage.Zero();

            for (int bin = 0; bin < bins; bin++)
            {
                double binValue = histo[bin];
                byte level = (byte)CVUtils.Round(binValue * 255 / minMax.Second);
                byte binHeight = (byte)CVUtils.Round(binValue * imageHeight / minMax.Second);

                byte[] color = new byte[3];
                color[channelIdx] = (byte) (((double) bin / (double) bins) * 255);

                byte[] markerColor = new byte[3];
                markerColor[channelIdx] = level;

                Color colColor = Color.FromArgb(color[2], color[1], color[0]);
                Color colMarker = Color.FromArgb(markerColor[2], markerColor[1], markerColor[0]);

                outputImage.DrawRectangle(new Rectangle(bin * binWidth, imageHeight - binHeight, binWidth - 1, binHeight), colColor);
                outputImage.DrawRectangle(new Rectangle(bin * binWidth, imageHeight - binHeight, binWidth - 1, binHeight), colMarker);
                outputImage.DrawRectangle(new Rectangle(bin * binWidth, imageHeight - binHeight, 1, binHeight), colMarker);
            }

            window.Image = outputImage.ToBitmap();
            outputImage.Release();
        }
 /// <summary>
 /// Copies source 2D array inside of the larger destination array and 
 /// makes a border of the specified type (IPL_BORDER_*) around the copied area.
 /// </summary>
 public CVImage CopyMakeBorder(CVImage dst, Point offset, int bordertype, CVScalar value)
 {
     PInvoke.cvCopyMakeBorder(new __CvArrPtr(this), new __CvArrPtr(dst), new __CvPoint(offset), bordertype, new __CvScalar(value));
     CVUtils.CheckLastError();
     return dst;
 }
Example #25
0
        public void Split(CVImage ch0, CVImage ch1, CVImage ch2, CVImage ch3)
        {
            __IplImagePtr d0 = ch0 != null ? ch0.image : IntPtr.Zero;
            __IplImagePtr d1 = ch1 != null ? ch1.image : IntPtr.Zero;
            __IplImagePtr d2 = ch2 != null ? ch2.image : IntPtr.Zero;
            __IplImagePtr d3 = ch3 != null ? ch3.image : IntPtr.Zero;

            PInvoke.cvSplit(image, d0, d1, d2, d3);
            CVUtils.CheckLastError();
        }
 /// <summary>
 /// Splits color or grayscale image into multiple connected components
 //  of nearly the same color/brightness using modification of Burt algorithm.
 //  comp with contain a pointer to sequence (CvSeq)
 //  of connected components (CvConnectedComp)
 /// TODO: Remove 'dst' argument and return a CVImage object
 /// </summary>
 public void PyrSegmentation(CVImage dst, IntPtr storage, IntPtr comp, int level, double threshold1, double threshold2)
 {
     PInvoke.cvPyrSegmentation(new __CvImagePtr(this), new __CvImagePtr(dst), new __CvMemStoragePtr(storage), new __CvSeqPtr(comp), level, threshold1, threshold2);
 }
 /// <summary>
 /// Splits color or grayscale image into multiple connected components
 //  of nearly the same color/brightness using modification of Burt algorithm.
 //  comp with contain a pointer to sequence (CvSeq)
 //  of connected components (CvConnectedComp)
 /// TODO: Remove 'dst' argument and return a CVImage object
 /// </summary>
 public void PyrSegmentation(CVImage dst, IntPtr storage, IntPtr comp, int level, double threshold1, double threshold2)
 {
     PInvoke.cvPyrSegmentation(new __CvImagePtr(this), new __CvImagePtr(dst), new __CvMemStoragePtr(storage), new __CvSeqPtr(comp), level, threshold1, threshold2);
     CVUtils.CheckLastError();
 }
Example #28
0
 /// <summary>
 /// Up-samples image and smoothes the result with gaussian kernel.
 ///   dst_width = src_width*2,
 ///   dst_height = src_height*2
 /// TODO: Remove 'dst' argument and return a CVImage object
 /// </summary>
 public void PyrUp(CVImage dst, PyrFilter filter)
 {
     PInvoke.cvPyrUp(new __CvImagePtr(this), new __CvImagePtr(dst), (int)filter);
     CVUtils.CheckLastError();
 }
 /// <summary>
 /// Up-samples image and smoothes the result with gaussian kernel.
 ///   dst_width = src_width*2,
 ///   dst_height = src_height*2
 /// TODO: Remove 'dst' argument and return a CVImage object
 /// </summary>
 public void PyrUp(CVImage dst, PyrFilter filter)
 {
     PInvoke.cvPyrUp(new __CvImagePtr(this), new __CvImagePtr(dst), (int)filter);
 }
Example #30
0
        public CVHistogram CalcHistogram(int[] binSizes, CVPair[] binRanges, CVImage mask)
        {
            CVHistogram h = new CVHistogram(binSizes, binRanges);

            __IplImagePtr[] images = new __IplImagePtr[this.Channels];
            if (this.Channels == 1)
            {
                images[0] = this.Internal;
            }
            else
            {
                CVImage[] planes = this.Split();
                for (int i = 0; i < planes.Length; ++i)
                {
                    images[i] = planes[i].Internal;
                }
            }

            __CvArrPtr maskArr = IntPtr.Zero;
            if (mask != null) maskArr = mask.Array;

            PInvoke.cvCalcHist(images, h.Internal, 0, maskArr);
            CVUtils.CheckLastError();
            return h;
        }
 public CVImageWithCapabilities(CVImage clone) : base(clone)
 {
 }
Example #32
0
 public unsafe CVImage(CVImage clone)
 {
     Create(clone.Width, clone.Height, clone.Depth, clone.Channels);
     PInvoke.cvConvertImage(clone.Array, this.image, clone.Internal.ToPointer()->origin == 1 ? (int)CVConvertImageFlags.Flip : 0);
     CVUtils.CheckLastError();
 }
        public static CVImage CannyEdgeDetection(CVImage image, double threshold1, double threshold2, int aperture_size)
        {
            if (image.Channels != 1) {
                    throw new CVException("Canny edge detection supports only one channel image format.");
                }

                CVImage result = new CVImage(image.Width, image.Height, CVDepth.Depth8U, 1);

                // Native call to openCV canny algorithm:
                PInvoke.cvCanny(new __CvArrPtr(image), new __CvArrPtr(result), threshold1, threshold2, aperture_size);
                CVUtils.CheckLastError();
                return result;
        }
Example #34
0
 private static void TestBitmapConversion()
 {
     CVImage image = new CVImage(new System.Drawing.Bitmap(@"D:\Users\Yoav HaCohen\Pictures\temp\hair_res.bmp"));
     new BitmapViewer(image.ToBitmap()).ShowDialog();
 }
Example #35
0
 public __CvImagePtr(CVImage img)
 {
     ptr = img.Ptr;
 }
Example #36
0
        private void videoPlayer_Opening(VideoPlayer sender, OpeningEventArgs args)
        {
            if (bgFrame != null) bgFrame.Dispose();

            // create accumulator image when a new video is opened.
            bgAccum = new double[args.NewCapture.Height, args.NewCapture.Width];
            for (int row = 0; row < args.NewCapture.Height; ++row)
                for (int col = 0; col < args.NewCapture.Width; ++col)
                    bgAccum[row, col] = -1.0;

            bgFrame = args.NewCapture.CreateCompatibleImage();
        }
 public void PyrUp(CVImage dst)
 {
     PyrUp(dst, PyrFilter.CV_GAUSSIAN_5x5);
 }
 /// <summary>
 /// Up-samples image and smoothes the result with gaussian kernel.
 ///   dst_width = src_width*2,
 ///   dst_height = src_height*2
 /// TODO: Remove 'dst' argument and return a CVImage object
 /// </summary>
 public void PyrUp(CVImage dst, PyrFilter filter)
 {
     PInvoke.cvPyrUp(new __CvImagePtr(this), new __CvImagePtr(dst), (int)filter);
     CVUtils.CheckLastError();
 }
Example #39
0
 public CVImage Clone()
 {
     CVImage n = new CVImage((__IplImagePtr)IntPtr.Zero);
     n.image = PInvoke.cvCloneImage(this.Internal);
     CVUtils.CheckLastError();
     return n;
 }
Example #40
0
 public CVCapture(int cameraId)
 {
     asImage = null;
     capture = PInvoke.cvCreateCameraCapture(cameraId);
     CVUtils.CheckLastError();
 }
 /// <summary>
 /// Converts input array pixels from one color space to another.
 /// </summary>
 public static CVImage ConvertColorSpace(CVImage image, ColorSpace colorSpace)
 {
     int numberOfChannels = (colorSpace.ToString().EndsWith("GRAY") ? 1 : 3);
     CVImage dst = new CVImage(image.Width, image.Height, image.Depth, numberOfChannels);
     PInvoke.cvCvtColor(new __CvArrPtr(image), new __CvArrPtr(dst), (int)colorSpace);
     CVUtils.CheckLastError();
     return dst;
 }
Example #42
0
 public void Resize(int newWidth, int newHeight, CVInterpolation interpolation)
 {
     CVImage newImage = new CVImage(newWidth, newHeight, Depth, Channels);
     PInvoke.cvResize(this.image, newImage.image, (int) interpolation);
     CVUtils.CheckLastError();
     Release();
     this.image = newImage.image;
     newImage.created = false;
 }
Example #43
0
 public void WriteFrame(CVImage image)
 {
     PInvoke.cvWriteFrame(vw_, image.Internal);
 }
Example #44
0
 public void Merge(CVImage[] rbgChannels)
 {
     System.Diagnostics.Debug.Assert(rbgChannels.Length == 3, "rgbChannels array must be of length 3.");
     Merge(rbgChannels[0], rbgChannels[1], rbgChannels[2]);
 }
 /// <summary>
 /// Copies source 2D array inside of the larger destination array and
 /// makes a border of the specified type (IPL_BORDER_*) around the copied area.
 /// </summary>
 public CVImage CopyMakeBorder(CVImage dst, Point offset, int bordertype, CVScalar value)
 {
     PInvoke.cvCopyMakeBorder(new __CvArrPtr(this), new __CvArrPtr(dst), new __CvPoint(offset), bordertype, new __CvScalar(value));
     return(dst);
 }
 public CVImageWithCapabilities(CVImage clone)
     : base(clone)
 {
 }
 public void PyrUp(CVImage dst)
 {
     PyrUp(dst, PyrFilter.CV_GAUSSIAN_5x5);
 }
Example #48
0
        private void openVideoToolStripMenuItem_Click(object sender, EventArgs e)
        {
            OpenFileDialog openFile = new OpenFileDialog();
            openFile.Title = "Select Video File";
            openFile.Filter = "AVI Files|*.avi";
            if (openFile.ShowDialog() != DialogResult.OK)
                return;

            cap.Open(openFile.FileName);
            image = cap.QueryFrame();

            UpdateHistogram();
            Track();
        }
Example #49
0
        private void videoTimer_Tick(object sender, EventArgs e)
        {
            image = cap.QueryFrame();

            if (image == null)
            {
                cap.Restart();
            }

            Track();
        }
Example #50
0
        private void openVideoToolStripMenuItem_Click(object sender, EventArgs e)
        {
            OpenFileDialog openFile = new OpenFileDialog();
            openFile.Title = "Select Video File";
            if (openFile.ShowDialog() != DialogResult.OK)
                return;

            CVCapture cap = new CVCapture(openFile.FileName);
            image = cap.QueryFrame();
            cap.Dispose();

            UpdateHistogram();
        }
Example #51
0
 public void Merge(CVImage blue, CVImage green, CVImage red)
 {
     __IplImagePtr c0 = blue != null ? blue.image : IntPtr.Zero;
     __IplImagePtr c1 = green != null ? green.image : IntPtr.Zero;
     __IplImagePtr c2 = red != null ? red.image : IntPtr.Zero;
     PInvoke.cvMerge(c0, c1, c2, IntPtr.Zero, image);
     CVUtils.CheckLastError();
 }
        ///<summery>
        /// Compares template against overlapped image regions
        /// <param name="image">
        /// Image where the search is running. 
        /// It should be 8-bit or 32-bit floating-point.
        /// </param>
        /// <param name="templ">
        /// Searched template;
        /// must be not greater than the source image 
        /// and the same data type as the image. 
        /// </param>
        /// <param name="result">
        /// A map of comparison results; single-channel 
        /// 32-bit floating-point. 
        /// If image is W×H and templ is w×h then result must be W-w+1×H-h+1. 
        /// </param>
        /// <param name="method">
        /// Specifies the way the template must be compared with 
        /// image regions (see below). 
        /// </param>
        /// <remarks>
        /// The function cvMatchTemplate is similiar to 
        /// cvCalcBackProjectPatch. It slids through image, 
        /// compares overlapped patches of size w×h with templ 
        /// using the specified method and stores the comparison results to result. Here are the formula for the different comparison methods one may use (I denotes image, T - template, R - result. The summation is done over template and/or the image patch: x'=0..w-1, y'=0..h-1):
        /// 
        /// method=CV_TM_SQDIFF:
        /// R(x,y)=sumx',y'[T(x',y')-I(x+x',y+y')]2
        /// method=CV_TM_SQDIFF_NORMED:
        /// R(x,y)=sumx',y'[T(x',y')-I(x+x',y+y')]2/sqrt[sumx',y'T(x',y')2•sumx',y'I(x+x',y+y')2]
        /// method=CV_TM_CCORR:
        /// R(x,y)=sumx',y'[T(x',y')•I(x+x',y+y')]
        /// method=CV_TM_CCORR_NORMED:
        /// R(x,y)=sumx',y'[T(x',y')•I(x+x',y+y')]/sqrt[sumx',y'T(x',y')2•sumx',y'I(x+x',y+y')2]
        /// method=CV_TM_CCOEFF:
        /// R(x,y)=sumx',y'[T'(x',y')•I'(x+x',y+y')],
        /// where T'(x',y')=T(x',y') - 1/(w•h)•sumx",y"T(x",y")
        ///       I'(x+x',y+y')=I(x+x',y+y') - 1/(w•h)•sumx",y"I(x+x",y+y")
        /// method=CV_TM_CCOEFF_NORMED:
        /// R(x,y)=sumx',y'[T'(x',y')•I'(x+x',y+y')]/sqrt[sumx',y'T'(x',y')2•sumx',y'I'(x+x',y+y')2]
        /// After the function finishes comparison, the best matches can be found as global minimums (CV_TM_SQDIFF*) or maximums (CV_TM_CCORR* and CV_TM_CCOEFF*) using cvMinMaxLoc function. In case of color image and template summation in both numerator and each sum in denominator is done over all the channels (and separate mean values are used for each channel). 
        /// </remarks>
        ///</summery>
        public static CVImage MatchTemplate(CVImage image,
            CVImage templateToSearch,
            TemplateMatchMethod method)
        {
            //specify the size needed by the match function
            int resultW = image.Width - templateToSearch.Width + 1;
            int resultH = image.Height - templateToSearch.Height + 1;

            if (image.Channels > 1)
            {
                throw new CVException("CVMatchTemplate supports only one channel image format.");
            }
            if (!(image.Depth == CVDepth.Depth32F || image.Depth == CVDepth.Depth8U))
            {
                throw new CVException("CVMatchTemplate supports only 32F or 8U image format.");
            }
            if (image.Depth != templateToSearch.Depth || image.Channels != templateToSearch.Channels)
            {
                throw new CVException("image and template should be of the same type format.");
            }

            CVImage result = new CVImage(resultW, resultH, CVDepth.Depth32F, 1);

            // Native call to openCV cvMatchTemplate function:
            PInvoke.cvMatchTemplate(new __CvArrPtr(image), new __CvArrPtr(templateToSearch), new __CvArrPtr(result), (int)method);
            CVUtils.CheckLastError();
            return result;
        }
Example #53
0
 private void ResetSegmentation()
 {
     segmentationImage = image.Clone();
     segPoints = new List<Point>();
 }
Example #54
0
        public CVImage DrawContours()
        {
            CVImage grayscaled = (this.Channels == 1 ? this : this.ToGrayscale());

            __CvMemStoragePtr storage = PInvoke.cvCreateMemStorage(0);
            __CvSeqPtr first_contour;
            CVImage result = new CVImage(this.Width, this.Height, CVDepth.Depth8U, 3);
            unsafe
            {
                int num_contours = PInvoke.cvFindContours(
                    grayscaled.Internal,
                    storage,
                    out first_contour,
                    sizeof(__CvContour),
                    CV_RETR.CV_RETR_EXTERNAL,
                    CV_CHAIN.CV_CHAIN_APPROX_SIMPLE,
                    new __CvPoint(0, 0)
                );

                // Makes an output image and draw contours:
                __CvSeq* cont = first_contour.ToPointer();

                for (; (cont = cont->_cvSequenceFields.__cvTreeNodeFields.h_next.ToPointer()) != null;)
                {
                    PInvoke.cvDrawContours(result.Array, new __CvSeqPtr(cont), new __CvScalar(255, 0, 0), new __CvScalar(0, 0, 0), 0, (int)CVGlobalConsts.CV_FILLED);
                    CVUtils.CheckLastError();
                }
            }

            PInvoke.cvReleaseMemStorage(ref storage);
            CVUtils.CheckLastError();
            return result;
        }
Example #55
0
 internal struct __CvImagePtr { public IntPtr ptr; public __CvImagePtr(CVImage img)
                                {
                                    ptr = img.Ptr;
                                }
Example #56
0
 public unsafe CVImage(CVImage clone)
 {
     Create(clone.Width, clone.Height, clone.Depth, clone.Channels);
     PInvoke.cvConvertImage(clone.Array, this.image, clone.Internal.ToPointer()->origin == 1 ? (int)CVConvertImageFlags.Flip : 0);
     CVUtils.CheckLastError();
 }
Example #57
0
        private void UpdateHistogram()
        {
            int numberOfBins;
            if (!int.TryParse(binSize.Text, out numberOfBins))
            {
                statusBar.Text = string.Format("Number of bins '{0}' is not an integer.", binSize.Text);
                return;
            }

            if (image == null) return;

            //image.RegionOfInterest = originalImage.SelectionRect;

            // split image into channels (b,g,r)
            CVImage[] planes = image.Split();

            // we will create a 1D histogram for every channel. each histogram will have
            // 'numberOfBins' bins for its single dimension (ranged from 0 to 255).
            int[] bins = { numberOfBins };
            CVPair[] ranges = { new CVPair(0, 255) };

            // calculate histogram for red, green and blue channels (seperately).
            CVHistogram histoRed = planes[0].CalcHistogram(bins, ranges);
            CVHistogram histoBlue = planes[1].CalcHistogram(bins, ranges);
            CVHistogram histoGreen = planes[2].CalcHistogram(bins, ranges);

            // draw the three histograms.
            DrawHistogram(bluePanel, histoBlue, 1);
            DrawHistogram(greenPanel, histoGreen, 2);
            DrawHistogram(redPanel, histoRed, 0);

            // resize & put original image onto form.
            CVImage output = new CVImage(image.Width, image.Height, CVDepth.Depth8U, 3);
            CVImage emptyPlane = new CVImage(image.Width, image.Height, CVDepth.Depth8U, 1);
            emptyPlane.Zero();

            CVImage[] images = new CVImage[3];
            images[0] = images[1] = images[2] = emptyPlane;

            if (blueCheck.Checked) images[0] = planes[0];
            if (greenCheck.Checked) images[1] = planes[1];
            if (redCheck.Checked) images[2] = planes[2];

            output.Merge(images);
            originalImage.Image = output.ToBitmap();

            // dispose of plane images.
            foreach (CVImage img in planes)
                img.Dispose();

            statusBar.Text = "Ready";
        }
Example #58
0
 public CVImage CopyRegion(System.Drawing.Rectangle rect)
 {
     CVImage roi = new CVImage(this);
     roi.RegionOfInterest = rect;
     return roi;
 }
Example #59
0
        private void openToolStripMenuItem_Click(object sender, EventArgs e)
        {
            OpenFileDialog openFile = new OpenFileDialog();
            openFile.Title = "Select Image File";
            if (openFile.ShowDialog() != DialogResult.OK)
                return;

            image = new CVImage(openFile.FileName);
            UpdateHistogram();
        }
Example #60
0
        public CVImage ToGrayscale()
        {
            CVImage gs = new CVImage(Width, Height, Depth, 1);
            System.Drawing.Rectangle prevRoi = this.RegionOfInterest;
            this.ResetROI();
            PInvoke.cvConvertImage(this.Internal, gs.Internal, (int)CVConvertImageFlags.Default);
            CVUtils.CheckLastError();
            this.RegionOfInterest = prevRoi;
            gs.RegionOfInterest = prevRoi;

            return gs;
        }