/// <summary> /// Process the filter on the specified image. /// </summary> /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> protected override unsafe void ProcessFilter(UnmanagedImage sourceData, UnmanagedImage destinationData) { var pixelSize = Image.GetPixelFormatSize(sourceData.PixelFormat) / 8; // get width and height var width = sourceData.Width; var height = sourceData.Height; var srcOffset = sourceData.Stride - width * pixelSize; var dstOffset = destinationData.Stride - width; // do the job var src = (byte*)sourceData.ImageData.ToPointer(); var dst = (byte*)destinationData.ImageData.ToPointer(); // for each row for (var y = 0; y < height; y++) { // for each pixel for (var x = 0; x < width; x++, src += pixelSize, dst++) { var r = (float)src[RGB.R] / 255; var g = (float)src[RGB.G] / 255; var b = (float)src[RGB.B] / 255; var yindex = (float)(0.2989 * r + 0.5866 * g + 0.1145 * b); *dst = (byte)(yindex * 255); } src += srcOffset; dst += dstOffset; } }
/// <summary> /// Get rectangle contain object in current frame /// </summary> /// <param name="templateInfo">Tracking template information</param> /// <param name="source">Frame</param> /// <returns>Rectangle contain object</returns> public static Rectangle TemplateColorTracking(ImageStatistics templateInfo, ref UnmanagedImage source) { UnmanagedImage image = source.Clone(); // create filter EuclideanColorFiltering filter = new EuclideanColorFiltering(); // set center colol and radius filter.CenterColor = new RGB( (byte)templateInfo.Red.Mean, (byte)templateInfo.Green.Mean, (byte)templateInfo.Blue.Mean); filter.Radius = 30; // apply the filter filter.ApplyInPlace(image); image = Grayscale.CommonAlgorithms.BT709.Apply(image); OtsuThreshold threshold = new OtsuThreshold(); threshold.ApplyInPlace(image); BlobCounter blobCounter = new BlobCounter(); blobCounter.ObjectsOrder = ObjectsOrder.Size; blobCounter.ProcessImage(image); Rectangle rect = blobCounter.ObjectsCount > 0 ? blobCounter.GetObjectsRectangles()[0] : Rectangle.Empty; return rect; }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="image">Source image data.</param> /// protected unsafe override void ProcessFilter(UnmanagedImage image) { int width = image.Width; int height = image.Height; int pixelSize = System.Drawing.Image.GetPixelFormatSize(image.PixelFormat) / 8; int stride = image.Stride; int offset = stride - image.Width * pixelSize; byte* src = (byte*)image.ImageData.ToPointer(); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++, src += pixelSize) { double sum = src[RGB.R] + src[RGB.G] + src[RGB.B]; sum = sum == 0 ? 1 : sum; double red = src[RGB.R] / sum; double green = src[RGB.G] / sum; double blue = 1 - red - green; src[RGB.R] = (byte)(red * 255); src[RGB.G] = (byte)(green * 255); src[RGB.B] = (byte)(blue * 255); } src += offset; } }
private void BtnLoadimage_Click(object sender, RoutedEventArgs e) { OpenFileDialog openFileDialog = new OpenFileDialog { Multiselect = true, Filter = "Image files (*.png;*.jpeg;*.bmp)|*.png;*.jpeg;*.bmp|All files (*.*)|*.*", //openFileDialog.InitialDirectory = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); //InitialDirectory = "C:\\Users\\BrianWang\\Desktop\\English Muffin Scan\\Dec 1 2017" InitialDirectory = "C:\\Users\\kai23\\Projects\\ABI\\EnglishMuffinVision_AForge\\Images\\English Muffin\\Batch 1\\All Top" }; if (openFileDialog.ShowDialog() == true) { foreach (string filename in openFileDialog.FileNames) { lblfilename.Content = filename; int startPos = filename.LastIndexOf("SK Foods On-Site Scan") + "SK Foods On-Site Scan".Length + 1; int length = filename.IndexOf("æ") - startPos - 1; string sub = filename.Substring(startPos, length); lblFolder.Content = sub; GrayScaleImage = AForge.Imaging.Image.FromFile(filename); imageLoaded = true; } } if (imageLoaded) { AForge.Imaging.UnmanagedImage unmanagedImage1 = AForge.Imaging.UnmanagedImage.FromManagedImage(GrayScaleImage); Bitmap managedImage = unmanagedImage1.ToManagedImage(); BitmapImage GrayImage_temp = ToBitmapImage(managedImage); imgGray.Source = GrayImage_temp; } }
protected override void ProcessFilter(UnmanagedImage image) { foreach (Rectangle rectangle in rectangles) { Drawing.Rectangle(image, rectangle, markerColor); } }
/// <summary> /// Looks for the brightest pixel after applying a redness filter. Narrows search first using a resampled copy of the image to eliminate edge dots. /// Expects an image that is already cropped to the interested area for faster processing. /// </summary> /// <param name="img"></param> /// <param name="mouse"></param> /// <param name="maxDistanceFromMouse"></param> /// <returns></returns> public unsafe Point FindMaxPixel(UnmanagedImage img, PointF mouse, float maxDistanceFromMouse) { int width = 15; int height = (int)Math.Ceiling((double)img.Height / (double)img.Width * width); if (width <= img.Width && height <= img.Height + 1) { width = img.Width; height = img.Height; } double scale = (double)img.Width / (double)width; UnmanagedImage lowRed = null; try { if (width != img.Width && height != img.Height) { using (Bitmap reduced = new Bitmap(width, height, PixelFormat.Format24bppRgb)) using (Graphics g = Graphics.FromImage(reduced)) using (ImageAttributes ia = new ImageAttributes()) { g.CompositingMode = System.Drawing.Drawing2D.CompositingMode.SourceCopy; g.CompositingQuality = System.Drawing.Drawing2D.CompositingQuality.HighQuality; g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.HighQualityBicubic; g.PixelOffsetMode = System.Drawing.Drawing2D.PixelOffsetMode.HighQuality; g.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality; ia.SetWrapMode(System.Drawing.Drawing2D.WrapMode.TileFlipXY); g.DrawImage(img.ToManagedImage(false), new Rectangle(0, 0, width, height), 0, 0, img.Width, img.Height, GraphicsUnit.Pixel, ia); //TODO: Not sure if ToManagedImage will stick around after the underying image is disposed. I know that the bitmap data will be gone, guess that's most of it. using (UnmanagedImage rui = UnmanagedImage.FromManagedImage(reduced)) { lowRed = new RedEyeFilter(2).Apply(rui); // Make an copy using the red eye filter } } } else { //Don't resample unless needed lowRed = new RedEyeFilter(2).Apply(img); } Point max = GetMax(lowRed, new PointF(mouse.X / (float)scale, mouse.Y / (float)scale), maxDistanceFromMouse / scale); //We weren't scaling things? OK, cool... if (scale == 0) return max; //Otherwise, let's get the unscaled pixel. //Calculate the rectangle surrounding the selected pixel, but in source coordinates. int tinySize = (int)Math.Ceiling(scale) + 1; Rectangle tinyArea = new Rectangle((int)Math.Floor(scale * (double)max.X), (int)Math.Floor(scale * (double)max.Y), tinySize, tinySize); if (tinyArea.Right >= img.Width) tinyArea.Width -= img.Width - tinyArea.Right + 1; if (tinyArea.Bottom >= img.Height) tinyArea.Height -= img.Height - tinyArea.Bottom + 1; //Filter it and look using (UnmanagedImage tiny = new Crop(tinyArea).Apply(img)) { using (UnmanagedImage tinyRed = new RedEyeFilter(2).Apply(tiny)) { max = GetMax(tinyRed); max.X += tinyArea.X; max.Y += tinyArea.Y; } } return max; } finally { if (lowRed != null) lowRed.Dispose(); } }
public void dessinePoint(IntPoint point, UnmanagedImage img,int nbPixel,Color col) { for (int i = point.X - nbPixel / 2; i < point.X + nbPixel / 2 + 1; i++) { for (int j = point.Y - nbPixel / 2; j < point.Y + nbPixel / 2 + 1; j++) { img.SetPixel(i, j, col); } } }
/// <summary> /// Locks the image for editing and returns the display image. /// </summary> /// <returns>Returns the current display image.</returns> public Bitmap Lock() { CanEdit = false; display.Dispose(); display = workingImage.ToManagedImage(); workingImage.Dispose(); workingImage = null; return display; }
public void ColeurVersNB() { /* Deprecated trop longue */ /* Convertie l'image en noir et blanc */ UnmanagedImage image = UnmanagedImage.Create(UnImgReel.Width, UnImgReel.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(UnImgReel, image); imgNB = image; }
public static void ConvertToGrayscale(UnmanagedImage source, UnmanagedImage destination) { if (source.PixelFormat != PixelFormat.Format8bppIndexed) { Grayscale.CommonAlgorithms.BT709.Apply(source, destination); } else { source.Copy(destination); } }
public static void GdiDrawImage(this Graphics graphics, UnmanagedImage image, Rectangle r) { IntPtr hdc = graphics.GetHdc(); IntPtr memdc = GdiInterop.CreateCompatibleDC(hdc); IntPtr bmp = image.ImageData; GdiInterop.SelectObject(memdc, bmp); GdiInterop.SetStretchBltMode(hdc, 0x04); GdiInterop.StretchBlt(hdc, r.Left, r.Top, r.Width, r.Height, memdc, 0, 0, image.Width, image.Height, GdiInterop.TernaryRasterOperations.SRCCOPY); GdiInterop.DeleteObject(bmp); GdiInterop.DeleteDC(memdc); graphics.ReleaseHdc(hdc); }
protected unsafe override void ProcessFilter(UnmanagedImage image, Rectangle rect) { int pixelSize = (image.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : (image.PixelFormat == PixelFormat.Format24bppRgb) ? 3 : 4; if (pixelSize != 4) throw new InvalidImagePropertiesException(); int startX = rect.Left; int startY = rect.Top; int stopX = startX + rect.Width; int stopY = startY + rect.Height; int stride = image.Stride; int offset = stride - rect.Width * pixelSize; int numberOfPixels = (stopX - startX) * (stopY - startY); // color image byte* ptr = (byte*)image.ImageData.ToPointer(); // allign pointer to the first pixel to process ptr += (startY * stride + startX * pixelSize); double width = Width; double inner = InnerAlpha; double outer = OuterAlpha; double diff = OuterAlpha - InnerAlpha; const short a = RGB.A; int w = (int)Math.Round(width); for (int y = startY; y < stopY; y++) { int ydist = Math.Max(0, Math.Max(startY + w - y, y - (stopY - 1 - w))); for (int x = startX; x < stopX; x++, ptr += pixelSize) { int xdist = Math.Max(0, Math.Max(startX + w - x, x - (stopX - 1 - w))); double dist = xdist > 0 && ydist > 0 ? Math.Round(Math.Sqrt(xdist * xdist + ydist * ydist)): Math.Max(xdist,ydist); if (dist <= 0 || w == 0) { ptr[a] = (byte)Math.Round((double)ptr[a] * inner); } else if (dist > w){ ptr[a] = (byte)Math.Round((double)ptr[a] * outer); } else { double t = dist / width; //t = Math.Sin(Math.PI * t / 2); t = 3 * t * t - 2 * t * t * t; //t = 6 * Math.Pow(t, 5) - 15 * Math.Pow(t, 4) + 10 * Math.Pow(t, 3); ptr[a] = (byte)Math.Round((double)ptr[a] * (inner + diff * t)); } } ptr += offset; } }
public static int AvgColor(System.Drawing.Image img, int forceZeroBelow) { long total = 0; AForge.Imaging.UnmanagedImage umimg = AForge.Imaging.UnmanagedImage.FromManagedImage(new Bitmap(img)); for (int y = 0; y < umimg.Height; y++) { for (int x = 0; x < umimg.Width; x++) { Color c = umimg.GetPixel(x, y); total += forceZeroBelow > ((c.R + c.G + c.B) / 3) ? 0 : 255; } } return((int)((long)total / ((long)umimg.Width * (long)umimg.Height))); }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> /// protected unsafe override void ProcessFilter(UnmanagedImage sourceData, UnmanagedImage destinationData) { int width = sourceData.Width; int height = sourceData.Height; PixelFormat format = sourceData.PixelFormat; int pixelSize = System.Drawing.Bitmap.GetPixelFormatSize(format) / 8; sourceData.Clone(); UnmanagedImage temp = UnmanagedImage.Create(width, height, format); int lineWidth = width * pixelSize; int srcStride = temp.Stride; int srcOffset = srcStride - lineWidth; int dstStride = destinationData.Stride; int dstOffset = dstStride - lineWidth; byte* srcStart = (byte*)temp.ImageData.ToPointer(); byte* dstStart = (byte*)destinationData.ImageData.ToPointer(); // first Convolution c = new Convolution(masks[0]); c.Apply(sourceData, destinationData); // others for (int i = 1; i < masks.Length; i++) { c.Kernel = masks[i]; c.Apply(sourceData, temp); byte* src = srcStart; byte* dst = dstStart; for (int y = 0; y < height; y++) { for (int x = 0; x < lineWidth; x++, src++, dst++) { if (*src > *dst) *dst = *src; } dst += dstOffset; src += srcOffset; } } }
/// <summary> /// Process the filter on the specified image. /// </summary> /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> protected unsafe override void ProcessFilter(UnmanagedImage sourceData, UnmanagedImage destinationData) { // get width and height int width = sourceData.Width; int height = sourceData.Height; int pixelSize = System.Drawing.Image.GetPixelFormatSize(sourceData.PixelFormat) / 8; int sum; var algorithm = Algorithm; if (pixelSize <= 4) { int srcOffset = sourceData.Stride - width * pixelSize; int dstOffset = destinationData.Stride - width; // do the job byte* src = (byte*)sourceData.ImageData.ToPointer(); byte* dst = (byte*)destinationData.ImageData.ToPointer(); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++, src += pixelSize, dst++) { if (src[RGB.R] == 0) continue; if (algorithm == 0) { //held *dst = (byte)Math.Max(src[RGB.R] - Math.Min(src[RGB.G], src[RGB.B]), 0); } else if (algorithm == 1) { //normalized r channel sum = (src[RGB.R] + src[RGB.G] + src[RGB.B]); *dst = (sum != 0) ? (byte)(255 * src[RGB.R] / sum) : (byte)0; } else if (algorithm == 2) { //Smolka *dst = src[RGB.R] == 0 ? (byte)0 : (byte)Math.Min(255, Math.Max(0, ((float)(src[RGB.R] - Math.Max(src[RGB.G], src[RGB.B])) * 255.0F / (float)src[RGB.R]))); } else if (algorithm == 3) { //GS *dst = (byte)Math.Pow((Math.Max(0, (src[RGB.R] * 2 - src[RGB.G] - src[RGB.B]) / src[RGB.R])), 2); } else if (algorithm == 4) { //Gabautz *dst = (byte)Math.Min(255, (src[RGB.R] * src[RGB.R] / (src[RGB.G] * src[RGB.G] + src[RGB.B] * src[RGB.B] + 14))); } } src += srcOffset; dst += dstOffset; } } else throw new NotImplementedException(); }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="image">Source image data.</param> /// protected unsafe override void ProcessFilter(UnmanagedImage image) { int width = image.Width; int height = image.Height; int pixelSize = System.Drawing.Image.GetPixelFormatSize(image.PixelFormat) / 8; int stride = image.Stride; int offset = stride - image.Width * pixelSize; byte* src = (byte*)image.ImageData.ToPointer(); // Get maximum color image values int maxR = 0, maxG = 0, maxB = 0; for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { if (src[RGB.R] > maxR) maxR = src[RGB.R]; if (src[RGB.G] > maxG) maxG = src[RGB.G]; if (src[RGB.B] > maxB) maxB = src[RGB.B]; } } double kr = maxR > 0 ? (255.0 / maxR) : 0; double kg = maxG > 0 ? (255.0 / maxG) : 0; double kb = maxB > 0 ? (255.0 / maxB) : 0; src = (byte*)image.ImageData.ToPointer(); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++, src += pixelSize) { double r = kr * src[RGB.R]; double g = kg * src[RGB.G]; double b = kb * src[RGB.B]; src[RGB.R] = (byte)(r > 255 ? 255 : r); src[RGB.G] = (byte)(g > 255 ? 255 : g); src[RGB.B] = (byte)(b > 255 ? 255 : b); } src += offset; } }
public static double EvaluateLocallyWAbs(UnmanagedImage image) { byte[,] pixels = image.GetPixels(); int width = pixels.GetLength(0); int height = pixels.GetLength(1); double aggregate = 0; for (int i = 0; i < width - 1; i++) { for (int j = 0; j < height; j++) { aggregate += Math.Abs((pixels[i, j] - pixels[i + 1, j])); } } for (int i = 0; i < width; i++) { for (int j = 0; j < height - 1; j++) { aggregate += Math.Abs((pixels[i, j] - pixels[i, j + 1])); } } for (int i = 0; i < width - 1; i++) { for (int j = 0; j < height - 1; j++) { aggregate += Math.Abs((pixels[i, j] - pixels[i + 1, j + 1])); } } for (int i = 0; i < width - 1; i++) { for (int j = 1; j < height; j++) { aggregate += Math.Abs((pixels[i, j] - pixels[i + 1, j - 1])); } } return (1D / (width * height * Math.Pow(255, 2))) * aggregate; }
public static int[] Histogram(System.Drawing.Image img) { int[] ans = new int[256]; AForge.Imaging.UnmanagedImage umimg = AForge.Imaging.UnmanagedImage.FromManagedImage(new Bitmap(img)); for (int y = 0; y < umimg.Height; y++) { for (int x = 0; x < umimg.Width; x++) { Color c = umimg.GetPixel(x, y); ans[(c.R + c.G + c.B) / 3]++; } } int max = ans.Max(); for (int i = 0; i < 256; i++) { ans[i] = (int)Math.Round((double)ans[i] / (double)max * 255D, 0); } return(ans); }
public void Correct(UnmanagedImage img, double aFocalLinPixels, int limit, double scale, int offx, int offy) { if (Math.Abs(_aFocalLinPixels - aFocalLinPixels) > Double.Epsilon || limit != _mFeLimit || Math.Abs(scale - _mScaleFeSize) > Double.Epsilon || img.Width != _w || img.Height != _h || _offsetx != offx || _offsety != offy) { Init(aFocalLinPixels, limit, scale, img.Width, img.Height, offx, offy); } var correctImage = UnmanagedImage.Create(img.Width, img.Height, img.PixelFormat); img.Copy(correctImage); int c = 0; for (int x = 0; x < _w; x++) { for (int y = 0; y < _h; y++) { img.SetPixel(x, y, correctImage.GetPixel(_map[c, 0], _map[c, 1])); c++; } } correctImage.Dispose(); }
// Gather statistics for the specified image private unsafe void ProcessImage( UnmanagedImage image, byte* mask, int maskLineSize ) { // get image dimension int width = image.Width; int height = image.Height; pixels = pixelsWithoutBlack = 0; int[] s = new int[256]; int[] l = new int[256]; int[] swb = new int[256]; int[] lwb = new int[256]; RGB rgb = new RGB( ); HSL hsl = new HSL( ); int pixelSize = ( image.PixelFormat == PixelFormat.Format24bppRgb ) ? 3 : 4; int offset = image.Stride - width * pixelSize; int maskOffset = maskLineSize - width; // do the job byte * p = (byte*) image.ImageData.ToPointer( ); if ( mask == null ) { // for each line for ( int y = 0; y < height; y++ ) { // for each pixel for ( int x = 0; x < width; x++, p += pixelSize ) { rgb.Red = p[RGB.R]; rgb.Green = p[RGB.G]; rgb.Blue = p[RGB.B]; // convert to HSL color space AForge.Imaging.HSL.FromRGB( rgb, hsl ); s[(int) ( hsl.Saturation * 255 )]++; l[(int) ( hsl.Luminance * 255 )]++; pixels++; if ( hsl.Luminance != 0.0 ) { swb[(int) ( hsl.Saturation * 255 )]++; lwb[(int) ( hsl.Luminance * 255 )]++; pixelsWithoutBlack++; } } p += offset; } } else { // for each line for ( int y = 0; y < height; y++ ) { // for each pixel for ( int x = 0; x < width; x++, p += pixelSize, mask++ ) { if ( *mask == 0 ) continue; rgb.Red = p[RGB.R]; rgb.Green = p[RGB.G]; rgb.Blue = p[RGB.B]; // convert to HSL color space AForge.Imaging.HSL.FromRGB( rgb, hsl ); s[(int) ( hsl.Saturation * 255 )]++; l[(int) ( hsl.Luminance * 255 )]++; pixels++; if ( hsl.Luminance != 0.0 ) { swb[(int) ( hsl.Saturation * 255 )]++; lwb[(int) ( hsl.Luminance * 255 )]++; pixelsWithoutBlack++; } } p += offset; mask += maskOffset; } } // create histograms saturation = new ContinuousHistogram( s, new Range( 0, 1 ) ); luminance = new ContinuousHistogram( l, new Range( 0, 1 ) ); saturationWithoutBlack = new ContinuousHistogram( swb, new Range( 0, 1 ) ); luminanceWithoutBlack = new ContinuousHistogram( lwb, new Range( 0, 1 ) ); }
/// <summary> /// Initializes a new instance of the <see cref="ImageStatisticsHSL"/> class. /// </summary> /// /// <param name="image">Image to gather statistics about.</param> /// <param name="mask">Mask array which specifies areas to collect statistics for.</param> /// /// <remarks><para>The mask array must be of the same size as the specified source image, where 0 values /// correspond to areas which should be excluded from processing. So statistics is calculated only for pixels, /// which have none zero corresponding value in the mask. /// </para></remarks> /// /// <exception cref="UnsupportedImageFormatException">Source pixel format is not supported.</exception> /// <exception cref="ArgumentException">Mask must have the same size as the source image to get statistics for.</exception> /// public ImageStatisticsHSL( UnmanagedImage image, byte[,] mask ) { CheckSourceFormat( image.PixelFormat ); CheckMaskProperties( PixelFormat.Format8bppIndexed, new Size( mask.GetLength( 1 ), mask.GetLength( 0 ) ), new Size( image.Width, image.Height ) ); unsafe { fixed ( byte* maskPtr = mask ) { ProcessImage( image, maskPtr, mask.GetLength( 1 ) ); } } }
/// <summary> /// Initializes a new instance of the <see cref="ImageStatisticsHSL"/> class. /// </summary> /// /// <param name="image">Image to gather statistics about.</param> /// <param name="mask">Mask image which specifies areas to collect statistics for.</param> /// /// <remarks><para>The mask image must be a grayscale/binary (8bpp) image of the same size as the /// specified source image, where black pixels (value 0) correspond to areas which should be excluded /// from processing. So statistics is calculated only for pixels, which are none black in the mask image. /// </para></remarks> /// /// <exception cref="UnsupportedImageFormatException">Source pixel format is not supported.</exception> /// <exception cref="ArgumentException">Mask image must be 8 bpp grayscale image.</exception> /// <exception cref="ArgumentException">Mask must have the same size as the source image to get statistics for.</exception> /// public ImageStatisticsHSL( UnmanagedImage image, UnmanagedImage mask ) { CheckSourceFormat( image.PixelFormat ); CheckMaskProperties( mask.PixelFormat, new Size( mask.Width, mask.Height ), new Size( image.Width, image.Height ) ); unsafe { ProcessImage( image, (byte*) mask.ImageData.ToPointer( ), mask.Stride ); } }
/// <summary> /// Process images matching blocks between them. /// </summary> /// /// <param name="sourceImage">Source unmanaged image with reference points.</param> /// <param name="coordinates">List of reference points to be matched.</param> /// <param name="searchImage">Unmanaged image in which the reference points will be looked for.</param> /// /// <returns>Returns list of found block matches. The list is sorted by similarity /// of found matches in descending order.</returns> /// /// <exception cref="InvalidImagePropertiesException">Source and search images sizes must match.</exception> /// <exception cref="UnsupportedImageFormatException">Source images can be grayscale (8 bpp indexed) or color (24 bpp) image only.</exception> /// <exception cref="ArgumentException">Source and search images must have same pixel format.</exception> /// public List <BlockMatch> ProcessImage(UnmanagedImage sourceImage, List <IntPoint> coordinates, UnmanagedImage searchImage) { // source images sizes must match. if ((sourceImage.Width != searchImage.Width) || (sourceImage.Height != searchImage.Height)) { throw new InvalidImagePropertiesException("Source and search images sizes must match"); } // sources images must be graysclae or color. if ((sourceImage.PixelFormat != PixelFormat.Format8bppIndexed) && (sourceImage.PixelFormat != PixelFormat.Format24bppRgb)) { throw new UnsupportedImageFormatException("Source images can be graysclae (8 bpp indexed) or color (24 bpp) image only"); } // source images must have the same pixel format. if (sourceImage.PixelFormat != searchImage.PixelFormat) { throw new InvalidImagePropertiesException("Source and search images must have same pixel format"); } int pointsCount = coordinates.Count; // found matches List <BlockMatch> matchingsList = new List <BlockMatch>( ); // get source image size int width = sourceImage.Width; int height = sourceImage.Height; int stride = sourceImage.Stride; int pixelSize = (sourceImage.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : 3; // pre-compute some values to avoid doing it in the loops. int blockRadius = blockSize / 2; int searchWindowSize = 2 * searchRadius; int blockLineSize = blockSize * pixelSize; int blockOffset = stride - (blockSize * pixelSize); // maximum possible difference of blocks int maxDiff = blockSize * blockSize * pixelSize * 255; // integer similarity threshold int threshold = (int)(similarityThreshold * maxDiff); // do the job unsafe { byte *ptrSource = (byte *)sourceImage.ImageData.ToPointer( ); byte *ptrSearch = (byte *)searchImage.ImageData.ToPointer( ); // for each point fed for (int iPoint = 0; iPoint < pointsCount; iPoint++) { int refPointX = coordinates[iPoint].X; int refPointY = coordinates[iPoint].Y; // make sure the source block is inside the image if ( ((refPointX - blockRadius < 0) || (refPointX + blockRadius >= width)) || ((refPointY - blockRadius < 0) || (refPointY + blockRadius >= height)) ) { // skip point continue; } // startting seatch point int searchStartX = refPointX - blockRadius - searchRadius; int searchStartY = refPointY - blockRadius - searchRadius; // output match int bestMatchX = refPointX; int bestMatchY = refPointY; // Exhaustive Search Algorithm - we test each location within the search window int minError = int.MaxValue; // for each search window's row for (int searchWindowRow = 0; searchWindowRow < searchWindowSize; searchWindowRow++) { if ((searchStartY + searchWindowRow < 0) || (searchStartY + searchWindowRow + blockSize >= height)) { // skip row continue; } // for each search window's column for (int searchWindowCol = 0; searchWindowCol < searchWindowSize; searchWindowCol++) { // tested block location in search image int blockSearchX = searchStartX + searchWindowCol; int blockSearchY = searchStartY + searchWindowRow; if ((blockSearchX < 0) || (blockSearchY + blockSize >= width)) { // skip column continue; } // get memory location of the block's upper left point in source and search images byte *ptrSourceBlock = ptrSource + ((refPointY - blockRadius) * stride) + ((refPointX - blockRadius) * pixelSize); byte *ptrSearchBlock = ptrSearch + (blockSearchY * stride) + (blockSearchX * pixelSize); // navigate this block, accumulating the error int error = 0; for (int blockRow = 0; blockRow < blockSize; blockRow++) { for (int blockCol = 0; blockCol < blockLineSize; blockCol++, ptrSourceBlock++, ptrSearchBlock++) { int diff = *ptrSourceBlock - *ptrSearchBlock; if (diff > 0) { error += diff; } else { error -= diff; } } // move to the next row ptrSourceBlock += blockOffset; ptrSearchBlock += blockOffset; } // check if the sum of error is mimimal if (error < minError) { minError = error; // keep best match so far bestMatchX = blockSearchX + blockRadius; bestMatchY = blockSearchY + blockRadius; } } } // calculate blocks' similarity and compare it with threshold int blockSimilarity = maxDiff - minError; if (blockSimilarity >= threshold) { matchingsList.Add(new BlockMatch( new IntPoint(refPointX, refPointY), new IntPoint(bestMatchX, bestMatchY), (float)blockSimilarity / maxDiff)); } } } // sort in descending order matchingsList.Sort(new MatchingsSorter( )); return(matchingsList); }
public unsafe double GetSkewAngle(UnmanagedImage image, Rectangle rect) { if (image.PixelFormat != PixelFormat.Format8bppIndexed) { throw new UnsupportedImageFormatException("Unsupported pixel format of the source image."); } this.InitHoughMap(); int width = image.Width; int height = image.Height; int num3 = width / 2; int num4 = height / 2; rect.Intersect(new Rectangle(0, 0, width, height)); int num5 = -num3 + rect.Left; int num6 = -num4 + rect.Top; int num7 = (width - num3) - (width - rect.Right); int num8 = ((height - num4) - (height - rect.Bottom)) - 1; int num9 = image.Stride - rect.Width; int num10 = (int)Math.Sqrt((double)((num3 * num3) + (num4 * num4))); int num11 = num10 * 2; this.houghMap = new short[this.houghHeight, num11]; byte *numPtr = (byte *)((image.ImageData.ToPointer() + (rect.Top * image.Stride)) + rect.Left); byte *numPtr2 = numPtr + image.Stride; for (int i = num6; i < num8; i++) { int num13 = num5; while (num13 < num7) { if ((numPtr[0] < 0x80) && (numPtr2[0] >= 0x80)) { for (int k = 0; k < this.houghHeight; k++) { int num15 = ((int)((this.cosMap[k] * num13) - (this.sinMap[k] * i))) + num10; if ((num15 >= 0) && (num15 < num11)) { short num1 = this.houghMap[k, num15]; num1[0] = (short)(num1[0] + 1); } } } num13++; numPtr++; numPtr2++; } numPtr += num9; numPtr2 += num9; } this.maxMapIntensity = 0; for (int j = 0; j < this.houghHeight; j++) { for (int m = 0; m < num11; m++) { if (this.houghMap[j, m] > this.maxMapIntensity) { this.maxMapIntensity = this.houghMap[j, m]; } } } this.CollectLines((short)(width / 10)); HoughLine[] mostIntensiveLines = this.GetMostIntensiveLines(5); double num18 = 0.0; double num19 = 0.0; foreach (HoughLine line in mostIntensiveLines) { if (line.RelativeIntensity > 0.5) { num18 += line.Theta * line.RelativeIntensity; num19 += line.RelativeIntensity; } } if (mostIntensiveLines.Length > 0) { num18 /= num19; } return(num18 - 90.0); }
public double GetSkewAngle(UnmanagedImage image) { return(this.GetSkewAngle(image, new Rectangle(0, 0, image.Width, image.Height))); }
private bool ApplyMotionDetector(UnmanagedImage lfu) { if (Alarm != null && lfu!=null) { _processFrameCount++; if (_processFrameCount >= CW.Camobject.detector.processeveryframe || CW.Calibrating) { _processFrameCount = 0; try { MotionLevel = _motionDetector.ProcessFrame(Filter != null ? Filter.Apply(lfu) : lfu); } catch(Exception ex) { throw new Exception("Error processing motion: "+ex.Message); } MotionLevel = MotionLevel * CW.Camobject.detector.gain; if (MotionLevel >= _alarmLevel) { if (Math.Min(MotionLevel,0.99) <= _alarmLevelMax) { return true; } } else MotionDetected = false; } } else MotionDetected = false; return false; }
public unsafe ImageStatisticsYCbCr(UnmanagedImage image, UnmanagedImage mask) { CheckSourceFormat(image.PixelFormat); CheckMaskProperties(mask.PixelFormat, new Size(mask.Width, mask.Height), new Size(image.Width, image.Height)); ProcessImage(image, (byte *)mask.ImageData.ToPointer(), mask.Stride); }
public ImageStatisticsYCbCr(UnmanagedImage image) { CheckSourceFormat(image.PixelFormat); ProcessImage(image, null, 0); }
/// <summary> /// Reset motion detector to initial state. /// </summary> /// /// <remarks><para>Resets internal state and variables of motion detection algorithm. /// Usually this is required to be done before processing new video source, but /// may be also done at any time to restart motion detection algorithm.</para> /// </remarks> /// public void Reset( ) { lock ( sync ) { if ( previousFrame != null ) { previousFrame.Dispose( ); previousFrame = null; } if ( motionFrame != null ) { motionFrame.Dispose( ); motionFrame = null; } if ( tempFrame != null ) { tempFrame.Dispose( ); tempFrame = null; } } }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> /// protected override void ProcessFilter(UnmanagedImage sourceData, UnmanagedImage destinationData) { convolution.Apply(sourceData, destinationData); }
/// <summary> /// Process image looking for matchings with specified template. /// </summary> /// /// <param name="image">Unmanaged source image to process.</param> /// <param name="template">Unmanaged template image to search for.</param> /// /// <returns>Returns array of found template matches. The array is sorted by similarity /// of found matches in descending order.</returns> /// /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception> /// <exception cref="InvalidImagePropertiesException">Template image is bigger than source image.</exception> /// public TemplateMatch[] ProcessImage( UnmanagedImage image, UnmanagedImage template ) { return ProcessImage( image, template, new Rectangle( 0, 0, image.Width, image.Height ) ); }
/// <summary> /// Gather vertical intensity statistics for specified image. /// </summary> /// /// <param name="image">Source image.</param> /// private void ProcessImage(UnmanagedImage image) { var pixelFormat = image.PixelFormat; // get image dimension var width = image.Width; var height = image.Height; red = green = blue = gray = null; // do the job unsafe { // check pixel format if (pixelFormat == PixelFormat.Format8bppIndexed) { // 8 bpp grayscale image var p = (byte *)image.ImageData.ToPointer(); var offset = image.Stride - width; // histogram array var g = new int[height]; // for each pixel for (var y = 0; y < height; y++) { var lineSum = 0; // for each pixel for (var x = 0; x < width; x++, p++) { lineSum += *p; } g[y] = lineSum; p += offset; } // create historgram for gray level gray = new Histogram(g); } else if (pixelFormat == PixelFormat.Format16bppGrayScale) { // 16 bpp grayscale image var basePtr = (byte *)image.ImageData.ToPointer(); var stride = image.Stride; // histogram array var g = new int[height]; // for each pixel for (var y = 0; y < height; y++) { var p = (ushort *)(basePtr + stride * y); var lineSum = 0; // for each pixel for (var x = 0; x < width; x++, p++) { lineSum += *p; } g[y] = lineSum; } // create historgram for gray level gray = new Histogram(g); } else if ( (pixelFormat == PixelFormat.Format24bppRgb) || (pixelFormat == PixelFormat.Format32bppRgb) || (pixelFormat == PixelFormat.Format32bppArgb)) { // 24/32 bpp color image var p = (byte *)image.ImageData.ToPointer(); var pixelSize = (pixelFormat == PixelFormat.Format24bppRgb) ? 3 : 4; var offset = image.Stride - width * pixelSize; // histogram arrays var r = new int[height]; var g = new int[height]; var b = new int[height]; // for each line for (var y = 0; y < height; y++) { var lineRSum = 0; var lineGSum = 0; var lineBSum = 0; // for each pixel for (var x = 0; x < width; x++, p += pixelSize) { lineRSum += p[RGB.R]; lineGSum += p[RGB.G]; lineBSum += p[RGB.B]; } r[y] = lineRSum; g[y] = lineGSum; b[y] = lineBSum; p += offset; } // create histograms red = new Histogram(r); green = new Histogram(g); blue = new Histogram(b); } else if ( (pixelFormat == PixelFormat.Format48bppRgb) || (pixelFormat == PixelFormat.Format64bppArgb)) { // 48/64 bpp color image var basePtr = (byte *)image.ImageData.ToPointer(); var stride = image.Stride; var pixelSize = (pixelFormat == PixelFormat.Format48bppRgb) ? 3 : 4; // histogram arrays var r = new int[height]; var g = new int[height]; var b = new int[height]; // for each line for (var y = 0; y < height; y++) { var p = (ushort *)(basePtr + stride * y); var lineRSum = 0; var lineGSum = 0; var lineBSum = 0; // for each pixel for (var x = 0; x < width; x++, p += pixelSize) { lineRSum += p[RGB.R]; lineGSum += p[RGB.G]; lineBSum += p[RGB.B]; } r[y] = lineRSum; g[y] = lineGSum; b[y] = lineBSum; } // create histograms red = new Histogram(r); green = new Histogram(g); blue = new Histogram(b); } } }
/// <summary> /// Process image looking for matchings with specified template. /// </summary> /// /// <param name="image">Unmanaged source image to process.</param> /// <param name="template">Unmanaged template image to search for.</param> /// <param name="searchZone">Rectangle in source image to search template for.</param> /// /// <returns>Returns array of found template matches. The array is sorted by similarity /// of found matches in descending order.</returns> /// /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception> /// <exception cref="InvalidImagePropertiesException">Template image is bigger than search zone.</exception> /// public TemplateMatch[] ProcessImage( UnmanagedImage image, UnmanagedImage template, Rectangle searchZone ) { // check image format if ( ( ( image.PixelFormat != PixelFormat.Format8bppIndexed ) && ( image.PixelFormat != PixelFormat.Format24bppRgb ) ) || ( image.PixelFormat != template.PixelFormat ) ) { throw new UnsupportedImageFormatException( "Unsupported pixel format of the source or template image." ); } // clip search zone Rectangle zone = searchZone; zone.Intersect( new Rectangle( 0, 0, image.Width, image.Height ) ); // search zone's starting point int startX = zone.X; int startY = zone.Y; // get source and template image size int sourceWidth = zone.Width; int sourceHeight = zone.Height; int templateWidth = template.Width; int templateHeight = template.Height; // check template's size if ( ( templateWidth > sourceWidth ) || ( templateHeight > sourceHeight ) ) { throw new InvalidImagePropertiesException( "Template's size should be smaller or equal to search zone." ); } int pixelSize = ( image.PixelFormat == PixelFormat.Format8bppIndexed ) ? 1 : 3; int sourceStride = image.Stride; // similarity map. its size is increased by 4 from each side to increase // performance of non-maximum suppresion int mapWidth = sourceWidth - templateWidth + 1; int mapHeight = sourceHeight - templateHeight + 1; int[,] map = new int[mapHeight + 4, mapWidth + 4]; // maximum possible difference with template int maxDiff = templateWidth * templateHeight * pixelSize * 255; // integer similarity threshold int threshold = (int) ( similarityThreshold * maxDiff ); // width of template in bytes int templateWidthInBytes = templateWidth * pixelSize; // do the job unsafe { byte* baseSrc = (byte*) image.ImageData.ToPointer( ); byte* baseTpl = (byte*) template.ImageData.ToPointer( ); int sourceOffset = image.Stride - templateWidth * pixelSize; int templateOffset = template.Stride - templateWidth * pixelSize; // for each row of the source image for ( int y = 0; y < mapHeight; y++ ) { // for each pixel of the source image for ( int x = 0; x < mapWidth; x++ ) { byte* src = baseSrc + sourceStride * ( y + startY ) + pixelSize * ( x + startX ); byte* tpl = baseTpl; // compare template with source image starting from current X,Y int dif = 0; // for each row of the template for ( int i = 0; i < templateHeight; i++ ) { // for each pixel of the template for ( int j = 0; j < templateWidthInBytes; j++, src++, tpl++ ) { int d = *src - *tpl; if ( d > 0 ) { dif += d; } else { dif -= d; } } src += sourceOffset; tpl += templateOffset; } // templates similarity int sim = maxDiff - dif; if ( sim >= threshold ) map[y + 2, x + 2] = sim; } } } // collect interesting points - only those points, which are local maximums List<TemplateMatch> matchingsList = new List<TemplateMatch>( ); // for each row for ( int y = 2, maxY = mapHeight + 2; y < maxY; y++ ) { // for each pixel for ( int x = 2, maxX = mapWidth + 2; x < maxX; x++ ) { int currentValue = map[y, x]; // for each windows' row for ( int i = -2; ( currentValue != 0 ) && ( i <= 2 ); i++ ) { // for each windows' pixel for ( int j = -2; j <= 2; j++ ) { if ( map[y + i, x + j] > currentValue ) { currentValue = 0; break; } } } // check if this point is really interesting if ( currentValue != 0 ) { matchingsList.Add( new TemplateMatch( new Rectangle( x - 2 + startX, y - 2 + startY, templateWidth, templateHeight ), (float) currentValue / maxDiff ) ); } } } // convert list to array TemplateMatch[] matchings = new TemplateMatch[matchingsList.Count]; matchingsList.CopyTo( matchings ); // sort in descending order Array.Sort( matchings, new MatchingsSorter( ) ); return matchings; }
/// <summary> /// Apply rectangle marker /// </summary> /// <param name="marker">Accord.Imaging.Filters.RectanglesMarker</param> /// <param name="um">AForge.Imaging.UnmanagedImage</param> /// <param name="rect">Rectangle</param> public static void Set(this Accord.Imaging.Filters.RectanglesMarker marker, ref AForge.Imaging.UnmanagedImage um, Rectangle rect) { marker.Rectangles = new Rectangle[] { rect }; marker.ApplyInPlace(um); }
private void VideoNewFrame(object sender, NewFrameEventArgs e) { var nf = NewFrame; var f = e.Frame; if (_requestedToStop || nf==null || f==null) return; if (_lastframeEvent > DateTime.MinValue) { if ((Helper.Now<_nextFrameTarget)) { return; } CalculateFramerates(); } _lastframeEvent = Helper.Now; Bitmap bmOrig = null; bool bMotion = false; lock (_sync) { try { bmOrig = ResizeBmOrig(f); if (RotateFlipType != RotateFlipType.RotateNoneFlipNone) { bmOrig.RotateFlip(RotateFlipType); } _width = bmOrig.Width; _height = bmOrig.Height; if (ZPoint == Point.Empty) { ZPoint = new Point(bmOrig.Width / 2, bmOrig.Height / 2); } if (CW.NeedMotionZones) CW.NeedMotionZones = !SetMotionZones(CW.Camobject.detector.motionzones); if (Mask != null) { ApplyMask(bmOrig); } if (CW.Camobject.alerts.active && Plugin != null && Alarm!=null) { bmOrig = RunPlugin(bmOrig); } var bmd = bmOrig.LockBits(new Rectangle(0, 0, bmOrig.Width, bmOrig.Height), ImageLockMode.ReadWrite, bmOrig.PixelFormat); //this converts the image into a windows displayable image so do it regardless using (var lfu = new UnmanagedImage(bmd)) { if (_motionDetector != null) { bMotion = ApplyMotionDetector(lfu); } else { MotionDetected = false; } if (CW.Camobject.settings.FishEyeCorrect) { _feCorrect.Correct(lfu, CW.Camobject.settings.FishEyeFocalLengthPX, CW.Camobject.settings.FishEyeLimit, CW.Camobject.settings.FishEyeScale, ZPoint.X, ZPoint.Y); } if (ZFactor > 1) { var f1 = new ResizeNearestNeighbor(lfu.Width, lfu.Height); var f2 = new Crop(ViewRectangle); try { using (var imgTemp = f2.Apply(lfu)) { f1.Apply(imgTemp, lfu); } } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } } } bmOrig.UnlockBits(bmd); PiP(bmOrig); AddTimestamp(bmOrig); } catch (UnsupportedImageFormatException ex) { CW.VideoSourceErrorState = true; CW.VideoSourceErrorMessage = ex.Message; bmOrig?.Dispose(); return; } catch (Exception ex) { bmOrig?.Dispose(); ErrorHandler?.Invoke(ex.Message); return; } if (MotionDetector != null && !CW.Calibrating && MotionDetector.MotionProcessingAlgorithm is BlobCountingObjectsProcessing && !CW.PTZNavigate && CW.Camobject.settings.ptzautotrack) { try { ProcessAutoTracking(); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } } } if (!_requestedToStop) { nf.Invoke(this, new NewFrameEventArgs(bmOrig)); } if (bMotion) { TriggerDetect(this); } }
/// <summary> /// Allocate new image in unmanaged memory. /// </summary> /// /// <param name="width">Image width.</param> /// <param name="height">Image height.</param> /// <param name="pixelFormat">Image pixel format.</param> /// /// <returns>Return image allocated in unmanaged memory.</returns> /// /// <remarks><para>Allocate new image with specified attributes in unmanaged memory.</para> /// /// <para><note>The method supports only /// <see cref="System.Drawing.Imaging.PixelFormat">Format8bppIndexed</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format16bppGrayScale</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format24bppRgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format32bppRgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format32bppArgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format32bppPArgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format48bppRgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format64bppArgb</see> and /// <see cref="System.Drawing.Imaging.PixelFormat">Format64bppPArgb</see> pixel formats. /// In the case if <see cref="System.Drawing.Imaging.PixelFormat">Format8bppIndexed</see> /// format is specified, pallete is not not created for the image (supposed that it is /// 8 bpp grayscale image). /// </note></para> /// </remarks> /// /// <exception cref="UnsupportedImageFormatException">Unsupported pixel format was specified.</exception> /// <exception cref="InvalidImagePropertiesException">Invalid image size was specified.</exception> /// public static UnmanagedImage Create( int width, int height, PixelFormat pixelFormat ) { int bytesPerPixel = 0 ; // calculate bytes per pixel switch ( pixelFormat ) { case PixelFormat.Format8bppIndexed: bytesPerPixel = 1; break; case PixelFormat.Format16bppGrayScale: bytesPerPixel = 2; break; case PixelFormat.Format24bppRgb: bytesPerPixel = 3; break; case PixelFormat.Format32bppRgb: case PixelFormat.Format32bppArgb: case PixelFormat.Format32bppPArgb: bytesPerPixel = 4; break; case PixelFormat.Format48bppRgb: bytesPerPixel = 6; break; case PixelFormat.Format64bppArgb: case PixelFormat.Format64bppPArgb: bytesPerPixel = 8; break; default: throw new UnsupportedImageFormatException( "Can not create image with specified pixel format." ); } // check image size if ( ( width <= 0 ) || ( height <= 0 ) ) { throw new InvalidImagePropertiesException( "Invalid image size specified." ); } // calculate stride int stride = width * bytesPerPixel; if ( stride % 4 != 0 ) { stride += ( 4 - ( stride % 4 ) ); } // allocate memory for the image IntPtr imageData = System.Runtime.InteropServices.Marshal.AllocHGlobal( stride * height ); AForge.SystemTools.SetUnmanagedMemory( imageData, 0, stride * height ); UnmanagedImage image = new UnmanagedImage( imageData, width, height, stride, pixelFormat ); image.mustBeDisposed = true; return image; }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame( UnmanagedImage videoFrame ) { lock ( sync ) { // check previous frame if ( previousFrame == null ) { // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for previous and current frames previousFrame = UnmanagedImage.Create( width, height, PixelFormat.Format8bppIndexed ); motionFrame = UnmanagedImage.Create( width, height, PixelFormat.Format8bppIndexed ); frameSize = motionFrame.Stride * height; // temporary buffer if ( suppressNoise ) { tempFrame = UnmanagedImage.Create( width, height, PixelFormat.Format8bppIndexed ); } // convert source frame to grayscale Tools.ConvertToGrayscale( videoFrame, previousFrame ); return; } // check image dimension if ( ( videoFrame.Width != width ) || ( videoFrame.Height != height ) ) return; // convert current image to grayscale Tools.ConvertToGrayscale( videoFrame, motionFrame ); // pointers to previous and current frames byte* prevFrame = (byte*) previousFrame.ImageData.ToPointer( ); byte* currFrame = (byte*) motionFrame.ImageData.ToPointer( ); // difference value int diff; // 1 - get difference between frames // 2 - threshold the difference // 3 - copy current frame to previous frame for ( int i = 0; i < frameSize; i++, prevFrame++, currFrame++ ) { // difference diff = (int) *currFrame - (int) *prevFrame; // copy current frame to previous *prevFrame = *currFrame; // treshold *currFrame = ( ( diff >= differenceThreshold ) || ( diff <= differenceThresholdNeg ) ) ? (byte) 255 : (byte) 0; } if ( suppressNoise ) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory( tempFrame.ImageData, motionFrame.ImageData, frameSize ); erosionFilter.Apply( tempFrame, motionFrame ); } // calculate amount of motion pixels pixelsChanged = 0; byte* motion = (byte*) motionFrame.ImageData.ToPointer( ); for ( int i = 0; i < frameSize; i++, motion++ ) { pixelsChanged += ( *motion & 1 ); } } }
/// <summary> /// Clone the unmanaged images. /// </summary> /// /// <returns>Returns clone of the unmanaged image.</returns> /// /// <remarks><para>The method does complete cloning of the object.</para></remarks> /// public UnmanagedImage Clone( ) { // allocate memory for the image IntPtr newImageData = System.Runtime.InteropServices.Marshal.AllocHGlobal( stride * height ); UnmanagedImage newImage = new UnmanagedImage( newImageData, width, height, stride, pixelFormat ); newImage.mustBeDisposed = true; AForge.SystemTools.CopyUnmanagedMemory( newImageData, imageData, stride * height ); return newImage; }
private void btnMassAnalysis_Click(object sender, RoutedEventArgs e) { Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); for (int index = 0; index < 6; index++) { string folderSettingText = ""; switch (index) { case 0: folderSettingText = "Batch 1\\All Top"; break; case 1: folderSettingText = "Batch 2\\All Top"; break; case 2: folderSettingText = "Batch 3\\All Top"; break; case 3: folderSettingText = "Batch 1\\All Bottom"; break; case 4: folderSettingText = "Batch 2\\All Bottom"; break; case 5: folderSettingText = "Batch 3\\All Bottom"; break; default: folderSettingText = ""; break; } string[] filename = { "" }; //string path = @"E:\Brian\Project 3 - English Muffin Onsite Data Gathering\SK Foods On-Site Scan\English Muffin\Batch 3\All Bottom"; string path = "C:\\Users\\kai23\\Projects\\ABI\\EnglishMuffinVision_AForge\\Images\\English Muffin\\" + folderSettingText; string searchPattern = "æKatanaScoring_CameraImageGray1*"; try { filename = Directory.GetFiles(path, searchPattern, SearchOption.AllDirectories); } catch (UnauthorizedAccessException) { } foreach (string f in filename) { if (f != null) { GrayScaleImage = AForge.Imaging.Image.FromFile(f); int startPos = f.LastIndexOf("SK Foods On-Site Scan") + "SK Foods On-Site Scan".Length + 1; int length = f.IndexOf("æ") - startPos - 1; string sub = f.Substring(startPos, length); lblFolder.Content = sub; //AForge.Imaging.UnmanagedImage unmanagedImage1 = AForge.Imaging.UnmanagedImage.FromManagedImage(GrayScaleImage); //Bitmap managedImage = unmanagedImage1.ToManagedImage(); //BitmapImage GrayImage_temp = ToBitmapImage(managedImage); //imgGray.Source = GrayImage_temp; //Stopwatch stopwatch = new Stopwatch(); //stopwatch.Start(); AForge.Imaging.UnmanagedImage unmanagedImage1 = AForge.Imaging.UnmanagedImage.FromManagedImage(GrayScaleImage); AForge.Imaging.BlobCounter bc = new AForge.Imaging.BlobCounter { CoupledSizeFiltering = true, FilterBlobs = true, MinHeight = 30, MinWidth = 30, MaxHeight = 100, MaxWidth = 100 }; bc.ProcessImage(GrayScaleImage); lblBlobCount.Content = bc.ObjectsCount; Bitmap indexMap = AForge.Imaging.Image.Clone(GrayScaleImage); for (int x = 0; x < indexMap.Width; x++) { for (int y = 0; y < indexMap.Height; y++) { indexMap.SetPixel(x, y, System.Drawing.Color.Black); } } System.Drawing.Rectangle[] rects = bc.GetObjectsRectangles(); // process blobs BreadBlob[] breadBlob1 = new BreadBlob[bc.ObjectsCount]; int blobArrayIndex = 0; int blobPt = Convert.ToInt16(txbBlobNum.Text); int blobThreshold = Convert.ToInt16(txbBlobThreshold.Text); if (blobPt >= bc.ObjectsCount) { blobPt = bc.ObjectsCount - 1; } StaticsCalculator MuffinStatistics = new StaticsCalculator(); Graphics g = Graphics.FromImage(indexMap); foreach (System.Drawing.Rectangle rect in rects) { //initialize Object breadBlob1[blobArrayIndex] = new BreadBlob(); breadBlob1[blobArrayIndex].TopDownThreshold = blobThreshold; byte[,] blobArray = new byte[rect.Width, rect.Height]; for (int x = rect.Left; x < rect.Right; x++) { for (int y = rect.Top; y < rect.Bottom; y++) { System.Drawing.Color tempPixelColor = GrayScaleImage.GetPixel(x, y); blobArray[x - rect.Left, y - rect.Top] = tempPixelColor.G; } } breadBlob1[blobArrayIndex].PixelArray = blobArray; breadBlob1[blobArrayIndex].X = rect.X; breadBlob1[blobArrayIndex].Y = rect.Y; MuffinStatistics.Add(breadBlob1[blobArrayIndex].Variance.QAverage); if (blobArrayIndex == blobPt) { System.Drawing.Rectangle tempRect = rect; tempRect.X -= 1; tempRect.Y -= 1; tempRect.Width += 2; tempRect.Height += 2; AForge.Imaging.Drawing.Rectangle(unmanagedImage1, tempRect, System.Drawing.Color.Yellow); } if (breadBlob1[blobArrayIndex].IsTop()) { AForge.Imaging.Drawing.Rectangle(unmanagedImage1, rect, System.Drawing.Color.Green); } else { AForge.Imaging.Drawing.Rectangle(unmanagedImage1, rect, System.Drawing.Color.Red); } RectangleF rectf = new RectangleF(rect.X, rect.Y, rect.Width, rect.Height); g.SmoothingMode = SmoothingMode.AntiAlias; g.InterpolationMode = InterpolationMode.HighQualityBicubic; g.PixelOffsetMode = PixelOffsetMode.HighQuality; g.DrawString(Convert.ToString(blobArrayIndex), new Font("Arial", 5), System.Drawing.Brushes.White, rectf); lblBlobHeight.Content = rect.Height; lblBlobWidth.Content = rect.Width; blobArrayIndex++; } BitmapImage indexMap_temp = ToBitmapImage(indexMap); g.Flush(); // conver to managed image if it is required to display it at some point of time Bitmap managedImage = unmanagedImage1.ToManagedImage(); // create filter Add filter = new Add(indexMap); // apply the filter Bitmap resultImage = filter.Apply(managedImage); BitmapImage GrayImage_temp = ToBitmapImage(resultImage); imgGray.Source = GrayImage_temp; lblLib.Content = "AForge"; lblVariance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.All); lblX.Content = breadBlob1[blobPt].X; lblY.Content = breadBlob1[blobPt].Y; lblQ1Variance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q1); lblQ2Variance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q2); lblQ3Variance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q3); lblQ4Variance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q4); lblQAverage.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.QAverage); lblAllMuffinStat.Content = MuffinStatistics.StandardDeviation; // System.IO.File.WriteAllLines(@"C:\Users\Public\TestFolder\Histogram.txt", GrayImage1Histogram_str); // E:\Brian\Project 3 - English Muffin Onsite Data Gathering\Data Analysis //System.IO.File.WriteAllLines(@"E:\Brian\Project 3 - English Muffin Onsite Data Gathering\Data Analysis\Histogram.txt", GrayImage1Histogram_str); bool fileExist = File.Exists("C:\\Users\\kai23\\Projects\\ABI\\EnglishMuffinVision_AForge\\Data Analysis\\Data.csv"); using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\Users\kai23\Projects\ABI\EnglishMuffinVision_AForge\Data Analysis\Data.csv", true)) { if (!fileExist) { file.WriteLine("File Info," + "Variance All," + "Vari Q1:," + "Vari Q2:," + "Vari Q3:," + "Vari Q4:," + "Variance Average:," + "S1," + "S2," + "S3," + "S4," + "S5," + "S6," + "S7," + "S8," + "S9," + "Savg," + "L1," + "L2," + "L3," + "L4," + "L5," + "L6," + "L7," + "L8," + "L9," + "L10," + "L11," + "L12," + "L13," + "L14," + "L15," + "L16," + "Lavg"); } file.WriteLine(Convert.ToString(lblFolder.Content) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.All)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q1)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q2)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q3)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q4)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.QAverage)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S1)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S2)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S3)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S4)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S5)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S6)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S7)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S8)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S9)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Savg)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L1)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L2)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L3)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L4)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L5)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L6)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L7)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L8)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L9)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L10)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L11)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L12)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L13)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L14)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L15)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.L16)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Lavg))); } //lblFolder.Content = ""; } } } stopwatch.Stop(); lblTime.Content = stopwatch.ElapsedMilliseconds; }
private unsafe void ProcessImage(UnmanagedImage image, byte *mask, int maskLineSize) { int width = image.Width; int height = image.Height; pixels = (pixelsWithoutBlack = 0); int[] array = new int[256]; int[] array2 = new int[256]; int[] array3 = new int[256]; int[] array4 = new int[256]; int[] array5 = new int[256]; int[] array6 = new int[256]; RGB rGB = new RGB(); YCbCr yCbCr = new YCbCr(); int num = (image.PixelFormat == PixelFormat.Format24bppRgb) ? 3 : 4; int num2 = image.Stride - width * num; int num3 = maskLineSize - width; byte *ptr = (byte *)image.ImageData.ToPointer(); if (mask == null) { for (int i = 0; i < height; i++) { int num4 = 0; while (num4 < width) { rGB.Red = ptr[2]; rGB.Green = ptr[1]; rGB.Blue = *ptr; YCbCr.FromRGB(rGB, yCbCr); array[(int)(yCbCr.Y * 255f)]++; array2[(int)(((double)yCbCr.Cb + 0.5) * 255.0)]++; array3[(int)(((double)yCbCr.Cr + 0.5) * 255.0)]++; pixels++; if ((double)yCbCr.Y != 0.0 || (double)yCbCr.Cb != 0.0 || (double)yCbCr.Cr != 0.0) { array4[(int)(yCbCr.Y * 255f)]++; array5[(int)(((double)yCbCr.Cb + 0.5) * 255.0)]++; array6[(int)(((double)yCbCr.Cr + 0.5) * 255.0)]++; pixelsWithoutBlack++; } num4++; ptr += num; } ptr += num2; } } else { for (int j = 0; j < height; j++) { int num5 = 0; while (num5 < width) { if (*mask != 0) { rGB.Red = ptr[2]; rGB.Green = ptr[1]; rGB.Blue = *ptr; YCbCr.FromRGB(rGB, yCbCr); array[(int)(yCbCr.Y * 255f)]++; array2[(int)(((double)yCbCr.Cb + 0.5) * 255.0)]++; array3[(int)(((double)yCbCr.Cr + 0.5) * 255.0)]++; pixels++; if ((double)yCbCr.Y != 0.0 || (double)yCbCr.Cb != 0.0 || (double)yCbCr.Cr != 0.0) { array4[(int)(yCbCr.Y * 255f)]++; array5[(int)(((double)yCbCr.Cb + 0.5) * 255.0)]++; array6[(int)(((double)yCbCr.Cr + 0.5) * 255.0)]++; pixelsWithoutBlack++; } } num5++; ptr += num; mask++; } ptr += num2; mask += num3; } } yHistogram = new ContinuousHistogram(array, new Range(0f, 1f)); cbHistogram = new ContinuousHistogram(array2, new Range(-0.5f, 0.5f)); crHistogram = new ContinuousHistogram(array3, new Range(-0.5f, 0.5f)); yHistogramWithoutBlack = new ContinuousHistogram(array4, new Range(0f, 1f)); cbHistogramWithoutBlack = new ContinuousHistogram(array5, new Range(-0.5f, 0.5f)); crHistogramWithoutBlack = new ContinuousHistogram(array6, new Range(-0.5f, 0.5f)); }
/// <summary> /// Create unmanaged image from the specified managed image. /// </summary> /// /// <param name="imageData">Source locked image data.</param> /// /// <returns>Returns new unmanaged image, which is a copy of source managed image.</returns> /// /// <remarks><para>The method creates an exact copy of specified managed image, but allocated /// in unmanaged memory. This means that managed image may be unlocked right after call to this /// method.</para></remarks> /// /// <exception cref="UnsupportedImageFormatException">Unsupported pixel format of source image.</exception> /// public static UnmanagedImage FromManagedImage( BitmapData imageData ) { PixelFormat pixelFormat = imageData.PixelFormat; // check source pixel format if ( ( pixelFormat != PixelFormat.Format8bppIndexed ) && ( pixelFormat != PixelFormat.Format16bppGrayScale ) && ( pixelFormat != PixelFormat.Format24bppRgb ) && ( pixelFormat != PixelFormat.Format32bppRgb ) && ( pixelFormat != PixelFormat.Format32bppArgb ) && ( pixelFormat != PixelFormat.Format32bppPArgb ) && ( pixelFormat != PixelFormat.Format48bppRgb ) && ( pixelFormat != PixelFormat.Format64bppArgb ) && ( pixelFormat != PixelFormat.Format64bppPArgb ) ) { throw new UnsupportedImageFormatException( "Unsupported pixel format of the source image." ); } // allocate memory for the image IntPtr dstImageData = System.Runtime.InteropServices.Marshal.AllocHGlobal( imageData.Stride * imageData.Height ); UnmanagedImage image = new UnmanagedImage( dstImageData, imageData.Width, imageData.Height, imageData.Stride, pixelFormat ); AForge.SystemTools.CopyUnmanagedMemory( dstImageData, imageData.Scan0, imageData.Stride * imageData.Height ); image.mustBeDisposed = true; return image; }
private void BtnCalculate_Click(object sender, RoutedEventArgs e) { if (imageLoaded) { Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); AForge.Imaging.UnmanagedImage unmanagedImage1 = AForge.Imaging.UnmanagedImage.FromManagedImage(GrayScaleImage); AForge.Imaging.BlobCounter bc = new AForge.Imaging.BlobCounter { CoupledSizeFiltering = true, FilterBlobs = true, MinHeight = 30, MinWidth = 30, MaxHeight = 100, MaxWidth = 100 }; bc.ProcessImage(GrayScaleImage); lblBlobCount.Content = bc.ObjectsCount; Bitmap indexMap = AForge.Imaging.Image.Clone(GrayScaleImage); for (int x = 0; x < indexMap.Width; x++) { for (int y = 0; y < indexMap.Height; y++) { indexMap.SetPixel(x, y, System.Drawing.Color.Black); } } System.Drawing.Rectangle[] rects = bc.GetObjectsRectangles(); // process blobs BreadBlob[] breadBlob1 = new BreadBlob[bc.ObjectsCount]; int blobArrayIndex = 0; int blobPt = Convert.ToInt16(txbBlobNum.Text); int blobThreshold = Convert.ToInt16(txbBlobThreshold.Text); if (blobPt >= bc.ObjectsCount) { blobPt = bc.ObjectsCount - 1; } StaticsCalculator MuffinStatistics = new StaticsCalculator(); Graphics g = Graphics.FromImage(indexMap); foreach (System.Drawing.Rectangle rect in rects) { //initialize Object breadBlob1[blobArrayIndex] = new BreadBlob(); breadBlob1[blobArrayIndex].TopDownThreshold = blobThreshold; byte[,] blobArray = new byte[rect.Width, rect.Height]; for (int x = rect.Left; x < rect.Right; x++) { for (int y = rect.Top; y < rect.Bottom; y++) { System.Drawing.Color tempPixelColor = GrayScaleImage.GetPixel(x, y); blobArray[x - rect.Left, y - rect.Top] = tempPixelColor.G; } } breadBlob1[blobArrayIndex].PixelArray = blobArray; breadBlob1[blobArrayIndex].X = rect.X; breadBlob1[blobArrayIndex].Y = rect.Y; MuffinStatistics.Add(breadBlob1[blobArrayIndex].Variance.QAverage); if (blobArrayIndex == blobPt) { System.Drawing.Rectangle tempRect = rect; tempRect.X -= 1; tempRect.Y -= 1; tempRect.Width += 2; tempRect.Height += 2; AForge.Imaging.Drawing.Rectangle(unmanagedImage1, tempRect, System.Drawing.Color.Yellow); } if (breadBlob1[blobArrayIndex].IsTop()) { AForge.Imaging.Drawing.Rectangle(unmanagedImage1, rect, System.Drawing.Color.Green); } else { AForge.Imaging.Drawing.Rectangle(unmanagedImage1, rect, System.Drawing.Color.Red); } RectangleF rectf = new RectangleF(rect.X, rect.Y, rect.Width, rect.Height); g.SmoothingMode = SmoothingMode.AntiAlias; g.InterpolationMode = InterpolationMode.HighQualityBicubic; g.PixelOffsetMode = PixelOffsetMode.HighQuality; g.DrawString(Convert.ToString(blobArrayIndex), new Font("Arial", 5), System.Drawing.Brushes.White, rectf); lblBlobHeight.Content = rect.Height; lblBlobWidth.Content = rect.Width; blobArrayIndex++; } BitmapImage indexMap_temp = ToBitmapImage(indexMap); g.Flush(); // conver to managed image if it is required to display it at some point of time Bitmap managedImage = unmanagedImage1.ToManagedImage(); // create filter Add filter = new Add(indexMap); // apply the filter Bitmap resultImage = filter.Apply(managedImage); BitmapImage GrayImage_temp = ToBitmapImage(resultImage); imgGray.Source = GrayImage_temp; stopwatch.Stop(); lblTime.Content = stopwatch.ElapsedMilliseconds; lbl9var_1.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S1); lbl9var_2.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S2); lbl9var_3.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S3); lbl9var_4.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S4); lbl9var_5.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S5); lbl9var_6.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S6); lbl9var_7.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S7); lbl9var_8.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S8); lbl9var_9.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.S9); lbl9var_avg.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Savg); lblLib.Content = "AForge"; lblVariance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.All); lblX.Content = breadBlob1[blobPt].X; lblY.Content = breadBlob1[blobPt].Y; lblQ1Variance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q1); lblQ2Variance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q2); lblQ3Variance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q3); lblQ4Variance.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q4); lblQAverage.Content = breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.QAverage); lblAllMuffinStat.Content = MuffinStatistics.StandardDeviation; // System.IO.File.WriteAllLines(@"C:\Users\Public\TestFolder\Histogram.txt", GrayImage1Histogram_str); // E:\Brian\Project 3 - English Muffin Onsite Data Gathering\Data Analysis //System.IO.File.WriteAllLines(@"E:\Brian\Project 3 - English Muffin Onsite Data Gathering\Data Analysis\Histogram.txt", GrayImage1Histogram_str); bool fileExist = File.Exists("C:\\Users\\kai23\\Projects\\ABI\\EnglishMuffinVision_AForge\\Data Analysis\\Data.csv"); using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\Users\kai23\Projects\ABI\EnglishMuffinVision_AForge\Data AnalysisData.csv", true)) { if (!fileExist) { file.WriteLine("File Info" + "Variance All," + "Vari Q1:," + "Vari Q2:," + "Vari Q3:," + "Vari Q4:," + "Variance Average:"); } file.WriteLine(Convert.ToString(lblFolder.Content) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.All)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q1)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q2)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q3)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.Q4)) + "," + Convert.ToString(breadBlob1[blobPt].GetVariance(BreadBlob.VarianceType.QAverage))); } } }
/// <summary> /// Copy unmanaged image. /// </summary> /// /// <param name="destImage">Destination image to copy this image to.</param> /// /// <remarks><para>The method copies current unmanaged image to the specified image. /// Size and pixel format of the destination image must be exactly the same.</para></remarks> /// /// <exception cref="InvalidImagePropertiesException">Destination image has different size or pixel format.</exception> /// public void Copy( UnmanagedImage destImage ) { if ( ( width != destImage.width ) || ( height != destImage.height ) || ( pixelFormat != destImage.pixelFormat ) ) { throw new InvalidImagePropertiesException( "Destination image has different size or pixel format." ); } if ( stride == destImage.stride ) { // copy entire image AForge.SystemTools.CopyUnmanagedMemory( destImage.imageData, imageData, stride * height ); } else { unsafe { int dstStride = destImage.stride; int copyLength = ( stride < dstStride ) ? stride : dstStride; byte* src = (byte*) imageData.ToPointer( ); byte* dst = (byte*) destImage.imageData.ToPointer( ); // copy line by line for ( int i = 0; i < height; i++ ) { AForge.SystemTools.CopyUnmanagedMemory( dst, src, copyLength ); dst += dstStride; src += stride; } } } }
/// <summary> /// Allocate new image in unmanaged memory. /// </summary> /// /// <param name="width">Image width.</param> /// <param name="height">Image height.</param> /// <param name="pixelFormat">Image pixel format.</param> /// /// <returns>Return image allocated in unmanaged memory.</returns> /// /// <remarks><para>Allocate new image with specified attributes in unmanaged memory.</para> /// /// <para><note>The method supports only /// <see cref="System.Drawing.Imaging.PixelFormat">Format8bppIndexed</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format16bppGrayScale</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format24bppRgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format32bppRgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format32bppArgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format32bppPArgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format48bppRgb</see>, /// <see cref="System.Drawing.Imaging.PixelFormat">Format64bppArgb</see> and /// <see cref="System.Drawing.Imaging.PixelFormat">Format64bppPArgb</see> pixel formats. /// In the case if <see cref="System.Drawing.Imaging.PixelFormat">Format8bppIndexed</see> /// format is specified, pallete is not not created for the image (supposed that it is /// 8 bpp grayscale image). /// </note></para> /// </remarks> /// /// <exception cref="UnsupportedImageFormatException">Unsupported pixel format was specified.</exception> /// <exception cref="InvalidImagePropertiesException">Invalid image size was specified.</exception> /// public static UnmanagedImage Create(int width, int height, PixelFormat pixelFormat) { int bytesPerPixel = 0; // calculate bytes per pixel switch (pixelFormat) { case PixelFormat.Format8bppIndexed: bytesPerPixel = 1; break; case PixelFormat.Format16bppGrayScale: bytesPerPixel = 2; break; case PixelFormat.Format24bppRgb: bytesPerPixel = 3; break; case PixelFormat.Format32bppRgb: case PixelFormat.Format32bppArgb: case PixelFormat.Format32bppPArgb: bytesPerPixel = 4; break; case PixelFormat.Format48bppRgb: bytesPerPixel = 6; break; case PixelFormat.Format64bppArgb: case PixelFormat.Format64bppPArgb: bytesPerPixel = 8; break; default: throw new UnsupportedImageFormatException("Can not create image with specified pixel format."); } // check image size if ((width <= 0) || (height <= 0)) { throw new InvalidImagePropertiesException("Invalid image size specified."); } // calculate stride int stride = width * bytesPerPixel; if (stride % 4 != 0) { stride += (4 - (stride % 4)); } // allocate memory for the image IntPtr imageData = System.Runtime.InteropServices.Marshal.AllocHGlobal(stride * height); AForge.SystemTools.SetUnmanagedMemory(imageData, 0, stride * height); UnmanagedImage image = new UnmanagedImage(imageData, width, height, stride, pixelFormat); image.mustBeDisposed = true; return(image); }