public void presave() { lock (balanceLock) { sav = AForge.Imaging.UnmanagedImage.FromManagedImage(number); } }
private void button2_Click(object sender, EventArgs e) { var watch = System.Diagnostics.Stopwatch.StartNew(); System.Drawing.Imaging.BitmapData objectsData = b.LockBits(new Rectangle(0, 0, b.Width, b.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, b.PixelFormat); AForge.Imaging.UnmanagedImage grayImage = AForge.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.Apply(new AForge.Imaging.UnmanagedImage(objectsData)); AForge.Imaging.Filters.Threshold th = new AForge.Imaging.Filters.Threshold(128); th.ApplyInPlace(grayImage); // unlock image b.UnlockBits(objectsData); pictureBox1.Image = grayImage.ToManagedImage();; b = grayImage.ToManagedImage(); int count = 0; b2 = ResizeImage(b, new Size(size, size)); String s = ""; pictureBox2.Image = b2; for (int i = 0; i < Bsize; i++) { int k = 0; int t = 0; for (int j = 0; j < Bsize; j++) { if (b.GetPixel(i, j).R > 100 && b.GetPixel(i, j).G > 100 && b.GetPixel(i, j).B > 100) { s = s + 0; } else { s = s + 1; count++; k++; } if (b.GetPixel(j, i).R > 100 && b.GetPixel(j, i).G > 100 && b.GetPixel(j, i).B > 100) { } else { t++; } } richTextBox2.Text = richTextBox2.Text + k + ""; richTextBox1.Text = richTextBox1.Text + t + "\n"; h = h + "," + k; w = w + "," + t; s = s + "\n"; result = string.Join("", h); result = count + "\n" + h + "\n" + w; } label1.Text = "Total 1 is " + count; // System.IO.File.WriteAllText( "test.txt", s); }
private void videoSourcePlayer1_NewFrame(object sender, ref Bitmap image) { i++; Bitmap objectsImage = null; AForge.Imaging.Filters.EuclideanColorFiltering filter = new AForge.Imaging.Filters.EuclideanColorFiltering(); // set center colol and radius AForge.Imaging.RGB f = new AForge.Imaging.RGB(Color.Red); filter.CenterColor = f;// AForge.Imaging.RGB..Blue; filter.Radius = (short)100; // apply the filter objectsImage = image; filter.ApplyInPlace(image); // lock image for further processing System.Drawing.Imaging.BitmapData objectsData = objectsImage.LockBits(new Rectangle(0, 0, image.Width, image.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, image.PixelFormat); // AForge.Imaging.Filters.Grayscale. // grayscaling AForge.Imaging.UnmanagedImage grayImage = AForge.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.Apply(new AForge.Imaging.UnmanagedImage(objectsData)); // unlock image objectsImage.UnlockBits(objectsData); AForge.Imaging.BlobCounter blobCounter = new AForge.Imaging.BlobCounter(); // locate blobs // blobCounter.MinHeight = 100; // blobCounter.MinWidth = 60; blobCounter.ProcessImage(grayImage); blobCounter.ObjectsOrder = AForge.Imaging.ObjectsOrder.Size; Rectangle[] rects = blobCounter.GetObjectsRectangles(); if (rects.Length > 0) { Rectangle objectRect = rects[0]; // draw rectangle around derected object Graphics g = Graphics.FromImage(image); s = s + "\n" + i + " - top = " + objectRect.Top + " bottom " + objectRect.Bottom + " left " + objectRect.Left + " right " + objectRect.Right + " " + objectRect.X + " " + objectRect.Location.X + " " + objectRect.Location.Y; using (Pen pen = new Pen(Color.FromArgb(160, 255, 160), 5)) { g.DrawRectangle(pen, objectRect); } g.Dispose(); int objectX = objectRect.X + objectRect.Width / 2 - image.Width / 2; int objectY = image.Height / 2 - (objectRect.Y + objectRect.Height / 2); // System.Console.Out. // label1.Text = label1.Text+objectRect.X; // ParameterizedThreadStart t = new ParameterizedThreadStart(p); // Thread aa = new Thread(t); // aa.Start(rects[0]); } Graphics g1 = Graphics.FromImage(image); Pen pen1 = new Pen(Color.FromArgb(160, 255, 160), 3); g1.DrawLine(pen1, image.Width / 2, 0, image.Width / 2, image.Width); g1.DrawLine(pen1, image.Width, image.Height / 2, 0, image.Height / 2); g1.Dispose(); }
private double[] imgToData(AForge.Imaging.UnmanagedImage img) { double[] res = new double[img.Width * img.Height]; for (int i = 0; i < img.Width; i++) { for (int j = 0; j < img.Height; j++) { res[i * img.Width + j] = img.GetPixel(i, j).GetBrightness(); // maybe threshold } } return(res); }
public void ProcessImage(Bitmap input_image) { lock (balanceLock) { int side = Math.Min(input_image.Height, input_image.Width); Rectangle cropRect = new Rectangle(0, 0, side, side); // this is square that represents feed from camera g.DrawImage(input_image, new Rectangle(0, 0, input_image.Width, input_image.Height), cropRect, GraphicsUnit.Pixel); // place it on original bitmap // set new processed if (processed != null) { processed.Dispose(); } // Конвертируем изображение в градации серого processed = grayFilter.Apply(AForge.Imaging.UnmanagedImage.FromManagedImage(original)); // Пороговый фильтр применяем. Величина порога берётся из настроек, и меняется на форме threshldFilter.PixelBrightnessDifferenceLimit = ThresholdValue; threshldFilter.ApplyInPlace(processed); InvertFilter.ApplyInPlace(processed); Blober.ProcessImage(processed); AForge.Imaging.Blob[] blobs = Blober.GetObjectsInformation(); BlobCount = blobs.Length; if (blobs.Length > 0) { var BiggestBlob = blobs[0]; Recongnised = true; Blober.ExtractBlobsImage(processed, BiggestBlob, false); processed = BiggestBlob.Image; AForge.Point mc = BiggestBlob.CenterOfGravity; AForge.Point ic = new AForge.Point((float)BiggestBlob.Image.Width / 2, (float)BiggestBlob.Image.Height / 2); AngleRad = (ic.Y - mc.Y) / (ic.X - mc.X); Angle = (float)(Math.Atan(AngleRad) * 180 / Math.PI); } else { // TODO make arrengaments for No blobs case Recongnised = false; Angle = 0; AngleRad = -1; } if (number != null) { number.Dispose(); } number = processed.ToManagedImage(); } }
/// <summary> /// Здесь надо инициализировать изображения /// </summary> public MagicEye() { // Инициализация изображений - предобработанных шаблонов powers[0] = 0; int pwr = 1; for (int i = 0; i < 17; ++i) { powers[pwr] = i; pwr *= 2; } samples = new Dictionary <int, AForge.Imaging.UnmanagedImage>(); AForge.Imaging.Filters.Grayscale GSfilter = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721); // Загружаем образцы чисел for (int i = 1; i < 14; ++i) { string fname = "Images/" + i.ToString() + ".jpg"; AForge.Imaging.UnmanagedImage tmp = AForge.Imaging.UnmanagedImage.FromManagedImage(new Bitmap(fname)); samples.Add(i, tmp); } }
public static Sample GenerateFigure(AForge.Imaging.UnmanagedImage img) { double[] input = new double[400]; for (int i = 0; i < 400; i++) { input[i] = 0; } FigureType type = FigureType.Undef; for (int i = 0; i < 200; i++) { for (int j = 0; j < 200; j++) { if (img.GetPixel(i, j).GetBrightness() < 0.5) { input[i] += 1; input[200 + j] += 1; } } } return(new Sample(input, 4, type)); }
public void ProcessImage(Bitmap bitmap, bool justShow) { // «Распиливаем» изображение на 16 фрагментов - по отдельной плитке каждый // Минимальная сторона изображения (обычно это высота) if (bitmap.Height > bitmap.Width) { throw new Exception("К такой забавной камере меня жизнь не готовила!"); } int side = bitmap.Height; // Отпиливаем границы, но не более половины изображения if (side < 4 * settings.border) { settings.border = side / 4; } side -= 2 * settings.border; Rectangle cropRect = new Rectangle((bitmap.Width - bitmap.Height) / 2 + settings.border, settings.border, side, side); original = new Bitmap(cropRect.Width, cropRect.Height); // Рисуем рамки на оригинале Graphics g = Graphics.FromImage(original); g.DrawImage(bitmap, new Rectangle(0, 0, original.Width, original.Height), cropRect, GraphicsUnit.Pixel); Pen p = new Pen(Color.Red); // Проверяем значение полей из settings if (side < 10 * settings.margin) { settings.margin = side / 10; } // Высчитываем сторону блока для извлечения int sz = side / 4 - 2 * settings.margin; int blockSide = side / 4; for (int r = 0; r < 4; ++r) { for (int c = 0; c < 4; ++c) { g.DrawRectangle(p, new Rectangle(settings.margin + c * blockSide, settings.margin + r * blockSide, sz, sz)); } } // Рисуем цифры, которые распознались на предыдущем шаге? if (justShow) { DrawNumbersOnOriginalBitmap(g, blockSide); } // Конвертируем изображение в градации серого AForge.Imaging.Filters.Grayscale grayFilter = new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721); processed = grayFilter.Apply(AForge.Imaging.UnmanagedImage.FromManagedImage(original)); // Масштабируем изображение до 500x500 - этого достаточно AForge.Imaging.Filters.ResizeBilinear scaleFilter = new AForge.Imaging.Filters.ResizeBilinear(500, 500); original = scaleFilter.Apply(original); // Если распознавание не планируем – просто выход if (justShow) { return; } // Обнуляем state for (int i = 0; i < 16; ++i) { currentDeskState[i] = 0; } // Пороговый фильтр применяем. Величина порога берётся из настроек, и меняется на форме AForge.Imaging.Filters.BradleyLocalThresholding threshldFilter = new AForge.Imaging.Filters.BradleyLocalThresholding(); threshldFilter.PixelBrightnessDifferenceLimit = settings.differenceLim; threshldFilter.ApplyInPlace(processed); for (int r = 0; r < 4; ++r) { for (int c = 0; c < 4; ++c) { // Берём очередной фрагмент - с дополнительными отступами (мы же там нарисовали прямоугольники) AForge.Imaging.Filters.Crop cropFilter = new AForge.Imaging.Filters.Crop(new Rectangle(2 + settings.margin + c * blockSide, 2 + settings.margin + r * blockSide, sz - 4, sz - 4)); arrayPics[r, c] = cropFilter.Apply(processed); //arrayPics[r, c] = processed.Clone(new Rectangle(2+settings.margin + c * blockSide, 2+settings.margin + r * blockSide, sz-4, sz-4), processed.PixelFormat); // И выполняем сопоставление processSample(r, c); } } DrawNumbersOnOriginalBitmap(g, blockSide); }
private void Go_Click(object sender, RoutedEventArgs e) { try { System.Windows.Media.Imaging.BitmapImage imagesource = imageBox.Source as System.Windows.Media.Imaging.BitmapImage; if (imageBox.Source == null) { System.Windows.MessageBox.Show("Add your image to the Image 1 box!"); return; } System.Drawing.Bitmap image = BmpImage2Bmp(imagesource); System.Windows.Media.Imaging.BitmapImage final = new System.Windows.Media.Imaging.BitmapImage(); if (!AForge.Imaging.Image.IsGrayscale(image)) { AForge.Imaging.Filters.ExtractChannel Grayer = new AForge.Imaging.Filters.ExtractChannel(0); image = Grayer.Apply(image); } if (Threshold.IsChecked == true) { imageBox.Source = null; AForge.Imaging.Filters.Threshold threshold = new AForge.Imaging.Filters.Threshold((int)slider.Value); threshold.ApplyInPlace(image); final = Bmp2BmpImage(image); } else if (GaussianFilter.IsChecked == true) { AForge.Imaging.Filters.GaussianBlur Gauss = new AForge.Imaging.Filters.GaussianBlur(); Gauss.Sigma = GaussSigma_Slide.Value; Gauss.Size = (int)GaussSize_Slide.Value; AForge.Imaging.UnmanagedImage unmanagedImage = AForge.Imaging.UnmanagedImage.FromManagedImage(image); AForge.Imaging.UnmanagedImage Dst = unmanagedImage.Clone(); Gauss.Apply(unmanagedImage, Dst); final = Bmp2BmpImage(Dst.ToManagedImage()); } else if (HiPass.IsChecked == true) { AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen(); filter.ApplyInPlace(image); final = Bmp2BmpImage(image); } else if (Erode.IsChecked == true) { AForge.Imaging.Filters.Erosion filter = new AForge.Imaging.Filters.Erosion(); filter.ApplyInPlace(image); final = Bmp2BmpImage(image); } else if (Invert.IsChecked == true) { AForge.Imaging.Filters.Invert filter = new AForge.Imaging.Filters.Invert(); filter.ApplyInPlace(image); final = Bmp2BmpImage(image); } else if (EdgeDetector.IsChecked == true) { AForge.Imaging.Filters.CannyEdgeDetector filter = new AForge.Imaging.Filters.CannyEdgeDetector(); filter.ApplyInPlace(image); final = Bmp2BmpImage(image); } else if (Median.IsChecked == true) { AForge.Imaging.Filters.Median filter = new AForge.Imaging.Filters.Median(); filter.Size = (int)GaussSize_Slide.Value; filter.ApplyInPlace(image); final = Bmp2BmpImage(image); } else if (More.IsChecked == true) { if (Dilate.IsSelected) { AForge.Imaging.Filters.Dilatation filter = new AForge.Imaging.Filters.Dilatation(); filter.ApplyInPlace(image); final = Bmp2BmpImage(image); } } imageBox.Source = final; TransformImage = image; boxWidth = imageBox.RenderSize.Width; boxHeight = imageBox.RenderSize.Height; } catch (Exception exc) { System.Windows.MessageBox.Show(exc.ToString()); } }
void CaptureListener.OnBuffer(CaptureDevice i_sender, double i_sample_time, IntPtr i_buffer, int i_buffer_len) { int w = i_sender.video_width; int h = i_sender.video_height; int s = w * (i_sender.video_bit_count / 8); AForge.Imaging.Filters.FiltersSequence seq = new AForge.Imaging.Filters.FiltersSequence(); seq.Add(new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721)); seq.Add(new AForge.Imaging.Filters.Threshold(127)); seq.Add(new AForge.Imaging.Filters.GrayscaleToRGB()); AForge.Imaging.UnmanagedImage srcImg = new AForge.Imaging.UnmanagedImage(i_buffer, w, h, s, System.Drawing.Imaging.PixelFormat.Format32bppRgb); AForge.Imaging.UnmanagedImage outputImg = seq.Apply(srcImg); byte[] destArr = new byte[outputImg.Stride * outputImg.Height]; System.Runtime.InteropServices.Marshal.Copy(outputImg.ImageData, destArr, 0, outputImg.Stride * outputImg.Height); this.m_raster.wrapBuffer(destArr); try { //this.m_ar. int detectedMkrs = this.m_ar.detectMarkerLite(this.m_raster, m_threshold); NyARDoublePoint2d[] points = null; List <NyARDoublePoint2d[]> ltPoints = new List <NyARDoublePoint2d[]>(); if (detectedMkrs > 0) { points = m_ar.getCorners(0); ltPoints.Add(points); for (int i = 1; i < detectedMkrs; i++) { NyARDoublePoint2d[] oMarkerPoints = m_ar.getCorners(i); ltPoints.Add(oMarkerPoints); } } Dispatcher.BeginInvoke(new Action(delegate() { try { TransformedBitmap b = new TransformedBitmap(); b.BeginInit(); b.Source = BitmapSource.Create(w, h, 96.0, 96.0, PixelFormats.Bgr32, BitmapPalettes.WebPalette, i_buffer, i_buffer_len, s); b.SetValue(TransformedBitmap.TransformProperty, new ScaleTransform(-1, -1)); b.EndInit(); image1.SetValue(Image.SourceProperty, b); if (points != null && points.Length == 4) { recognizedTag.Points = new PointCollection(new Point[] { new Point(cameraResX - points[0].x, cameraResY - points[0].y), new Point(cameraResX - points[1].x, cameraResY - points[1].y), new Point(cameraResX - points[2].x, cameraResY - points[2].y), new Point(cameraResX - points[3].x, cameraResY - points[3].y) }); recognizedTag.Visibility = System.Windows.Visibility.Visible; } else { recognizedTag.Visibility = System.Windows.Visibility.Hidden; } this.CvMainZm.Children.Clear(); for (int i = 1; i < ltPoints.Count; i++) { NyARDoublePoint2d[] oMarkerPoints = ltPoints[i]; Polygon oPolygon = new Polygon() { SnapsToDevicePixels = true, Fill = new SolidColorBrush(Colors.Violet), Opacity = 0.8, Stroke = new SolidColorBrush(Colors.Red) }; oPolygon.Points = new PointCollection(new Point[] { new Point(cameraResX - oMarkerPoints[0].x, cameraResY - oMarkerPoints[0].y), new Point(cameraResX - oMarkerPoints[1].x, cameraResY - oMarkerPoints[1].y), new Point(cameraResX - oMarkerPoints[2].x, cameraResY - oMarkerPoints[2].y), new Point(cameraResX - oMarkerPoints[3].x, cameraResY - oMarkerPoints[3].y) }); this.CvMainZm.Children.Add(oPolygon); } } catch { } }), null); } catch { } }
void CaptureListener.OnBuffer(CaptureDevice i_sender, double i_sample_time, IntPtr i_buffer, int i_buffer_len) { // calculate size of the frame bitmap int w = i_sender.video_width; int h = i_sender.video_height; int s = w * (i_sender.video_bit_count / 8); // stride AForge.Imaging.Filters.FiltersSequence seq = new AForge.Imaging.Filters.FiltersSequence(); seq.Add(new AForge.Imaging.Filters.Grayscale(0.2125, 0.7154, 0.0721)); seq.Add(new AForge.Imaging.Filters.Threshold(127)); seq.Add(new AForge.Imaging.Filters.GrayscaleToRGB()); AForge.Imaging.UnmanagedImage srcImg = new AForge.Imaging.UnmanagedImage(i_buffer, w, h, s, System.Drawing.Imaging.PixelFormat.Format32bppRgb); AForge.Imaging.UnmanagedImage outputImg = seq.Apply(srcImg); byte[] destArr = new byte[outputImg.Stride * outputImg.Height]; System.Runtime.InteropServices.Marshal.Copy(outputImg.ImageData, destArr, 0, outputImg.Stride * outputImg.Height); this.m_raster.wrapBuffer(destArr); try { int detectedMkrs = this.m_ar.detectMarkerLite(this.m_raster, m_threshold); NyARSquare square = null; if (detectedMkrs > 0) { NyARTransMatResult transMat = new NyARTransMatResult(); NyARDoublePoint2d[] points = m_ar.getCorners(0); // RichF added this method square = new NyARSquare(); square.sqvertex = points; } Dispatcher.BeginInvoke(new Action(delegate() { TransformedBitmap b = new TransformedBitmap(); b.BeginInit(); b.Source = BitmapSource.Create(w, h, dpiX, dpiY, PixelFormats.Bgr32, BitmapPalettes.WebPalette, i_buffer, i_buffer_len, s); b.SetValue(TransformedBitmap.TransformProperty, new ScaleTransform(-1, -1)); b.EndInit(); image1.SetValue(Image.SourceProperty, b); if (square != null) { recognizedTag.Points = new PointCollection(new Point[] { new Point(cameraResX - square.sqvertex[0].x, cameraResY - square.sqvertex[0].y), new Point(cameraResX - square.sqvertex[1].x, cameraResY - square.sqvertex[1].y), new Point(cameraResX - square.sqvertex[2].x, cameraResY - square.sqvertex[2].y), new Point(cameraResX - square.sqvertex[3].x, cameraResY - square.sqvertex[3].y) }); recognizedTag.Visibility = System.Windows.Visibility.Visible; } else { recognizedTag.Visibility = System.Windows.Visibility.Hidden; } }), null); } catch { } }
private void button1_Click(object sender, EventArgs e) { richTextBox1.Text = ""; System.Drawing.Imaging.BitmapData objectsData = b.LockBits(new Rectangle(0, 0, b.Width, b.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, b.PixelFormat); AForge.Imaging.UnmanagedImage grayImage = AForge.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.Apply(new AForge.Imaging.UnmanagedImage(objectsData)); AForge.Imaging.Filters.Threshold th = new AForge.Imaging.Filters.Threshold(128); th.ApplyInPlace(grayImage); // unlock image b.UnlockBits(objectsData); pictureBox2.Image = grayImage.ToManagedImage();; Pen pen = new Pen(Color.Red, 2); Bitmap img = grayImage.ToManagedImage(); Graphics g = Graphics.FromImage(b); string text = "0"; int z = 0; { for (int j = 2; j < img.Height; j++) { int pixel = img.GetPixel(1, j).R; if (pixel == 255) { z = j; break; } // g.DrawString(j.ToString(), new System.Drawing.Font("Tahoma", 12, FontStyle.Bold), Brushes.Red, i, j); // g.DrawLine(pen, i, j, i+1, j+1 ); } // if (z!=0) // break; // text = text + "\n"; } g.DrawRectangle(pen, 0, 0, z, z); bitsize = z; int l = bitsize % 3; int lastblack = 0; int lastwhite = 0; for (int i = bitsize + bitsize / 3; i < img.Width;) { int ptb = 0; int ptw = 0; int bitsizecount = 0; for (int j = bitsize; j < img.Height; j++) { bitsizecount++; g.DrawLine(pen, i, i, i + 1, j); int pixel = img.GetPixel(i, j).R; if (pixel == 255) { ptb++; ptw = 0; } if (pixel == 0) { ptb = 0; ptw++; } if (bitsize == bitsizecount) { if (ptb >= ptw) { horijontal = horijontal + 0; } else { horijontal = horijontal + 1; } bitsizecount = 0; ptb = 0; ptw = 0; } } if (i % 3 == 0) { i = i + bitsize + 1; } else if (i % 5 == 0) { i = i + bitsize + 1; } else { i = i + bitsize + bitsize % 3; } } for (int j = bitsize + bitsize / 3; j < img.Height;) { int ptb = 0; int ptw = 0; int bitsizecount = 0; for (int i = bitsize; i < img.Height; i++) { bitsizecount++; g.DrawLine(pen, i, j, i, j + 1); int pixel = img.GetPixel(i, j).R; if (pixel == 255) { ptb++; ptw = 0; } if (pixel == 0) { ptb = 0; ptw++; } if (bitsize == bitsizecount) { if (ptb > ptw) { vertical = vertical + 0; } else { vertical = vertical + 1; } bitsizecount = 0; ptb = 0; ptw = 0; } } // j = j + bitsize+2 ; if (j % 3 == 0) { j = j + bitsize + 1; } else if (j % 5 == 0) { j = j + bitsize + 1; } else { j = j + bitsize + bitsize % 3; } } pictureBox2.Image = b; int la = horijontal.LastIndexOf('1'); horijontal = horijontal.Substring(0, la); int lb = vertical.LastIndexOf('1'); vertical = vertical.Substring(0, lb); String s1 = horijontal + "" + vertical; String s2 = vertical + "" + horijontal; int f = 8 - s2.Length % 8; for (int k = 0; k < f; k++) { s2 = s2 + "0"; s1 = s1 + "0"; } richTextBox1.Text = "Result Binarycode: " + vertical + horijontal; label1.Text = "Result TEXT: " + BinaryToString(vertical + horijontal);// +" - " + BinaryToString(horijontal); label2.Text = "Bitsize: " + bitsize + " Datalength " + s2.Length; }
/// <summary> /// Get spectrogram image. /// </summary> /// <param name="audioData">Audio data.</param> /// <param name="height">Spectrogram height.</param> /// <param name="width">Spectrogram width.</param> /// <returns>Spectrogram image.</returns> public static Bitmap GetImage(double[,] audioData, int height, int width) { var managedImage = new Bitmap(width, height, PixelFormat.Format24bppRgb); AForge.Imaging.UnmanagedImage image = AForge.Imaging.UnmanagedImage.FromManagedImage(managedImage); int pixelSize = Image.GetPixelFormatSize(image.PixelFormat) / 8; // image dimension int imageWidth = image.Width; int imageHeight = image.Height; int stride = image.Stride; const int StartX = 0; int stopX = imageWidth - 1; // spectrogram is drawn from the bottom const int StartY = 0; int stopY = imageHeight - 1; // min, max, range double min; double max; audioData.MinMax(out min, out max); double range = max - min; int offset = stride - ((stopX - StartX + 1) * pixelSize); int heightOffset = imageHeight; unsafe { // do the job byte *ptr = (byte *)image.ImageData.ToPointer() + (StartY * stride) + (StartX * pixelSize); // height for (int y = StartY; y <= stopY; y++) { // width for (int x = StartX; x <= stopX; x++, ptr += pixelSize) { // required to render spectrogram correct way up int spectrogramY = heightOffset - 1; // NormaliseMatrixValues and bound the value - use min bound, max and 255 image intensity range // this is the amplitude double value = (audioData[x, spectrogramY] - min) / range; double colour = 255.0 - Math.Floor(255.0 * value); colour = Math.Min(colour, 255); colour = Math.Max(colour, 0); byte paintColour = Convert.ToByte(colour); // set colour ptr[AForge.Imaging.RGB.R] = paintColour; ptr[AForge.Imaging.RGB.G] = paintColour; ptr[AForge.Imaging.RGB.B] = paintColour; } ptr += offset; heightOffset--; } } return(image.ToManagedImage()); }