public void ConvertTest3() { // Create an array representation // of a 4x4 image with a inner 2x2 // square drawn in the middle byte[] pixels = { 0, 0, 0, 0, 0, 255, 255, 0, 0, 255, 255, 0, 0, 0, 0, 0, }; // Create the converter to create a Bitmap from the array var conv = new ArrayToBitmapSource(width: 4, height: 4); // Declare an image and store the pixels on it BitmapSource image; conv.Convert(pixels, out image); var conv2 = new ArrayToImage(width: 4, height: 4); Bitmap expected; conv2.Convert(pixels, out expected); Assert.AreEqual(expected.ToMatrix(0), image.ToMatrix(0)); }
public void ConvertTest3() { double[] pixels = { 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, }; var conv1 = new ArrayToBitmapSource(width: 4, height: 4); BitmapSource image; conv1.Convert(pixels, out image); var conv = new BitmapSourceToArray(); double[] array; conv.Convert(image, out array); var conv2 = new ArrayToImage(width: 4, height: 4); Bitmap image2; conv2.Convert(pixels, out image2); Assert.AreEqual(0, array.Min()); Assert.AreEqual(1, array.Max()); Assert.AreEqual(16, array.Length); var expected = image2.ToVector(0); Assert.AreEqual(array, expected); }
/// <summary> /// Runs the K-Means algorithm. /// </summary> /// private void runKMeans() { // Retrieve the number of clusters int k = (int)numClusters.Value; // Load original image Bitmap image = Properties.Resources.leaf; // Create converters ImageToArray imageToArray = new ImageToArray(min: -1, max: +1); ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(image, out pixels); // Create a K-Means algorithm using given k and a // square Euclidean distance as distance metric. KMeans kmeans = new KMeans(k, Distance.SquareEuclidean); // Compute the K-Means algorithm until the difference in // cluster centroids between two iterations is below 0.05 int[] idx = kmeans.Compute(pixels, 0.05); // Replace every pixel with its corresponding centroid pixels.ApplyInPlace((x, i) => kmeans.Clusters.Centroids[idx[i]]); // Show resulting image in the picture box Bitmap result; arrayToImage.Convert(pixels, out result); pictureBox.Image = result; }
public void ConvertTest4() { // Create an array representation // of a 4x4 image with a inner 2x2 // square drawn in the middle Color[] pixels = { Color.Black, Color.Black, Color.Black, Color.Black, Color.Black, Color.Transparent, Color.Red, Color.Black, Color.Black, Color.Green, Color.Blue, Color.Black, Color.Black, Color.Black, Color.Black, Color.Black, }; // Create the converter to create a Bitmap from the array ArrayToImage conv = new ArrayToImage(width: 4, height: 4); // Declare an image and store the pixels on it Bitmap image; conv.Convert(pixels, out image); // Show the image on screen image = new ResizeNearestNeighbor(320, 320).Apply(image); // Accord.Controls.ImageBox.Show(image, PictureBoxSizeMode.Zoom); Assert.AreEqual(0, conv.Min); Assert.AreEqual(1, conv.Max); Assert.AreEqual(320, image.Height); Assert.AreEqual(320, image.Width); }
private void Recall(bool reconstruct) { string[] sp = textBox1.Text.Split(','); if (sp.Length != 2) { label1.Text = "You need to enter <neuron>,<layer>!"; label1.Refresh(); return; } try { int neuron = int.Parse(sp[0]); int layer = int.Parse(sp[1]); string c = (layer == LAYERS.Length - 1) ? listBox1.Items[neuron].ToString() : "(not a category)"; double[] a = (reconstruct) ? new double[LAYERS[layer]] : new double[NUM_CATEGORIES]; a[neuron] = 1; double[] r = (reconstruct) ? _network.Reconstruct(a, layer) : _network.GenerateInput(a); Bitmap bm; _atoi.Convert(r, out bm); label1.Text = "Reconstructing " + c + ", length of reconstruction: " + r.Length; label1.Refresh(); pictureBox1.Image = bm; pictureBox1.Refresh(); } catch (Exception ex) { label1.Text = ex.Message + "\n" + ex.StackTrace + "\n"; label1.Text += "Reconstruction input params invalid. neuron should be < size of layer."; label1.Refresh(); } }
void cluster(ref Bitmap bitmap) { if (!PreCluster.Checked) { return; } int _k = (int)PartCount.Value; Bitmap image = realImage; ImageToArray imageToArray = new ImageToArray(min: -1, max: +1); ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); double[][] pixels; imageToArray.Convert(image, out pixels); KMeans kmeans = new KMeans(_k, new SquareEuclidean()) { Tolerance = 0.05 }; int[] idx = kmeans.Learn(pixels).Decide(pixels); pixels.Apply((x, i) => kmeans.Clusters.Centroids[idx[i]], result: pixels); Bitmap result; arrayToImage.Convert(pixels, out result); bitmap = result; }
/// <summary> /// Image Kmeans. /// </summary> /// <param name="image">Input Image</param> /// <param name="k">Number of colors</param> /// <returns>K dominante colors</returns> public static Color[] GetDominanteColors(Bitmap image, int k) { // Create converters ImageToArray imageToArray = new ImageToArray(min: -1, max: +1); ArrayToImage arrayToImage = new ArrayToImage(1, k, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(image, out pixels); // Create a K-Means algorithm using given k and a // square Euclidean distance as distance metric. KMeans kmeans = new KMeans(k, new SquareEuclidean()) { Tolerance = 0.05 }; // Compute the K-Means algorithm until the difference in // cluster centroids between two iterations is below 0.05 kmeans.Learn(pixels); var controids = kmeans.Clusters.Centroids; Bitmap controidsColors; arrayToImage.Convert(controids, out controidsColors); List <Color> results = new List <Color>(); for (var i = 0; i < k; i++) { Color colorPx = controidsColors.GetPixel(0, i); results.Add(colorPx); } return(results.ToArray()); }
public Tile(int width, int height, double[][] pixels) { this.pixels = pixels; ArrayToImage arrayToImage = new ArrayToImage(width, height, min: -1, max: +1); arrayToImage.Convert(pixels, out tileImage); }
private void kmeans() { // Retrieve the number of clusters int k = (int)numClusters.Value; // Load original image Bitmap image = Properties.Resources.leaf; // Create conversors ImageToArray imageToArray = new ImageToArray(min: -1, max: +1); ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(image, out pixels); // Create a K-Means algorithm using given k and a // square euclidean distance as distance metric. KMeans kmeans = new KMeans(k, Distance.SquareEuclidean); // Compute the K-Means algorithm until the difference in // cluster centroids between two iterations is below 0.05 int[] idx = kmeans.Compute(pixels, 0.05); // Replace every pixel with its corresponding centroid pixels.ApplyInPlace((x, i) => kmeans.Clusters.Centroids[idx[i]]); // Show resulting image in the picture box Bitmap result; arrayToImage.Convert(pixels, out result); pictureBox.Image = result; }
public BitmapImage ConvertByteArrayToBitMapImage(byte[] imageByteArray) { ArrayToImage conv = new ArrayToImage(width: 1024, height: 768); Bitmap image = new Bitmap(1024, 768, PixelFormat.Format24bppRgb); conv.Convert(imageByteArray, out image); return(ToBitmapImage(image));; }
public static MeanShiftClusteringResult MeanShiftAccord(Image <Bgr, Byte> image, MeanShiftClusteringAcordParams msParams) { //Image<Bgr, byte> result = new Image<Bgr, byte>(image.Size); //int pixelSize = 3; // RGB color pixel //int kernel = 3; //double sigma = 0.06; // kernel bandwidth int pixelSize = 3; // RGB color pixel int kernel = msParams.Kernel; double sigma = msParams.Sigma; // kernel bandwidth // Load a test image (shown below) Bitmap msImage = image.Bitmap; // Create converters ImageToArray imageToArray = new ImageToArray(min: -1, max: +1); ArrayToImage arrayToImage = new ArrayToImage(msImage.Width, msImage.Height, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(msImage, out pixels); // Create a MeanShift algorithm using given bandwidth // and a Gaussian density kernel as kernel function. Accord.MachineLearning.MeanShift meanShift = new Accord.MachineLearning.MeanShift(pixelSize, new GaussianKernel(kernel), sigma); // We will compute the mean-shift algorithm until the means // change less than 0.5 between two iterations of the algorithm meanShift.Tolerance = 0.05; meanShift.MaxIterations = 10; // Learn the clusters in the data var clustering = meanShift.Learn(pixels); // Use clusters to decide class labels int[] labels = clustering.Decide(pixels); int regionCount = labels.DistinctCount(); // Replace every pixel with its corresponding centroid pixels.ApplyInPlace((x, i) => meanShift.Clusters.Modes[labels[i]]); // Retrieve the resulting image in a picture box Bitmap msResult; arrayToImage.Convert(pixels, out msResult); Image <Bgr, byte> result = new Image <Bgr, byte>(msResult); //EmguCvWindowManager.Display(result, "msResult"); return(new MeanShiftClusteringResult() { Image = result, Labels = labels, RegionCount = regionCount }); }
BitmapSource CreateColoredImgVector(byte [] byteMatrix, int width, int height, ColorCovMode colormod) { ColorConvertMethod cv = new ColorConvertMethod(); byte[] flatMatrix = byteMatrix; Color[] colorArr = cv.ConvertColor(colormod)(flatMatrix); ArrayToImage convertor = new ArrayToImage(width, height); System.Drawing.Bitmap imgbit = new System.Drawing.Bitmap(width, height); convertor.Convert(colorArr, out imgbit); return(CreateBitmapSourceClass.ToWpfBitmap(imgbit)); }
BitmapSource Arr2Source(byte[,] input, ColorCovMode colomod) { ColorConvertMethod cv = new ColorConvertMethod(); byte[] flatMatrix = input.Flatten <byte>(); Color[] rainbowArr = cv.ConvertColor(colomod)(flatMatrix); ArrayToImage convertor = new ArrayToImage(input.GetLength(1), input.GetLength(0)); System.Drawing.Bitmap imgbit = new System.Drawing.Bitmap(input.GetLength(1), input.GetLength(0)); convertor.Convert(rainbowArr, out imgbit); return(CreateBitmapSourceClass.ToWpfBitmap(imgbit)); }
public void ConvertTest3() { double[] pixels = { 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, }; ArrayToImage conv1 = new ArrayToImage(width: 4, height: 4); Bitmap image; conv1.Convert(pixels, out image); image = new ResizeNearestNeighbor(16, 16).Apply(image); // Obtain an image // Bitmap image = ... // Show on screen //ImageBox.Show(image, PictureBoxSizeMode.Zoom); // Create the converter to convert the image to a // matrix containing only values between 0 and 1 ImageToMatrix conv = new ImageToMatrix(min: 0, max: 1); // Convert the image and store it in the matrix double[,] matrix; conv.Convert(image, out matrix); /* * // Show the matrix on screen as an image * ImageBox.Show(matrix, PictureBoxSizeMode.Zoom); * * * // Show the matrix on screen as a .NET multidimensional array * MessageBox.Show(matrix.ToString(CSharpMatrixFormatProvider.InvariantCulture)); * * // Show the matrix on screen as a table * DataGridBox.Show(matrix, nonBlocking: true) * .SetAutoSizeColumns(DataGridViewAutoSizeColumnsMode.Fill) * .SetAutoSizeRows(DataGridViewAutoSizeRowsMode.AllCellsExceptHeaders) * .SetDefaultFontSize(5) * .WaitForClose(); */ Assert.AreEqual(0, matrix.Min()); Assert.AreEqual(1, matrix.Max()); Assert.AreEqual(16 * 16, matrix.Length); }
public void ConvertTest1() { ArrayToImage target = new ArrayToImage(16, 16); double[] pixels = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 1 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, // 2 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 4 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 5 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 6 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 7 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 8 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 9 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 10 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 11 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 12 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, // 13 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 14 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 15 }; Bitmap imageActual; target.Convert(pixels, out imageActual); double[] actual; ImageToArray c = new ImageToArray(); c.Convert(imageActual, out actual); double[] expected; Bitmap imageExpected = Properties.Resources.image1; new Invert().ApplyInPlace(imageExpected); new Threshold().ApplyInPlace(imageExpected); c.Convert(imageExpected, out expected); for (int i = 0; i < pixels.Length; i++) { Assert.AreEqual(actual[i], expected[i]); } }
public void meanShift() { string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "kmeans"); Directory.CreateDirectory(basePath); #region doc_meanshift // Load a test image (shown in a picture box below) var sampleImages = new TestImages(path: basePath); Bitmap image = sampleImages.GetImage("airplane.png"); // ImageBox.Show("Original", image).Hold(); // Create converters to convert between Bitmap images and double[] arrays var imageToArray = new ImageToArray(min: -1, max: +1); var arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(image, out pixels); // Create a MeanShift algorithm using given bandwidth // and a Gaussian density kernel as kernel function. MeanShift meanShift = new MeanShift() { Kernel = new GaussianKernel(3), Bandwidth = 0.06, // We will compute the mean-shift algorithm until the means // change less than 0.05 between two iterations of the algorithm Tolerance = 0.05, MaxIterations = 10 }; // Learn the clusters from the data var clusters = meanShift.Learn(pixels); // Use clusters to decide class labels int[] labels = clusters.Decide(pixels); // Replace every pixel with its corresponding centroid double[][] replaced = pixels.Apply((x, i) => clusters.Modes[labels[i]]); // Retrieve the resulting image (shown in a picture box) Bitmap result; arrayToImage.Convert(replaced, out result); // ImageBox.Show("Mean-Shift clustering", result).Hold(); #endregion }
public void kmeans() { string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "kmeans"); Directory.CreateDirectory(basePath); #region doc_kmeans // Load a test image (shown in a picture box below) var sampleImages = new TestImages(path: basePath); Bitmap image = sampleImages.GetImage("airplane.png"); // ImageBox.Show("Original", image).Hold(); // Create converters to convert between Bitmap images and double[] arrays var imageToArray = new ImageToArray(min: -1, max: +1); var arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(image, out pixels); // Create a K-Means algorithm using given k and a // square Euclidean distance as distance metric. KMeans kmeans = new KMeans(k: 5) { Distance = new SquareEuclidean(), // We will compute the K-Means algorithm until cluster centroids // change less than 0.5 between two iterations of the algorithm Tolerance = 0.05 }; // Learn the clusters from the data var clusters = kmeans.Learn(pixels); // Use clusters to decide class labels int[] labels = clusters.Decide(pixels); // Replace every pixel with its corresponding centroid double[][] replaced = pixels.Apply((x, i) => clusters.Centroids[labels[i]]); // Retrieve the resulting image (shown in a picture box) Bitmap result; arrayToImage.Convert(replaced, out result); // ImageBox.Show("k-Means clustering", result).Hold(); #endregion }
void btnCompute_Click(object sender, EventArgs e) { dataGridView2.Rows.Clear(); // Extract feature vectors double[][] hands = extract(); // Create a new Principal Component Analysis object pca = new PrincipalComponentAnalysis() { Method = PrincipalComponentMethod.Center, ExplainedVariance = 0.95 }; // Compute it pca.Learn(hands); // Now we will plot the Eigenvectors as images ArrayToImage reverse = new ArrayToImage(32, 32); // For each Principal Component for (int i = 0; i < pca.Components.Count; i++) { // We will extract its Eigenvector double[] vector = pca.Components[i].Eigenvector; // Normalize its values reverse.Max = vector.Max(); reverse.Min = vector.Min(); // Then arrange each vector value as if it was a pixel Bitmap eigenHand; reverse.Convert(vector, out eigenHand); // This will give the Eigenhands dataGridView2.Rows.Add(eigenHand, pca.Components[i].Proportion); } // Populate components overview with analysis data dgvPrincipalComponents.DataSource = pca.Components; distributionView.DataSource = pca.Components; cumulativeView.DataSource = pca.Components; btnCreateProjection.Enabled = true; }
/// <summary> /// Runs the Mean-Shift algorithm. /// </summary> /// private void runMeanShift() { int pixelSize = 3; // Retrieve the kernel bandwidth double sigma = (double)numBandwidth.Value; // Load original image Bitmap image = Properties.Resources.leaf; // Create converters ImageToArray imageToArray = new ImageToArray(min: -1, max: +1); ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(image, out pixels); // Create a MeanShift algorithm using the given bandwidth // and a Gaussian density kernel as the kernel function: IRadiallySymmetricKernel kernel = new GaussianKernel(pixelSize); var meanShift = new MeanShift(pixelSize, kernel, sigma) { Tolerance = 0.05, MaxIterations = 10 }; // Compute the mean-shift algorithm until the difference // in shift vectors between two iterations is below 0.05 int[] idx = meanShift.Compute(pixels); // Replace every pixel with its corresponding centroid pixels.ApplyInPlace((x, i) => meanShift.Clusters.Modes[idx[i]]); // Show resulting image in the picture box Bitmap result; arrayToImage.Convert(pixels, out result); pictureBox.Image = result; }
void btnCompute_Click(object sender, EventArgs e) { dataGridView2.Rows.Clear(); // Extract feature vectors double[][] hands = extract(); // Create a new Principal Component Analysis object pca = new PrincipalComponentAnalysis(hands, AnalysisMethod.Center); // Compute it pca.Compute(); // Now we will plot the Eigenvectors as images ArrayToImage reverse = new ArrayToImage(32, 32); // For each Principal Component for (int i = 0; i < pca.Components.Count; i++) { // We will extract its Eigenvector double[] vector = pca.Components[i].Eigenvector; // Normalize its values reverse.Max = vector.Max(); reverse.Min = vector.Min(); // Then arrange each vector value as if it was a pixel Bitmap eigenHand; reverse.Convert(vector, out eigenHand); // This will give the Eigenhands dataGridView2.Rows.Add(eigenHand, pca.Components[i].Proportion); } // Populates components overview with analysis data dgvPrincipalComponents.DataSource = pca.Components; CreateComponentCumulativeDistributionGraph(graphCurve); CreateComponentDistributionGraph(graphShare); btnCreateProjection.Enabled = true; }
static void TestMeanShift() { Bitmap image = Accord.Imaging.Image.FromUrl("https://c1.staticflickr.com/4/3209/2527630511_fae07530c2_b.jpg"); //ImageBox.Show("Original", image).Hold(); // Create converters to convert between Bitmap images and double[] arrays var imageToArray = new ImageToArray(min: -1, max: +1); var arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(image, out pixels); // Create a MeanShift algorithm using given bandwidth // and a Gaussian density kernel as kernel function. MeanShift meanShift = new MeanShift() { Kernel = new EpanechnikovKernel(), Bandwidth = 0.1, // We will compute the mean-shift algorithm until the means // change less than 0.05 between two iterations of the algorithm Tolerance = 0.05, MaxIterations = 10 }; // Learn the clusters from the data var clusters = meanShift.Learn(pixels); // Use clusters to decide class labels int[] labels = clusters.Decide(pixels); // Replace every pixel with its corresponding centroid double[][] replaced = pixels.Apply((x, i) => clusters.Modes[labels[i]]); // Retrieve the resulting image (shown in a picture box) Bitmap result; arrayToImage.Convert(replaced, out result); //ImageBox.Show("Mean-Shift clustering", result).Hold(); }
public void ConvertTest4() { // Create an array representation // of a 4x4 image with a inner 2x2 // square drawn in the middle System.Windows.Media.Color[] pixels = { Colors.Red, Colors.Lime, Colors.Blue, Colors.Black, Colors.Black, Colors.Transparent, Colors.Red, Colors.Black, Colors.Black, Colors.Lime, Colors.Blue, Colors.Black, Colors.Black, Colors.Black, Colors.Black, Colors.Black, }; // Create the converter to create a Bitmap from the array var conv = new ArrayToBitmapSource(width: 4, height: 4); // Declare an image and store the pixels on it BitmapSource image; conv.Convert(pixels, out image); System.Drawing.Color[] pixels2 = { Color.Red, Color.Lime, Color.Blue, Color.Black, Color.Black, Color.Transparent, Color.Red, Color.Black, Color.Black, Color.Lime, Color.Blue, Color.Black, Color.Black, Color.Black, Color.Black, Color.Black, }; ArrayToImage conv2 = new ArrayToImage(width: 4, height: 4); Bitmap image2; conv2.Convert(pixels2, out image2); var actual = image.ToMatrix(); var expected = image2.ToMatrix(); Assert.AreEqual(expected, actual); }
public void ConvertTest3() { double[] pixels = { 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, }; ArrayToImage conv1 = new ArrayToImage(width: 4, height: 4); Bitmap image; conv1.Convert(pixels, out image); image = new ResizeNearestNeighbor(16, 16).Apply(image); // Obtain a 16x16 bitmap image // Bitmap image = ... // Show on screen // ImageBox.Show(image, PictureBoxSizeMode.Zoom); // Create the converter to convert the image to an // array containing only values between 0 and 1 ImageToArray conv = new ImageToArray(min: 0, max: 1); // Convert the image and store it in the array double[] array; conv.Convert(image, out array); // Show the array on screen // ImageBox.Show(array, 16, 16, PictureBoxSizeMode.Zoom); Assert.AreEqual(0, array.Min()); Assert.AreEqual(1, array.Max()); Assert.AreEqual(16 * 16, array.Length); }
private void cluster(ushort j) { cvsbmp = UtilFn.BitmapImage2Bitmap(images[j]); var imageToArray = new ImageToArray(min: -1, max: +1); var arrayToImage = new ArrayToImage(cvsbmp.Width, cvsbmp.Height, min: -1, max: +1); int kk; double[][] pixels; imageToArray.Convert(cvsbmp, out pixels); try { kk = Int16.Parse(kCluster.Text); } catch (Exception) { return; } if (kk < 1) { return; } KMeans kmeans = new KMeans(k: kk) { Distance = new SquareEuclidean(), Tolerance = 0.05 }; var clusters = kmeans.Learn(pixels); int[] labels = clusters.Decide(pixels); double[][] replaced = pixels.Apply((x, i) => clusters.Centroids[labels[i]]); Bitmap result; arrayToImage.Convert(replaced, out result); imagesEdited.Add(converter.Convert(result, Type.GetType("BitmapImage"), null, null) as BitmapImage); }
private void runKMeans() { int k = (int)numClusters.Value; Bitmap image = img; ImageToArray imageToArray = new ImageToArray(min: -1, max: +1); ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); double[][] pixels; imageToArray.Convert(image, out pixels); KMeans kmeans = new KMeans(k, new SquareEuclidean()) { Tolerance = 0.05 }; int[] idx = kmeans.Learn(pixels).Decide(pixels); pixels.Apply((x, i) => kmeans.Clusters.Centroids[idx[i]], result: pixels); Bitmap result; arrayToImage.Convert(pixels, out result); pictureBox1.Image = result; }
static void Main(string[] args) { List <Bitmap> trainingFaces = new List <Bitmap>(); List <Bitmap> testingFaces = new List <Bitmap>(); List <Bitmap> settingFaces = new List <Bitmap>(); int imageWidth = 192; int imageHeight = 168; int trainingImageNumber = 44; //int settingImageNumber = 5; int testingImageNumber = 21; //for (int i = 1; i <= trainingImageNumber + testingImageNumber; i++) //{ // string path = string.Format(@"yaleB01\subject1 ({0}).bmp", i); // Bitmap newBitmap = new Bitmap(path); // if (i <= trainingImageNumber) // trainingFaces.Add(newBitmap); // else // testingFaces.Add(newBitmap); //} for (int i = 1; i <= trainingImageNumber + testingImageNumber; i++) { string path = string.Format(@"yaleB03\subject3 ({0}).bmp", i); Bitmap newBitmap = new Bitmap(path); if (i <= trainingImageNumber) { trainingFaces.Add(newBitmap); } else { testingFaces.Add(newBitmap); } } //string path1 = string.Format(@"yaleB01\tree.bmp"); //Bitmap newBitmap1 = new Bitmap(path1); //testingFaces.Add(newBitmap1); //path1 = string.Format(@"yaleB01\puppy.bmp"); //newBitmap1 = new Bitmap(path1); //testingFaces.Add(newBitmap1); //path1 = string.Format(@"yaleB01\subject3 (3).bmp"); //newBitmap1 = new Bitmap(path1); //testingFaces.Add(newBitmap1); ImageToArray converter = new ImageToArray(-1, +1); List <double[]> trainingOutputList = new List <double[]>(); foreach (Bitmap bitmap in trainingFaces) { double[] newOutput; converter.Convert(bitmap, out newOutput); trainingOutputList.Add(newOutput); } List <double[]> testingOutputList = new List <double[]>(); foreach (Bitmap bitmap in testingFaces) { double[] newOutput; converter.Convert(bitmap, out newOutput); testingOutputList.Add(newOutput); } //List<double[]> settingOutputList = new List<double[]>(); //foreach (Bitmap bitmap in settingFaces) //{ // double[] newOutput; // converter.Convert(bitmap, out newOutput); // settingOutputList.Add(newOutput); //} //ArrayToImage ati1 = new ArrayToImage(image1.Height, image1.Width); //Bitmap test = new Bitmap(image1.Height, image1.Width); //ati1.Convert(output1, out test); //test.Save(@"d:\eigenface1.bmp"); int size = imageHeight * imageWidth; double[,] data = new double[trainingFaces.Count, size]; for (int i = 0; i < trainingOutputList.Count; i++) { data.SetRow(i, trainingOutputList[i]); } ObjectPCA obj = new ObjectPCA(data); obj.take2(); obj.setMaxValue(); var finalData = obj.W; //var x = obj.projectImage(testingOutputList[10].Transpose()); //Console.WriteLine(x); double x; double max = 0; //foreach (var row in settingOutputList) //{ // x = obj.projectImage(row.Transpose()); // if (x > max) // max = x; //} Console.WriteLine("max " + obj.MaxValue); int bad = 0; int v = 0; foreach (var row in testingOutputList) { x = obj.projectImage(row.Transpose()); // Console.WriteLine(x+" "+v++); if (x > obj.MaxValue) { Console.WriteLine(testingOutputList.IndexOf(row) + trainingImageNumber + " " + x); bad++; } } Console.WriteLine("eroare " + bad / (testingImageNumber + 0.0) * 100); finalData = obj.W; ArrayToImage ati = new ArrayToImage(imageHeight, imageWidth); ati.Min = finalData.Min(); ati.Max = finalData.Max(); Bitmap eigenface = new Bitmap(imageHeight, imageWidth); for (int i = 0; i < finalData.Columns(); i++) { string path = string.Format(@"eigenfaces result\image{0}.bmp", i); ati.Convert(finalData.GetColumn(i), out eigenface); eigenface.Save(path); } //obj.Compute(); //double[,] finalData = obj.KernelData; //var image = testingOutputList[14].Transpose().Dot(finalData.Transpose()); // var image1 = data.Transpose().Dot(finalData.GetColumn(0)); //double[,] finalData = obj.FinalData; //double[,] finalData =obj.plotPointPCA(testingOutputList[0]); //foreach (var face in trainingOutputList) // obj.faceRecognition(face); //------------------------------------------------- //obj.Gamma = Math.Pow(10, -3); //obj.ComputeKernel(); //int[] indexesInitial = new int[testingOutputList.Count]; //for (int i = 0; i < testingOutputList.Count; i++) // if (i < testingOutputList.Count / 2) // indexesInitial[i] = 1; // else // indexesInitial[i] = 2; //double min1 = double.MaxValue, max1 = double.MinValue, min2 = min1, max2 = max1; //for (int i = 0; i < trainingOutputList.Count; i++) //{ // if (i < trainingOutputList.Count / 2) // { // if (obj.KernelVectors[i] < min1) // min1 = obj.KernelVectors[i]; // if (obj.KernelVectors[i] > max1) // max1 = obj.KernelVectors[i]; // } // else // { // if (obj.KernelVectors[i] < min2) // min2 = obj.KernelVectors[i]; // if (obj.KernelVectors[i] > max2) // max2 = obj.KernelVectors[i]; // } //} //int x = 0; //double mean1 = min2 - (max1 + min2) / 2; //double separationPoint = min2 + Math.Abs(mean1); //int[] indexesFinal = new int[testingOutputList.Count]; //foreach (var face in testingOutputList) //{ // System.Console.WriteLine(x++); // double aux = obj.plotPointKernelPCA(face); // System.Console.WriteLine(aux); // if (aux < separationPoint) // indexesFinal[testingOutputList.IndexOf(face)] = 1; // else // indexesFinal[testingOutputList.IndexOf(face)] = 2; //} //Console.WriteLine(obj.KernelVectors.ToString("+0.0000;-0.0000")); //Console.WriteLine(); //Console.WriteLine(obj.KernelValues); //Console.WriteLine(); //Console.WriteLine(indexesInitial.ToString("+0.0000;-0.0000")); //Console.WriteLine(indexesFinal.ToString("+0.0000;-0.0000")); //Console.WriteLine(separationPoint); //------------------------------------------------------------ //int minimi = 0; //foreach (var training in testingOutputList) //{ // if (obj.faceRecognition(training) < 105) // minimi++; //} //System.Console.WriteLine(minimi); //ArrayToImage ati = new ArrayToImage(imageHeight, imageWidth); //ati.Min = finalData.Min(); //ati.Max = finalData.Max(); //Bitmap eigenface = new Bitmap(imageHeight, imageWidth); //for (int i = 0; i < finalData.Columns(); i++) //{ // string path = string.Format(@"D:\eigenfaces result\image{0}.bmp", i); // ati.Convert(finalData.GetColumn(i), out eigenface); // eigenface.Save(path); //} //finalData = image; //for (int i = 0; i < finalData.Columns(); i++) //{ // string path = string.Format(@"D:\eigenfaces result\image{0}.bmp", i); // ati.Convert(finalData.GetColumn(i), out eigenface); // eigenface.Save(path); //} //string path2 = string.Format(@"D:\eigenfaces result\image{0}.bmp", "asd"); //ati.Convert(image1, out eigenface); //eigenface.Save(path2); //eigenface.Dispose(); foreach (Bitmap bitmap in trainingFaces) { bitmap.Dispose(); } //Console.WriteLine(output.ToString("+0.00;-0.00")); //ArrayToImage ati = new ArrayToImage(image1.Height, image1.Width); //Bitmap image2 = new Bitmap(image1.Height, image1.Width); //ati.Convert(output, out image2); //image2.Save("d:\\image.bmp", System.Drawing.Imaging.ImageFormat.Bmp); //StreamWriter sw = new StreamWriter("data.txt"); //foreach (double x in output) // sw.Write(x + " "); }
private void Draw(PictureBox picture, int param) { ArrayToImage imageConverter = new ArrayToImage(COMPACTSIZE, COMPACTSIZE); if (hash.Keys.Count == 0) { return; } Bitmap bitmap = new Bitmap(COMPACTSIZE, COMPACTSIZE); Color[] d = new Color[COMPACTSIZE * COMPACTSIZE]; int z = 0; int t = 0; for (int i = 0; i < IMAGESIZE; i++) { for (int j = 0; j < IMAGESIZE; j++) { if (i >= 140 && j >= 140 && i < 840 && j < 840) { float Lv = ((float[])hash[3])[t]; float u = ((float[])hash[6])[t]; float v = ((float[])hash[7])[t]; if (Lv >= 0 && u >= 0 && v >= 0) { if (d[z] != Color.White) { if (param == -1) { d[z] = Color.DarkGray; } else { float val = ((float[])hash[param])[t]; float min = ranges[param.ToString() + "n"]; float max = ranges[param.ToString() + "x"]; d[z] = InterpolateColor((val - min) / (max - min)); } } } else { if (d[z] != Color.White) { d[z] = Color.Black; } } // Mark the corners if this is just gray image if (param == -1) { int x1 = t % IMAGESIZE - pArea.left; int x2 = t % IMAGESIZE - pArea.right; int y1 = (int)(t / IMAGESIZE) - pArea.top; int y2 = (int)(t / IMAGESIZE) - pArea.bottom; if ((x1 == 0 && y1 == 0) || (x1 == 0 && y2 == 0) || (x2 == 0 && y1 == 0) || (x2 == 0 && y2 == 0)) { for (int g = -20; g < 20; g++) { if (z + g >= 0 && z + g < d.Length) { d[z + g] = Color.White; } if (z - COMPACTSIZE + g >= 0 && z - COMPACTSIZE + g < d.Length) { d[z - COMPACTSIZE + g] = Color.White; } if (z + g * COMPACTSIZE >= 0 && z + g * COMPACTSIZE < d.Length) { d[z + g * COMPACTSIZE] = Color.White; } if (z - 1 + g * COMPACTSIZE >= 0 && z - 1 + g * COMPACTSIZE < d.Length) { d[z - 1 + g * COMPACTSIZE] = Color.White; } } } } if (param == 3 && ((this.top > 0 && this.left > 0) || (this.right > 0 && this.bottom > 0))) { int x1 = t % IMAGESIZE - this.left; int x2 = t % IMAGESIZE - this.right; int y1 = (int)(t / IMAGESIZE) - this.top; int y2 = (int)(t / IMAGESIZE) - this.bottom; if ((x1 == 0 && y1 == 0) || (x1 == 0 && y2 == 0) || (x2 == 0 && y1 == 0) || (x2 == 0 && y2 == 0)) { for (int g = -200; g < 200; g++) { if (z + g >= 0 && z + g < d.Length) { d[z + g] = Color.White; } if (z - COMPACTSIZE + g >= 0 && z - COMPACTSIZE + g < d.Length) { d[z - COMPACTSIZE + g] = Color.White; } if (z + g * COMPACTSIZE >= 0 && z + g * COMPACTSIZE < d.Length) { d[z + g * COMPACTSIZE] = Color.White; } if (z - 1 + g * COMPACTSIZE >= 0 && z - 1 + g * COMPACTSIZE < d.Length) { d[z - 1 + g * COMPACTSIZE] = Color.White; } } } } z++; } t++; } } imageConverter.Convert(d, out bitmap); picture.Image = bitmap; }
public void ConvertTest3() { double[] pixels = { 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, }; ArrayToImage conv1 = new ArrayToImage(width: 4, height: 4); Bitmap image; conv1.Convert(pixels, out image); image = new ResizeNearestNeighbor(16, 16).Apply(image); // Obtain an image // Bitmap image = ... // Show on screen //ImageBox.Show(image, PictureBoxSizeMode.Zoom); // Create the converter to convert the image to a // matrix containing only values between 0 and 1 ImageToMatrix conv = new ImageToMatrix(min: 0, max: 1); // Convert the image and store it in the matrix double[,] matrix; conv.Convert(image, out matrix); /* // Show the matrix on screen as an image ImageBox.Show(matrix, PictureBoxSizeMode.Zoom); // Show the matrix on screen as a .NET multidimensional array MessageBox.Show(matrix.ToString(CSharpMatrixFormatProvider.InvariantCulture)); // Show the matrix on screen as a table DataGridBox.Show(matrix, nonBlocking: true) .SetAutoSizeColumns(DataGridViewAutoSizeColumnsMode.Fill) .SetAutoSizeRows(DataGridViewAutoSizeRowsMode.AllCellsExceptHeaders) .SetDefaultFontSize(5) .WaitForClose(); */ Assert.AreEqual(0, matrix.Min()); Assert.AreEqual(1, matrix.Max()); Assert.AreEqual(16 * 16, matrix.Length); }
public void ConvertTest1() { ArrayToImage target = new ArrayToImage(16, 16); double[] pixels = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 1 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, // 2 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 4 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 5 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 6 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 7 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 8 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 9 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 10 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 11 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 12 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, // 13 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 14 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 15 }; Bitmap imageActual; target.Convert(pixels, out imageActual); double[] actual; ImageToArray c = new ImageToArray(); c.Convert(imageActual, out actual); double[] expected; Bitmap imageExpected = Accord.Imaging.Image.Clone(Properties.Resources.image1); new Invert().ApplyInPlace(imageExpected); new Threshold().ApplyInPlace(imageExpected); c.Convert(imageExpected, out expected); for (int i = 0; i < pixels.Length; i++) Assert.AreEqual(actual[i], expected[i]); }