private void btnRun_Click(object sender, EventArgs e) { // Retrieve the number of clusters int k = (int)numClusters.Value; // Load original image Bitmap image = Properties.Resources.leaf; // Transform the image into an array of pixel values double[][] pixels = image.ToDoubleArray(); // Create a K-Means algorithm using given k and a // square euclidean distance as distance metric. KMeans kmeans = new KMeans(k, Distance.SquareEuclidean); // Compute the K-Means algorithm until the difference in // cluster centroids between two iterations is below 0.05 int[] idx = kmeans.Compute(pixels, 0.05); // Replace every pixel with its corresponding centroid pixels.ApplyInPlace((x, i) => kmeans.Clusters.Centroids[idx[i]]); // Show resulting image in the picture box pictureBox.Image = pixels.ToBitmap(image.Width, image.Height); }
public Dictionary<int, List<int>> apply(Matrix<double> objCoords, int K, int depth) { Dictionary<int, List<int>> dict = new Dictionary<int, List<int>>(); double[][] objects = new double[objCoords.RowCount][]; for (int rowIdx = 0; rowIdx < objCoords.RowCount; rowIdx++) { objects[rowIdx] = new double[objCoords.ColumnCount]; for (int columnIdx = 0; columnIdx < objCoords.ColumnCount; columnIdx++) { objects[rowIdx][columnIdx] = objCoords[rowIdx, columnIdx]; } } KMeans kmeans = new KMeans(K); int[] clusterIDs = kmeans.Compute(objects); for (int objIdx = 0; objIdx < clusterIDs.Length; objIdx++) { int clusterID = clusterIDs[objIdx]; List<int> currentElements; if (dict.TryGetValue(clusterID, out currentElements)) { currentElements.Add(objIdx); dict[clusterID] = currentElements; } else { dict[clusterID] = new List<int>(new int[] { objIdx }); } } return dict; }
public void KMeansConstructorTest() { Accord.Math.Tools.SetupGenerator(0); // Declare some observations double[][] observations = { new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; double[][] orig = observations.MemberwiseClone(); // Create a new K-Means algorithm with 3 clusters KMeans kmeans = new KMeans(3); // Compute the algorithm, retrieving an integer array // containing the labels for each of the observations int[] labels = kmeans.Compute(observations); // As a result, the first two observations should belong to the // same cluster (thus having the same label). The same should // happen to the next four observations and to the last three. Assert.AreEqual(labels[0], labels[1]); Assert.AreEqual(labels[2], labels[3]); Assert.AreEqual(labels[2], labels[4]); Assert.AreEqual(labels[2], labels[5]); Assert.AreEqual(labels[6], labels[7]); Assert.AreEqual(labels[6], labels[8]); Assert.AreNotEqual(labels[0], labels[2]); Assert.AreNotEqual(labels[2], labels[6]); Assert.AreNotEqual(labels[0], labels[6]); int[] labels2 = kmeans.Clusters.Nearest(observations); Assert.IsTrue(labels.IsEqual(labels2)); // the data must not have changed! Assert.IsTrue(orig.IsEqual(observations)); }
public int[] Classify() { var clusterData = features.Select(); var theta = MinMaxTheta(clusterData); var args = new BsasArgs(theta, m_iterations, clusterData, m_thetaStepNum); var bsas = new BasicSequentialAlgorithmicScheme(args); var clusters = bsas.CalculateClasses(); var kmeans = new KMeans(clusters, Distance.Euclidean); var idx = kmeans.Compute(clusterData); return idx; }
public void KMeansConstructorTest2() { Accord.Math.Tools.SetupGenerator(0); // Declare some observations double[][] observations = { new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; double error, e; // Create a new algorithm KMeans kmeans = new KMeans(3); kmeans.Randomize(observations); // Save the first initialization double[][] initial = kmeans.Clusters.Centroids; // Compute the first K-Means kmeans.Compute(observations, out error); // Create more K-Means algorithms // with the same initializations for (int i = 0; i < 1000; i++) { kmeans = new KMeans(3); kmeans.Clusters.Centroids = initial; kmeans.Compute(observations, out e); Assert.AreEqual(error, e); } // Create more K-Means algorithms // without the same initialization bool differ = false; for (int i = 0; i < 1000; i++) { kmeans = new KMeans(3); kmeans.Compute(observations, out e); if (error != e) differ = true; } Assert.IsTrue(differ); }
/// <summary> /// Runs the K-Means algorithm. /// </summary> /// private void runKMeans() { // Retrieve the number of clusters int k = (int)numClusters.Value; // Load original image Bitmap image = Properties.Resources.leaf; // Create converters ImageToArray imageToArray = new ImageToArray(min: -1, max: +1); ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1); // Transform the image into an array of pixel values double[][] pixels; imageToArray.Convert(image, out pixels); // Create a K-Means algorithm using given k and a // square Euclidean distance as distance metric. KMeans kmeans = new KMeans(k, Distance.SquareEuclidean) { Tolerance = 0.05 }; // Compute the K-Means algorithm until the difference in // cluster centroids between two iterations is below 0.05 int[] idx = kmeans.Compute(pixels); // Replace every pixel with its corresponding centroid pixels.ApplyInPlace((x, i) => kmeans.Clusters.Centroids[idx[i]]); // Show resulting image in the picture box Bitmap result; arrayToImage.Convert(pixels, out result); pictureBox.Image = result; }
public void buildModel() { if (inputMatrix == null) getMatrix(); switch (cType) { case clusterType.KMEANS: KMeans kmeans = new KMeans(k); kmeans.Compute(inputMatrix, precision); clusterCollection = kmeans.Clusters; model = kmeans; break; case clusterType.BINARY: BinarySplit bSplit = new BinarySplit(k); bSplit.Compute(inputMatrix, precision); clusterCollection = bSplit.Clusters; model = bSplit; //Console.WriteLine("BinarySplit"); break; case clusterType.GAUSSIANMIXTURE: GaussianMixtureModel gModel = new GaussianMixtureModel(k); gModel.Compute(inputMatrix, precision); clusterCollection = gModel.Gaussians; model = gModel; break; default: break; } lbl = new List<string>(); for (int i = 0; i < k; i++) { lbl.Add(i.ToString()); } }
public void IndexFiles(FileInfo[] imageFiles, System.ComponentModel.BackgroundWorker IndexBgWorker, Action<string> logWriter, LocateSettings locateSetting = null) { //For Time Profilling long extractingTime, kMeanTime = 0, calcBagOfVisualTime = 0; Stopwatch sw1; SimpleSurfSift.LoCATe descriptorExtractor = new SimpleSurfSift.LoCATe(); sw1 = Stopwatch.StartNew(); logWriter("Index started, extracting Descriptors..."); List<double[]> ListofDescriptorsForCookBook = new List<double[]>(); List<LoCATeRecord> ListOfAllImageDescriptors = new List<LoCATeRecord>(); int totalFileCount = imageFiles.Length; if (totalFileCount == 0) { logWriter("No files to index"); return; }; for (int i = 0; i < totalFileCount; i++) { var fi = imageFiles[i]; using (Bitmap observerImage = (Bitmap)Image.FromFile(fi.FullName)) { List<double[]> locateDescriptors = descriptorExtractor.extract(observerImage, "SURF"); ListOfAllImageDescriptors.Add(new LoCATeRecord { Id = i, ImageName = fi.Name, ImagePath = fi.FullName, LoCATeDescriptors = locateDescriptors }); if (locateSetting.IsCodeBookNeedToBeCreated) { if (locateDescriptors.Count > 4) { RandomHelper randNumGenerator = new RandomHelper(); List<int> randIndexes = randNumGenerator.GetRandomNumberInRange(0, locateDescriptors.Count, 10d); foreach (int index in randIndexes) { ListofDescriptorsForCookBook.Add(locateDescriptors[index]); } } else { Debug.WriteLine(fi.Name + " skip from index, because it didn't have significant feature"); } } } IndexBgWorker.ReportProgress(i); } sw1.Stop(); extractingTime = Convert.ToInt32(sw1.Elapsed.TotalSeconds); double[][] codeBook = null; if (locateSetting.IsCodeBookNeedToBeCreated) { logWriter("Indexing, Calculating Mean..."); sw1.Reset(); sw1.Start(); KMeans kMeans = new KMeans(locateSetting.SizeOfCodeBook); kMeans.Compute(ListofDescriptorsForCookBook.ToArray()); codeBook = kMeans.Clusters.Centroids; //------------Save CookBook string fullFileName = locateSetting.CodeBookFullPath; if (File.Exists(fullFileName)) File.Delete(fullFileName); using (FileStream fs = new FileStream(fullFileName, FileMode.Create, FileAccess.Write, FileShare.None)) { System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); bf.Serialize(fs, codeBook); fs.Close(); } sw1.Stop(); kMeanTime = Convert.ToInt32(sw1.Elapsed.TotalSeconds); } else { string fullFileName = locateSetting.CodeBookFullPath; if (!File.Exists(fullFileName)) { string msg = string.Format("Couldn't find {0}, Please Index before querying with Locate", fullFileName); throw new InvalidOperationException(msg); } using (FileStream fs = new FileStream(fullFileName, FileMode.Open, FileAccess.Read, FileShare.None)) { System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); codeBook = (double[][])bf.Deserialize(fs); fs.Close(); } } logWriter("Indexing, Calculating Bag of Visual Words..."); sw1.Reset(); sw1.Start(); List<LoCaTeBoWRecord> ListOfImageVisualBagOfWorks = new List<LoCaTeBoWRecord>(); for (int i = 0; i < ListOfAllImageDescriptors.Count; i++) { double[] visualWordForImage = createVisualWord(ListOfAllImageDescriptors[i].LoCATeDescriptors, codeBook); LoCaTeBoWRecord rec = new LoCaTeBoWRecord { Id = ListOfAllImageDescriptors[i].Id, ImageName = ListOfAllImageDescriptors[i].ImageName, ImagePath = ListOfAllImageDescriptors[i].ImagePath, VisaulWord = visualWordForImage }; ListOfImageVisualBagOfWorks.Add(rec); IndexBgWorker.ReportProgress(i); } logWriter("Indexing, Calculating ltcData..."); int[] histogramSumOfAllVisualWords = null; //------------Creating sum histogram of all words double[][] AllDatas = ListOfImageVisualBagOfWorks.Select(des => des.VisaulWord).ToArray(); histogramSumOfAllVisualWords = createIndex((double[][])(AllDatas)); //------------Creating Image Records Data LoCaTeDataSet locateDS = new LoCaTeDataSet { AllImageRecordSet = ListOfImageVisualBagOfWorks, HistogramSumOfAllVisualWords = histogramSumOfAllVisualWords }; logWriter("Indexing, Saving Image Data..."); //------------Save CookBook string ImageRecordName = Path.Combine(DirectoryHelper.SaveDirectoryPath, "LoCATeImageRecords.bin"); if (File.Exists(ImageRecordName)) File.Delete(ImageRecordName); using (FileStream fs = new FileStream(ImageRecordName, FileMode.Create, FileAccess.Write, FileShare.None)) { System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); bf.Serialize(fs, locateDS); fs.Close(); } sw1.Stop(); calcBagOfVisualTime = Convert.ToInt32(sw1.Elapsed.TotalSeconds); logWriter(string.Format("Extracting: {0} sec, KMeanTime: {1} sec, CalcBagOfVisalTime: {2} sec", extractingTime, kMeanTime, calcBagOfVisualTime)); }
private List<String> GetSimilaresDatabaseKmeans(List<Double> descriptoresEntrada) { ModeloSimilitudEntities db = new ModeloSimilitudEntities(); List<canciones> ListaCanciones = db.canciones.ToList(); Double[] vectorEntrada = descriptoresEntrada.ToArray(); vectorEntrada = Normalizar(vectorEntrada); Double[][] matriz = csvtoMatrix("descriptoresNormalizados"); int Nclusters = 7; KMeans kmeans = new KMeans(Nclusters, Accord.Math.Distance.Chebyshev); int[] indices = kmeans.Compute(matriz); int Cluster =kmeans.Nearest(vectorEntrada); int nroSimilares = 10; int[] indiceSimilar=new int[nroSimilares]; for(int j=0;j<nroSimilares;j++){ Double distancia = 1000; for (int i = 0; i < indices.Length; i++) { if (!indiceSimilar.Contains(i)) { if (Cluster == indices[i]) { Double distanciatemp = Accord.Math.Distance.Chebyshev(vectorEntrada, matriz[i]); if (distanciatemp < distancia) { distancia = distanciatemp; indiceSimilar[j] = i; } } } } } List<String> listaSimilares = new List<String>(); foreach (int i in indiceSimilar) { listaSimilares.Add(ListaCanciones[i].id_spotify.Substring(14)); } //string select="select * from canciones where energy={0} and liveness={1} and tempo={2} and speechiness={3} and acousticness={4} and loudness={5} and valence={6} and danceability={7} and instrumentalness={8} and key={9}"; //string select2 = "select * from canciones"; //for(int j=0;j<cercanos.Length;j++){ // object[] parameters = new object[10]; // for (int i = 0; i < 10; i++) // { // SqlParameter param = new SqlParameter("i", cercanos[j][i]); // parameters[i] = cercanos[j][i]; // } // var stores = db.Database.SqlQuery<canciones>(select, parameters).ToList(); // listaSimilares.Add(stores[0].id_spotify); //} return listaSimilares; }
private void RunKMeans(Double[][] data) { var clusterData = BSASFeatures(data); var theta = MinMaxTheta(clusterData); var s = (int)iterationsNum.Value; var clusters = BSAS(theta, s, clusterData); KMeans kmeans = new KMeans(clusters, Distance.Euclidean); int[] idx = kmeans.Compute(clusterData); ClustersTextUpdate(idx.Distinct().Length.ToString()); }
public void KMeansMoreClustersThanSamples() { Accord.Math.Tools.SetupGenerator(0); // Declare some observations double[][] observations = { new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; double[][] orig = observations.MemberwiseClone(); KMeans kmeans = new KMeans(15); bool thrown = false; try { int[] labels = kmeans.Compute(observations); } catch (ArgumentException) { thrown = true; } Assert.IsTrue(thrown); }
private void buildModel() { if (inputMatrix == null) getMatrix(); kmeans = new KMeans(k); kmeans.Compute(inputMatrix,0.0001); lbl = new List<string>(); for (int i = 0; i < k; i++) { lbl.Add(i.ToString()); } }
private void btnInitialize_Click(object sender, EventArgs e) { kmeans = new KMeans(k); kmeans.Compute(mixture); // Classify all instances in mixture data int[] classifications = kmeans.Classify(mixture); // Draw the classifications updateGraph(classifications); }
public void KMeansMoreClustersThanSamples() { Accord.Math.Tools.SetupGenerator(0); // Declare some observations double[][] observations = { new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; double[][] orig = observations.MemberwiseClone(); KMeans kmeans = new KMeans(15); int[] labels = kmeans.Compute(observations); }
public string kmeans(DataTable tbl) { Codification codebook = new Codification(tbl); DataTable symbols = codebook.Apply(tbl); double[][] inputs = symbols.ToIntArray("Clump Thickness", "Uniformity of Cell Size", "Uniformity of Cell Shape", "Marginal Adhesion", "Single Epithelial Cell Size", "Bare Nuclei", "Bland Chromatin", "Normal Nucleoli", "Mitoses").ToDouble(); int sayac = 0; int[] outputs = symbols.ToIntArray("Class").GetColumn(0); // Declare some observations //double[][] observations = // { // new double[] { -5, -2, -1 }, // new double[] { -5, -5, -6 }, // new double[] { 2, 1, 1 }, // new double[] { 1, 1, 2 }, // new double[] { 1, 2, 2 }, // new double[] { 3, 1, 2 }, // new double[] { 11, 5, 4 }, // new double[] { 15, 5, 6 }, // new double[] { 10, 5, 6 }, // }; KMeans kmeans = new KMeans(2); int[] labels = kmeans.Compute(inputs); int c = kmeans.Clusters.Nearest(new double[] { Convert.ToInt32(inputlar[0]), Convert.ToInt32(inputlar[1]), Convert.ToInt32( inputlar[2]), Convert.ToInt32( inputlar[3]), Convert.ToInt32( inputlar[4]), Convert.ToInt32( inputlar[5]), Convert.ToInt32( inputlar[6]), Convert.ToInt32( inputlar[7]), Convert.ToInt32( inputlar[8]) }); return c.ToString(); ; }
/// <summary> /// Initializes the Gaussian Mixture Models using K-Means /// parameters as an initial parameter guess. /// </summary> /// private void btnInitialize_Click(object sender, EventArgs e) { // Creates and computes a new // K-Means clustering algorithm: kmeans = new KMeans(k); kmeans.Compute(mixture); // Classify all instances in mixture data int[] classifications = kmeans.Clusters.Nearest(mixture); // Draw the classifications updateGraph(classifications); }
public override byte[][] DivideProblem(int threadCount) { /****************** DESERIALIZE ************************/ BinaryFormatter formatter = new BinaryFormatter(); VRPParser benchmark = (VRPParser)formatter.Deserialize(new MemoryStream(_problemData)); /******************* DIVIDE *************************/ // Combine coords (x, y) and time_avail (z) List <Point> data = new List <Point>(); for (int i = 0; i < benchmark.Num_Visits; i++) { List <double> point_coords = new List <double>(); // does not include depots - which is what we want int loc_index = benchmark.Visit_Location[i]; point_coords.Add(benchmark.Location_Coord[loc_index][0]); point_coords.Add(benchmark.Location_Coord[loc_index][1]); point_coords.Add(benchmark.Time_Avail[loc_index - 1] + benchmark.Duration[loc_index - 1]); data.Add(new Point(point_coords)); } // get optimal number of clusters //PredictionStrength ps = new PredictionStrength(data); //ps.Compute(true); //int k = ps.BestK; int max_k = 5; int start_k = 1; // prepare byte array for all partial solutions int solutionsSize = 0; for (int i = start_k; i <= max_k; i++) { solutionsSize += i; } int temporarySolutionIndex = 0; byte[][] temporarySolution = new byte[solutionsSize][]; for (int k = start_k; k <= max_k; k++) { // compute clusters KMeans clusters = new KMeans(data, k); clusters.Compute(); // create k benchmarks for k solvers VRPParser[] partial_benchmarks = new VRPParser[k]; for (int i = 0; i < k; i++) { VRPParser partial_benchmark = new VRPParser(); List <int> cluster_indecies = clusters.GetCluterIndecies(i); partial_benchmark.ID = k; /************ COMMON ****************/ int num_depots = benchmark.Num_Depots; int num_visits = cluster_indecies.Count; int num_locations = cluster_indecies.Count + num_depots; partial_benchmark.Num_Visits = num_visits; partial_benchmark.Num_Depots = num_depots; partial_benchmark.Name = benchmark.Name; partial_benchmark.Num_Capacities = benchmark.Num_Capacities; partial_benchmark.Num_Vehicles = 1; partial_benchmark.Capacites = benchmark.Capacites; partial_benchmark.Depots_IDs = new int[benchmark.Depots_IDs.Length]; benchmark.Depots_IDs.CopyTo(partial_benchmark.Depots_IDs, 0); partial_benchmark.Depot_Location = new int[benchmark.Depot_Location.Length]; benchmark.Depot_Location.CopyTo(partial_benchmark.Depot_Location, 0); partial_benchmark.Depot_Time_Window = new int[benchmark.Depot_Time_Window.Length][]; for (int p = 0; p < partial_benchmark.Depot_Time_Window.Length; p++) { partial_benchmark.Depot_Time_Window[p] = new int[benchmark.Depot_Time_Window[p].Length]; benchmark.Depot_Time_Window[p].CopyTo(partial_benchmark.Depot_Time_Window[p], 0); } /************ LOCATION_COORD ****************/ partial_benchmark.Num_Locations = num_locations; int[][] location_coord = new int[num_locations][]; // get all depots locations for (int j = 0; j < num_depots; j++) { location_coord[j] = new int[2]; location_coord[j][0] = benchmark.Location_Coord[j][0]; location_coord[j][1] = benchmark.Location_Coord[j][1]; } // get all partial clients locations for (int j = num_depots; j < num_locations; j++) { location_coord[j] = new int[2]; int clientNodeIndex = benchmark.Visit_Location[cluster_indecies[j - num_depots]]; location_coord[j][0] = benchmark.Location_Coord[clientNodeIndex][0]; location_coord[j][1] = benchmark.Location_Coord[clientNodeIndex][1]; } partial_benchmark.Location_Coord = location_coord; /************ DEMAND ****************/ int[] demands = new int[num_visits]; for (int j = 0; j < num_visits; j++) { int clientNodeIndex = benchmark.Visit_Location[cluster_indecies[j]]; demands[j] = benchmark.Demands[clientNodeIndex - num_depots]; } partial_benchmark.Demands = demands; /************ VISIT_LOCATION ****************/ int[] visit_location = new int[num_visits]; for (int j = 0; j < num_visits; j++) { int clientNodeIndex = benchmark.Visit_Location[cluster_indecies[j]] - num_depots; visit_location[j] = clientNodeIndex; //visit_location[j] = j + num_depots; } partial_benchmark.Visit_Location = visit_location; /************ DURATION ****************/ int[] duration = new int[num_visits]; for (int j = 0; j < num_visits; j++) { int clientNodeIndex = benchmark.Visit_Location[cluster_indecies[j]]; duration[j] = benchmark.Duration[clientNodeIndex - num_depots]; } partial_benchmark.Duration = duration; /************ TIME_AVAIL ****************/ int[] time_avail = new int[num_visits]; for (int j = 0; j < num_visits; j++) { int clientNodeIndex = benchmark.Visit_Location[cluster_indecies[j]]; time_avail[j] = benchmark.Time_Avail[clientNodeIndex - num_depots]; } partial_benchmark.Time_Avail = time_avail; partial_benchmarks[i] = partial_benchmark; } /************ SERIALIZATION ******************/ for (int i = 0; i < partial_benchmarks.Count(); i++) { temporarySolution[temporarySolutionIndex++] = DataSerialization.ObjectToByteArray(partial_benchmarks[i]); } } return(temporarySolution); }