void K_Means(int K) { #region Create Output layer for (int i = 0; i < 5; i++) // Construct the output Layer { Perceptron per = new Perceptron(K); OutPut.Add(per); } #endregion #region Get_Cntres // Get Random Centers For Clusters Random rd = new Random(); for (int i = 0; i < K; i++) { cluster temp = new cluster(); temp.center = Training_data[rd.Next(0, Training_data.Count)]; Clusters.Add((temp)); sample s = new sample(); s.copysample(temp.center); old_centers.Add(s); } #endregion old_centers[0].features[0] = (float)-0.0002; // To change it for first time while (check() ) { #region updateing_old_centers for (int i = 0; i < K; i++) { Clusters[i].cluster_samples.Clear(); old_centers[i].copysample(Clusters[i].center); } #endregion #region Clustering List<double> _out = new List<double>(); for (int sampleindx = 0; sampleindx < Training_data.Count(); sampleindx ++ ) { _out.Clear(); for (int Clusterindx = 0; Clusterindx < Clusters.Count(); Clusterindx ++ ) { _out.Add(distance(Training_data[sampleindx].features, Clusters[Clusterindx].center.features)); } Clusters[get_min(_out)].cluster_samples.Add(Training_data[sampleindx]); } #endregion #region Get New Centers for (int clusterindx = 0; clusterindx < Clusters.Count(); clusterindx ++ ) { double[] x = new double[Clusters[clusterindx].center.features.Count()]; for (int sampleindx = 0 ; sampleindx < Clusters[clusterindx].cluster_samples.Count ; sampleindx++) { for (int featureindx = 0; featureindx < Clusters[clusterindx].center.features.Count(); featureindx++ ) { x[featureindx] += Clusters[clusterindx].cluster_samples[sampleindx].features[featureindx]; } } for (int featureindx = 0; featureindx < Clusters[clusterindx].center.features.Count(); featureindx++) { x[featureindx] /= Clusters[clusterindx].cluster_samples.Count(); Clusters[clusterindx].center.features[featureindx] = (float) x[featureindx]; // update new centers } } #endregion } }
private void kMeans(int k) { #region Create Output layer for (int i = 0; i < 5; i++) // Construct the output Layer { Perceptron per = new Perceptron(k); OutPut.Add(per); } #endregion #region Get_Cntres // Get Random Centers For Clusters Random rd = new Random(); for (int i = 0; i < k; i++) { cluster temp = new cluster(); temp.center = Training_data[rd.Next(0, Training_data.Count)]; Cluster.Add((temp)); KeyValuePair<int, float[]> ls = new KeyValuePair<int, float[]>(); sample s = new sample(); s.copysample(temp.center); old_centers.Add(s); } #endregion // old_centers[0].Value[0] = Training_data[rd.Next(0,Training_data.Count)].Value[0]; old_centers[0].features[0] = (float)-0.0002; while (chek()) { List<double> _out = new List<double>(); #region updateing_old_centers for (int i = 0; i < k; i++) { Cluster[i].cluster_samples.Clear(); old_centers[i].copysample(Cluster[i].center); } #endregion #region Clustring //Clustering // Here's a trap for (int i = 0; i < Training_data.Count; i++) // outer one { //for (int z = 0; z < Training_data[i].Count; z++) { _out.Clear(); for (int j = 0; j < k; j++) { _out.Add(distance(Training_data[i].features, Cluster[j].center.features)); } Cluster[get_min(_out)].cluster_samples.Add(Training_data[i]); } } #endregion #region get new centers for (int c = 0; c < k; c++) { double[] x = new double[128]; for (int i = 0; i < Cluster[c].cluster_samples.Count; i++) { for (int j = 0; j < 128; j++) { x[j] += Convert.ToDouble(Cluster[c].cluster_samples[i].features[j]); } } if (Cluster[c].cluster_samples.Count != 0) { for (int sd = 0; sd < 128; sd++) { x[sd] /= Cluster[c].cluster_samples.Count; } // Cluster[c].center.; for (int sd = 0; sd < 128; sd++) { Cluster[c].center.features[sd] = (float)(x[sd]); } } } #endregion } }