public void somTraining() { double[][] input_data = new double[Data.instance.images.Count][]; for (int i = 0; i < images.Count; i++) { Bitmap image = new Bitmap(images[i]); image = preProcess(image); ImageToArray converter = new ImageToArray(0, 1); converter.Convert(image, out input_data[i]); } pca = new PrincipalComponentAnalysis(PrincipalComponentMethod.Center); pca.Learn(input_data); double[][] input_pca = pca.Transform(input_data); //o Clusters(Groups) Count: 4 som_network = new DistanceNetwork(input_pca[0].Count(), 4); som_learning = new SOMLearning(som_network); //o Error Goals: 0.001 //o Max Epochs: 100000 int maxIter = 100000; double maxError = 0.001; for (int i = 0; i < maxIter; i++) { double error = som_learning.RunEpoch(input_pca); if (error < maxError) { break; } } System.Windows.Forms.MessageBox.Show("SOM Training Complete"); }
void Clustering() { pca = new PrincipalComponentAnalysis(); //pca pca.Learn(inputImgArray.ToArray()); var pcaRes = pca.Transform(inputImgArray.ToArray()); dn = new DistanceNetwork(pcaRes[0].Length, mainForm.closestSquareNumber(totalData)); som = new SOMLearning(dn); Console.WriteLine("Learning"); for (var i = 0; i < maxEpoch; i++) { var error = som.RunEpoch(pcaRes); if (error < errorGoal) { break; } if (i % 10 == 0) { Console.WriteLine($"Report Cluster error {i} : {error}"); } } dn.Save(savedDNNetwork); }
static DistanceNetwork CreateSOM(string tagname) { int samples = 2 * 31 * 24; int retro = 3; var context = new TagDbContext(); var data = context.TagValues .Where(t => t.Tag.TagName == tagname) .OrderByDescending(v => v.DateTime) .Select(v => new double[] { (double)v.DateTime.Hour, (double)v.DateTime.DayOfWeek, v.Value }) .Take(samples + retrospective + 3) .AsEnumerable() .Reverse() .ToArray(); double[][] trainingSet = new double[data.Length][]; for (int index = 0; index < samples; index++) { trainingSet[index] = new double[] { data[index + 3][0], data[index + 3][1], data[index + 3][2], data[index + 2][2], data[index + 1][2], data[index][2] }; } var networkSize = 15; var iterations = 500; var learningRate = 0.3; var learningRadius = 3; Neuron.RandRange = new Range(0, 255); // create network DistanceNetwork network = new DistanceNetwork(2, networkSize * networkSize); // create learning algorithm SOMLearning trainer = new SOMLearning(network, networkSize, networkSize); // create map //map = new int[networkSize, networkSize, 3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int i = 0; // loop while (true) { trainer.LearningRate = driftingLearningRate * (iterations - i) / iterations + fixedLearningRate; trainer.LearningRadius = (double)learningRadius * (iterations - i) / iterations; // run training epoch trainer.RunEpoch(trainingSet); // increase current iteration i++; // stop ? if (i >= iterations) { break; } } return(network); }
public void DistanceNetworkTest1() { string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "dn"); #region doc_example // Assure results are reproducible Accord.Math.Tools.SetupGenerator(0); int numberOfInputs = 3; int hiddenNeurons = 25; // Create some example inputs double[][] input = { new double[] { -1, -1, -1 }, new double[] { -1, 1, -1 }, new double[] { 1, -1, -1 }, new double[] { 1, 1, -1 }, new double[] { -1, -1, 1 }, new double[] { -1, 1, 1 }, new double[] { 1, -1, 1 }, new double[] { 1, 1, 1 }, }; // Create a new network var network = new DistanceNetwork(numberOfInputs, hiddenNeurons); // Create a teaching algorithm var teacher = new SOMLearning(network); // Use the teacher to learn the network double error = Double.PositiveInfinity; for (int i = 0; i < 10; i++) { error = teacher.RunEpoch(input); } string fileName = Path.Combine(basePath, "ann.bin"); // Save the network to a file path: Serializer.Save(network, fileName); // Load the network back from the stream DistanceNetwork target = Serializer.Load <DistanceNetwork>(fileName); #endregion // Make sure the network we loaded is exactly the same Assert.AreEqual(network.InputsCount, target.InputsCount); for (int i = 0; i < network.Layers.Length; i++) { Assert.AreEqual(network.Layers[i].InputsCount, target.Layers[i].InputsCount); for (int j = 0; j < network.Layers[i].Neurons.Length; j++) { Assert.AreEqual(network.Layers[i].Neurons[j].InputsCount, target.Layers[i].Neurons[j].InputsCount); Assert.AreEqual(network.Layers[i].Neurons[j].Weights, target.Layers[i].Neurons[j].Weights); } } }
// Worker thread void SearchSolution() { // create network DistanceNetwork network = new DistanceNetwork(2, networkSize * networkSize); // set random generators range foreach (var layer in network.Layers) { foreach (var neuron in layer.Neurons) { neuron.RandGenerator = new UniformContinuousDistribution( new Range(0, Math.Max(pointsPanel.ClientRectangle.Width, pointsPanel.ClientRectangle.Height))); } } // create learning algorithm SOMLearning trainer = new SOMLearning(network, networkSize, networkSize); // create map map = new int[networkSize, networkSize, 3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int i = 0; // loop while (!needToStop) { trainer.LearningRate = driftingLearningRate * (iterations - i) / iterations + fixedLearningRate; trainer.LearningRadius = (double)learningRadius * (iterations - i) / iterations; // run training epoch trainer.RunEpoch(trainingSet); // update map UpdateMap(network); // increase current iteration i++; // set current iteration's info SetText(currentIterationBox, i.ToString()); // stop ? if (i >= iterations) { break; } } // enable settings controls EnableControls(true); }
public Form() { InitializeComponent(); rbCheck(); chartType(); Accord.Math.Random.Generator.Seed = 0; for (int i = 0; i < 101; i++) { Graph[i] = new Boolean[101]; Clusters[i] = new int[101]; for (int j = 0; j < 100; j++) { Graph[i][j] = false; Clusters[i][j] = 0; } } #region Инициализация персептрона /* * netAtt = new ActivationNetwork(new SigmoidFunction(0.2), Configuration[0], Configuration.Skip(1).ToArray()); * netAtt.ForEachWeight(z => rnd.NextDouble()); * teachAtt = new BackPropagationLearning(netAtt); * teachAtt.LearningRate = 1; * * netMed = new ActivationNetwork(new SigmoidFunction(0.2), Configuration[0], Configuration.Skip(1).ToArray()); * netMed.ForEachWeight(z => rnd.NextDouble()); * teachMed = new BackPropagationLearning(netMed); * teachMed.LearningRate = 1; * * network = new ActivationNetwork(new SigmoidFunction(6), Configuration[0], Configuration.Skip(1).ToArray()); * network.ForEachWeight(z => rnd.NextDouble()*2-1); * teacher = new BackPropagationLearning(network); * teacher.LearningRate = 1; */ #endregion network = new DistanceNetwork(2, NetworkWidth * NetworkHeight); for (int x = 0; x < NetworkWidth; x++) { for (int y = 0; y < NetworkHeight; y++) { var n = network.Layers[0].Neurons[x * NetworkHeight + y]; n.Weights[0] = Rnd.NextDouble() * 0.2 + 0.4; n.Weights[1] = Rnd.NextDouble() * 0.2 + 0.4; } } learning = new SOMLearning(network, NetworkWidth, NetworkHeight); learning.LearningRadius = LearningRadius; learning.LearningRate = LearningRate; Timer = new System.Windows.Forms.Timer(); Timer.Tick += (sender, args) => { Learning(); this.Invalidate(true); }; Timer.Interval = 1000; }
private void btnTraining_Click(object sender, EventArgs e) { double[][] input_data = new double[Data.getInstance().images.Count][]; double[][] output_data = new double[Data.getInstance().images.Count][]; int max = Data.getInstance().classes.Count - 1; int min = 0; for (int i = 0; i < Data.getInstance().images.Count; i++) { Bitmap image = Data.getInstance().preprocessing(Data.getInstance().images[i]); ImageToArray converter = new ImageToArray(0, 1); converter.Convert(image, out input_data[i]); output_data[i] = new double[1]; output_data[i][0] = Data.getInstance().class_indexes[i]; output_data[i][0] = 0 + (output_data[i][0] - min) * (1 - 0) / (max - min); } pca = new PrincipalComponentAnalysis(); pca.Method = PrincipalComponentMethod.Center; pca.Learn(input_data); double[][] input_from_pca = pca.Transform(input_data); int a = 0; int output_count = 0; while (a < Data.getInstance().classes.Count) { output_count = a * a; a++; } som_network = new DistanceNetwork(input_from_pca[0].Count(), output_count); som_learning = new SOMLearning(som_network); int max_iteration = 10000; double max_error = 0.0001; for (int i = 0; i < max_iteration; i++) { double error = som_learning.RunEpoch(input_from_pca); if (error < max_error) { break; } } btnBrowseClustering.Enabled = true; btnTraining.Enabled = false; }
//initialize public findForm(DistanceNetwork dn) { InitializeComponent(); this.dn = dn; listView1.LargeImageList = imgList; //Predicting Method som = new SOMLearning(this.dn); init(); getTrainedDataArray(); getClassMap(); classMapChecker(); }
public override void addModality(Signal s, string label = null) { base.addModality(s, label); //Each time we add a modality the structure of the network changes int inputCount = 0; foreach (Signal mod in modalities) { inputCount += mod.Width * mod.Height; } network = new DistanceNetwork(inputCount, output.Width * output.Height); teacher = new SOMLearning(network, output.Width, output.Height); }
// Worker thread void SearchSolution() { // create learning algorithm SOMLearning trainer = new SOMLearning(network); // input double[] input = new double[3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int i = 0; // loop while (!needToStop) { trainer.LearningRate = driftingLearningRate * (iterations - i) / iterations + fixedLearningRate; trainer.LearningRadius = (double)radius * (iterations - i) / iterations; input[0] = rand.Next(256); input[1] = rand.Next(256); input[2] = rand.Next(256); trainer.Run(input); // update map once per 50 iterations if ((i % 10) == 9) { UpdateMap(); } // increase current iteration i++; // set current iteration's info SetText(currentIterationBox, i.ToString()); // stop ? if (i >= iterations) { break; } } // enable settings controls EnableControls(true); }
// Worker thread void SearchSolution( ) { // create learning algorithm var trainer = new SOMLearning(this.network); // input var input = new double[3]; var fixedLearningRate = this.learningRate / 10; var driftingLearningRate = fixedLearningRate * 9; // iterations var i = 0; // loop while (!this.needToStop) { trainer.LearningRate = driftingLearningRate * (this.iterations - i) / this.iterations + fixedLearningRate; trainer.LearningRadius = (double)this.radius * (this.iterations - i) / this.iterations; input[0] = this.rand.Next(256); input[1] = this.rand.Next(256); input[2] = this.rand.Next(256); trainer.Run(input); // update map once per 50 iterations if ((i % 10) == 9) { UpdateMap( ); } // increase current iteration i++; // set current iteration's info this.currentIterationBox.Text = i.ToString( ); // stop ? if (i >= this.iterations) { break; } } // enable settings controls EnableControls(true); }
// Worker thread void SearchSolution( ) { // set random generators range Neuron.RandRange = new DoubleRange(0, Math.Max(this.pointsPanel.ClientRectangle.Width, this.pointsPanel.ClientRectangle.Height)); // create network var network = new DistanceNetwork(2, this.networkSize * this.networkSize); // create learning algorithm var trainer = new SOMLearning(network, this.networkSize, this.networkSize); // create map this.map = new int[this.networkSize, this.networkSize, 3]; var fixedLearningRate = this.learningRate / 10; var driftingLearningRate = fixedLearningRate * 9; // iterations var i = 0; // loop while (!this.needToStop) { trainer.LearningRate = driftingLearningRate * (this.iterations - i) / this.iterations + fixedLearningRate; trainer.LearningRadius = (double)this.learningRadius * (this.iterations - i) / this.iterations; // run training epoch trainer.RunEpoch(this.trainingSet); // update map UpdateMap(network); // increase current iteration i++; // set current iteration's info this.currentIterationBox.Text = i.ToString( ); // stop ? if (i >= this.iterations) { break; } } // enable settings controls EnableControls(true); }
/// <summary> /// Trains Network for Clustering (Distance Network, Self Organizing Map) /// </summary> /// <param name="epoch">Number of epoch</param> /// <returns>Neural network Error</returns> // TODO: Build the correct clustering network public double TrainClusteringNetwork(int epoch = 10000) { _clusteringNetwork = CreateNewDistanceNetwork(); var pcaResult = ComputePca(RemoveFromCategory(_allData.Values.ToList())); var trainer = new SOMLearning(_clusteringNetwork); var error = 0d; for (var i = 0; i < epoch; i++) { error = trainer.RunEpoch(pcaResult); if (error < 0.0001) { break; } } return(error); }
// Worker thread void SearchSolution() { SOMLearning trainer = new SOMLearning(network); double[] input = new double[3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; int i = 0; while (!needToStop) { trainer.LearningRate = driftingLearningRate * (iterations - i) / iterations + fixedLearningRate; trainer.LearningRadius = radius * (iterations - i) / iterations; if (rand != null) { input[0] = rand.Next(256); input[1] = rand.Next(256); input[2] = rand.Next(256); } trainer.Run(input); // update map once per 50 iterations if ((i % 10) == 9) { UpdateMap(); } i++; SetText(currentIterationBox, i.ToString()); if (i >= iterations) { break; } } EnableControls(true); }
public NeuralNetwork Train(SOMTrainerArgs X) { if (!Info.Moduls.Contains("NeuralNetworks/Learning/SOMLearnig.cs")) { Info.Moduls.Add("NeuralNetworks/Learning/SOMLearnig.cs"); } SOMLearning Trainer = new SOMLearning(X.NetworkToTrain.DNetwork); Trainer.LearningRadius = (X.LearningRadius <= 0) ? (5) : (X.LearningRadius); Trainer.LearningRate = (X.LearningRate <= 0) ? (0.01) : (X.LearningRate); uint tick = Settings.LearningUpdateTick; while (true) { double Error = Trainer.RunEpoch(X.Inputs); if (Error <= X.WantedERROR) { break; } if (tick == 0) { Settings.OnUpdateEvent(this, new LearningUpdateArgs() { ERROR = Error, Inputs = X.Inputs, Outputs = null, NetworkState = X.NetworkToTrain, }); } else { tick--; } } return(X.NetworkToTrain); }
public static void RunNetwork() { //TODO: wrap genetic algorithm around this for input counts? //ActivationNetwork net = new ActivationNetwork(new ThresholdFunction(), 100, new int[] { 10, 15, 20 }); // set neurons weights randomization range Neuron.RandRange = new Range(0f, 1f); int inputRange = 100; // create network DistanceNetwork network = new DistanceNetwork(100, 100 * 100); // create learning algorithm SOMLearning trainer = new SOMLearning(network); // input double[] input = new double[100]; //// loop //while ( ... ) //{ // // update learning rate and radius // // ... // trainer. // // prepare network input // input[0] = rand.Next( 256 ); // input[1] = rand.Next( 256 ); // input[2] = rand.Next( 256 ); // // run learning iteration // trainer.Run( input ); // ... //} }
// On "Start" button click private void startButton_Click(object sender, System.EventArgs e) { // get network size try { networkSize = Math.Max(5, Math.Min(50, int.Parse(sizeBox.Text))); } catch { networkSize = 15; } // get iterations count try { iterations = Math.Max(10, Math.Min(1000000, int.Parse(iterationsBox.Text))); } catch { iterations = 500; } // get learning rate try { learningRate = Math.Max(0.00001, Math.Min(1.0, double.Parse(rateBox.Text))); } catch { learningRate = 0.3; } // get radius try { learningRadius = Math.Max(1, Math.Min(30, int.Parse(radiusBox.Text))); } catch { learningRadius = 3; } // update settings controls UpdateSettings( ); // disable all settings controls except "Stop" button EnableControls(false); // generate training set for (int i = 0; i < pointsCount; i++) { // create new training sample trainingSet[i] = new double[2] { points[i, 0], points[i, 1] }; } //// run worker thread //needToStop = false; //workerThread = new Thread( new ThreadStart( SearchSolution ) ); //workerThread.Start( ); Neuron.RandRange = new DoubleRange(0, Math.Max(pointsPanel.ClientRectangle.Width, pointsPanel.ClientRectangle.Height)); // create network network = new DistanceNetwork(2, networkSize * networkSize); // create learning algorithm trainer = new SOMLearning(network, networkSize, networkSize); // create map map = new int[networkSize, networkSize, 3]; fixedLearningRate = learningRate / 10; driftingLearningRate = fixedLearningRate * 9; // iterations _i = 0; }
private void StartCommandHandler(object sender, ExecutedRoutedEventArgs e) { PrepareParams(); lblStatus.Text = "Обучение"; this.Cursor = Cursors.Wait; BitmapSource bmp = (BitmapSource)imgOrig.Source; int H = bmp.PixelHeight, W = bmp.PixelWidth, stride = (W * bmp.Format.BitsPerPixel + 7) / 8, len = H * stride; byte[] pixels = new byte[len]; bmp.CopyPixels(pixels, stride, 0); // input double[] input = new double[4]; for (int i = 0; i < len; i += 4) { byte b = pixels[i]; byte g = pixels[i + 1]; byte r = pixels[i + 2]; byte a = pixels[i + 3]; } Neuron.RandRange = new Range <double>(0, 255); nt = new DistanceNetwork(4, _nx * _ny); SOMLearning trainer = new SOMLearning(nt, _nx, _ny); double fixedLearningRate = _rate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int k = 0; Random rand = new Random(); // loop while (true) { trainer.LearningRate = driftingLearningRate * (_iterations - k) / _iterations + fixedLearningRate; trainer.LearningRadius = (double)_radius * (_iterations - k) / _iterations; int i = rand.Next(H * W); input[0] = pixels[i]; input[1] = pixels[i + 1]; input[2] = pixels[i + 2]; input[3] = pixels[i + 3]; trainer.Run(input); // increase current iteration k++; // stop ? if (k >= _iterations) { break; } } stride = 4 * _nx; byte[] array = new byte[_ny * stride]; Layer layer = nt[0]; for (int y = 0, i = 0; y < _ny; y++) { // for all pixels for (int x = 0; x < stride; i++, x += 4) { Neuron neuron = layer[i]; array[stride * y + x] = (byte)Math.Max(0, Math.Min(255, neuron[0])); array[stride * y + x + 1] = (byte)Math.Max(0, Math.Min(255, neuron[1])); array[stride * y + x + 2] = (byte)Math.Max(0, Math.Min(255, neuron[2])); array[stride * y + x + 3] = (byte)Math.Max(0, Math.Min(255, neuron[3])); } } try { System.Drawing.Graphics g = System.Drawing.Graphics.FromHwnd(IntPtr.Zero); WriteableBitmap bm1 = new WriteableBitmap(_nx, _ny, g.DpiX, g.DpiY, bmp.Format, null); bm1.WritePixels(new Int32Rect(0, 0, _nx, _ny), array, stride, 0); imgMap.Source = bm1; } catch (Exception ex) { MessageBox.Show(ex.Message); } lblStatus.Text = ""; this.Cursor = Cursors.Arrow; }
private void somLearningProcessing() { // PCA.GetLength(1) means that first dimension is column and zero dimension is row // neuronsCount must square root of 2 // Find nearest square with total images int sqrt = (int)Math.Ceiling(Math.Sqrt(listImages.Count)); if (sqrt <= 1) { sqrt = 2; } // SOMBrain.net doesn't exist && user doesn't add some art if (Network.loadDistanceNetwork == null && !Network.checkAddArt) { Network.distanceNetwork = new DistanceNetwork(principalComponentAnalysis.Result.GetLength(1), (int)Math.Pow(sqrt, 2)); somLearning = new SOMLearning(Network.distanceNetwork); } // SOMBrain.net doest exist && user add some art else if (Network.loadDistanceNetwork != null && Network.checkAddArt) { Network.distanceNetwork = new DistanceNetwork(principalComponentAnalysis.Result.GetLength(1), (int)Math.Pow(sqrt, 2)); somLearning = new SOMLearning(Network.distanceNetwork); } else if (Network.loadDistanceNetwork == null) { Network.distanceNetwork = new DistanceNetwork(principalComponentAnalysis.Result.GetLength(1), (int)Math.Pow(sqrt, 2)); somLearning = new SOMLearning(Network.distanceNetwork); } else if (Network.loadActivationNetwork != null) { Network.distanceNetwork = Network.loadDistanceNetwork; } //change double [,] to double[][] double[][] data = new double[listImages.Count][]; for (int i = 0; i < listImages.Count; i++) { data[i] = new double[principalComponentAnalysis.Result.GetLength(1)]; for (int j = 0; j < principalComponentAnalysis.Result.GetLength(1); j++) { data[i][j] = principalComponentAnalysis.Result[i, j]; } } if (somLearning != null) { double maxEpochs = 10000, errorRate = 0.0000001; for (int i = 0; i < maxEpochs; i++) { double error = somLearning.RunEpoch(data); if (error < errorRate) { break; } } } }
static void Main() { network = new DistanceNetwork(2, task.NetworkWidth * task.NetworkHeight); for (int x = 0; x < task.NetworkWidth; x++) { for (int y = 0; y < task.NetworkHeight; y++) { var n = network.Layers[0].Neurons[x * task.NetworkHeight + y]; n.Weights[0] = rnd.NextDouble() * 0.2 + 0.4; n.Weights[1] = rnd.NextDouble() * 0.2 + 0.4; } } learning = new SOMLearning(network, task.NetworkWidth, task.NetworkHeight); learning.LearningRadius = task.LearningRadius; learning.LearningRate = task.LearningRate; Inputs = task.GenerateInputs().ToArray(); pointsPanel = new MyUserControl() { Dock = DockStyle.Fill }; pointsPanel.Paint += DrawPoints; networkPanel = new MyUserControl() { Dock = DockStyle.Fill }; networkPanel.Paint += DrawNetwork; networkGraphControl = new MyUserControl { Dock = DockStyle.Fill }; networkGraphControl.Paint += DrawGraph; var pauseButton = new Button { Text = "Pause/Resume" }; pauseButton.Click += PauseResume; var table = new TableLayoutPanel() { Dock = DockStyle.Fill, RowCount = 2, ColumnCount = 2 }; table.Controls.Add(pointsPanel, 0, 0); table.Controls.Add(networkPanel, 0, 1); table.Controls.Add(networkGraphControl, 1, 0); table.Controls.Add(pauseButton, 1, 1); // table.Controls.Add(pause, 1, 1); table.RowStyles.Add(new RowStyle(SizeType.Percent, 50f)); table.RowStyles.Add(new RowStyle(SizeType.Percent, 50f)); table.ColumnStyles.Add(new ColumnStyle(SizeType.Percent, 50f)); table.ColumnStyles.Add(new ColumnStyle(SizeType.Percent, 50f)); form = new Form() { ClientSize = new Size(600, 600), Controls = { table } }; timer = new System.Windows.Forms.Timer(); timer.Tick += (sender, args) => { Learning(); form.Invalidate(true); }; timer.Interval = 100; timer.Start(); Application.Run(form); }
public Browse() { InitializeComponent(); if (listView1.Items.Count == 0) { MessageBox.Show("Add some art first"); } else { OpenFileDialog dialog = new OpenFileDialog(); dialog.Multiselect = true; List <double[]> PCAInput = new List <double[]>(); if (dialog.ShowDialog() == DialogResult.OK) { for (int i = 0; i < dialog.FileNames.Length; i++) { Bitmap image = new Bitmap(dialog.FileNames.ElementAt(i)); image = preprocessImage(image); double[] output; ImageToArray converter = new ImageToArray(); converter.Convert(image, out output); PCAInput.Add(output); pca = new PrincipalComponentAnalysis(PCAInput.ToArray()); pca.Compute(); } } dn = new DistanceNetwork(pca.Result.GetLength(0), 4); som = new SOMLearning(dn); double[][] inputSOM = new double[pca.Result.GetLength(0)][]; for (int i = 0; i < pca.Result.GetLength(1); i++) { inputSOM[i] = new double[pca.Result.GetLength(1)]; for (int j = 0; j < pca.Result.GetLength(1); j++) { inputSOM[i][j] = pca.Result[i, j]; } } //training som int epoch = 10000; double minError = 0.0000001; for (int i = 0; i < epoch; i++) { double error = som.RunEpoch(inputSOM); if (error < minError) { break; } } //clustering for (int i = 0; i < pca.Result.GetLength(0); i++) { dn.Compute(inputSOM[i].ToArray()); int winner = dn.GetWinner(); ListViewGroup group; if (listView1.Groups[winner.ToString()] == null) { //bkin group baru group = new ListViewGroup(winner.ToString(), "" + winner); } else { //masukin ke group lama group = listView1.Groups[winner.ToString()]; } listView1.Groups.Add(group); //listView1.Items.Add(new ListViewItem(dialog.SafeFileNames[i], i, group)); //imageList1.Images.Add(new Bitmap(dialog.FileNames.ElementAt(i))); } } }