public NetworkHelper(IList<Minutia> features, SOMParams somParams) { var trainingFeaturesCount = features.Count; var inputs = new double[trainingFeaturesCount][]; var featureCharacteristicsCount = somParams.CharacteristicsCount; for (var i = 0; i < trainingFeaturesCount; i++) { var feature = features[i]; inputs[i] = new double[featureCharacteristicsCount]; inputs[i][0] = feature.Angle * somParams.AngleMultiplier; inputs[i][1] = (double)feature.MinutiaType * somParams.TypeMultiplier; inputs[i][2] = feature.X * somParams.CoordsMultiplier; inputs[i][3] = feature.Y * somParams.CoordsMultiplier; inputs[i][4] = feature.GetHashCode()*somParams.HashCodeMultiplier; } _networkSize = CreateRectangleFromFeatures(trainingFeaturesCount); _som = new DistanceNetwork(featureCharacteristicsCount, trainingFeaturesCount); _teacher = new SOMLearning(_som, _networkSize[0], _networkSize[1]); _inputs = inputs; _learningRadius = somParams.LearningRadius; _fixedLearningRate = somParams.LearningRate / 10; _driftingLearningRate = _fixedLearningRate * 9; }
public NetworkHelper(double[][] features, SOMParams somParams) { var trainingFeaturesCount = features.GetLength(0); var featureCharacteristicsCount = somParams.CharacteristicsCount; _networkSize = CreateRectangleFromFeatures(trainingFeaturesCount); _som = new DistanceNetwork(featureCharacteristicsCount, trainingFeaturesCount); _teacher = new SOMLearning(_som, _networkSize[0], _networkSize[1]); _inputs = features; _learningRadius = somParams.LearningRadius; _fixedLearningRate = somParams.LearningRate / 10; _driftingLearningRate = _fixedLearningRate * 9; }
static void Main() { network = new DistanceNetwork(2, task.NetworkWidth * task.NetworkHeight); for (int x=0;x<task.NetworkWidth;x++) for (int y = 0; y < task.NetworkHeight; y++) { var n = network.Layers[0].Neurons[x * task.NetworkHeight + y]; n.Weights[0] = rnd.NextDouble() * 0.2 + 0.4; n.Weights[1] = rnd.NextDouble() * 0.2 + 0.4; } learning = new SOMLearning(network, task.NetworkWidth, task.NetworkHeight); learning.LearningRadius = task.LearningRadius; learning.LearningRate = task.LearningRate; Inputs = task.GenerateInputs().ToArray(); pointsPanel = new MyUserControl() { Dock= DockStyle.Fill}; pointsPanel.Paint += DrawPoints; networkPanel = new MyUserControl() { Dock = DockStyle.Fill }; networkPanel.Paint += DrawNetwork; networkGraphControl = new MyUserControl { Dock = DockStyle.Fill }; networkGraphControl.Paint += DrawGraph; var pauseButton = new Button { Text = "Pause/Resume" }; pauseButton.Click+=PauseResume; var table = new TableLayoutPanel() { Dock = DockStyle.Fill, RowCount=2, ColumnCount=2 }; table.Controls.Add(pointsPanel, 0, 0); table.Controls.Add(networkPanel, 0, 1); table.Controls.Add(networkGraphControl, 1, 0); table.Controls.Add(pauseButton,1,1); // table.Controls.Add(pause, 1, 1); table.RowStyles.Add(new RowStyle(SizeType.Percent, 50f)); table.RowStyles.Add(new RowStyle(SizeType.Percent, 50f)); table.ColumnStyles.Add(new ColumnStyle(SizeType.Percent, 50f)); table.ColumnStyles.Add(new ColumnStyle(SizeType.Percent, 50f)); form = new Form() { ClientSize = new Size(600, 600), Controls = { table } }; timer = new System.Windows.Forms.Timer(); timer.Tick += (sender, args) => { Learning(); form.Invalidate(true); }; timer.Interval = 100; timer.Start(); Application.Run(form); }
// Worker thread void SearchSolution( ) { // create learning algorithm SOMLearning trainer = new SOMLearning( network ); // input double[] input = new double[3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int i = 0; // loop while ( !needToStop ) { trainer.LearningRate = driftingLearningRate * ( iterations - i ) / iterations + fixedLearningRate; trainer.LearningRadius = (double) radius * ( iterations - i ) / iterations; input[0] = rand.Next( 256 ); input[1] = rand.Next( 256 ); input[2] = rand.Next( 256 ); trainer.Run( input ); // update map once per 50 iterations if ( ( i % 10 ) == 9 ) { UpdateMap( ); } // increase current iteration i++; // set current iteration's info this.BeginInvoke ( new MethodInvoker (() => currentIterationBox.Text = i.ToString())); // stop ? if ( i >= iterations ) break; } // enable settings controls EnableControls( true ); }
/// <summary> /// Enseña patrones a la red en un hilo secundario. /// </summary> protected override void teachingWorker_DoWork(object sender, DoWorkEventArgs e) { // Extrae los kanjis var allKanjis = (List<Kanji>)e.Argument; // Genera una lista de patrones, una por cada kanji var allPatterns = new List<Pattern>(allKanjis.Count); foreach (var kanji in allKanjis) { //Dependiendo del modo obj de aprendizaje genera el patron string imageHash = string.Empty; Pattern pattern = null; switch (Method) { case GenerationMethod.Normal: case GenerationMethod.Heightmap: generatePattern_Normal(kanji.sourceImage, out pattern, out imageHash); break; case GenerationMethod.Hashing: generatePattern_Hashing(kanji.sourceImage, out pattern, out imageHash); break; } // Lo agrega a la lista de patrones a aprender allPatterns.Add(pattern); } // Crea un entrenador var sqrtOutputSize = (int)Math.Sqrt(OutputSize); SOMLearning trainer = new SOMLearning(somNN, sqrtOutputSize, sqrtOutputSize); // Crea el set de datos a entrenar var trainingSet = generateInputSet(allPatterns); // Iteraciones de aprendizaje for (int i = 0; i < LearningIterations; i++) { // Establece los valores de aprendizaje y radio para la corrida actual var completedRatio = (float)i / (LearningIterations - 1); trainer.LearningRate = completedRatio * LearningEndingRate + (1 - completedRatio) * LearningInitialRate; trainer.LearningRadius = completedRatio * LearningEndingRadius + (1 - completedRatio) * LearningInitialRadius; // Ejecuta la corrida trainer.RunEpoch(trainingSet); // Informa progreso ((BackgroundWorker)sender).ReportProgress((int)(completedRatio * 100 * .99f)); // Revisa la solicitud de cancelar if (needToStop) { e.Cancel = true; break; } } // Genera la lista de clases para cada patron aprendido var classes = new List<int>(allPatterns.Count); foreach (var pattern in allPatterns) classes.Add(classifyPattern(pattern)); if (classes.GroupBy(i => i).Where(g => g.Count() > 1).ToList().Count > 0) throw new Exception("No se pudo distinguir la diferencia de clase entre dos kanjis."); // Para cada patron busca la neurona representante for (int i = 0; i < allKanjis.Count; i++) { // Guarda el kanji en el diccionario learnedKanjis.Add(classes[i].ToString(), allKanjis[i]); } }
// Worker thread void SearchSolution( ) { // set random generators range Neuron.RandRange = new DoubleRange( 0, Math.Max( pointsPanel.ClientRectangle.Width, pointsPanel.ClientRectangle.Height ) ); // create network DistanceNetwork network = new DistanceNetwork( 2, networkSize * networkSize ); // create learning algorithm SOMLearning trainer = new SOMLearning( network, networkSize, networkSize ); // create map map = new int[networkSize, networkSize, 3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int i = 0; RefreshDelegate refreshIterationsBox = delegate () { currentIterationBox.Text = i.ToString( ); }; RefreshDelegate enableControls = delegate () { EnableControls( true ); }; // loop while ( !needToStop ) { trainer.LearningRate = driftingLearningRate * ( iterations - i ) / iterations + fixedLearningRate; trainer.LearningRadius = (double) learningRadius * ( iterations - i ) / iterations; // run training epoch trainer.RunEpoch( trainingSet ); // update map UpdateMap( network ); // increase current iteration i++; // set current iteration's info this.Invoke(refreshIterationsBox); // stop ? if ( i >= iterations ) break; } // enable settings controls this.Invoke(enableControls); }
void KohonenTeach() { Neuron.RandRange = new Range(0.0f, 1.0f); kohonenNetwork = new DistanceNetwork(liczba_wejsc, liczba_neuronow_pion * liczba_neuronow_poziom); kohonenNetwork.Randomize(); SOMLearning teacher = new SOMLearning(kohonenNetwork); double driftingLearningRate = this.wps_zmiany_wsp; double fixedLearningRate = this.pocz_wart_wsp_nauki; double learningRadius = this.pocz_rozmiar_sasiedz; double driftingLearningRadius = this.wsp_zmian_rozm_sasiedz; int iteration = 1; while (!needToStop) { teacher.LearningRate = driftingLearningRate * (numberOfCycles - iteration) / numberOfCycles + fixedLearningRate; teacher.LearningRadius = (double)learningRadius * (numberOfCycles - iteration) / numberOfCycles; teacher.RunEpoch(trainingKData); SetIterationsCount(iteration++); if (iteration > numberOfCycles) break; } teachingDone = true; needToStop = false; UpdateLabels(); }
// On "Start" button click private void startButton_Click(object sender, System.EventArgs e) { // get network size try { networkSize = Math.Max( 5, Math.Min( 50, int.Parse( sizeBox.Text ) ) ); } catch { networkSize = 15; } // get iterations count try { iterations = Math.Max( 10, Math.Min( 1000000, int.Parse( iterationsBox.Text ) ) ); } catch { iterations = 500; } // get learning rate try { learningRate = Math.Max( 0.00001, Math.Min( 1.0, double.Parse( rateBox.Text ) ) ); } catch { learningRate = 0.3; } // get radius try { learningRadius = Math.Max( 1, Math.Min( 30, int.Parse( radiusBox.Text ) ) ); } catch { learningRadius = 3; } // update settings controls UpdateSettings( ); // disable all settings controls except "Stop" button EnableControls( false ); // generate training set for ( int i = 0; i < pointsCount; i++ ) { // create new training sample trainingSet[i] = new double[2] { points[i, 0], points[i, 1] }; } //// run worker thread //needToStop = false; //workerThread = new Thread( new ThreadStart( SearchSolution ) ); //workerThread.Start( ); Neuron.RandRange = new DoubleRange(0, Math.Max(pointsPanel.ClientRectangle.Width, pointsPanel.ClientRectangle.Height)); // create network network = new DistanceNetwork(2, networkSize * networkSize); // create learning algorithm trainer = new SOMLearning(network, networkSize, networkSize); // create map map = new int[networkSize, networkSize, 3]; fixedLearningRate = learningRate / 10; driftingLearningRate = fixedLearningRate * 9; // iterations _i = 0; }