public RunEpoch ( double input ) : double | ||
input | double | Array of input vectors. |
return | double |
// Worker thread void SearchSolution( ) { // set random generators range Neuron.RandRange = new DoubleRange( 0, Math.Max( pointsPanel.ClientRectangle.Width, pointsPanel.ClientRectangle.Height ) ); // create network DistanceNetwork network = new DistanceNetwork( 2, networkSize * networkSize ); // create learning algorithm SOMLearning trainer = new SOMLearning( network, networkSize, networkSize ); // create map map = new int[networkSize, networkSize, 3]; double fixedLearningRate = learningRate / 10; double driftingLearningRate = fixedLearningRate * 9; // iterations int i = 0; RefreshDelegate refreshIterationsBox = delegate () { currentIterationBox.Text = i.ToString( ); }; RefreshDelegate enableControls = delegate () { EnableControls( true ); }; // loop while ( !needToStop ) { trainer.LearningRate = driftingLearningRate * ( iterations - i ) / iterations + fixedLearningRate; trainer.LearningRadius = (double) learningRadius * ( iterations - i ) / iterations; // run training epoch trainer.RunEpoch( trainingSet ); // update map UpdateMap( network ); // increase current iteration i++; // set current iteration's info this.Invoke(refreshIterationsBox); // stop ? if ( i >= iterations ) break; } // enable settings controls this.Invoke(enableControls); }
/// <summary> /// Enseña patrones a la red en un hilo secundario. /// </summary> protected override void teachingWorker_DoWork(object sender, DoWorkEventArgs e) { // Extrae los kanjis var allKanjis = (List<Kanji>)e.Argument; // Genera una lista de patrones, una por cada kanji var allPatterns = new List<Pattern>(allKanjis.Count); foreach (var kanji in allKanjis) { //Dependiendo del modo obj de aprendizaje genera el patron string imageHash = string.Empty; Pattern pattern = null; switch (Method) { case GenerationMethod.Normal: case GenerationMethod.Heightmap: generatePattern_Normal(kanji.sourceImage, out pattern, out imageHash); break; case GenerationMethod.Hashing: generatePattern_Hashing(kanji.sourceImage, out pattern, out imageHash); break; } // Lo agrega a la lista de patrones a aprender allPatterns.Add(pattern); } // Crea un entrenador var sqrtOutputSize = (int)Math.Sqrt(OutputSize); SOMLearning trainer = new SOMLearning(somNN, sqrtOutputSize, sqrtOutputSize); // Crea el set de datos a entrenar var trainingSet = generateInputSet(allPatterns); // Iteraciones de aprendizaje for (int i = 0; i < LearningIterations; i++) { // Establece los valores de aprendizaje y radio para la corrida actual var completedRatio = (float)i / (LearningIterations - 1); trainer.LearningRate = completedRatio * LearningEndingRate + (1 - completedRatio) * LearningInitialRate; trainer.LearningRadius = completedRatio * LearningEndingRadius + (1 - completedRatio) * LearningInitialRadius; // Ejecuta la corrida trainer.RunEpoch(trainingSet); // Informa progreso ((BackgroundWorker)sender).ReportProgress((int)(completedRatio * 100 * .99f)); // Revisa la solicitud de cancelar if (needToStop) { e.Cancel = true; break; } } // Genera la lista de clases para cada patron aprendido var classes = new List<int>(allPatterns.Count); foreach (var pattern in allPatterns) classes.Add(classifyPattern(pattern)); if (classes.GroupBy(i => i).Where(g => g.Count() > 1).ToList().Count > 0) throw new Exception("No se pudo distinguir la diferencia de clase entre dos kanjis."); // Para cada patron busca la neurona representante for (int i = 0; i < allKanjis.Count; i++) { // Guarda el kanji en el diccionario learnedKanjis.Add(classes[i].ToString(), allKanjis[i]); } }
void KohonenTeach() { Neuron.RandRange = new Range(0.0f, 1.0f); kohonenNetwork = new DistanceNetwork(liczba_wejsc, liczba_neuronow_pion * liczba_neuronow_poziom); kohonenNetwork.Randomize(); SOMLearning teacher = new SOMLearning(kohonenNetwork); double driftingLearningRate = this.wps_zmiany_wsp; double fixedLearningRate = this.pocz_wart_wsp_nauki; double learningRadius = this.pocz_rozmiar_sasiedz; double driftingLearningRadius = this.wsp_zmian_rozm_sasiedz; int iteration = 1; while (!needToStop) { teacher.LearningRate = driftingLearningRate * (numberOfCycles - iteration) / numberOfCycles + fixedLearningRate; teacher.LearningRadius = (double)learningRadius * (numberOfCycles - iteration) / numberOfCycles; teacher.RunEpoch(trainingKData); SetIterationsCount(iteration++); if (iteration > numberOfCycles) break; } teachingDone = true; needToStop = false; UpdateLabels(); }