public void CreateBackPropagationTrainingAlgorithm(AnnBuild annComp, BackPropagation prop) { errorTarget = annComp.ErrorTarget; network.InstantiateBackPropagationAlgorithm( errorTarget, annComp.MaxEpochs, prop.LearningRate, prop.Beta); network.BuildStatsListener(); }
public void TestMlpTrainingWthResilientPropPerformance() { using (wr = new StreamWriter(@"test.log", true, System.Text.Encoding.ASCII, 1024)) { try { // Build network model uint[] hlayers = new uint[1]; hlayers[0] = 4; Build(3, 1, hlayers, 1); wr.WriteLine("-> Test network performance for parity problem with rprop"); double targetError = 0.05; uint epochs = 10000; mlpNetwork.InstantiateResilientPropagationAlgorithm(targetError, epochs); mlpNetwork.BuildStatsListener(); Task <bool> trainResult = mlpNetwork.TrainingAsync(parityInput, parityOutput, targetError); DateTime start = DateTime.Now; while (!trainResult.IsCompleted) { List <double[][]> output = mlpNetwork.GetOutputValues(); //wr.WriteLine("[" + (DateTime.Now-start).Milliseconds.ToString() + " ms] Output {0}", string.Join(";", output)); Thread.Sleep(2); } if (!trainResult.Result) { wr.WriteLine("-> Network training fail"); } else { wr.WriteLine("-> Network trained! [Error: {0}, Epochs: {1}]", mlpNetwork.CurrentError, mlpNetwork.EpochsTraining); wr.WriteLine("-> Validation:"); int i = 0; foreach (double[] pattern in parityInputValidation) { double[] output = mlpNetwork.Exec(pattern); wr.WriteLine("-> Input: " + pattern[0] + " " + pattern[1]); wr.WriteLine("-> Output: " + output[0] + " (ideal: " + parityOutput[i++][0] + ")"); } wr.WriteLine("-> Validation end"); } } catch (Exception e) { wr.WriteLine(e.Message + Environment.NewLine + e.StackTrace); } } }