public double EvaluateMPROP(BasicNetwork network, IMLDataSet data) { var train = new ResilientPropagation(network, data); long start = DateTime.Now.Ticks; Console.WriteLine(@"Training 20 Iterations with MPROP"); for (int i = 1; i <= 20; i++) { train.Iteration(); Console.WriteLine("Iteration #" + i + " Error:" + train.Error); } //train.finishTraining(); long stop = DateTime.Now.Ticks; double diff = new TimeSpan(stop - start).Seconds; Console.WriteLine("MPROP Result:" + diff + " seconds."); Console.WriteLine("Final MPROP error: " + network.CalculateError(data)); return diff; }
public double Evaluate(BasicNetwork network, IMLDataSet training) { var rprop = new ResilientPropagation(network, training); double startingError = network.CalculateError(training); for (int i = 0; i < ITERATIONS; i++) { rprop.Iteration(); } double finalError = network.CalculateError(training); return startingError - finalError; }
public void Train() { TrainingErrorData.Clear(); TestingIdealData.Clear(); TestingResultsData.Clear(); _network = ConstructNetwork(TrainingSet.InputSize,TrainingSet.IdealSize); //var trainer = new Backpropagation(_network, TrainingSet, LearningRate, Momentum); var trainer = new ResilientPropagation(_network, TrainingSet); double[] resultsArray = new double[TrainingSet.Count]; double[] errorArray = new double[NumberOfIterations]; IsBusy = true; for (int iteration = 0; iteration < numberOfIterations; iteration++) { trainer.Iteration(); TrainingErrorData.Add(new Tuple<int,double>(iteration, trainer.Error)); } IsBusy = false; for(int i = 0; i < TrainingSet.Count; i++) { resultsArray[i] = _network.Classify(TrainingSet[i].Input); } TrainingErrorValue = _network.CalculateError(TrainingSet); Stage = Stage.Trained; }