private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet, string Method) { // train the neural network ICalculateScore score = new TrainingSetScore(trainingSet); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); IMLTrain trainMain; if (Method.Equals("Leven")) { Console.WriteLine("Using LevenbergMarquardtTraining"); trainMain = new LevenbergMarquardtTraining(network, trainingSet); } else { trainMain = new Backpropagation(network, trainingSet); } var stop = new StopTrainingStrategy(); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); int epoch = 0; while (!stop.ShouldStop()) { trainMain.Iteration(); app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error); epoch++; } return(trainMain.Error); }
public static double TrainNetworks(SupportVectorMachine network, MarketMLDataSet training) { // train the neural network SVMTrain trainMain = new SVMTrain(network, training); StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); trainMain.AddStrategy(stop); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { trainMain.PreIteration(); trainMain.Iteration(); trainMain.PostIteration(); Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error); } sw.Stop(); Console.WriteLine("SVM Trained in :" + sw.ElapsedMilliseconds + "For error:" + trainMain.Error + " Iterated:" + trainMain.IterationNumber); return(trainMain.Error); }
public static double TrainNetworks(BasicNetwork network, IMLDataSet minis) { // train the neural network ICalculateScore score = new TrainingSetScore(minis); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); IMLTrain trainMain = new Backpropagation(network, minis, 0.0001, 0.01); StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { trainMain.Iteration(); Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber); } sw.Stop(); return(trainMain.Error); }
public override double Train(Data info, float lr, float mom) { IMLDataSet data = new BasicMLDataSet(info.InputData, info.OutputData); //Train network on data set, parameters (Network, dataset, learning rate, momentum). ICalculateScore score = new TrainingSetScore(data); IMLTrain trainAlt = new NeuralSimulatedAnnealing(EncogNetwork, score, 10, 2, 1000); IMLTrain learner; learner = new LevenbergMarquardtTraining(EncogNetwork, data); var stop = new StopTrainingStrategy(); learner.AddStrategy(new Greedy()); learner.AddStrategy(new HybridStrategy(trainAlt)); //Train network on data set. double lastError = double.PositiveInfinity; do { if (learner.Error != 0) { lastError = learner.Error; } learner.Iteration(); } while (lastError - learner.Error > 0.0000001); return(learner.Error); }
private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet) { // train the neural network ICalculateScore score = new TrainingSetScore(trainingSet); IMLTrain trainAlt = new NeuralSimulatedAnnealing( network, score, 10, 2, 100); IMLTrain trainMain = new Backpropagation(network, trainingSet, 0.00001, 0.0); var stop = new StopTrainingStrategy(); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); int epoch = 0; while (!stop.ShouldStop()) { trainMain.Iteration(); app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error); epoch++; } return(trainMain.Error); }
private void trainNetworkBackprop() { // IMLTrain train = new Backpropagation(this.network, this.input,this.ideal, 0.000001, 0.1); IMLDataSet aset = new BasicMLDataSet(input, ideal); int epoch = 1; // train the neural network ICalculateScore score = new TrainingSetScore(aset); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); IMLTrain trainMain = new Backpropagation(network, aset, 0.001, 0.0); StopTrainingStrategy stop = new StopTrainingStrategy(); var pop = new NEATPopulation(INPUT_SIZE, OUTPUT_SIZE, 1000); // train the neural network var step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; var train = new NEATTraining(score, pop); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); trainMain.AddStrategy(new HybridStrategy(train)); network.ClearContext(); while (!stop.ShouldStop()) { trainMain.Iteration(); train.Iteration(); Console.WriteLine(@"Training " + @"Epoch #" + epoch + @" Error:" + trainMain.Error + @" Genetic iteration:" + trainAlt.IterationNumber + @"neat iteration:" + train.IterationNumber); epoch++; } }
static void Main(string[] args) { // used for prediction of time series // sin(x) in theory int DEGREES = 360; int WINDOW_SIZE = 16; double[][] Input = new double[DEGREES][]; double[][] Ideal = new double[DEGREES][]; // Create array of sin signals for (int i = 0; i < DEGREES; i++) { Input[i] = new double[WINDOW_SIZE]; Ideal[i] = new double[] { Math.Sin(DegreeToRad(i + WINDOW_SIZE)) }; for (int j = 0; j < WINDOW_SIZE; j++) { Input[i][j] = Math.Sin(DegreeToRad(i + j)); } } // construct training set IMLDataSet trainingSet = new BasicMLDataSet(Input, Ideal); // construct an Elman type network // simple recurrent network ElmanPattern pattern = new ElmanPattern { InputNeurons = WINDOW_SIZE, ActivationFunction = new ActivationSigmoid(), OutputNeurons = 1 }; pattern.AddHiddenLayer(WINDOW_SIZE); IMLMethod method = pattern.Generate(); BasicNetwork network = (BasicNetwork)method; // Train network IMLTrain train = new Backpropagation(network, trainingSet); var stop = new StopTrainingStrategy(); train.AddStrategy(new Greedy()); train.AddStrategy(stop); int epoch = 0; while (!stop.ShouldStop()) { train.Iteration(); Console.WriteLine($"Training Epoch #{epoch} Error:{train.Error}"); epoch++; } // Test network foreach (IMLDataPair pair in trainingSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine($"actual={output[0]}, ideal={pair.Ideal[0]}"); } }
public static SupportVectorMachine SVMSearch(SupportVectorMachine anetwork, IMLDataSet training) { SVMSearchTrain bestsearch = new SVMSearchTrain(anetwork, training); StopTrainingStrategy stop = new StopTrainingStrategy(0.00000000001, 1); bestsearch.AddStrategy(stop); while (bestsearch.IterationNumber < 30 && !stop.ShouldStop()) { bestsearch.Iteration(); Console.WriteLine("Iteration #" + bestsearch.IterationNumber + " Error :" + bestsearch.Error); } bestsearch.FinishTraining(); return(anetwork); }
public static double TrainSVM(SVMTrain train, SupportVectorMachine machine) { StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); train.AddStrategy(stop); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { train.PreIteration(); train.Iteration(); train.PostIteration(); Console.WriteLine(@"Iteration #:" + train.IterationNumber + @" Error:" + train.Error + " Gamma:" + train.Gamma); } sw.Stop(); Console.WriteLine(@"SVM Trained in :" + sw.ElapsedMilliseconds); return(train.Error); }
public static double TrainNetwork(String what, FreeformNetwork network, IMLDataSet trainingSet) { ICalculateScore score = new TrainingSetScore(trainingSet); IMLTrain trainAlt = new NeuralSimulatedAnnealing( network, score, 10, 2, 100); IMLTrain trainMain = new FreeformBackPropagation(network, trainingSet, 0.00001, 0.0); StopTrainingStrategy stop = new StopTrainingStrategy(); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); EncogUtility.TrainToError(trainMain, 0.01); return(trainMain.Error); }
public void Train(BasicNetwork network, IMLDataSet training) { IMLTrain trainMain = new LevenbergMarquardtTraining(network, training); // train the neural network var stop = new StopTrainingStrategy(); var score = new TrainingSetScore(trainMain.Training); var trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); var epoch = 0; while (!stop.ShouldStop() && trainMain.IterationNumber < 1500) { trainMain.Iteration(); Console.WriteLine("Training " + ", Epoch #" + epoch + " Error:" + trainMain.Error); epoch++; } }
public static double TrainNetworks(BasicNetwork network, IMLDataSet minis) { Backpropagation trainMain = new Backpropagation(network, minis, 0.0001, 0.6); //set the number of threads below. trainMain.ThreadCount = 0; // train the neural network ICalculateScore score = new TrainingSetScore(minis); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); // IMLTrain trainMain = new Backpropagation(network, minis, 0.0001, 0.01); StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); //prune strategy not in GIT!...Removing it. //PruneStrategy strategypruning = new PruneStrategy(0.91d, 0.001d, 10, network,minis, 0, 20); //trainMain.AddStrategy(strategypruning); EncogUtility.TrainConsole(trainMain, network, minis, 15.2); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { trainMain.Iteration(); Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber); } sw.Stop(); Console.WriteLine(@"Total elapsed time in seconds:" + TimeSpan.FromMilliseconds(sw.ElapsedMilliseconds).Seconds); return(trainMain.Error); }
/// <summary> /// Perform an individual job unit, which is a single network to train and /// evaluate. /// </summary> /// /// <param name="context">Contains information about the job unit.</param> public override sealed void PerformJobUnit(JobUnitContext context) { var network = (BasicNetwork)context.JobUnit; BufferedMLDataSet buffer = null; IMLDataSet useTraining = _training; if (_training is BufferedMLDataSet) { buffer = (BufferedMLDataSet)_training; useTraining = (buffer.OpenAdditional()); } // train the neural network double error = Double.PositiveInfinity; for (int z = 0; z < _weightTries; z++) { network.Reset(); Propagation train = new ResilientPropagation(network, useTraining); var strat = new StopTrainingStrategy(0.001d, 5); train.AddStrategy(strat); train.ThreadCount = 1; // force single thread mode for (int i = 0; (i < _iterations) && !ShouldStop && !strat.ShouldStop(); i++) { train.Iteration(); } error = Math.Min(error, train.Error); } if (buffer != null) { buffer.Close(); } if (!ShouldStop) { // update min and max _high = Math.Max(_high, error); _low = Math.Min(_low, error); if (_hidden1Size > 0) { int networkHidden1Count; int networkHidden2Count; if (network.LayerCount > 3) { networkHidden2Count = network.GetLayerNeuronCount(2); networkHidden1Count = network.GetLayerNeuronCount(1); } else { networkHidden2Count = 0; networkHidden1Count = network.GetLayerNeuronCount(1); } int row, col; if (_hidden2Size == 0) { row = networkHidden1Count - _hidden[0].Min; col = 0; } else { row = networkHidden1Count - _hidden[0].Min; col = networkHidden2Count - _hidden[1].Min; } if ((row < 0) || (col < 0)) { Console.Out.WriteLine("STOP"); } _results[row][col] = error; } // report status _currentTry++; UpdateBest(network, error); ReportStatus( context, "Current: " + NetworkToString(network) + "; Best: " + NetworkToString(_bestNetwork)); } }
private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet) { // train the neural network ICalculateScore score = new TrainingSetScore(trainingSet); IMLTrain trainAlt = new NeuralSimulatedAnnealing( network, score, 10, 2, 100); IMLTrain trainMain = new Backpropagation(network, trainingSet, 0.00001, 0.0); StopTrainingStrategy stop = new StopTrainingStrategy(); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); int epoch = 0; while (!stop.ShouldStop()) { trainMain.Iteration(); app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error); epoch++; } return trainMain.Error; }