/// <summary> /// Create an annealing trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is BasicNetwork)) { throw new TrainingError( "Invalid method type, requires BasicNetwork"); } ICalculateScore score = new TrainingSetScore(training); IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); double startTemp = holder.GetDouble( MLTrainFactory.PropertyTemperatureStart, false, 10); double stopTemp = holder.GetDouble( MLTrainFactory.PropertyTemperatureStop, false, 2); int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100); IMLTrain train = new NeuralSimulatedAnnealing( (BasicNetwork) method, score, startTemp, stopTemp, cycles); return train; }
/// <summary> /// Create an annealing trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is BasicNetwork)) { throw new TrainingError( "Invalid method type, requires BasicNetwork"); } ICalculateScore score = new TrainingSetScore(training); IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); int populationSize = holder.GetInt( MLTrainFactory.PropertyPopulationSize, false, 5000); double mutation = holder.GetDouble( MLTrainFactory.PropertyMutation, false, 0.1d); double mate = holder.GetDouble(MLTrainFactory.PropertyMate, false, 0.25d); IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork) method, new RangeRandomizer(-1, 1), score, populationSize, mutation, mate); return train; }
/// <summary> /// Create an annealing trainer. /// </summary> /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is BasicNetwork)) { throw new TrainingError( "Invalid method type, requires BasicNetwork"); } ICalculateScore score = new TrainingSetScore(training); IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); int populationSize = holder.GetInt( MLTrainFactory.PropertyPopulationSize, false, 5000); IMLTrain train = new MLMethodGeneticAlgorithm( () => { IMLMethod result = (IMLMethod) ObjectCloner.DeepCopy(method); ((IMLResettable)result).Reset(); return result; }, score, populationSize); return train; }
public void TestAnneal() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained(); ICalculateScore score = new TrainingSetScore(trainingData); NeuralSimulatedAnnealing anneal = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); NetworkUtil.TestTraining(anneal, 0.01); }
/// <summary> /// Create an NEAT GA trainer. /// </summary> /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { ICalculateScore score = new TrainingSetScore(training); TrainEA train = NEATUtil.ConstructNEATTrainer((NEATPopulation)method, score); return train; }
public void TestGenetic() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained(); ICalculateScore score = new TrainingSetScore(trainingData); NeuralGeneticAlgorithm genetic = new NeuralGeneticAlgorithm(network, new RangeRandomizer(-1, 1), score, 500, 0.1, 0.25); NetworkUtil.TestTraining(genetic, 0.00001); }
public void TestAnneal() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained(); ICalculateScore score = new TrainingSetScore(trainingData); NeuralSimulatedAnnealing anneal = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); NetworkUtil.TestTraining(anneal, 0.01); }
public void TestGenetic() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingData); MLMethodGeneticAlgorithm genetic = new MLMethodGeneticAlgorithm(() => { return(NetworkUtil.CreateXORNetworkUntrained()); }, score, 500); NetworkUtil.TestTraining(genetic, 0.00001); }
private NEATPopulation Generate() { IMLDataSet trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingSet); // train the neural network ActivationStep step = new ActivationStep(); step.Center = 0.5; NEATTraining train = new NEATTraining( score, 2, 1, 10); return (NEATPopulation)train.Population; }
/// <summary> /// Create a PSO trainer. /// </summary> /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); ParamsHolder holder = new ParamsHolder(args); int particles = holder.GetInt( MLTrainFactory.PropertyParticles, false, 20); ICalculateScore score = new TrainingSetScore(training); IRandomizer randomizer = new NguyenWidrowRandomizer(); IMLTrain train = new NeuralPSO((BasicNetwork)method, randomizer, score, particles); return train; }
/// <summary> /// Create an EPL GA trainer. /// </summary> /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { PrgPopulation pop = (PrgPopulation)method; ICalculateScore score = new TrainingSetScore(training); TrainEA train = new TrainEA(pop, score); train.Rules.AddRewriteRule(new RewriteConstants()); train.Rules.AddRewriteRule(new RewriteAlgebraic()); train.CODEC = new PrgCODEC(); train.AddOperation(0.8, new SubtreeCrossover()); train.AddOperation(0.1, new SubtreeMutation(pop.Context, 4)); train.AddOperation(0.1, new ConstMutation(pop.Context, 0.5, 1.0)); train.AddScoreAdjuster(new ComplexityAdjustedScore()); train.Speciation = new PrgSpeciation(); return train; }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); NEATPopulation pop = new NEATPopulation(2, 1, 1000); pop.Reset(); pop.InitialConnectionDensity = 1.0; // not required, but speeds processing. ICalculateScore score = new TrainingSetScore(trainingSet); // train the neural network TrainEA train = NEATUtil.ConstructNEATTrainer(pop,score); EncogUtility.TrainToError(train, 0.01); NEATNetwork network = (NEATNetwork)train.CODEC.Decode(train.BestGenome); // test the neural network Console.WriteLine(@"Neural Network Results:"); EncogUtility.Evaluate(network, trainingSet); }
public static double TrainNetwork(String what, FreeformNetwork network, IMLDataSet trainingSet) { ICalculateScore score = new TrainingSetScore(trainingSet); IMLTrain trainAlt = new NeuralSimulatedAnnealing( network, score, 10, 2, 100); IMLTrain trainMain = new FreeformBackPropagation(network, trainingSet,0.00001, 0.0); StopTrainingStrategy stop = new StopTrainingStrategy(); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); EncogUtility.TrainToError(trainMain, 0.01); return trainMain.Error; }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); BasicNetwork network = EncogUtility.SimpleFeedForward(2, 2, 0, 1, false); ICalculateScore score = new TrainingSetScore(trainingSet); IRandomizer randomizer = new NguyenWidrowRandomizer(); IMLTrain train = new NeuralPSO(network, randomizer, score, 20); EncogUtility.TrainToError(train, 0.01); network = (BasicNetwork)train.Method; // test the neural network Console.WriteLine("Neural Network Results:"); EncogUtility.Evaluate(network, trainingSet); EncogFramework.Instance.Shutdown(); }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { double num2; int num3; if (!(method is BasicNetwork)) { throw new TrainingError("Invalid method type, requires BasicNetwork"); } ICalculateScore calculateScore = new TrainingSetScore(training); ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)); double startTemp = holder.GetDouble("startTemp", false, 10.0); if (((((uint) num3) & 0) != 0) || ((((uint) num2) - ((uint) num2)) < 0)) { IMLTrain train; return train; } num2 = holder.GetDouble("stopTemp", false, 2.0); return new NeuralSimulatedAnnealing((BasicNetwork) method, calculateScore, startTemp, num2, holder.GetInt("cycles", false, 100)); }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); var pop = new NEATPopulation(2, 1, 1000); ICalculateScore score = new TrainingSetScore(trainingSet); // train the neural network var step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; var train = new NEATTraining(score, pop); EncogUtility.TrainToError(train, 0.01); var network = (NEATNetwork) train.Method; network.ClearContext(); // test the neural network Console.WriteLine(@"Neural Network Results:"); EncogUtility.Evaluate(network, trainingSet); }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { int num; double num3; IMLTrain train; if (!(method is BasicNetwork)) { throw new TrainingError("Invalid method type, requires BasicNetwork"); } ICalculateScore calculateScore = new TrainingSetScore(training); do { ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)); num = holder.GetInt("population", false, 0x1388); double mutationPercent = holder.GetDouble("mutate", false, 0.1); num3 = holder.GetDouble("mate", false, 0.25); train = new NeuralGeneticAlgorithm((BasicNetwork) method, new RangeRandomizer(-1.0, 1.0), calculateScore, num, mutationPercent, num3); } while ((((uint) num) - ((uint) num3)) < 0); return train; }
private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet) { // train the neural network ICalculateScore score = new TrainingSetScore(trainingSet); IMLTrain trainAlt = new NeuralSimulatedAnnealing( network, score, 10, 2, 100); IMLTrain trainMain = new Backpropagation(network, trainingSet, 0.00001, 0.0); StopTrainingStrategy stop = new StopTrainingStrategy(); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); int epoch = 0; while (!stop.ShouldStop()) { trainMain.Iteration(); app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error); epoch++; } return trainMain.Error; }
public void TestGenetic() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingData); MLMethodGeneticAlgorithm genetic = new MLMethodGeneticAlgorithm(() => { return NetworkUtil.CreateXORNetworkUntrained(); }, score, 500); NetworkUtil.TestTraining(genetic, 0.00001); }
public static double TrainNetworks(BasicNetwork network, IMLDataSet minis) { Backpropagation trainMain = new Backpropagation(network, minis,0.0001,0.6); //set the number of threads below. trainMain.ThreadCount = 0; // train the neural network ICalculateScore score = new TrainingSetScore(minis); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); // IMLTrain trainMain = new Backpropagation(network, minis, 0.0001, 0.01); StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); //prune strategy not in GIT!...Removing it. //PruneStrategy strategypruning = new PruneStrategy(0.91d, 0.001d, 10, network,minis, 0, 20); //trainMain.AddStrategy(strategypruning); EncogUtility.TrainConsole(trainMain,network,minis, 15.2); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { trainMain.Iteration(); Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber); } sw.Stop(); Console.WriteLine(@"Total elapsed time in seconds:" + TimeSpan.FromMilliseconds(sw.ElapsedMilliseconds).Seconds); return trainMain.Error; }
public void TestParticleSwarmOptimization() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained(); ICalculateScore score = new TrainingSetScore(trainingData); ParticleSwarmOptimizationAlgorithm pso = new ParticleSwarmOptimizationAlgorithm(network, new RangeRandomizer(-1,1),score); NetworkUtil.TestTraining(pso, 0.00001); }
public void Train(BasicNetwork network, IMLDataSet training) { IMLTrain trainMain = new LevenbergMarquardtTraining(network, training); // train the neural network var stop = new StopTrainingStrategy(); ICalculateScore score = new TrainingSetScore(trainMain.Training); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); int epoch = 0; while (!stop.ShouldStop() && trainMain.IterationNumber < 1500) { trainMain.Iteration(); Console.WriteLine("Training " + ", Epoch #" + epoch + " Error:" + trainMain.Error); epoch++; } }
private void trainNetworkBackprop() { // IMLTrain train = new Backpropagation(this.network, this.input,this.ideal, 0.000001, 0.1); IMLDataSet aset = new BasicMLDataSet(input, ideal); int epoch = 1; // train the neural network ICalculateScore score = new TrainingSetScore(aset); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); IMLTrain trainMain = new Backpropagation(network, aset, 0.001, 0.0); StopTrainingStrategy stop = new StopTrainingStrategy(); var pop = new NEATPopulation(INPUT_SIZE, OUTPUT_SIZE, 1000); // train the neural network var step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; var train = new NEATTraining(score, pop); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); trainMain.AddStrategy(new HybridStrategy(train)); network.ClearContext(); while (!stop.ShouldStop()) { trainMain.Iteration(); train.Iteration(); Console.WriteLine(@"Training " + @"Epoch #" + epoch + @" Error:" + trainMain.Error+ @" Genetic iteration:"+trainAlt.IterationNumber+ @"neat iteration:"+train.IterationNumber ); epoch++; } }
public void TestGenetic() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained(); ICalculateScore score = new TrainingSetScore(trainingData); NeuralGeneticAlgorithm genetic = new NeuralGeneticAlgorithm(network, new RangeRandomizer(-1, 1), score, 500, 0.1, 0.25); NetworkUtil.TestTraining(genetic, 0.00001); }
private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet, string Method) { // train the neural network ICalculateScore score = new TrainingSetScore(trainingSet); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); IMLTrain trainMain; if (Method.Equals("Leven")) { Console.WriteLine("Using LevenbergMarquardtTraining"); trainMain = new LevenbergMarquardtTraining(network, trainingSet); } else trainMain = new Backpropagation(network, trainingSet); var stop = new StopTrainingStrategy(); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); int epoch = 0; while (!stop.ShouldStop()) { trainMain.Iteration(); app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error); epoch++; } return trainMain.Error; }
public static double TrainNetworks(BasicNetwork network, IMLDataSet minis) { // train the neural network ICalculateScore score = new TrainingSetScore(minis); IMLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100); IMLTrain trainMain = new Backpropagation(network, minis, 0.0001, 0.01); StopTrainingStrategy stop = new StopTrainingStrategy(0.0001, 200); trainMain.AddStrategy(new Greedy()); trainMain.AddStrategy(new HybridStrategy(trainAlt)); trainMain.AddStrategy(stop); var sw = new Stopwatch(); sw.Start(); while (!stop.ShouldStop()) { trainMain.Iteration(); Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber); } sw.Stop(); return trainMain.Error; }
private void Validate(NEATPopulation pop) { Assert.AreEqual(0.3, pop.OldAgePenalty); Assert.AreEqual(50, pop.OldAgeThreshold); Assert.AreEqual(10, pop.PopulationSize); Assert.AreEqual(0.2, pop.SurvivalRate); Assert.AreEqual(10, pop.YoungBonusAgeThreshold); Assert.AreEqual(0.3, pop.YoungScoreBonus); // see if the population can actually be used to train IMLDataSet trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingSet); NEATTraining train = new NEATTraining(score, pop); train.Iteration(); }
/// <summary> /// Builds and trains a neat network. /// </summary> /// <param name="aset">The IMLDataset.</param> /// <param name="inputcounts">The inputcounts.</param> /// <param name="outputcounts">The outputcounts.</param> /// <param name="populationsize">The populationsize.</param> /// <param name="ToErrorTraining">To error rate you want to train too.</param> /// <returns>a trained netnetwork.</returns> public static NEATNetwork BuildTrainNeatNetwork(IMLDataSet aset, int inputcounts, int outputcounts, int populationsize, double ToErrorTraining) { NEATPopulation pop = new NEATPopulation(inputcounts, outputcounts, populationsize); ICalculateScore score = new TrainingSetScore(aset); // train the neural network ActivationStep step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; NEATTraining train = new NEATTraining(score, pop); EncogUtility.TrainToError(train, ToErrorTraining); NEATNetwork network = (NEATNetwork)train.Method; return network; }
private void Validate(NEATPopulation pop) { Assert.AreEqual(10, pop.PopulationSize); Assert.AreEqual(0.2, pop.SurvivalRate); // see if the population can actually be used to train IMLDataSet trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingSet); IEvolutionaryAlgorithm train = NEATUtil.ConstructNEATTrainer(pop, score); train.Iteration(); }
private NEATPopulation Generate() { IMLDataSet trainingSet = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); ICalculateScore score = new TrainingSetScore(trainingSet); // train the neural network ActivationStep step = new ActivationStep(); step.Center = 0.5; IEvolutionaryAlgorithm train = NEATUtil.ConstructNEATTrainer( score, 2, 1, 10); return (NEATPopulation)train.Population; }