private static void Update(INeuralConnection conn, WeightDecayRule rule) { double w4 = Math.Pow(conn.Weight, 4.0); double div = w4 + rule.Cutoff4; if (div != 0.0) { double delta = (w4 / div) * rule.Factor * conn.Weight; conn.Weight += delta; } }
private static void Begin() { // Data: Console.WriteLine("Creating data providers ..."); var rootDataProv = CreateDataProvider(20000); var valDataProv = rootDataProv.GetObjectSubsetProvider((index, values) => index < 5000); var trainingDataProv = rootDataProv.GetObjectSubsetProvider((index, values) => index >= 5000); var trainingSelStrat = new IntelligentDataFeatureSelectionStrategy(250, new GaussianSelectionAlgorithm(0.3), 2500, new MTPEliminationParameters(5, 10)); //var trainingSelStrat = new MonteCarloDataFeatureSelectionStrategy(250, MonteCarloMode.NewBlock); var valSelStrat = new MonteCarloDataFeatureSelectionStrategy(250, MonteCarloMode.NewBlock); var trainingMP = new SupervisedDataFeatureMatrixProvider(trainingSelStrat, trainingDataProv, NIPDataProvider.InputFeatureIDs, NIPDataProvider.OutputFeatureIDs); var valMP = new SupervisedDataFeatureMatrixProvider(valSelStrat, valDataProv, NIPDataProvider.InputFeatureIDs, NIPDataProvider.OutputFeatureIDs); Console.WriteLine("Samples found: " + rootDataProv.ItemCount); Console.WriteLine("Training samples: " + trainingDataProv.ItemCount); Console.WriteLine("Validation samples: " + valDataProv.ItemCount); // Rules: Console.WriteLine("Creating learning rules ..."); var weightInitRule = new NoisedWeightInitializationRule { Noise = 1.0, IsEnabled = true }; var decayRule = new WeightDecayRule { Factor = -0.00001, IsEnabled = true }; //var learningRule = new QuickpropRule { StepSize = 0.001 }; var learningRule = new SCGRule(); //var learningRule = new LMRule(); //var learningRule = new MetaQSARule { Mode = LearningMode.Stochastic, Momentum = 0.8, StepSizeRange = new DoubleRange(0.0, 0.005), StepSize = 0.001, StochasticAdaptiveStateUpdate = true }; //var learningRule = new SuperSABRule { Mode = LearningMode.Batch, Momentum = 0.8, StepSizeRange = new DoubleRange(0.0, 0.5), StepSize = 0.1, StochasticAdaptiveStateUpdate = false }; //var learningRule = new SignChangesRule { Mode = LearningMode.Batch, Momentum = 0.8, StepSizeRange = new DoubleRange(0.0, 0.05), StepSize = 0.01, StochasticAdaptiveStateUpdate = true }; //var learningRule = new GradientDescentRule { Mode = LearningMode.Batch, Momentum = 0.8, StepSize = 0.1 }; //var learningRule = new QSARule(); //var learningRule = new MAQRule(); //var learningRule = new RpropRule { Momentum = 0.01, StepSize = 0.01 }; //var learningRule = new CrossEntropyRule { PopulationSize = 400, NumberOfElites = 100 }; //var learningRule = new GARule { }; // Net: Console.WriteLine("Creating Neural Network ..."); var network = CreateNetwork(trainingMP, weightInitRule, decayRule, learningRule); var learning = new Learning(network); // Epoch: Console.WriteLine("Initializing optimization epoch ..."); var epoch = new OptimizationEpoch(learning, trainingMP, valMP, 1); epoch.Initialize(); epoch.BestValidationResult.Updated += OnBestResultUpdated; // Training loop: Console.WriteLine("Starting ..."); bool done = false; do { //CodeBench.By("Epoch").Do = () => //{ // epoch.Step(); //}; //CodeBench.By("Epoch").WriteToConsole(); epoch.Step(); WriteResult(epoch, trainedNet != null); if (Console.KeyAvailable) { var key = Console.ReadKey(); switch (key.Key) { case ConsoleKey.Escape: done = true; break; case ConsoleKey.S: Save(network, trainingMP); break; case ConsoleKey.V: if (trainedNet != null) Save(trainedNet, trainingMP); break; } } } while (!done); }