public void SaveConfigs( DataProviderConfiguration dataProviderConfiguration, GlobalTrainerConfiguration globalTrainerConfiguration, Training1Parameters training1Parameters, Training2Parameters training2Parameters) { }
public Training1Parameters GetTraining1Parameters() { var initialTraining1Parameters = new Training1Parameters { Momentum = Settings.Default.Training1Parameters_Momentum, Decay = Settings.Default.Training1Parameters_Decay, LearningRate = Settings.Default.Training1Parameters_LearningRate, UnsupervisedEpochs = Settings.Default.Training1Parameters_UnsupervisedEpochs, }; return(initialTraining1Parameters); }
private static void SaveTraining1Parameters(Training1Parameters training1Parameters) { if (training1Parameters == null) { return; } Settings.Default.Training1Parameters_Momentum = training1Parameters.Momentum; Settings.Default.Training1Parameters_Decay = training1Parameters.Decay; Settings.Default.Training1Parameters_LearningRate = training1Parameters.LearningRate; Settings.Default.Training1Parameters_UnsupervisedEpochs = training1Parameters.UnsupervisedEpochs; }
public Training1Parameters GetTraining1Parameters() { var initialTraining1Parameters = new Training1Parameters { Momentum = 0.5, Decay = 0.001, LearningRate = 0.1, UnsupervisedEpochs = 200, }; return(initialTraining1Parameters); }
public void SaveConfigs( DataProviderConfiguration dataProviderConfiguration, GlobalTrainerConfiguration globalTrainerConfiguration, Training1Parameters training1Parameters, Training2Parameters training2Parameters) { SaveDataProviderConfiguration(dataProviderConfiguration); SaveGlobalTrainerConfiguration(globalTrainerConfiguration); SaveTraining1Parameters(training1Parameters); SaveTraining2Parameters(training2Parameters); Settings.Default.Save(); }
public void RunTraining1(Training1Parameters parameters) { LogInfoUsingBothLoggers("Started unsupervised training."); var teacher = new DeepBeliefNetworkLearning(NeuralNetwork) { Algorithm = (hiddenLayer, visibleLayer, i) => new ContrastiveDivergenceLearning(hiddenLayer, visibleLayer) { LearningRate = parameters.LearningRate, Momentum = parameters.Momentum, Decay = parameters.Decay, } }; var inputs = _configuration.InputsOutputsData.Inputs; // Setup batches of input for learning. var batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. var groups = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount); var batches = inputs.Subgroups(groups); // Unsupervised learning on each hidden layer, except for the output layer. var guiLogIntensity = GetGuiLogIntensity(parameters.UnsupervisedEpochs); for (int layerIndex = 0; layerIndex < NeuralNetwork.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; var layerData = teacher.GetLayerInput(batches); foreach (int i in Enumerable.Range(1, parameters.UnsupervisedEpochs)) { var error = teacher.RunEpoch(layerData) / inputs.Length; var message = $"Layer: {layerIndex} Epoch: {i}, Error: {error}"; LogCurrentEpochResult(message, guiLogIntensity, i, parameters.UnsupervisedEpochs); if (_skipPhaseRequest.RequestedAndUnhandled) { LogPhaseSkippnigAndNotifyHandled(i, parameters.UnsupervisedEpochs); break; } } } }