private void StartButtonClick(object sender, RoutedEventArgs e) { var layers = LayersTextBox.Text.Split(new string[] { ",", " ", "-", "_", "." }, StringSplitOptions.RemoveEmptyEntries); List <int> layersVal = new List <int>(); foreach (var layer in layers) { layersVal.Add(int.Parse(layer)); } YesNo bias = (YesNo)BiasCombobox.SelectedItem; ActivationFunction activation = (ActivationFunction)ActivationCombobox.SelectedItem; int iterations = int.Parse(Iterations.Text); double learningRate = LearningRateSlider.Value; double momentum = MomentumSlider.Value; BackpropagationRunMode runMode = (BackpropagationRunMode)RunCombobox.SelectedItem; ProblemType problem = (ProblemType)ProblemCombobox.SelectedItem; CasesData trainingCases; CasesData testCases; YesNo appendTestFile = (YesNo)AppendTestCombobox.SelectedItem; int input = testData.First().Count(); int output = trainingData.First().Count() - input; CasesData.InitializeAllData(trainingData, testData, problem, out trainingCases, out testCases); layersVal.Insert(0, input); layersVal.Add((problem == ProblemType.Regression) ? output : trainingCases.ClassCount); NeuralNetwork network = new NeuralNetwork( (activation == ActivationFunction.Bipolar) ? (IActivation) new BipolarTanhActivation() : new UnipolarSigmoidActivation(), bias == YesNo.Yes, layersVal.ToArray()); LearningResult learningResult = BackpropagationManager.Run(network, trainingCases, testCases, runMode, iterations, learningRate, momentum); if (appendTestFile == YesNo.Yes) { AppendCSVile(testDataPath, testCases); } ShowNetworkErrorWindow(learningResult); if (problem == ProblemType.Regression) { ShowRegressionWindow(network, trainingCases, testCases); } else if (problem == ProblemType.Classification) { ShowClassificationWindow(network, trainingCases, testCases); } }
public void TestDirectionGuessed20PercentIdealToNetFalling() { List <Pattern> patterns = new List <Pattern>(); for (int i = 0; i < 101; i++) { patterns.Add(new Pattern(i)); patterns[i].IdealOutput = new DenseVector(new double[] { (double)-i }); patterns[i].NetworkAnswer = new DenseVector(new double[] { (double)-(i - 10) - (i % 5) * 2.26 }); } double directionGuessed = BackpropagationManager.CalculateDirIdealToNet(patterns); Assert.AreEqual(0.2, directionGuessed, 0.0001); }
public void TestDirectionGuessedPerfectlyNetToNet() { List <Pattern> patterns = new List <Pattern>(); for (int i = 0; i < 20; i++) { patterns.Add(new Pattern(i)); patterns[i].IdealOutput = new DenseVector(new double[] { (double)i }); patterns[i].NetworkAnswer = new DenseVector(new double[] { (double)i + 20 }); //(i - 10) + (i % 5) * 100 } double directionGuessed = BackpropagationManager.CalculateDirNetToNet(patterns); Assert.AreEqual(1.0, directionGuessed, 0.0001); }
public EngineResult Run() { foreach (LearningSettings learningSettings in eid.SettingsToRun) { resultsBySettings[learningSettings] = new List <SingleRunReport>(); for (int i = 0; i < eid.RunsPerSettings; i++) { runCounter++; List <int> layersVal = BuildLayersVal(learningSettings); BuildDataSet(layersVal); INetwork network = CreateNetwork(learningSettings, layersVal); NormalizeData(network, trainSet, testSet); CheckIfPerformPCA(network); learningStrategy = new VSetLearningStrategy(learningSettings); BackpropagationManager backpropMan = new BackpropagationManager(network, trainSet, testSet); var learningResult = backpropMan.Run(learningStrategy, learningSettings, this); NormalizeDataBack(network, trainSet, testSet); resultsBySettings[learningSettings].Add(new SingleRunReport( network, DateTime.Now, learningResult, trainSet, testSet)); } // use train error as criterion to simulate real-life ("we cannot use test set results yet - test set is the future") resultsBySettings[learningSettings].RemoveHighestValues( r => r.LearningResult.FinalTrainError, discardCount); } EngineResult result = new EngineResult(); result.ResultsBySettings = resultsBySettings; result.Eid = eid; result.WorstDiscardedCount = discardCount; return(result); }