public MainViewModel() { _sourceBitmap = new Bitmap("C:/sample.jpg"); _neuralNetworkBitmap = new Bitmap(_sourceBitmap.Width, _sourceBitmap.Height); //_network = new NeuralNetworkBuilder() // .AddInputLayer(3) // .AddHiddenLayer(3) // .AddHiddenLayer(4) // .AddOutputLayer(3) // .Build(); _networks = new Dictionary <Point, NeuralNetwork>(); for (int y = 0; y < _sourceBitmap.Height; y++) { var network = new NeuralNetworkBuilder() .AddInputLayer(3) .AddHiddenLayer(3) .AddHiddenLayer(4) .AddOutputLayer(3) .Build(); for (int x = 0; x < _sourceBitmap.Width; x++) { _networks.Add(new Point(x, y), network); } } SourceImage = _sourceBitmap.ToBitmapSource(); NeuralNetworkImage = _neuralNetworkBitmap.ToBitmapSource(); }
static void DoubleOutput() { var network = new NeuralNetworkBuilder() .AddInputLayer(2) .AddHiddenLayer(3) .AddHiddenLayer(4) .AddOutputLayer(2) .Build(); int i = 0; while (i++ < 1000000) { Console.Clear(); //network.PropagateForward(); network.Evaluate(new List <double> { 1, 1 }); network.PropagateBackward(new List <double> { 0, 0 }); var output = network.Layers.Last().Neurons[0].Value; var output2 = network.Layers.Last().Neurons[1].Value; Console.WriteLine($"iteration: {i}\noutput:\n{output}\n{output2}"); } }
private TrainStats ExecuteSingleIteration(ArchitectureTestConfig config) { var neuralNetworkBuilder = new NeuralNetworkBuilder() .SetActivationFunction(new SigmoidActivationFunction()) .SetErrorFunction(new MeanSquaredErrorFunction(1)) .SetNumberOfInputNeurons(2) .SetNumberOfHiddenNeurons(config.NumberOfHiddenNeurons) .SetNumberOfOutputNeurons(1); if (config.EnableBias) { neuralNetworkBuilder.AddBiasConnections(); } var network = neuralNetworkBuilder.Build(); var trainer = new Trainer(neuralNetwork: network, learningRate: 0.01, logger: new DummyLogger()); var trainDataCollection = new[] { new TrainData(new double [] { 0, 0 }, new double [] { 1 }), new TrainData(new double [] { 1, 0 }, new double [] { 0 }), new TrainData(new double [] { 0, 1 }, new double [] { 0 }), new TrainData(new double [] { 1, 1 }, new double [] { 1 }), }; var trainStats = trainer.Train(trainDataCollection, numberOfEpochs: _maxNumberOfEpochs, terminalEpochError: _terminalEpochError); return(trainStats); }
public void TrainXor() { NeuralNetworkBuilder builder = new NeuralNetworkBuilder(); NeuralNetwork network = builder.CreateNew() .AddInputLayer(2, ActivationFunctions.Sigmoid, false) .AddHiddenLayer(2, ActivationFunctions.Sigmoid, false) .AddOutputLayer(1, ActivationFunctions.Sigmoid) .GetNetwork(); BackPropagationTrainer trainer = new BackPropagationTrainer(network); TrainingData[] data = new TrainingData[] { new TrainingData(new double[] { 1, 1 }, new double[] { 0 }), new TrainingData(new double[] { 1, 0 }, new double[] { 1 }), new TrainingData(new double[] { 0, 1 }, new double[] { 1 }), new TrainingData(new double[] { 0, 0 }, new double[] { 0 }), }; var result = trainer.Train(data, 100000, 0.7, 0.0, 0.005); string csv = result.Errors.ToCsvString(); Assert.IsTrue(result.IsSuccessful, "could not traing against expected error"); double[] result1 = network.Run(new double[] { 1.0, 1.0 }); double[] result2 = network.Run(new double[] { 1.0, 0.0 }); double[] result3 = network.Run(new double[] { 0.0, 1.0 }); double[] result4 = network.Run(new double[] { 0.0, 0.0 }); Assert.IsTrue(result1[0].IsEqual(0, 0.0005)); Assert.IsTrue(result2[0].IsEqual(1, 0.0005)); Assert.IsTrue(result3[0].IsEqual(1, 0.0005)); Assert.IsTrue(result4[0].IsEqual(0, 0.0005)); }
public void When_querying_the_network_after_training_it_should_yield_the_correct_output() { // Arrange var network = new NeuralNetworkBuilder() .Using(new XORNetworkLayout()) .Build(); network.Train(new[] { new TrainingExample(new[] { 0.0, 0.0 }, new[] { 0.0 }), new TrainingExample(new[] { 0.0, 1.0 }, new[] { 1.0 }), new TrainingExample(new[] { 1.0, 0.0 }, new[] { 1.0 }), new TrainingExample(new[] { 1.0, 1.0 }, new[] { 0.0 }) }, 10000, 0.3, 0.1); // Act double output_0_0 = network.Query(new[] { 0.0, 0.0 }).Single(); double output_0_1 = network.Query(new[] { 0.0, 1.0 }).Single(); double output_1_0 = network.Query(new[] { 1.0, 0.0 }).Single(); double output_1_1 = network.Query(new[] { 1.0, 1.0 }).Single(); // Assert output_0_0.Should().BeApproximately(0.0, 0.05); output_0_1.Should().BeApproximately(1.0, 0.05); output_1_0.Should().BeApproximately(1.0, 0.05); output_1_1.Should().BeApproximately(0.0, 0.05); }
public void When_querying_the_network_after_training_for_the_XOR_problem_it_should_return_the_correct_output() { // Arrange const int numberOfEpochs = 10000; const double learningRate = 0.3; const double momentum = 0.1; var network = new NeuralNetworkBuilder() .Using(new XORNetworkLayout()) .Build(); network.Train(new[] { new TrainingExample(new[] { 0.0, 0.0 }, new[] { 0.0 }), new TrainingExample(new[] { 0.0, 1.0 }, new[] { 1.0 }), new TrainingExample(new[] { 1.0, 0.0 }, new[] { 1.0 }), new TrainingExample(new[] { 1.0, 1.0 }, new[] { 0.0 }) }, numberOfEpochs, learningRate, momentum); // Act double output_0_0 = network.Query(new[] { 0.0, 0.0 }).Single(); double output_0_1 = network.Query(new[] { 0.0, 1.0 }).Single(); double output_1_0 = network.Query(new[] { 1.0, 0.0 }).Single(); double output_1_1 = network.Query(new[] { 1.0, 1.0 }).Single(); // Assert output_0_0.Should().BeApproximately(0.0, 0.05); output_0_1.Should().BeApproximately(1.0, 0.05); output_1_0.Should().BeApproximately(1.0, 0.05); output_1_1.Should().BeApproximately(0.0, 0.05); }
private static void Main(string[] args) { NeuralNetwork Network = NeuralNetworkBuilder.StartBuild() .SetInitMethod(InitializationFunction.Random) .CreateInputLayer(2) .AddHiddenLayer(2, new Sigmoid()) .CreateOutputLayer(1, new Sigmoid()) .Build(new Random()); //Set Test Data double[][] TestDataOutputs = new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 1 }, new double[] { 0 } }; double[][] TestDataInputs = new double[][] { new double[] { 0, 0 }, new double[] { 1, 0 }, new double[] { 0, 1 }, new double[] { 1, 1 } }; double Error = 0; Backpropagation Backprop = new Backpropagation(Network); while (Backprop.EpochCount < 8000) { Error = Backprop.TrainEpoch(TestDataInputs, TestDataOutputs, TestDataInputs, TestDataOutputs); } }
public static void BipolarNXORExample(ILogger logger) { var neuralNetworkBuilder = new NeuralNetworkBuilder(); var network = neuralNetworkBuilder .AddBiasConnections() .SetActivationFunction(new TANHActivationFunction()) .SetErrorFunction(new MeanSquaredErrorFunction(1)) .SetNumberOfInputNeurons(2) .SetNumberOfHiddenNeurons(3) .SetNumberOfOutputNeurons(1) .Build(); var trainer = new Trainer(neuralNetwork: network, learningRate: 0.01, logger: logger); var trainDataCollection = new[] { new TrainData(new double [] { -1, -1 }, new double [] { 1 }), new TrainData(new double [] { 1, -1 }, new double [] { -1 }), new TrainData(new double [] { -1, 1 }, new double [] { -1 }), new TrainData(new double [] { 1, 1 }, new double [] { 1 }), }; trainer.Train(trainDataCollection, numberOfEpochs: 100000, terminalEpochError: 0.01); TestNetwork(network, trainDataCollection, logger); }
public void When_training_the_network_for_a_single_epoch_it_should_update_the_weights_and_biases_correctly() { // Arrange const int numberOfEpochs = 1; const double learningRate = 0.5; const double momentum = 0.0; var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Build(); // Act network.Train(new[] { new TrainingExample(new[] { 0.1, 0.2, 0.3 }, new[] { 1.0, 0.0 }) }, numberOfEpochs, learningRate, momentum); // Assert Layer hiddenLayer = network.GetLayers().First(); Layer outputLayer = network.GetLayers().Last(); var updatedOutputWeights = outputLayer.Weights.ToRowMajorArray(); updatedOutputWeights[0].Should().BeApproximately(0.18612817, 0.000000005); updatedOutputWeights[1].Should().BeApproximately(0.28933822, 0.000000005); updatedOutputWeights[2].Should().BeApproximately(0.39242441, 0.000000005); updatedOutputWeights[3].Should().BeApproximately(0.49533644, 0.000000005); updatedOutputWeights[4].Should().BeApproximately(0.49296848, 0.000000005); updatedOutputWeights[5].Should().BeApproximately(0.58790113, 0.000000005); updatedOutputWeights[6].Should().BeApproximately(0.68302931, 0.000000005); updatedOutputWeights[7].Should().BeApproximately(0.77843241, 0.000000005); var updatedOutputBiases = outputLayer.Biases.ToArray(); updatedOutputBiases[0].Should().BeApproximately(0.10570742, 0.000000005); updatedOutputBiases[1].Should().BeApproximately(-0.05372498, 0.000000005); var updatedHiddenWeights = hiddenLayer.Weights.ToRowMajorArray(); updatedHiddenWeights[0].Should().BeApproximately(0.09883190, 0.000000005); updatedHiddenWeights[1].Should().BeApproximately(0.19766381, 0.000000005); updatedHiddenWeights[2].Should().BeApproximately(0.29649571, 0.000000005); updatedHiddenWeights[3].Should().BeApproximately(0.39877481, 0.000000005); updatedHiddenWeights[4].Should().BeApproximately(0.49754961, 0.000000005); updatedHiddenWeights[5].Should().BeApproximately(0.59632442, 0.000000005); updatedHiddenWeights[6].Should().BeApproximately(0.69874635, 0.000000005); updatedHiddenWeights[7].Should().BeApproximately(0.79749270, 0.000000005); updatedHiddenWeights[8].Should().BeApproximately(0.89623905, 0.000000005); updatedHiddenWeights[9].Should().BeApproximately(0.99874653, 0.000000005); updatedHiddenWeights[10].Should().BeApproximately(1.09749307, 0.000000005); updatedHiddenWeights[11].Should().BeApproximately(1.19623960, 0.000000005); var updatedHiddenBiases = hiddenLayer.Biases.ToArray(); updatedHiddenBiases[0].Should().BeApproximately(0.04831904, 0.000000005); updatedHiddenBiases[1].Should().BeApproximately(0.06774805, 0.000000005); updatedHiddenBiases[2].Should().BeApproximately(0.08746351, 0.000000005); updatedHiddenBiases[3].Should().BeApproximately(0.10746534, 0.000000005); }
public void When_training_the_network_for_a_single_epoch_it_should_update_the_weights_and_biases_correctly() { // Arrange const double learningRate = 0.5; const double momentum = 0.0; var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Build(); // Act network.Train(new[] { new TrainingExample(new[] { 1.0, -2.0, 3.0 }, new[] { 0.1234, 0.8766 }) }, 1, learningRate, momentum); // Assert Layer hiddenLayer = network.GetLayers().First(); Layer outputLayer = network.GetLayers().Last(); var updatedOutputWeights = outputLayer.Weights.ToColumnMajorArray(); updatedOutputWeights[0].Should().BeApproximately(-0.00791776, 0.000000005); updatedOutputWeights[1].Should().BeApproximately(-0.00595456, 0.000000005); updatedOutputWeights[2].Should().BeApproximately(-0.00399135, 0.000000005); updatedOutputWeights[3].Should().BeApproximately(-0.00202815, 0.000000005); updatedOutputWeights[4].Should().BeApproximately(0.04078488, 0.000000005); updatedOutputWeights[5].Should().BeApproximately(0.04281853, 0.000000005); updatedOutputWeights[6].Should().BeApproximately(0.04485217, 0.000000005); updatedOutputWeights[7].Should().BeApproximately(0.04688582, 0.000000005); var updatedOutputBiases = outputLayer.Biases.ToArray(); updatedOutputBiases[0].Should().BeApproximately(-0.02407493, 0.000000005); updatedOutputBiases[1].Should().BeApproximately(0.07087427, 0.000000005); var updatedHiddenWeights = hiddenLayer.Weights.ToColumnMajorArray(); updatedHiddenWeights[0].Should().BeApproximately(0.00099337, 0.000000005); updatedHiddenWeights[1].Should().BeApproximately(0.00501327, 0.000000005); updatedHiddenWeights[2].Should().BeApproximately(0.00898010, 0.000000005); updatedHiddenWeights[3].Should().BeApproximately(0.00199127, 0.000000005); updatedHiddenWeights[4].Should().BeApproximately(0.00601746, 0.000000005); updatedHiddenWeights[5].Should().BeApproximately(0.00997380, 0.000000005); updatedHiddenWeights[6].Should().BeApproximately(0.00298917, 0.000000005); updatedHiddenWeights[7].Should().BeApproximately(0.00702166, 0.000000005); updatedHiddenWeights[8].Should().BeApproximately(0.01096751, 0.000000005); updatedHiddenWeights[9].Should().BeApproximately(0.00398707, 0.000000005); updatedHiddenWeights[10].Should().BeApproximately(0.00802586, 0.000000005); updatedHiddenWeights[11].Should().BeApproximately(0.01196121, 0.000000005); var updatedHiddenBiases = hiddenLayer.Biases.ToArray(); updatedHiddenBiases[0].Should().BeApproximately(0.01299337, 0.000000005); updatedHiddenBiases[1].Should().BeApproximately(0.01399127, 0.000000005); updatedHiddenBiases[2].Should().BeApproximately(0.01498917, 0.000000005); updatedHiddenBiases[3].Should().BeApproximately(0.01598707, 0.000000005); }
public void When_querying_a_network_that_has_known_weights_and_biases_it_should_yield_the_expected_output() { // Arrange var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Build(); // Act double[] networkOutput = network.Query(new[] { 1.0, -2.0, 3.0 }); // Assert networkOutput[0].Should().BeApproximately(0.5164, 0.00005); networkOutput[1].Should().BeApproximately(0.5172, 0.00005); }
public void When_querying_a_network_with_known_weights_and_biases_it_should_return_the_expected_output() { // Arrange var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Build(); // Act double[] networkOutput = network.Query(new[] { 0.1, 0.2, 0.3 }); // Assert networkOutput.Should().HaveCount(2); networkOutput[0].Should().BeApproximately(0.6918, 0.00005); networkOutput[1].Should().BeApproximately(0.8596, 0.00005); }
public static trainerParams BuildNet(FlowLayoutPanel layers, FileDialog LoadData_dlg, Result r, Telerik.WinControls.UI.RadDiagram radDiagram1) { Training.Trainer trainer = new Training.Trainer(); NeuralNetworkBuilder b = new NeuralNetworkBuilder(); L_ctrl_mat temp; int neuronsnumber; IActivatorFunction AF = null; FunctionApplier functionApplier = new FunctionApplier(); foreach (var layer in layers.Controls) { temp = layer as L_ctrl_mat; neuronsnumber = Convert.ToInt16(temp.NN_drpdn.Value); if (ActivatorFunctions.FunctionName.SIGMOID.ToString() == temp.AF_drpdn.SelectedItem.Text) { AF = new SigmoidFunction(); imgs.Add(Resources.Layer__Sigmoid); } if (ActivatorFunctions.FunctionName.TANH.ToString() == temp.AF_drpdn.SelectedItem.Text) { AF = new TanhFunction(); imgs.Add(Resources.Layer_Tan_H); } functionApplier.ActivatorFunction = AF; b.Layer(neuronsnumber, functionApplier, (double)temp.Lr_drpdn.Value); } NeuralNetwork.NeuralNetwork nn = b.Build(); string FileName = LoadData_dlg.FileName; var tuples = DataReader.DataReader.Instance.ReadFromFile(FileName); var inputs = tuples.Item1; var outputs = tuples.Item2; // StaticDivider Divider = new StaticDivider(.6,.3); //var temp2 = Divider.Divide(inputs, outputs); //ActivatorFunctions.FunctionName applier ; //// test case (should belong to the second class = [0 1 0]) //var tt = nn.ForwardInput(Vector<double>.Build.DenseOfArray(new double[] { 5.5, 2.5, 4.0, 1.3 })); Params = new trainerParams(); Params.nn = nn; Params.Tuple = tuples; NetGraph(nn, radDiagram1); return(Params); }
public void When_training_the_network_for_a_single_epoch_it_should_return_the_cost() { // Arrange const double learningRate = 0.5; const double momentum = 0.0; var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Build(); // Act double cost = network.Train(new[] { new TrainingExample(new[] { 1.0, -2.0, 3.0 }, new[] { 0.1234, 0.8766 }) }, 1, learningRate, momentum); // Assert cost.Should().BeApproximately(0.1418, 0.00005); }
public void When_training_the_network_for_a_single_epoch_it_should_return_the_cost() { // Arrange const int numberOfEpochs = 1; const double learningRate = 0.5; const double momentum = 0.0; var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Using(new QuadraticCost()) .Build(); // Act double cost = network.Train(new[] { new TrainingExample(new[] { 0.1, 0.2, 0.3 }, new[] { 1.0, 0.0 }) }, numberOfEpochs, learningRate, momentum); // Assert cost.Should().BeApproximately(0.83395, 0.000005); }
public void When_querying_the_network_after_training_it_should_return_the_correct_output() { // Arrange const int numberOfEpochs = 20000; const double learningRate = 0.3; const double momentum = 0.1; var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Build(); network.Train(new[] { new TrainingExample(new[] { 0.1, 0.2, 0.3 }, new[] { 1.0, 0.0 }) }, numberOfEpochs, learningRate, momentum); // Act double[] output = network.Query(new[] { 0.1, 0.2, 0.3 }); // Assert output[0].Should().BeApproximately(1.0, 0.005); output[1].Should().BeApproximately(0.0, 0.005); }
public void When_training_the_network_for_a_single_epoch_it_should_calculate_the_gradients() { // Arrange const int numberOfEpochs = 1; const double learningRate = 0.5; const double momentum = 0.0; var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Build(); // Act network.Train(new[] { new TrainingExample(new[] { 0.1, 0.2, 0.3 }, new[] { 1.0, 0.0 }) }, numberOfEpochs, learningRate, momentum); // Assert Layer hiddenLayer = network.GetLayers().First(); Layer outputLayer = network.GetLayers().Last(); double[] outputWeightGradients = outputLayer.WeightGradients.ToRowMajorArray(); outputWeightGradients[0].Should().BeApproximately(-0.07225635, 0.000000005); outputWeightGradients[1].Should().BeApproximately(-0.07867644, 0.000000005); outputWeightGradients[2].Should().BeApproximately(-0.08484882, 0.000000005); outputWeightGradients[3].Should().BeApproximately(-0.09067289, 0.000000005); outputWeightGradients[4].Should().BeApproximately(0.11406304, 0.000000005); outputWeightGradients[5].Should().BeApproximately(0.12419773, 0.000000005); outputWeightGradients[6].Should().BeApproximately(0.13394138, 0.000000005); outputWeightGradients[7].Should().BeApproximately(0.14313518, 0.000000005); var outputBiasGradients = outputLayer.BiasGradients.ToArray(); outputBiasGradients[0].Should().BeApproximately(-0.131415, 0.0000005); outputBiasGradients[1].Should().BeApproximately(0.207450, 0.0000005); var outputInputGradients = outputLayer.PreviousLayerActivationGradients.ToArray(); outputInputGradients[0].Should().BeApproximately(0.09438525, 0.000000005); outputInputGradients[1].Should().BeApproximately(0.10198876, 0.000000005); outputInputGradients[2].Should().BeApproximately(0.10959228, 0.000000005); outputInputGradients[3].Should().BeApproximately(0.11719579, 0.000000005); double[] hiddenWeightGradients = hiddenLayer.WeightGradients.ToRowMajorArray(); hiddenWeightGradients[0].Should().BeApproximately(0.00233619, 0.0000005); hiddenWeightGradients[1].Should().BeApproximately(0.00467238, 0.0000005); hiddenWeightGradients[2].Should().BeApproximately(0.00700857, 0.0000005); hiddenWeightGradients[3].Should().BeApproximately(0.00245039, 0.0000005); hiddenWeightGradients[4].Should().BeApproximately(0.00490078, 0.0000005); hiddenWeightGradients[5].Should().BeApproximately(0.00735117, 0.0000005); hiddenWeightGradients[6].Should().BeApproximately(0.00250730, 0.0000005); hiddenWeightGradients[7].Should().BeApproximately(0.00501460, 0.0000005); hiddenWeightGradients[8].Should().BeApproximately(0.00752190, 0.0000005); hiddenWeightGradients[9].Should().BeApproximately(0.00250693, 0.0000005); hiddenWeightGradients[10].Should().BeApproximately(0.00501386, 0.0000005); hiddenWeightGradients[11].Should().BeApproximately(0.00752079, 0.0000005); double[] hiddenBiasGradients = hiddenLayer.BiasGradients.ToArray(); hiddenBiasGradients[0].Should().BeApproximately(0.02336191, 0.000000005); hiddenBiasGradients[1].Should().BeApproximately(0.02450390, 0.000000005); hiddenBiasGradients[2].Should().BeApproximately(0.02507299, 0.000000005); hiddenBiasGradients[3].Should().BeApproximately(0.02506932, 0.000000005); hiddenLayer.PreviousLayerActivationGradients.Should().BeNull(); }
public void When_training_the_network_for_a_single_epoch_it_should_calculate_the_gradient_vectors() { // Arrange const double learningRate = 0.5; const double momentum = 0.0; var network = new NeuralNetworkBuilder() .Using(new TwoLayerNetworkProvider()) .Build(); // Act network.Train(new[] { new TrainingExample(new[] { 1.0, -2.0, 3.0 }, new[] { 0.1234, 0.8766 }) }, 1, learningRate, momentum); // Assert Layer hiddenLayer = network.GetLayers().First(); Layer outputLayer = network.GetLayers().Last(); double[] outputWeightGradients = outputLayer.WeightGradients.ToColumnMajorArray(); outputWeightGradients[0].Should().BeApproximately(0.04983553, 0.000000005); outputWeightGradients[1].Should().BeApproximately(0.04990912, 0.000000005); outputWeightGradients[2].Should().BeApproximately(0.04998271, 0.000000005); outputWeightGradients[3].Should().BeApproximately(0.05005629, 0.000000005); outputWeightGradients[4].Should().BeApproximately(-0.04556976, 0.000000005); outputWeightGradients[5].Should().BeApproximately(-0.04563706, 0.000000005); outputWeightGradients[6].Should().BeApproximately(-0.04570435, 0.000000005); outputWeightGradients[7].Should().BeApproximately(-0.04577163, 0.000000005); var outputBiasGradients = outputLayer.BiasGradients.ToArray(); outputBiasGradients[0].Should().BeApproximately(0.098150, 0.0000005); outputBiasGradients[1].Should().BeApproximately(-0.089749, 0.0000005); var outputInputGradients = outputLayer.PreviousLayerActivationGradients.ToArray(); outputInputGradients[0].Should().BeApproximately(0.00005307, 0.000000005); outputInputGradients[1].Should().BeApproximately(0.00006988, 0.000000005); outputInputGradients[2].Should().BeApproximately(0.00008668, 0.000000005); outputInputGradients[3].Should().BeApproximately(0.00010348, 0.000000005); double[] hiddenWeightGradients = hiddenLayer.WeightGradients.ToColumnMajorArray(); hiddenWeightGradients[0].Should().BeApproximately(0.00001326, 0.0000005); hiddenWeightGradients[1].Should().BeApproximately(-0.00002653, 0.0000005); hiddenWeightGradients[2].Should().BeApproximately(0.00003980, 0.0000005); hiddenWeightGradients[3].Should().BeApproximately(0.00001746, 0.0000005); hiddenWeightGradients[4].Should().BeApproximately(-0.00003493, 0.0000005); hiddenWeightGradients[5].Should().BeApproximately(0.00005239, 0.0000005); hiddenWeightGradients[6].Should().BeApproximately(0.00002166, 0.0000005); hiddenWeightGradients[7].Should().BeApproximately(-0.00004332, 0.0000005); hiddenWeightGradients[8].Should().BeApproximately(0.00006499, 0.0000005); hiddenWeightGradients[9].Should().BeApproximately(0.00002586, 0.0000005); hiddenWeightGradients[10].Should().BeApproximately(-0.00005172, 0.0000005); hiddenWeightGradients[11].Should().BeApproximately(0.00007758, 0.0000005); double[] hiddenBiasGradients = hiddenLayer.BiasGradients.ToArray(); hiddenBiasGradients[0].Should().BeApproximately(0.000013265, 0.0000000005); hiddenBiasGradients[1].Should().BeApproximately(0.000017464, 0.0000000005); hiddenBiasGradients[2].Should().BeApproximately(0.000021662, 0.0000000005); hiddenBiasGradients[3].Should().BeApproximately(0.000025860, 0.0000000005); hiddenLayer.PreviousLayerActivationGradients.Should().BeNull(); }
public static int Main() { var hiddenLayers = new List <int>(); var networkBlueprint = new LayeredNeuralNetworkBlueprint { inputNeuronAmount = 2, outputNeuronAmount = 4, hiddenLayersNeuronAmounts = hiddenLayers }; var network = NeuralNetworkBuilder.buildLayeredNetwork(networkBlueprint); var sets = new List <TrainingData>(); for (int i = 0; i < 4; i++) { sets.Add(new TrainingData()); } sets[0].inputs.Add(-1); sets[0].inputs.Add(1); sets[0].expectedOutputs.Add(true); sets[0].expectedOutputs.Add(false); sets[0].expectedOutputs.Add(false); sets[0].expectedOutputs.Add(false); sets[1].inputs.Add(1); sets[1].inputs.Add(1); sets[1].expectedOutputs.Add(false); sets[1].expectedOutputs.Add(true); sets[1].expectedOutputs.Add(false); sets[1].expectedOutputs.Add(false); sets[2].inputs.Add(1); sets[2].inputs.Add(-1); sets[2].expectedOutputs.Add(false); sets[2].expectedOutputs.Add(false); sets[2].expectedOutputs.Add(true); sets[2].expectedOutputs.Add(false); sets[3].inputs.Add(-1); sets[3].inputs.Add(-1); sets[3].expectedOutputs.Add(false); sets[3].expectedOutputs.Add(false); sets[3].expectedOutputs.Add(false); sets[3].expectedOutputs.Add(true); bool makeLoop = true; int numberOfCycles = 0; int setNumber = 0; int good = 0; int all = 0; while (good != all || all == 0) { numberOfCycles++; if (makeLoop) { good = 0; all = 0; for (int i = 0; i < 10000; i++) { setNumber = RandGen.NextInt() % 4; if (network.Teach(sets[setNumber].inputs, sets[setNumber].expectedOutputs)) { good++; } all++; } } Console.WriteLine(numberOfCycles + ". good: " + good + "/" + all); SaveToFile(@"C:\Users\Majek\Desktop\WriteText.txt", good, all); } return(0); }
static void Main(string[] args) { //Set Default Appearance Variables Random randy = new Random(26); Console.CursorVisible = false; Console.SetWindowSize(100, 50); Console.SetBufferSize(100, 60); Console.Title = "Neural Net Classification Test"; while (true) { //Ask For Amount Of Epochs/Generations Console.ForegroundColor = ConsoleColor.Green; Console.Write("Train Time (in milliseconds): "); Console.ForegroundColor = ConsoleColor.White; int Milliseconds = int.Parse(Console.ReadLine()); Console.CursorVisible = false; Console.Clear(); #region Data Set Dictionary <double[], string> NewData = new Dictionary <double[], string> { // Input Format: // # of Legs, Has Scales, Is Cold Blooded, Water Breathing, Has Fins { new double[] { 4, 0, 1, 1, 0 }, "Frog" }, { new double[] { 4, 0, 0, 0, 0 }, "Corgi" }, { new double[] { 4, 0, 0, 0, 0 }, "Tiger" }, { new double[] { 4, 0, 0, 0, 0 }, "Stoat" }, { new double[] { 4, 1, 1, 0, 0 }, "Gecko" }, { new double[] { 2, 0, 0, 0, 0 }, "Human" }, { new double[] { 0, 1, 1, 1, 1 }, "Salmon" }, { new double[] { 0, 0, 0, 0, 1 }, "Dolphin" }, { new double[] { 5, 0, 1, 1, 0 }, "Starfish" }, { new double[] { 4, 1, 1, 0, 0 }, "Chameleon" }, { new double[] { 2, 1, 1, 0, 0 }, "Sea Turtle" }, { new double[] { 8, 0, 1, 0, 0 }, "Black Widow" }, { new double[] { 4, 1, 1, 0, 0 }, "Komodo Dragon" } }; double[] NewDataCorrectOutputs = new double[] { 0, //Frog 0, //Corgi 0, //Tiger 0, //Stoat 1, //Gecko 0, //Human 0, //Salmon 0, //Dolphin 0, //Starfish 1, //Chameleon 1, //Sea Turtle 0, //Black Widow 1 //Komodo Dragon }; Dictionary <double[], string> TestData = new Dictionary <double[], string> { // Input Format: // # of Legs, Has Scales, Is Cold Blooded, Water Breathing, Has Fins { new double[] { 0, 1, 1, 1, 1 }, "Tuna" }, { new double[] { 2, 0, 0, 0, 0 }, "Seagull" }, { new double[] { 0, 0, 1, 1, 0 }, "Jelly Fish" }, { new double[] { 0, 1, 1, 0, 0 }, "Burmese Python" }, { new double[] { 4, 1, 1, 0, 0 }, "Nile Crocodile" }, }; double[][] TestDataInputs = new double[TestData.Keys.Count][]; double[][] TestDataOutputs = new double[][] { new double[] { 0 }, //Tuna new double[] { 0 }, //Seagull new double[] { 0 }, //Jelly Fish new double[] { 1 }, //Burmese Python new double[] { 1 }, //Nile Crocodile }; TestData.Keys.CopyTo(TestDataInputs, 0); #endregion Data Set //Create Neural Network for Backprop NeuralNetwork ModelNetwork = new NeuralNetworkBuilder(InitializationFunction.Random) .CreateInputLayer(5) .CreateOutputLayer(1, new Sigmoid()) .Build(randy); NeuralNetwork ModelGeneticNetwork = new NeuralNetworkBuilder(InitializationFunction.Random) .CreateInputLayer(5) .CreateOutputLayer(1, new Sigmoid()) .Build(randy); //Create Backpropagation Trainer Backpropagation BackpropTrainer = new Backpropagation(ModelNetwork, 0.035); double BackpropError = 1; //Create Genetics Trainer Genetics GeneticsTrainer = new Genetics(randy, ModelGeneticNetwork, 500); //Create Timers TimeSpan GeneticsLearnTime = new TimeSpan(); TimeSpan BackpropLearnTime = new TimeSpan(); #region Training Loop while (BackpropLearnTime.TotalMilliseconds < Milliseconds || GeneticsLearnTime.TotalMilliseconds < Milliseconds) { //Train Neural Networks Only Until It Reaches The Time Limit if (BackpropLearnTime.TotalMilliseconds < Milliseconds) { DateTime StartTime = DateTime.Now; BackpropError = BackpropTrainer.TrainEpoch(TestDataInputs, TestDataOutputs); BackpropLearnTime += DateTime.Now - StartTime; } if (GeneticsLearnTime.TotalMilliseconds < Milliseconds) { DateTime StartTime = DateTime.Now; GeneticsTrainer.TrainGeneration(TestDataInputs, TestDataOutputs); GeneticsLearnTime += DateTime.Now - StartTime; } //Write Out The Values Of The Backpropagation Neural Network Console.SetCursorPosition(0, 0); PrintBackpropHeader(BackpropTrainer, BackpropError); //Separator Console.ForegroundColor = ConsoleColor.White; Console.WriteLine($"{Environment.NewLine}-------------------------------------{Environment.NewLine}"); //Write Out The Values Of The Genetic Neural Network PrintGeneticsHeader(GeneticsTrainer); } #endregion Training Loop //Show Results Console.Clear(); Console.SetCursorPosition(0, 0); #region Printing Backpropagation Results //Print Backpropagation Training Data Results PrintBackpropHeader(BackpropTrainer, BackpropError); Console.Write(Environment.NewLine); foreach (KeyValuePair <double[], string> SingleTestData in TestData) { Console.ForegroundColor = ConsoleColor.White; Console.Write($"{SingleTestData.Value} => "); Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine($"{Math.Round(BackpropTrainer.Network.Compute(SingleTestData.Key)[0], 0)}"); } //Print Separator Console.ForegroundColor = ConsoleColor.Magenta; Console.WriteLine($"----------New Data----------"); //Print Backpropagation Test Data Results int BackpropQuestionIndex = 0; int BackpropCorrectQuestions = 0; foreach (KeyValuePair <double[], string> SingleTestData in NewData) { int NeuralNetComputedValue = (int)Math.Round(BackpropTrainer.Network.Compute(SingleTestData.Key)[0], 0); bool CorrectAnswer = NeuralNetComputedValue == NewDataCorrectOutputs[BackpropQuestionIndex]; Console.ForegroundColor = CorrectAnswer ? ConsoleColor.White : ConsoleColor.Red; BackpropCorrectQuestions += CorrectAnswer ? 1 : 0; Console.Write($"{SingleTestData.Value} => "); Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine(NeuralNetComputedValue); BackpropQuestionIndex++; } Console.ForegroundColor = ConsoleColor.Green; Console.Write("Backpropagation Prediction (% Correct): "); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(Math.Round(BackpropCorrectQuestions / (double)NewDataCorrectOutputs.Length * 100, 2)); #endregion Printing Backpropagation Results //Learning Algorithm Separator Console.ForegroundColor = ConsoleColor.White; Console.WriteLine($"{Environment.NewLine}-------------------------------------{Environment.NewLine}"); PrintGeneticsHeader(GeneticsTrainer); Console.Write(Environment.NewLine); #region Printing Genetics Results //Print Genetics Training Data Results foreach (KeyValuePair <double[], string> SingleTestData in TestData) { Console.ForegroundColor = ConsoleColor.White; Console.Write($"{SingleTestData.Value} => "); Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine($"{Math.Round(GeneticsTrainer.BestNetwork.Compute(SingleTestData.Key)[0], 0)}"); } Console.ForegroundColor = ConsoleColor.Magenta; Console.WriteLine($"----------New Data----------"); //Print Genetics Test Data Results int GeneticQuestionIndex = 0; int GeneticCorrectQuestions = 0; foreach (KeyValuePair <double[], string> SingleTestData in NewData) { int NeuralNetComputedValue = (int)Math.Round(GeneticsTrainer.BestNetwork.Compute(SingleTestData.Key)[0], 0); bool CorrectAnswer = NeuralNetComputedValue == NewDataCorrectOutputs[GeneticQuestionIndex]; Console.ForegroundColor = CorrectAnswer ? ConsoleColor.White : ConsoleColor.Red; GeneticCorrectQuestions += CorrectAnswer ? 1 : 0; Console.Write($"{SingleTestData.Value} => "); Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine(NeuralNetComputedValue); GeneticQuestionIndex++; } Console.ForegroundColor = ConsoleColor.Green; Console.Write("Genetics Prediction (% Correct): "); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(Math.Round(GeneticCorrectQuestions / (double)NewDataCorrectOutputs.Length * 100, 2)); #endregion Printing Genetics Results //Pause Until Key Press Console.ReadKey(); Console.Clear(); } }
static void Main(string[] args) { //Create Neural Nets Random randy = new Random(); NeuralNetwork XORNeuralNetwork = new NeuralNetworkBuilder(InitializationFunction.Random) .CreateInputLayer(2) .AddHiddenLayer(2, new Sigmoid()) .CreateOutputLayer(1, new Sigmoid()) .Build(randy); //Create Trainers Backpropagation BackpropTrainer = new Backpropagation(XORNeuralNetwork); Genetics GeneticsTrainer = new Genetics(randy, XORNeuralNetwork, 500); //Neural Net Variables double NeuralNetworkTargetError = 0.05; double BackpropError = 0; //Set Test Data double[][] TestDataOutputs = new double[][] { new double[] { 0 }, new double[] { 1 }, new double[] { 1 }, new double[] { 0 } }; double[][] TestDataInputs = new double[][] { new double[] { 0, 0 }, new double[] { 1, 0 }, new double[] { 0, 1 }, new double[] { 1, 1 } }; //Create Timers TimeSpan GeneticsLearnTime = new TimeSpan(); TimeSpan BackpropLearnTime = new TimeSpan(); Console.CursorVisible = false; //Train Both At Least Once BackpropError = BackpropTrainer.TrainEpoch(TestDataInputs, TestDataOutputs); GeneticsTrainer.TrainGeneration(TestDataInputs, TestDataOutputs); while (true) { //Train Backpropagation Neural Network if (BackpropError > NeuralNetworkTargetError) { DateTime StartTime = DateTime.Now; BackpropError = BackpropTrainer.TrainEpoch(TestDataInputs, TestDataOutputs); BackpropLearnTime += DateTime.Now - StartTime; } //Train Genetics Neural Network if (GeneticsTrainer.BestNetworkFitness > NeuralNetworkTargetError) { DateTime StartTime = DateTime.Now; GeneticsTrainer.TrainGeneration(TestDataInputs, TestDataOutputs); GeneticsLearnTime += DateTime.Now - StartTime; } //Write Out The Values Of The Backpropagation Neural Network Console.SetCursorPosition(0, 0); Console.ForegroundColor = ConsoleColor.White; Console.WriteLine($"Backpropagation Results: "); Console.WriteLine($"Epoch Count: {BackpropTrainer.EpochCount}"); Console.WriteLine($"Learning Time: {BackpropLearnTime.TotalMilliseconds}{Environment.NewLine}"); WriteNeuralNetSingleValue(BackpropTrainer.Network, new double[] { 1, 1 }); WriteNeuralNetSingleValue(BackpropTrainer.Network, new double[] { 0, 1 }); WriteNeuralNetSingleValue(BackpropTrainer.Network, new double[] { 1, 0 }); WriteNeuralNetSingleValue(BackpropTrainer.Network, new double[] { 0, 0 }); Console.ForegroundColor = ConsoleColor.Green; Console.Write("Error: "); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine($"{BackpropError:0.000000000}"); Console.ForegroundColor = ConsoleColor.White; Console.WriteLine($"{Environment.NewLine}-------------------------------------{Environment.NewLine}"); //Write Out The Values Of The Genetics Neural Network Console.WriteLine($"Genetics Results: "); Console.WriteLine($"Generation Count: {GeneticsTrainer.GenerationCount}"); Console.WriteLine($"Learning Time: {GeneticsLearnTime.TotalMilliseconds}{Environment.NewLine}"); WriteNeuralNetSingleValue(GeneticsTrainer.BestNetwork, new double[] { 1, 1 }); WriteNeuralNetSingleValue(GeneticsTrainer.BestNetwork, new double[] { 0, 1 }); WriteNeuralNetSingleValue(GeneticsTrainer.BestNetwork, new double[] { 1, 0 }); WriteNeuralNetSingleValue(GeneticsTrainer.BestNetwork, new double[] { 0, 0 }); Console.ForegroundColor = ConsoleColor.Green; Console.Write("Error: "); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine($"{GeneticsTrainer.BestNetworkFitness:0.000000000}"); } }
private void SineTestTable_Load(object sender, EventArgs e) { //Set Graph Min And Max MainGraph.ChartAreas[0].AxisY.Maximum = 1.25; MainGraph.ChartAreas[0].AxisY.Minimum = -1.25; //Create Model Network & Random Random Rand = new Random(26); NeuralNetwork BackpropNeuralNetwork = new NeuralNetworkBuilder(InitializationFunction.Random) .CreateInputLayer(5) .AddHiddenLayer(20, new SoftSine()) .CreateOutputLayer(1, new SoftSine()) .Build(Rand); NeuralNetwork GeneticsModelNetwork = new NeuralNetworkBuilder(InitializationFunction.Random) .CreateInputLayer(5) .AddHiddenLayer(20, new SoftSine()) .CreateOutputLayer(1, new SoftSine()) .Build(Rand); //Create Trainers GeneticsTrainer = new Genetics(Rand, GeneticsModelNetwork, 500, 0.025); BackpropTrainer = new Backpropagation(BackpropNeuralNetwork, 4.425E-4, 2.5E-14); //Get Data Array Counts const int TestDataCount = (int)(TotalDataCount * 0.2); const int TrainingDataCount = TotalDataCount - TestDataCount; //Create Total Data Arrays TestDataInputs = new double[TestDataCount][]; TestDataOutputs = new double[TestDataCount][]; TotalDataInputs = new double[TotalDataCount][]; TotalDataOutputs = new double[TotalDataCount][]; TrainingDataInputs = new double[TrainingDataCount][]; TrainingDataOutputs = new double[TrainingDataCount][]; //Create All Data for (int i = 0; i < TotalDataCount; i++) { //Calculate The Point X-Values double pointXValue = GraphDomain / TotalDataCount * i; double previousXValue = GraphDomain / TotalDataCount * (i - 1); double previousXValue2 = GraphDomain / TotalDataCount * (i - 2); double previousXValue3 = GraphDomain / TotalDataCount * (i - 3); double previousXValue4 = GraphDomain / TotalDataCount * (i - 4); TotalDataInputs[i] = new double[] { previousXValue4, previousXValue3, previousXValue2, previousXValue, pointXValue }; TotalDataOutputs[i] = new double[] { Math.Sin(pointXValue) }; MainGraph.Series[2].Points.AddXY(pointXValue, TotalDataOutputs[i][0]); } //Assign The Data Into The Test Category for (int j = 0; j < TestDataCount; j++) { int SelectedIndex = Rand.Next(0, TotalDataCount - 1); TestDataInputs[j] = TotalDataInputs[SelectedIndex]; TestDataOutputs[j] = TotalDataOutputs[SelectedIndex]; } //Assign The Data Into The Training Category for (int k = 0; k < TrainingDataCount; k++) { int SelectedIndex = Rand.Next(0, TotalDataCount - 1); TrainingDataInputs[k] = TotalDataInputs[SelectedIndex]; TrainingDataOutputs[k] = TotalDataOutputs[SelectedIndex]; } //Start Training KeepWorking = true; //Start The Background Workers BackpropWorker.DoWork += BackpropWorker_DoWork; GeneticsWorker.DoWork += GeneticsWorker_DoWork; BackpropWorker.RunWorkerAsync(); GeneticsWorker.RunWorkerAsync(); }