public void TestAndGate() { NetworkModel model = new NetworkModel(); model.Layers.Add(new NeuralLayer(2, "INPUT")); model.Layers.Add(new NeuralLayer(2, "HIDDEN")); model.Layers.Add(new NeuralLayer(1, "OUTPUT")); model.Build(); NeuralData X = new NeuralData(4); X.Add(0, 0); X.Add(0, 1); X.Add(1, 0); X.Add(1, 1); NeuralData Y = new NeuralData(4); Y.Add(0); Y.Add(0); Y.Add(0); Y.Add(1); // model.Train(X, Y, iterations: 10, learningRate: 0.1); }
public void Train(NeuralData InputData, NeuralData referenceOutput, int iterations, double learningRate = 0.1) { int epoch = 1; while (iterations >= epoch) { NeuralLayer inputLayer = Layers[0]; List <double> outputs = new List <double>(); for (int i = 0; i < InputData.Data.Count; i++) { for (int y = 0; y < InputData.Data[i].Length; y++) { inputLayer.Neurons[y] .OutPulse.Value = InputData.Data[i][y]; } ComputeOutput(); outputs.Add(Layers.Last() .Neurons.First().OutPulse.Value); } double accuracy = 0; int y_counter = 0; outputs.ForEach((x) => { if (x == referenceOutput.Data[y_counter].First()) { accuracy++; } y_counter++; }); Accuracy = accuracy / y_counter; OptimizeWeights(accuracy / y_counter); epoch++; } }
public static void DoTraining() { NeuralNetwork TestNetwork = new NeuralNetwork(); TestNetwork.AddLayer(new NeuralLayer(3, 0.1)); TestNetwork.AddLayer(new NeuralLayer(1, 0.1)); TestNetwork.Build(); NeuralData InputData = new NeuralData(); InputData.Data.Add(new double[] { 0, 0, 0 }); InputData.Data.Add(new double[] { 1, 0, 0 }); InputData.Data.Add(new double[] { 0, 1, 0 }); InputData.Data.Add(new double[] { 1, 1, 0 }); InputData.Data.Add(new double[] { 0, 0, 1 }); InputData.Data.Add(new double[] { 1, 0, 1 }); InputData.Data.Add(new double[] { 0, 1, 1 }); InputData.Data.Add(new double[] { 1, 1, 1 }); NeuralData RefOutput = new NeuralData(); RefOutput.Data.Add(new double[] { 0 }); RefOutput.Data.Add(new double[] { 0 }); RefOutput.Data.Add(new double[] { 0 }); RefOutput.Data.Add(new double[] { 1 }); RefOutput.Data.Add(new double[] { 0 }); RefOutput.Data.Add(new double[] { 1 }); RefOutput.Data.Add(new double[] { 1 }); RefOutput.Data.Add(new double[] { 1 }); TestNetwork.Train(InputData, RefOutput, iterations: 10, learningRate: 0.1); Console.WriteLine($"CURRENT ACCURACY: {TestNetwork.Accuracy}"); }
static void Main(string[] args) { Network model = new Network(); model.Layers.Add(new Layer(2, 0.1, "INPUT")); model.Layers.Add(new Layer(1, 0.1, "OUTPUT")); model.Build(); Console.WriteLine("----Before Training------------"); model.Print(); Console.WriteLine(); NeuralData X = new NeuralData(4); X.Add(0, 0); X.Add(0, 1); X.Add(1, 0); X.Add(1, 1); NeuralData Y = new NeuralData(4); Y.Add(0); Y.Add(0); Y.Add(0); Y.Add(1); model.Train(X, Y, iterations: 10, learningRate: 0.1); Console.WriteLine(); Console.WriteLine("----After Training------------"); model.Print(); }
public List <double> UpdateOne(List <double> inputs) { List <double> outPuts = new List <double>(); for (int i = 0; i < mNeuralLayerDataList.Count; i++) { if (i > 0) { inputs = new List <double>(outPuts); } outPuts.Clear(); NeuralLayerData neuralLayer = mNeuralLayerDataList[i]; for (int j = 0; j < neuralLayer.mNeuralDataList.Count; j++) { double netInput = 0; NeuralData ndata = neuralLayer.mNeuralDataList[j]; for (int n = 0; n < ndata._Code.Count - 1; n++) { netInput += ndata._Code[n] * inputs[n]; } netInput += ndata._Code[ndata._Code.Count - 1] * BISDP; ndata.mActivation = GetSigmoid(netInput); outPuts.Add(ndata.mActivation); } } return(outPuts); }
public NeuralLayerData(int neuralNum, int intputNum) { _intputNum = intputNum; for (int i = 0; i < neuralNum; i++) { NeuralData nd = new NeuralData(intputNum); mNeuralDataList.Add(nd); } }
public void SetWeights(List <double> weights) { int startIndex = 0; FEngineManager.SetList(mNeuralLayerDataList, (f, index) => { for (int i = 0; i < f.mNeuralDataList.Count; i++) { NeuralData nd = f.mNeuralDataList[i]; nd.SetCode(weights.GetRange(startIndex, nd._Code.Count)); startIndex += nd._Code.Count; } }); }
//Generate the input and target output private NeuralData GenerateTrainingData(Random rand) { NeuralData neuralData = new NeuralData(); //Randomly generate the inputs for (int j = 0; j < Globals.XmlData.Inputs; j++) { neuralData.Input.Add(rand.Next(0, 2)); } //Randomly generate target outputs for (int j = 0; j < Globals.XmlData.Outputs; j++) { neuralData.TargetOutput.Add(rand.Next(0, 2)); } return(neuralData); }
//Adjust the weights of the inputs public void AdjustWeights(NeuralNetwork neuralNetwork, int targetOutput, NeuralData neuralData, double learningRate) { double initialError = neuralData.Error; //Store the initial Error Queue <double> initialWeights = new Queue <double>(); GetWeights(initialWeights); int count = 10; while (neuralData.Error >= initialError) { foreach (Conection conection in Conections) { //Loop through every conection if ((conection.ConectedFrom.Output > 0 && targetOutput == 0)) { //If the conected neuron fired and we wanted the neuron to not fire conection.weight -= learningRate; conection.ConectedFrom.AdjustWeights(targetOutput, learningRate); } else if ((conection.ConectedFrom.Output) == 0 && targetOutput == 1) { //If the conected neuron didn't fire and we wanted it to fire. conection.weight += learningRate; conection.ConectedFrom.AdjustWeights(targetOutput, learningRate); } neuralData = neuralNetwork.CalculateOutputs(neuralData); neuralData.CalculateError(); if (neuralData.Error < initialError) { return; } } count--; if (count < 0) { //we were unable to improve the error, we must fix the weights SetWeights(initialWeights); neuralNetwork.CalculateOutputs(neuralData); neuralData.CalculateError(); break; } } }
public NeuralData CalculateOutputs(NeuralData neuralData) { //Assign the inputs to the first layer. for (int neuron = 0; neuron < NeuralLayers[0].Neurons.Count; neuron++) { NeuralLayers[0].Neurons[neuron].Output = neuralData.Input[neuron]; } //Attempt to fire all nodes. Skip the first layer. This will get out actual output bool FirstLayer = true; foreach (NeuralLayer neuralLayer in NeuralLayers) { if (FirstLayer) { FirstLayer = false; } else { neuralLayer.FireNodes(); } } neuralData.ActualOutput.Clear(); //Need to clear the list, we will call this function multiple times //Fetch the actual output foreach (Neuron neuron in NeuralLayers.Last().Neurons) { if (neuron.Output > 0) { //If the neuron fired. neuralData.ActualOutput.Add(neuron.Output); } else { //The neuron didn't fire. neuralData.ActualOutput.Add(0); } } return(neuralData); }
public void Train() { Random rand = new Random(); for (int i = 0; i < Globals.XmlData.TrainingIterations; i++) { //Generate the input and target output NeuralData neuralData = GenerateTrainingData(rand); //Calculate the actual outputs from our generated inputs neuralData = CalculateOutputs(neuralData); //Calculate the error and print it double initialError = neuralData.CalculateError(); neuralData.PrintInput(); neuralData.PrintTarget(); neuralData.PrintActual(); Console.WriteLine("Initial error: {0}", Math.Round(initialError, 3)); //Adjust the weights. If our error is zero, don't change anything if (initialError > 0) { Console.WriteLine("Adjusting weights."); //loop through each output and compare actual to target for (int j = 0; j < neuralData.TargetOutput.Count; j++) { if (neuralData.ActualOutput[j] != neuralData.TargetOutput[j]) { //Actual output does not match target output NeuralLayers.Last().Neurons[j].AdjustWeights(this, neuralData.TargetOutput[j], neuralData, Math.Pow(neuralData.ActualOutput[j] - neuralData.TargetOutput[j], 2)); } } Console.WriteLine("New error: {0}", Math.Round(neuralData.CalculateError(), 3)); neuralData.PrintActual(); } Console.WriteLine(""); } }