private void Train() { for (int i = 0; i < dataSet.Count; i++) { //Layer 0 List <NeuralNetwork> hiddenLayer1 = neuralNetwork.FindAll(x => x.LayerNumber == 0); double[] weightedSum1 = new double[32]; for (int j = 0; j < hiddenLayer1.Count; j++) { weightedSum1[j] = perceptron.Execute(hiddenLayer1[j].Weights, dataSet[i].PublicAddressDouble, hiddenLayer1[j].Bias); } for (int k = 0; k < weightedSum1.Length; k++) { weightedSum1[k] = activationFunctions.LeakyReLU(weightedSum1[k]); } //Layer 1 List <NeuralNetwork> hiddenLayer2 = neuralNetwork.FindAll(x => x.LayerNumber == 1); double[] weightedSum2 = new double[64]; for (int j = 0; j < hiddenLayer2.Count; j++) { weightedSum2[j] = perceptron.Execute(hiddenLayer2[j].Weights, weightedSum1, hiddenLayer2[j].Bias); } for (int k = 0; k < weightedSum2.Length; k++) { weightedSum2[k] = activationFunctions.LeakyReLU(weightedSum2[k]); } //Layer 2 List <NeuralNetwork> hiddenLayer3 = neuralNetwork.FindAll(x => x.LayerNumber == 2); double[] weightedSum3 = new double[128]; for (int j = 0; j < hiddenLayer3.Count; j++) { weightedSum3[j] = perceptron.Execute(hiddenLayer3[j].Weights, weightedSum2, hiddenLayer3[j].Bias); } for (int k = 0; k < weightedSum3.Length; k++) { weightedSum3[k] = activationFunctions.LeakyReLU(weightedSum3[k]); } //Output Layer List <NeuralNetwork> outputLayer = neuralNetwork.FindAll(x => x.LayerNumber == 3); double[] weightedSum4 = new double[256]; for (int j = 0; j < outputLayer.Count; j++) { weightedSum4[j] = perceptron.Execute(outputLayer[j].Weights, weightedSum3, outputLayer[j].Bias); } for (int k = 0; k < weightedSum4.Length; k++) { weightedSum4[k] = activationFunctions.BinaryStep(weightedSum4[k]); } Assess(weightedSum4, i); } }