示例#1
0
        private void Train()
        {
            //Layer 0
            List <NeuralNetwork> hiddenLayer1 = neuralNetwork.FindAll(x => x.LayerNumber == 0);

            double[] weightedSum1 = new double[20];
            for (int j = 0; j < hiddenLayer1.Count; j++)
            {
                weightedSum1[j] = perceptron.Execute(analyserEntry.Layer1Weights[j], publicAddressDouble, hiddenLayer1[j].Bias);
            }

            for (int k = 0; k < weightedSum1.Length; k++)
            {
                weightedSum1[k] = activationFunctions.LeakyReLU(weightedSum1[k]);
            }

            //Layer 1
            List <NeuralNetwork> hiddenLayer2 = neuralNetwork.FindAll(x => x.LayerNumber == 1);

            double[] weightedSum2 = new double[64];
            for (int j = 0; j < hiddenLayer2.Count; j++)
            {
                weightedSum2[j] = perceptron.Execute(analyserEntry.Layer2Weights[j], weightedSum1, hiddenLayer2[j].Bias);
            }

            for (int k = 0; k < weightedSum2.Length; k++)
            {
                weightedSum2[k] = activationFunctions.LeakyReLU(weightedSum2[k]);
            }

            //Layer 2
            List <NeuralNetwork> hiddenLayer3 = neuralNetwork.FindAll(x => x.LayerNumber == 2);

            double[] weightedSum3 = new double[128];
            for (int j = 0; j < hiddenLayer3.Count; j++)
            {
                weightedSum3[j] = perceptron.Execute(analyserEntry.Layer3Weights[j], weightedSum2, hiddenLayer3[j].Bias);
            }

            for (int k = 0; k < weightedSum3.Length; k++)
            {
                weightedSum3[k] = activationFunctions.LeakyReLU(weightedSum3[k]);
            }

            //Output Layer
            List <NeuralNetwork> outputLayer = neuralNetwork.FindAll(x => x.LayerNumber == 3);

            double[] weightedSum4 = new double[256];
            for (int j = 0; j < outputLayer.Count; j++)
            {
                weightedSum4[j] = perceptron.Execute(analyserEntry.Layer4Weights[j], weightedSum3, outputLayer[j].Bias);
            }

            for (int k = 0; k < weightedSum4.Length; k++)
            {
                weightedSum4[k] = activationFunctions.BinaryStep(weightedSum4[k]);
            }

            Assess(weightedSum4);
        }
示例#2
0
        public float activation(bool derivate = false)
        {
            if (type == PerceptronType.Type.input || type == PerceptronType.Type.bias)
            {
                return(state);
            }

            if (activation_type == ActivationType.Type.sigmoid)
            {
                return(ActivationFunctions.Sigmoid(state, derivate));
            }
            else if (activation_type == ActivationType.Type.relu)
            {
                return(ActivationFunctions.RelU(state, derivate));
            }
            else if (activation_type == ActivationType.Type.tanh)
            {
                return(ActivationFunctions.TanH(state, derivate));
            }
            else if (activation_type == ActivationType.Type.identity)
            {
                return(ActivationFunctions.Identity(state, derivate));
            }
            else if (activation_type == ActivationType.Type.lrelu)
            {
                return(ActivationFunctions.LeakyReLU(state, derivate));
            }
            else
            {
                return(ActivationFunctions.Sigmoid(state, derivate));
            }
        }