Esempio n. 1
0
        public float activation(bool derivate = false)
        {
            if (type == PerceptronType.Type.input || type == PerceptronType.Type.bias)
            {
                return(state);
            }

            if (activation_type == ActivationType.Type.sigmoid)
            {
                return(ActivationFunctions.Sigmoid(state, derivate));
            }
            else if (activation_type == ActivationType.Type.relu)
            {
                return(ActivationFunctions.RelU(state, derivate));
            }
            else if (activation_type == ActivationType.Type.tanh)
            {
                return(ActivationFunctions.TanH(state, derivate));
            }
            else if (activation_type == ActivationType.Type.identity)
            {
                return(ActivationFunctions.Identity(state, derivate));
            }
            else if (activation_type == ActivationType.Type.lrelu)
            {
                return(ActivationFunctions.LeakyReLU(state, derivate));
            }
            else
            {
                return(ActivationFunctions.Sigmoid(state, derivate));
            }
        }
        public override double Process(double[] input)
        {
            double sum = 0;

            for (int i = 0; i < InputCount; i++)
            {
                sum += input[i] * Weights[i];
            }
            if (HasConstant)
            {
                sum += Weights[InputCount]; //Constant Neuron
            }
            return(ActivationFunctions.Identity(sum));
        }