public static void RecalculateWeights(NeuronForManipulation neuron, double eta, IList <double> inputs)
        {
            double etaDelta = eta * neuron.Delta; //improve performance

            for (int i = 0; i < neuron.Weights.Count; i++)
            {
                neuron.Weights[i].Weight += etaDelta * inputs[i];
            }
            neuron.Bias += etaDelta;
        }
        public static void CalculateNeuronOutput(NeuronForManipulation neuron, ActivationFunctionType activationFunction, double Alpha, IList <double> inputs)
        {
            neuron.Weights = neuron.Weights.OrderBy(a => a.Index).ToList();

            double outputSum = 0;

            for (int i = 0; i < neuron.Weights.Count; i++)
            {
                outputSum += inputs[i] * neuron.Weights[i].Weight;
            }
            outputSum    += neuron.Bias;
            neuron.Output = NeuronActiveFunctionResult(activationFunction, Alpha, outputSum);
        }
        public static double NeuronDiffActiveFunctionResult(NeuronForManipulation neuron, ActivationFunctionType activationFunction, double Alpha)
        {
            var diffOutput = 1d;

            switch (activationFunction)
            {
            case ActivationFunctionType.Lineal:
                break;

            case ActivationFunctionType.Tangential:
                diffOutput = ((1d - neuron.Output) * (1d + neuron.Output));
                break;

            case ActivationFunctionType.Sigmoid:
                diffOutput = Alpha * neuron.Output * (1d - neuron.Output);
                break;
            }
            return(diffOutput);
        }
 public static void RecalculateDelta(NeuronForManipulation neuron, ActivationFunctionType activationFunction, double Alpha, double value)
 {
     //if hidden type, neuron receive a gradient, if output type, neuron receive an error.
     neuron.Delta = value * NeuronDiffActiveFunctionResult(neuron, activationFunction, Alpha);
 }