Example #1
0
        public double Gradient(double?target = null)
        {
            if (target == null)
            {
                return(gradient = Axons.Sum(a => a.targetNeuron.gradient * a.Weight) * Activate.sigmoidDerivative(value));
            }

            return(gradient = (target.Value - value) * Activate.sigmoidDerivative(value));
        }
Example #2
0
        public double activate(double x, ActivatorFunc?_tF)
        {
            ActivatorFunc __tF = _tF ?? tF;

            switch (__tF)
            {
            case ActivatorFunc.Sigmoid:
                return(Activate.Sigmoid(x));

            case ActivatorFunc.TanHyperbolic:
                return(Activate.Tanh(x));

            case ActivatorFunc.Identity:
                return(Activate.Identity(x));

            case ActivatorFunc.ReLU:
                return(Activate.ReLU(x));

            case ActivatorFunc.SoftStep:
                return(Activate.SoftStep(x));
            }
            return(-1);
        }
Example #3
0
 public double Value()
 {
     return(value = Activate.Sigmoid(Dendrites.Sum(a => a.Weight * a.sourceNeuron.value) + bias));
 }