public double Gradient(double?target = null) { if (target == null) { return(gradient = Axons.Sum(a => a.targetNeuron.gradient * a.Weight) * Activate.sigmoidDerivative(value)); } return(gradient = (target.Value - value) * Activate.sigmoidDerivative(value)); }
public double activate(double x, ActivatorFunc?_tF) { ActivatorFunc __tF = _tF ?? tF; switch (__tF) { case ActivatorFunc.Sigmoid: return(Activate.Sigmoid(x)); case ActivatorFunc.TanHyperbolic: return(Activate.Tanh(x)); case ActivatorFunc.Identity: return(Activate.Identity(x)); case ActivatorFunc.ReLU: return(Activate.ReLU(x)); case ActivatorFunc.SoftStep: return(Activate.SoftStep(x)); } return(-1); }
public double Value() { return(value = Activate.Sigmoid(Dendrites.Sum(a => a.Weight * a.sourceNeuron.value) + bias)); }