public ISupervisedOperations SupervisedOperations(EActivationFunction act, EErrorFunction err, EOptimizerFunction opt) { IActivationFunction activationFunction = null; IErrorFunction errorFunction = null; IOptimizerFunction optimizerFunction = null; switch (act) { case EActivationFunction.Sigmoid: activationFunction = new SigmoidFunction(); break; case EActivationFunction.LeakRelu: activationFunction = new LeakReluFunction(); break; } switch (err) { case EErrorFunction.Dense: errorFunction = new DenseErrorFunction(); break; case EErrorFunction.Desired: errorFunction = new DesiredErrorFunction(); break; } switch (opt) { case EOptimizerFunction.SGD: optimizerFunction = new SGDOptimizerFunction(); break; } return(new AnnBasicOperations(activationFunction, errorFunction, optimizerFunction)); }
public LayerOptimizer(int size, int connections, float learningRate, float momentum, EActivationFunction activationFunction, EErrorFunction errorFunction, EOptimizerFunction optimizerFunction, ELearningMethod learningMethod) : base(size, connections, learningRate, momentum, activationFunction, errorFunction, optimizerFunction, learningMethod) { }
public static System.Func <decimal[], int, decimal[]> GetAppropriateDerivativeActivationFunction(EActivationFunction DerivativeActivationFunctionName) { System.Func <decimal[], int, decimal[]> RetVal; switch (DerivativeActivationFunctionName) { case EActivationFunction.Sigmoid: RetVal = Sigmoid_Derivative; break; case EActivationFunction.Softmax: RetVal = Softmax_Derivative; break; default: RetVal = Sigmoid_Derivative; break; } return(RetVal); }