Exemple #1
0
 public void SetActivation(string activationType)
 {
     if (activationType == "tanh")
     {
         Activation = Activations.Tanh();
     }
     else if (activationType == "tanhAbs")
     {
         Activation = Activations.TanhAbs();
     }
     else if (activationType == "sigmoid")
     {
         Activation = Activations.Sigmoid();
     }
     else if (activationType == "sin")
     {
         Activation = Activations.Sin();
     }
     else if (activationType == "fract")
     {
         Activation = Activations.Fract();
     }
     else if (activationType == "rescale")
     {
         Activation = Activations.Rescale();
     }
     else if (activationType == "downscale")
     {
         Activation = Activations.Downscale();
     }
     else if (activationType == "gaussian")
     {
         Activation = Activations.Gaussian();
     }
     else if (activationType == "square")
     {
         Activation = Activations.Square();
     }
     else if (activationType == "abs")
     {
         Activation = Activations.Abs();
     }
     else if (activationType == "cos")
     {
         Activation = Activations.Cos();
     }
     else if (activationType == "linear")
     {
         Activation = Activations.Linear();
     }
     else if (activationType == "random")
     {
         Activation = Activations.Random();
     }
 }
Exemple #2
0
        private KerasSymbol _Call(KerasSymbol x)
        {
            switch (activation)
            {
            case "elu":
                return(Activations.Elu(x));

            case "exp":
                return(Activations.Exponential(x));

            case "hard_sigmoid":
                return(Activations.HardSigmoid(x));

            case "linear":
                return(Activations.Linear(x));

            case "relu":
                return(Activations.Relu(x));

            case "selu":
                return(Activations.Selu(x));

            case "sigmoid":
                return(Activations.Sigmoid(x));

            case "softmax":
                return(Activations.Softmax(x));

            case "softplus":
                return(Activations.Softplus(x));

            case "softsign":
                return(Activations.Softsign(x));

            case "tanh":
                return(Activations.Tanh(x));

            default:
                break;
            }

            return(Activations.Linear(x));
        }