internal static BaseLayer Get(ActivationType activationType)
        {
            BaseLayer act = null;

            switch (activationType)
            {
            case ActivationType.ReLU:
                act = new Relu();
                break;

            case ActivationType.Sigmoid:
                act = new Sigmoid();
                break;

            case ActivationType.Tanh:
                act = new Tanh();
                break;

            case ActivationType.Elu:
                act = new Elu();
                break;

            case ActivationType.Exp:
                act = new Exp();
                break;

            case ActivationType.HargSigmoid:
                act = new HardSigmoid();
                break;

            case ActivationType.LeakyReLU:
                act = new LeakyRelu();
                break;

            case ActivationType.PReLU:
                act = new PRelu();
                break;

            case ActivationType.SeLU:
                act = new Selu();
                break;

            case ActivationType.Softmax:
                act = new Softmax();
                break;

            case ActivationType.Softplus:
                act = new Softplus();
                break;

            case ActivationType.SoftSign:
                act = new Softsign();
                break;

            default:
                break;
            }

            return(act);
        }
예제 #2
0
파일: PRelu.cs 프로젝트: jeason0813/SiaNet
 public PRelu(BaseInitializer alphaInitializer = null, BaseRegularizer alphaRegularizer = null, BaseConstraint alphaConstraint = null, params long[] sharedAxes)
     : base("prelu")
 {
     AlphaInitializer = alphaInitializer ?? new Zeros();
     AlphaRegularizer = alphaRegularizer;
     AlphaConstraint  = alphaConstraint;
     pos_relu         = new Relu();
     neg_relu         = new Relu();
 }