/// <summary> /// Initializes a new instance of the <see cref="PRelu"/> class. /// </summary> /// <param name="alphaInitializer">The alpha initializer.</param> /// <param name="alphaRegularizer">The alpha regularizer.</param> /// <param name="alphaConstraint">The alpha constraint.</param> /// <param name="sharedAxes">The shared axes.</param> public PRelu(BaseInitializer alphaInitializer = null, BaseRegularizer alphaRegularizer = null, BaseConstraint alphaConstraint = null, params long[] sharedAxes) : base("prelu") { AlphaInitializer = alphaInitializer ?? new Zeros(); AlphaRegularizer = alphaRegularizer; AlphaConstraint = alphaConstraint; pos_relu = new Relu(); neg_relu = new Relu(); }
internal static BaseLayer Get(ActType activationType) { BaseLayer act = null; switch (activationType) { case ActType.ReLU: act = new Relu(); break; case ActType.Sigmoid: act = new Sigmoid(); break; case ActType.Tanh: act = new Tanh(); break; case ActType.Elu: act = new Elu(); break; case ActType.Exp: act = new Exp(); break; case ActType.HardSigmoid: act = new HardSigmoid(); break; case ActType.LeakyReLU: act = new LeakyRelu(); break; case ActType.PReLU: act = new PRelu(); break; case ActType.SeLU: act = new Selu(); break; case ActType.Softmax: act = new Softmax(); break; case ActType.Softplus: act = new Softplus(); break; case ActType.SoftSign: act = new Softsign(); break; case ActType.Linear: act = new Linear(); break; default: break; } return(act); }