public void ActivationTest() { Activation act = new Activation("relu"); var obj = act.ToPython(); var act1 = Activations.Softmax(np.array <float>(1, 2, 3, 4).reshape(2, 2)); }
public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null) { List <KerasSymbol> result = new List <KerasSymbol>(); foreach (var input in inputs) { result.Add(Activations.Softmax(input, axis)); } return(result.ToArray()); }
private KerasSymbol _Call(KerasSymbol x) { switch (activation) { case "elu": return(Activations.Elu(x)); case "exp": return(Activations.Exponential(x)); case "hard_sigmoid": return(Activations.HardSigmoid(x)); case "linear": return(Activations.Linear(x)); case "relu": return(Activations.Relu(x)); case "selu": return(Activations.Selu(x)); case "sigmoid": return(Activations.Sigmoid(x)); case "softmax": return(Activations.Softmax(x)); case "softplus": return(Activations.Softplus(x)); case "softsign": return(Activations.Softsign(x)); case "tanh": return(Activations.Tanh(x)); default: break; } return(Activations.Linear(x)); }
public Tensor Forward(Tensor inputs) { Tensor weights = BuildWeightTensor(); Tensor biases = BuildBiases(); Tensor transposed = weights.Transpose(); Tensor result = inputs.Dot(transposed).VecSum(biases); switch (_activation) { case ActivationType.ReLU: Activations.ReLU(result); break; case ActivationType.Softmax: Activations.Softmax(result); break; } return(result); }