コード例 #1
0
        public void ActivationTest()
        {
            Activation act = new Activation("relu");
            var        obj = act.ToPython();

            var act1 = Activations.Softmax(np.array <float>(1, 2, 3, 4).reshape(2, 2));
        }
コード例 #2
0
ファイル: Softmax.cs プロジェクト: quakemaster/MxNet.Sharp
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                result.Add(Activations.Softmax(input, axis));
            }

            return(result.ToArray());
        }
コード例 #3
0
        private KerasSymbol _Call(KerasSymbol x)
        {
            switch (activation)
            {
            case "elu":
                return(Activations.Elu(x));

            case "exp":
                return(Activations.Exponential(x));

            case "hard_sigmoid":
                return(Activations.HardSigmoid(x));

            case "linear":
                return(Activations.Linear(x));

            case "relu":
                return(Activations.Relu(x));

            case "selu":
                return(Activations.Selu(x));

            case "sigmoid":
                return(Activations.Sigmoid(x));

            case "softmax":
                return(Activations.Softmax(x));

            case "softplus":
                return(Activations.Softplus(x));

            case "softsign":
                return(Activations.Softsign(x));

            case "tanh":
                return(Activations.Tanh(x));

            default:
                break;
            }

            return(Activations.Linear(x));
        }
コード例 #4
0
ファイル: Layer.cs プロジェクト: Skinz3/CNet
        public Tensor Forward(Tensor inputs)
        {
            Tensor weights = BuildWeightTensor();

            Tensor biases = BuildBiases();

            Tensor transposed = weights.Transpose();

            Tensor result = inputs.Dot(transposed).VecSum(biases);

            switch (_activation)
            {
            case ActivationType.ReLU:
                Activations.ReLU(result);
                break;

            case ActivationType.Softmax:
                Activations.Softmax(result);
                break;
            }

            return(result);
        }