Esempio n. 1
0
        public static KerasSymbol MeanSquaredLogrithmicError(KerasSymbol y_true, KerasSymbol y_pred)
        {
            var first_log  = K.Log(K.Clip(y_pred, K.Epsilon(), null) + 1);
            var second_log = K.Log(K.Clip(y_true, K.Epsilon(), null) + 1);

            return(K.Mean(K.Square(first_log - second_log), axis: -1));
        }
Esempio n. 2
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                if (this.data_format == "channels_first")
                {
                    // Ensure works for any dim
                    var permutation = new List <int> {
                        0
                    };
                    permutation.AddRange((from i in Enumerable.Range(2, K.NDim(input) - 2)
                                          select i).ToList());
                    permutation.Add(1);
                    result.Add(K.PermuteDimensions(input, new Shape(permutation)));
                }
                else
                {
                    result.Add(K.BatchFlatten(input));
                }
            }

            return(result.ToArray());
        }
Esempio n. 3
0
        public static KerasSymbol Logcosh(KerasSymbol y_true, KerasSymbol y_pred)
        {
            var x        = y_pred - y_true;
            var _logcosh = x + K.Softplus(-2 * x) - (float)Math.Log(2);

            return(K.Mean(_logcosh, axis: -1));
        }
Esempio n. 4
0
        public override KerasSymbol Call(Shape shape, DType dtype = null)
        {
            var(fan_in, fan_out) = _compute_fans(shape);
            var scale = this.scale;

            if (this.mode == "fan_in")
            {
                scale /= Math.Max(1, fan_in);
            }
            else if (this.mode == "fan_out")
            {
                scale /= Math.Max(1, fan_out);
            }
            else
            {
                scale /= Math.Max(1, (fan_in + fan_out) / 2);
            }
            if (this.distribution == "normal")
            {
                // 0.879... = scipy.stats.truncnorm.std(a=-2, b=2, loc=0., scale=1.)
                float stddev = (float)Math.Sqrt(scale) / 0.8796256610342398f;
                return(K.TruncatedNormal(shape, 0, stddev, dtype: dtype, seed: this.seed));
            }
            else
            {
                float limit = (float)Math.Sqrt(3.0 * scale);
                return(K.RandomUniform(shape, -limit, limit, dtype: dtype, seed: this.seed));
            }
        }
        internal override Shape GetNoiseShape(KerasSymbol inputs)
        {
            var input_shape = K.Shape(inputs);
            var noise_shape = (input_shape[0], 1, input_shape[2]);

            return(noise_shape);
        }
Esempio n. 6
0
        public static KerasSymbol Selu(KerasSymbol x)
        {
            var alpha = 1.6732632423543772f;
            var scale = 1.0507009873554805f;

            return(scale * K.Elu(x, alpha));
        }
Esempio n. 7
0
        public static KerasSymbol CategorialHinge(KerasSymbol y_true, KerasSymbol y_pred)
        {
            var pos = K.Sum(y_true * y_pred, axis: -1);
            var neg = K.Max((1 - y_true) * y_pred, axis: -1);

            return(K.Maximum(0, neg - pos + 1));
        }
Esempio n. 8
0
        public static KerasSymbol[] CreateInput(
            Shape shape        = null,
            Shape batch_shape  = null,
            string name        = null,
            DType dtype        = null,
            bool sparse        = false,
            KerasSymbol tensor = null)
        {
            if (batch_shape == null && tensor == null)
            {
                Debug.Assert(shape != null, "Please provide to Input either a `shape` or a `batch_shape` argument. Note that `shape` does not include the batch dimension.");
            }

            if (shape != null && batch_shape != null)
            {
                var list = shape.Data.ToList();
                list.Insert(0, 0);
                batch_shape = new Shape(list);
            }

            if (dtype == null)
            {
                dtype = K.FloatX();
            }

            var input_layer = new InputLayer(batch_input_shape: batch_shape, name: name, dtype: dtype, sparse: sparse, input_tensor: tensor);
            // Return tensor including _keras_shape and _keras_history.
            // Note that in this case train_output and test_output are the same pointer.
            var outputs = input_layer._inbound_nodes[0].output_tensors;

            return(outputs);
        }
Esempio n. 9
0
        public override KerasSymbol Call(KerasSymbol w)
        {
            var norms   = K.Sqrt(K.Sum(K.Square(w), axis: this.axis, keepdims: true));
            var desired = K.Clip(norms, 0, this.max_value);

            w *= desired / (K.Epsilon() + norms);
            return(w);
        }
Esempio n. 10
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                result.Add(input * K.Cast(K.Greater(inputs[0], this.theta), K.FloatX()));
            }

            return(result.ToArray());
        }
Esempio n. 11
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                result.Add(K.Relu(input, alpha));
            }

            return(result.ToArray());
        }
Esempio n. 12
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                var maskValue    = K.Constant(mask_value, input.DType, input.Shape);
                var boolean_mask = K.Any(K.NotEqual(input, maskValue), axis: -1, keepdims: true);
                result.Add(input * K.Cast(boolean_mask, K.DataType(input)));
            }

            return(result.ToArray());
        }
Esempio n. 13
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                result.Add(
                    K.Relu(input, alpha: this.negative_slope, max_value: this.max_value, threshold: this.threshold)
                    );
            }

            return(result.ToArray());
        }
Esempio n. 14
0
        public override KerasSymbol[] ComputeMask(KerasSymbol[] inputs, KerasSymbol[] mask = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                var maskValue   = K.Constant(mask_value, input.DType, input.Shape);
                var output_mask = K.Any(K.NotEqual(input, maskValue), axis: -1);
                result.Add(output_mask);
            }

            return(result.ToArray());
        }
Esempio n. 15
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            var shape = this.dims.Data.ToList();

            shape.Insert(0, 0);
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                result.Add(K.PermuteDimensions(input, new Shape(shape)));
            }

            return(result.ToArray());
        }
Esempio n. 16
0
        public override KerasSymbol Call(KerasSymbol x)
        {
            KerasSymbol regularization = null;

            if (this.l1 > 0)
            {
                regularization += K.Sum(this.l1 * K.Abs(x), null);
            }
            if (this.l2 > 0)
            {
                regularization += K.Sum(this.l2 * K.Square(x), null);
            }

            return(regularization);
        }
Esempio n. 17
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                KerasSymbol neg;
                var         pos = K.Relu(input);
                neg = -this.alpha * K.Relu(-input);

                result.Add(pos + neg);
            }

            return(result.ToArray());
        }
Esempio n. 18
0
        internal override Shape GetNoiseShape(KerasSymbol inputs)
        {
            Shape noise_shape = null;
            var   input_shape = K.Shape(inputs);

            if (this.data_format == "channels_first")
            {
                noise_shape = new Shape(input_shape[0], input_shape[1], 1, 1);
            }
            else
            {
                noise_shape = new Shape(input_shape[0], 1, 1, input_shape[3]);
            }

            return(noise_shape);
        }
Esempio n. 19
0
        public Shape ComputeOutputShape(Shape input_shape)
        {
            int num_samples;

            if (this._output_shape == null)
            {
                var x = K.Placeholder(shape: input_shape);
                x = this.Invoke(new KerasSymbol[] { x }, null)[0];
                return(x.Shape);
            }
            else
            {
                num_samples = input_shape[0];
                var shape = _output_shape;
                shape.Insert(0, num_samples);
                return(shape);
            }
        }
Esempio n. 20
0
        internal virtual Shape GetNoiseShape(KerasSymbol inputs)
        {
            if (this.noise_shape == null)
            {
                return(this.noise_shape);
            }

            var symbolic_shape = K.Shape(inputs);

            for (int axis = 0; axis < noise_shape.Dimension; axis++)
            {
                var shape = noise_shape[axis];
                if (shape <= 0)
                {
                    noise_shape[axis] = symbolic_shape[axis];
                }
            }

            return(noise_shape);
        }
Esempio n. 21
0
        public Sequential(Layer[] layers = null, string name = "", Context context = null, string kvstore = "device")
            : base(name, context, kvstore)
        {
            if (string.IsNullOrWhiteSpace(name))
            {
                var prefix = this.GetType().Name.ToLower();
                this.name = prefix + "_" + K.GetUid(prefix).ToString();
            }

            this.batch_input_shape = null;
            // Add to the model any layers passed to the constructor.

            if (layers != null)
            {
                foreach (var layer in layers)
                {
                    this.Add(layer);
                }
            }
        }
Esempio n. 22
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();
            bool training             = kwargs.Get <bool>("training");

            foreach (var input in inputs)
            {
                Func <KerasSymbol> dropped_inputs = () => {
                    return(K.Dropout(input, this.rate, noise_shape, seed: this.seed));
                };

                if ((0 < this.rate) && (this.rate < 1.0))
                {
                    var noise_shape = this.GetNoiseShape(input);
                    result.Add(K.InTrainPhase(dropped_inputs, input, training: training));
                }
            }

            return(result.ToArray());
        }
Esempio n. 23
0
        public static NDArray StandardizeSingleArray(NDArray x)
        {
            if (x == null)
            {
                return(null);
            }
            else if (K.IsTensor(x))
            {
                var shape = x.Shape;
                if (shape == null || shape[0] == 0)
                {
                    throw new Exception(String.Format("When feeding symbolic tensors to a model, we expect thetensors to have a static batch size. Got tensor with shape: {0}", shape.ToString()));
                }
                return(x);
            }
            else if (x.Dimension == 1)
            {
                x = nd.ExpandDims(x, 1);
            }

            return(x);
        }
Esempio n. 24
0
        public override KerasSymbol[] Invoke(KerasSymbol[] inputs, FuncArgs kwargs = null)
        {
            List <KerasSymbol> result = new List <KerasSymbol>();

            foreach (var input in inputs)
            {
                var output = K.Dot(input, this.kernel);
                if (this.use_bias)
                {
                    output = K.BiasAdd(output, this.bias, data_format: "channels_last");
                }

                result.Add(output);
            }

            if (this.activation != null)
            {
                return(this.activation.Invoke(result.ToArray()));
            }

            return(result.ToArray());
        }
Esempio n. 25
0
        public override void Build(Shape input_shape = null)
        {
            if (input_shape != null && this.inputs == null && this.inputs.Count > 0)
            {
                var batch_shape = input_shape;
                var dtype       = K.FloatX();
                var x           = InputLayer.CreateInput(batch_shape: batch_shape, dtype: dtype, name: this.name + "_input");
                this.inputs = x.ToList();
                foreach (var layer in this._layers)
                {
                    x = layer.Invoke(x, null);
                }

                this.outputs            = x.ToList();
                this._build_input_shape = input_shape;
            }

            if (this.inputs != null && this.inputs.Count > 0)
            {
                this.InitGraphNetwork(this.inputs.ToArray(), this.outputs.ToArray(), name: this.name);
                this.built = true;
            }
        }
Esempio n. 26
0
        public static KerasSymbol Softmax(KerasSymbol x, int axis = -1)
        {
            var ndim = K.NDim(x);

            if (ndim == 2)
            {
                return(K.Softmax(x));
            }
            else if (ndim > 2)
            {
                var e = K.Exp(x - K.Max(x, axis: axis, keepdims: true));
                var s = K.Sum(e, axis: axis, keepdims: true);
                return(e / s);
            }
            else if (ndim == 0)
            {
                // x dim is not inferred yet
                return(K.Softmax(x));
            }
            else
            {
                throw new Exception($"Cannot apply softmax to a tensor that is 1D. Received input: {x}");
            }
        }
Esempio n. 27
0
 public static KerasSymbol Exponential(KerasSymbol x)
 {
     return(K.Exp(x));
 }
Esempio n. 28
0
 public static KerasSymbol HardSigmoid(KerasSymbol x)
 {
     return(K.HardSigmoid(x));
 }
Esempio n. 29
0
 public static KerasSymbol Sigmoid(KerasSymbol x)
 {
     return(K.Sigmoid(x));
 }
Esempio n. 30
0
 public static KerasSymbol Tanh(KerasSymbol x)
 {
     return(K.Tanh(x));
 }