internal override Shape GetNoiseShape(KerasSymbol inputs) { var input_shape = K.Shape(inputs); var noise_shape = (input_shape[0], 1, input_shape[2]); return(noise_shape); }
public static KerasSymbol[] CreateInput( Shape shape = null, Shape batch_shape = null, string name = null, DType dtype = null, bool sparse = false, KerasSymbol tensor = null) { if (batch_shape == null && tensor == null) { Debug.Assert(shape != null, "Please provide to Input either a `shape` or a `batch_shape` argument. Note that `shape` does not include the batch dimension."); } if (shape != null && batch_shape != null) { var list = shape.Data.ToList(); list.Insert(0, 0); batch_shape = new Shape(list); } if (dtype == null) { dtype = K.FloatX(); } var input_layer = new InputLayer(batch_input_shape: batch_shape, name: name, dtype: dtype, sparse: sparse, input_tensor: tensor); // Return tensor including _keras_shape and _keras_history. // Note that in this case train_output and test_output are the same pointer. var outputs = input_layer._inbound_nodes[0].output_tensors; return(outputs); }
public override KerasSymbol Call(KerasSymbol w) { var norms = K.Sqrt(K.Sum(K.Square(w), axis: this.axis, keepdims: true)); var desired = K.Clip(norms, 0, this.max_value); w *= desired / (K.Epsilon() + norms); return(w); }
public Lambda(Func <KerasSymbol, FuncArgs, KerasSymbol> function, Shape output_shape = null, KerasSymbol mask = null, FuncArgs arguments = null) { this.function = function; this.arguments = arguments != null ? arguments : new FuncArgs(); if (mask != null) { this.supports_masking = true; } this.mask = mask; this._output_shape = output_shape; }
public override KerasSymbol Call(KerasSymbol x) { KerasSymbol regularization = null; if (this.l1 > 0) { regularization += K.Sum(this.l1 * K.Abs(x), null); } if (this.l2 > 0) { regularization += K.Sum(this.l2 * K.Square(x), null); } return(regularization); }
internal override Shape GetNoiseShape(KerasSymbol inputs) { Shape noise_shape = null; var input_shape = K.Shape(inputs); if (this.data_format == "channels_first") { noise_shape = new Shape(input_shape[0], input_shape[1], 1, 1); } else { noise_shape = new Shape(input_shape[0], 1, 1, input_shape[3]); } return(noise_shape); }
public static KerasSymbol[] GetSourceInputs(KerasSymbol tensor, Layer layer = null, int?node_index = null) { if (tensor._keras_history == null) { return(new KerasSymbol[] { tensor }); } if (layer == null || node_index.HasValue) { (layer, node_index, _) = tensor._keras_history.Value; } if (layer._inbound_nodes == null || layer._inbound_nodes.Count == 0) { return(new KerasSymbol[] { tensor }); } else { var node = layer._inbound_nodes[node_index.Value]; if (node.inbound_layers == null || node.inbound_layers.Length == 0) { // Reached an Input layer, stop recursion. return(node.input_tensors); } else { var source_tensors = new List <KerasSymbol>(); foreach (var i in Enumerable.Range(0, node.inbound_layers.Length)) { var x = node.input_tensors[i]; layer = node.inbound_layers[i]; node_index = node.node_indices[i]; var previous_sources = GetSourceInputs(x, layer, node_index); // Avoid input redundancy. foreach (var ps in previous_sources) { if (!source_tensors.Contains(ps)) { source_tensors.Add(ps); } } } return(source_tensors.ToArray()); } } }
private KerasSymbol _Call(KerasSymbol x) { switch (activation) { case "elu": return(Activations.Elu(x)); case "exp": return(Activations.Exponential(x)); case "hard_sigmoid": return(Activations.HardSigmoid(x)); case "linear": return(Activations.Linear(x)); case "relu": return(Activations.Relu(x)); case "selu": return(Activations.Selu(x)); case "sigmoid": return(Activations.Sigmoid(x)); case "softmax": return(Activations.Softmax(x)); case "softplus": return(Activations.Softplus(x)); case "softsign": return(Activations.Softsign(x)); case "tanh": return(Activations.Tanh(x)); default: break; } return(Activations.Linear(x)); }
internal virtual Shape GetNoiseShape(KerasSymbol inputs) { if (this.noise_shape == null) { return(this.noise_shape); } var symbolic_shape = K.Shape(inputs); for (int axis = 0; axis < noise_shape.Dimension; axis++) { var shape = noise_shape[axis]; if (shape <= 0) { noise_shape[axis] = symbolic_shape[axis]; } } return(noise_shape); }
public override void Build(Shape input_shape) { Debug.Assert(input_shape.Dimension >= 2); var input_dim = input_shape[-1]; this.kernel = this.AddWeight(shape: (input_dim, this.units), initializer: this.kernel_initializer, name: "kernel", regularizer: this.kernel_regularizer, constraint: this.kernel_constraint, sparse_weight: this.sparse_weight); if (this.use_bias) { this.bias = this.AddWeight(shape: new Shape(this.units), initializer: this.bias_initializer, name: "bias", regularizer: this.bias_regularizer, constraint: this.bias_constraint); } else { this.bias = null; } this.input_spec = new InputSpec[] { new InputSpec(min_ndim: 2, axes: new Dictionary <int, int> { { -1, input_dim } }) }; this.built = true; }
public override void Build(Shape input_shape) { var param_shape = input_shape.Data.Skip(1).ToArray(); this.param_broadcast = new List <bool>(); for (int i = 0; i < param_shape.Length; i++) { param_broadcast.Add(false); } if (this.shared_axes != null) { foreach (var i in this.shared_axes.Data) { param_shape[i - 1] = 1; this.param_broadcast[i - 1] = true; } } this.alpha = this.AddWeight(shape: new Shape(param_shape), name: "alpha", initializer: this.alpha_initializer, regularizer: this.alpha_regularizer, constraint: this.alpha_constraint); // Set input spec var axes = new Dictionary <int, int>(); if (this.shared_axes.Dimension > 0) { foreach (var i in Enumerable.Range(1, input_shape.Dimension - 1)) { if (!this.shared_axes.Data.Contains(i)) { axes[i] = input_shape[i]; } } } this.input_spec = new InputSpec[] { new InputSpec(ndim: input_shape.Dimension, axes: axes) }; this.built = true; }
public abstract KerasSymbol Call(KerasSymbol w);
public override KerasSymbol Call(KerasSymbol w) { throw new NotImplementedException(); }
public virtual KerasSymbol[] ComputeMask(KerasSymbol[] inputs, KerasSymbol mask = null) { throw new NotImplementedException(); }
public virtual KerasSymbol Call(KerasSymbol x) { return(x); }
public static KerasSymbol[] GetSourceInputs(KerasSymbol tensor, Layer layer = null, int?node_index = null) { throw new NotImplementedException(); }
public InputLayer( Shape input_shape = null, int?batch_size = null, Shape batch_input_shape = null, DType dtype = null, KerasSymbol input_tensor = null, bool sparse = false, string name = null) { if (name != null) { var prefix = "input"; name = prefix + "_" + K.GetUid(prefix).ToString(); } this.trainable = false; this.built = true; this.sparse = sparse; this.supports_masking = true; if (input_shape != null && batch_input_shape != null) { throw new Exception("Only provide the input_shape OR batch_input_shape argument to InputLayer, not both at the same time."); } if (input_tensor != null && batch_input_shape == null) { // If input_tensor is set, and batch_input_shape is not set: // Attempt automatic input shape inference. try { batch_input_shape = input_tensor.Shape; } catch (Exception) { if (input_shape == null && batch_input_shape == null) { throw new Exception("InputLayer was provided an input_tensor argument, but its input shape cannot be automatically inferred. You should pass an input_shape or batch_input_shape argument."); } } } if (batch_input_shape == null) { if (input_shape == null) { throw new Exception("An Input layer should be passed either a `batch_input_shape` or an `input_shape`."); } else { var batchShapeData = input_shape.Data.ToList(); batchShapeData.Insert(0, batch_size.Value); batch_input_shape = new Shape(batchShapeData); } } else { batch_input_shape = new Shape(batch_input_shape); } if (dtype == null) { if (input_tensor == null) { dtype = K.FloatX(); } else { dtype = K.DataType(input_tensor); } } this.batch_input_shape = batch_input_shape; this.dtype = dtype; if (input_tensor == null) { this.is_placeholder = true; input_tensor = K.Placeholder(shape: batch_input_shape, dtype: dtype, sparse: this.sparse, name: this.name); } else { this.is_placeholder = false; input_tensor._keras_shape = batch_input_shape; } // Create an input node to add to this.outbound_node // and set output_tensors' _keras_history. input_tensor._uses_learning_phase = false; input_tensor._keras_history = (this, 0, 0); var node = new Node(this, inbound_layers: new Layer[0], node_indices: new int[0], tensor_indices: new int[0], input_tensors: new KerasSymbol[] { input_tensor }, output_tensors: new KerasSymbol[] { input_tensor }, input_masks: new KerasSymbol[] { null }, output_masks: new KerasSymbol[] { null }, input_shapes: new Shape[] { batch_input_shape }, output_shapes: new Shape[] { batch_input_shape } ); }
public override KerasSymbol Call(KerasSymbol w) { w *= K.Cast(K.GreaterEqual(w, 0), K.FloatX()); return(w); }
public override KerasSymbol Call(KerasSymbol w) { return(w / (K.Epsilon() + K.Sqrt(K.Sum(K.Square(w), axis: this.axis, keepdims: true)))); }