public override JObject ToJObject() { var jobj = new JObject(); jobj["units"] = _units; if (_inputShape != null) { jobj["input_shape"] = new JArray(_inputShape); } jobj["use_bias"] = _useBias; KerasUtils.AddActivation(jobj, _activation); KerasUtils.AddActivation(jobj, "recurrent_activation", _recurrentActivation); KerasUtils.AddStringOrObject(jobj, "kernel_initializer", _kernelInitializer); KerasUtils.AddStringOrObject(jobj, "recurrent_initializer", _recurrentInitializer); KerasUtils.AddStringOrObject(jobj, "bias_initializer", _biasInitializer); jobj["unit_forget_bias"] = _unitForgetBias; KerasUtils.AddStringOrObject(jobj, "kernel_regularizer", _kernelRegularizer); KerasUtils.AddStringOrObject(jobj, "recurrent_regularizer", _recurrentRegularizer); KerasUtils.AddStringOrObject(jobj, "bias_regularizer", _biasRegularizer); KerasUtils.AddStringOrObject(jobj, "activity_regularizer", _activityRegularizer); jobj["dropout"] = _dropout; jobj["recurrent_dropout"] = _recurrentDropout; jobj["op"] = "LSTM"; return(jobj); }
public override JObject ToJObject() { var jobj = new JObject() { ["input_dim"] = _inputDim, ["output_dim"] = _outputDim, ["mask_zero"] = _maskZero }; KerasUtils.AddStringOrObject(jobj, "embedding_initializer", _embeddingInitializer); KerasUtils.AddStringOrObject(jobj, "embedding_regularizer", _embeddingRegularizer); if (_inputLength.HasValue) { jobj.Add("input_length", _inputLength.Value); } if (_inputShape != null) { jobj["input_shape"] = new JArray(_inputShape); } jobj["op"] = "Embedding"; return(jobj); }