/** @copydoc LayerParameterBase::Clone */ public override LayerParameterBase Clone() { RecurrentParameter p = new RecurrentParameter(); p.Copy(this); return(p); }
/// <summary> /// Parses the parameter from a RawProto. /// </summary> /// <param name="rp">Specifies the RawProto to parse.</param> /// <returns>A new instance of the parameter is returned.</returns> public static new RecurrentParameter FromProto(RawProto rp) { string strVal; RecurrentParameter p = new RecurrentParameter(); ((EngineParameter)p).Copy(EngineParameter.FromProto(rp)); if ((strVal = rp.FindValue("num_output")) != null) { p.num_output = uint.Parse(strVal); } RawProto rpWeightFiller = rp.FindChild("weight_filler"); if (rpWeightFiller != null) { p.weight_filler = FillerParameter.FromProto(rpWeightFiller); } RawProto rpBiasFiller = rp.FindChild("bias_filler"); if (rpBiasFiller != null) { p.bias_filler = FillerParameter.FromProto(rpBiasFiller); } if ((strVal = rp.FindValue("debug_info")) != null) { p.debug_info = bool.Parse(strVal); } if ((strVal = rp.FindValue("expose_hidden")) != null) { p.expose_hidden = bool.Parse(strVal); } if ((strVal = rp.FindValue("dropout_ratio")) != null) { p.dropout_ratio = ParseDouble(strVal); } if ((strVal = rp.FindValue("dropout_seed")) != null) { p.dropout_seed = long.Parse(strVal); } if ((strVal = rp.FindValue("num_layers")) != null) { p.num_layers = uint.Parse(strVal); } if ((strVal = rp.FindValue("cudnn_enable_tensor_cores")) != null) { p.cudnn_enable_tensor_cores = bool.Parse(strVal); } return(p); }
/** @copydoc LayerParameterBase::Copy */ public override void Copy(LayerParameterBase src) { RecurrentParameter p = (RecurrentParameter)src; m_nNumOutput = p.num_output; m_weight_filler = p.weight_filler.Clone(); m_bias_filler = p.bias_filler.Clone(); m_bDebugInfo = p.debug_info; m_bExposeHidden = p.expose_hidden; }
/** @copydoc LayerParameterBase::Load */ public override object Load(BinaryReader br, bool bNewInstance = true) { RawProto proto = RawProto.Parse(br.ReadString()); RecurrentParameter p = FromProto(proto); if (!bNewInstance) { Copy(p); } return(p); }
/** @copydoc LayerParameterBase::Copy */ public override void Copy(LayerParameterBase src) { base.Copy(src); if (src is RecurrentParameter) { RecurrentParameter p = (RecurrentParameter)src; m_nNumOutput = p.num_output; m_weight_filler = p.weight_filler.Clone(); m_bias_filler = p.bias_filler.Clone(); m_bDebugInfo = p.debug_info; m_bExposeHidden = p.expose_hidden; m_dfDropoutRatio = p.dropout_ratio; m_lDropoutSeed = p.dropout_seed; m_nNumLayers = p.num_layers; } }
/** @copydoc LayerParameterBase::Copy */ public override void Copy(LayerParameterBase src) { base.Copy(src); if (src is RecurrentParameter) { RecurrentParameter p = (RecurrentParameter)src; m_nNumOutput = p.num_output; m_weight_filler = p.weight_filler.Clone(); m_bias_filler = p.bias_filler.Clone(); m_bDebugInfo = p.debug_info; m_bExposeHiddenInput = p.expose_hidden_input; m_bExposeHiddenOutput = p.expose_hidden_output; m_dfDropoutRatio = p.dropout_ratio; m_lDropoutSeed = p.dropout_seed; m_nNumLayers = p.num_layers; m_bBidirectional = p.bidirectional; m_bCudnnEnableTensorCores = p.m_bCudnnEnableTensorCores; } }
/// <summary> /// Parses the parameter from a RawProto. /// </summary> /// <param name="rp">Specifies the RawProto to parse.</param> /// <returns>A new instance of the parameter is returned.</returns> public static RecurrentParameter FromProto(RawProto rp) { string strVal; RecurrentParameter p = new RecurrentParameter(); if ((strVal = rp.FindValue("num_output")) != null) { p.num_output = uint.Parse(strVal); } RawProto rpWeightFiller = rp.FindChild("weight_filler"); if (rpWeightFiller != null) { p.weight_filler = FillerParameter.FromProto(rpWeightFiller); } RawProto rpBiasFiller = rp.FindChild("bias_filler"); if (rpBiasFiller != null) { p.bias_filler = FillerParameter.FromProto(rpBiasFiller); } if ((strVal = rp.FindValue("debug_info")) != null) { p.debug_info = bool.Parse(strVal); } if ((strVal = rp.FindValue("expose_hidden")) != null) { p.expose_hidden = bool.Parse(strVal); } return(p); }