/// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            m_ipUa.ReInitializeParameters(target);
            m_ipWa.ReInitializeParameters(target);

            return(true);
        }
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            if (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.WEIGHTS)
            {
                for (int i = 0; i < 3; i++)
                {
                    m_colBlobs[i].SetData(0);
                }
            }

            return(true);
        }
Beispiel #3
0
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            if (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.BIAS)
            {
                FillerParameter fp = m_param.bias_param.filler;
                if (fp == null)
                {
                    fp = new FillerParameter("constant", 0.0);
                }

                Filler <T> filler = Filler <T> .Create(m_cuda, m_log, fp);

                filler.Fill(m_colBlobs[0]);
            }

            return(true);
        }
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            if (target == WEIGHT_TARGET.WEIGHTS || target == WEIGHT_TARGET.BOTH)
            {
                Filler <T> filler = Filler <T> .Create(m_cuda, m_log, m_param.convolution_param.weight_filler);

                filler.Fill(m_colBlobs[0]);
            }

            if (m_param.convolution_param.bias_term && m_colBlobs.Count > 1 && (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.BIAS))
            {
                Filler <T> fillerBias = Filler <T> .Create(m_cuda, m_log, m_param.convolution_param.bias_filler);

                fillerBias.Fill(m_colBlobs[1]);
            }

            return(true);
        }
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            if (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.WEIGHTS)
            {
                Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.weight_filler);

                weight_filler.Fill(m_colBlobs[0]);
            }

            if (m_param.inner_product_param.bias_term && m_colBlobs.Count > 1 && (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.BIAS))
            {
                Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.inner_product_param.bias_filler);

                bias_filler.Fill(m_colBlobs[1]);
            }

            return(true);
        }
        /// <summary>
        /// Re-initialize the parameters of the layer.
        /// </summary>
        /// <param name="target">Specifies the weights to target (e.g. weights, bias or both).</param>
        /// <returns>When handled, this method returns <i>true</i>, otherwise <i>false</i>.</returns>
        public override bool ReInitializeParameters(WEIGHT_TARGET target)
        {
            base.ReInitializeParameters(target);

            if (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.WEIGHTS)
            {
                Filler <T> weight_filler = Filler <T> .Create(m_cuda, m_log, m_param.lstm_simple_param.weight_filler);

                weight_filler.Fill(m_colBlobs[0]);
                weight_filler.Fill(m_colBlobs[1]);
            }

            if (target == WEIGHT_TARGET.BOTH || target == WEIGHT_TARGET.BIAS)
            {
                Filler <T> bias_filler = Filler <T> .Create(m_cuda, m_log, m_param.lstm_simple_param.bias_filler);

                bias_filler.Fill(m_colBlobs[2]);

                // Initialize the bias for the forget gate to 5.0 as described in the
                // Clockwork RNN paper:
                // [1] Koutnik, J., Greff, K., Gomez, F., Schmidhuber, J., 'A Clockwork RNN', 2014"
                if (m_param.lstm_simple_param.enable_clockwork_forgetgate_bias)
                {
                    double[] rgBias = convertD(m_colBlobs[2].mutable_cpu_data);

                    for (int i = m_nH; i < 2 * m_nH; i++)
                    {
                        rgBias[i] = 5.0;
                    }

                    m_colBlobs[2].mutable_cpu_data = convert(rgBias);
                }
            }

            return(true);
        }