Exemplo n.º 1
0
        /// <summary>
        /// Setup the layer.
        /// </summary>
        /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param>
        /// <param name="colTop">Specifies the collection of top (output) Blobs.</param>
        public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop)
        {
            base.LayerSetUp(colBottom, colTop);

            GetIterationArgs args = getCurrentIteration();

            if (args != null && args.CurrentPhase != Phase.TRAIN)
            {
                return;
            }

            m_log.CHECK(args != null, "WARNING: The OnGetIteration event is not connected!");

            m_dfLowerBound = m_param.gradient_scale_param.lower_bound;
            m_dfUpperBound = m_param.gradient_scale_param.upper_bound;
            m_dfAlpha      = m_param.gradient_scale_param.alpha;
            m_dfMaxIter    = m_param.gradient_scale_param.max_iter;
            m_dfCoeff      = 1.0; // default adaptation coefficient.

            m_log.CHECK_LE(m_dfLowerBound, m_dfUpperBound, "The lower bound must be <= the upper bound.");
            m_log.CHECK_GE(m_dfAlpha, 0, "The alpha value must be >= 0.0");
            m_log.CHECK_GE(m_dfCoeff, 1, "The max_iter must be >= 1.0");

            int    nIteration = (args == null) ? 1 : args.Iteration;
            double dfProgress = Math.Min(1.0, (double)nIteration / m_dfMaxIter);
            double dfHeight   = m_dfUpperBound - m_dfLowerBound;

            m_dfCoeff = 2.0 * dfHeight / (1.0 + Math.Exp(-m_dfAlpha * dfProgress)) - dfHeight + m_dfLowerBound;
            m_log.WriteLine("iter = " + nIteration.ToString() + " progress = " + dfProgress.ToString() + " coeff = " + m_dfCoeff.ToString());
            m_swOutput.Start();
        }
Exemplo n.º 2
0
        /// <summary>
        /// Scales the error gradient w.r.t. the GRADIENTSCALER value inputs.
        /// </summary>
        /// <param name="colTop">top output blob vector, providing the error gradient
        /// with respect to outputs
        /// </param>
        /// <param name="rgbPropagateDown">propagate_down see Layer::Backward.</param>
        /// <param name="colBottom">bottom input blob vector
        /// </param>
        protected override void backward(BlobCollection <T> colTop, List <bool> rgbPropagateDown, BlobCollection <T> colBottom)
        {
            if (!rgbPropagateDown[0])
            {
                return;
            }

            GetIterationArgs args = getCurrentIteration();

            if (args != null && args.CurrentPhase != Phase.TRAIN)
            {
                return;
            }

            int  nCount      = colTop[0].count();
            long hTopDiff    = colTop[0].gpu_diff;
            long hBottomDiff = colBottom[0].mutable_gpu_diff;

            int    nIteration = (args == null) ? 1 : args.Iteration;
            double dfProgress = Math.Min(1.0, (double)nIteration / m_dfMaxIter);
            double dfHeight   = m_dfUpperBound - m_dfLowerBound;

            m_dfCoeff = 2.0 * dfHeight / (1.0 + Math.Exp(-m_dfAlpha * dfProgress)) - dfHeight + m_dfLowerBound;

            if (m_swOutput.Elapsed.TotalMilliseconds > 1000)
            {
                m_log.WriteLine("iter = " + nIteration.ToString() + " progress = " + dfProgress.ToString() + " coeff = " + m_dfCoeff.ToString());
                m_swOutput.Restart();
            }

            m_cuda.scale(nCount, -m_dfCoeff, hTopDiff, hBottomDiff);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Fires the OnGetIteration event to query the current iteration.
        /// </summary>
        /// <returns>The GetIterationArgs is returned if the event is connected, otherwise <i>null</i> is returned.</returns>
        protected GetIterationArgs getCurrentIteration()
        {
            GetIterationArgs args = null;

            if (OnGetIteration != null)
            {
                args = new GetIterationArgs();
                OnGetIteration(this, args);
            }

            return(args);
        }