コード例 #1
0
        /// <summary>
        /// Updates the specified iteration.
        /// </summary>
        /// <param name="iteration">The iteration.</param>
        /// <param name="layer">The layer.</param>
        internal override void Update(int iteration, BaseLayer layer)
        {
            if (DecayRate > 0)
            {
                LearningRate = LearningRate * (1 / (1 + DecayRate * iteration));
            }

            float t    = iteration + 1;
            float lr_t = Convert.ToSingle(LearningRate / (1f - Math.Pow(Beta1, t)));

            foreach (var item in layer.Params)
            {
                var param = item.Value;

                if (!ms.ContainsKey(param.Name))
                {
                    ms[param.Name] = SuperArray.Constant(0f, param.Data.Shape);
                    us[param.Name] = SuperArray.Constant(0f, param.Data.Shape);
                }

                var m_t = (Beta1 * ms[param.Name]) + (1 - Beta1) * param.Grad;
                var u_t = Ops.Maximum((Beta2 * us[param.Name]), Ops.Abs(param.Grad));

                param.Data     = param.Data - LearningRate * m_t / (u_t + Ops.EPSILON);
                ms[param.Name] = m_t;
                us[param.Name] = u_t;

                param.ApplyConstraint();
            }
        }
コード例 #2
0
        /// <summary>
        /// Backpropagation method to calculate gradient of the loss function
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Backward(SuperArray preds, SuperArray labels)
        {
            var y_true = Ops.Max(Ops.Sum(labels, 1), 1) / (Ops.Abs(preds * Ops.Abs(labels)));
            var y_pred = Ops.Max(Ops.Sum(preds, 1), 1) / Ops.Square(Ops.Abs(preds));

            return(y_true + _cossine_sim(preds, labels) * y_pred);
        }
コード例 #3
0
        /// <summary>
        /// Calculates the grad.
        /// </summary>
        /// <param name="x">The x.</param>
        /// <returns></returns>
        internal override SuperArray CalcGrad(SuperArray x)
        {
            SuperArray grad = null;

            if (L1 > 0)
            {
                grad = (L1 * x) / (Ops.Abs(x) + Ops.EPSILON);
            }

            if (L2 > 0)
            {
                grad = (2 * L2 * x);
            }

            return(grad);
        }
コード例 #4
0
        /// <summary>
        /// Calls the specified x.
        /// </summary>
        /// <param name="x">The x.</param>
        /// <returns></returns>
        internal override float Call(SuperArray x)
        {
            float result = 0;

            if (L1 > 0)
            {
                result += (float)Ops.Sum(L1 * Ops.Abs(x));
            }

            if (L2 > 0)
            {
                result += (float)Ops.Sum(L2 * Ops.Square(x));
            }

            return(result);
        }
コード例 #5
0
 /// <summary>
 /// Backpropagation method to calculate gradient of the loss function
 /// </summary>
 /// <param name="preds">The predicted result.</param>
 /// <param name="labels">The true result.</param>
 /// <returns></returns>
 public override SuperArray Backward(SuperArray preds, SuperArray labels)
 {
     return((preds - labels) / ((float)preds.Shape[0] * Ops.Abs(preds - labels)));
 }
コード例 #6
0
 /// <summary>
 /// Forwards the inputs and calculate the loss.
 /// </summary>
 /// <param name="preds">The predicted result.</param>
 /// <param name="labels">The true result.</param>
 /// <returns></returns>
 public override SuperArray Forward(SuperArray preds, SuperArray labels)
 {
     return(Ops.Mean(Ops.Abs(preds - labels), 1).Reshape(1, -1));
 }
コード例 #7
0
        /// <summary>
        /// Backpropagation method to calculate gradient of the loss function
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Backward(SuperArray preds, SuperArray labels)
        {
            var diff = (preds - labels) / Ops.Clip(Ops.Abs(labels) * Ops.Abs(labels - preds), Ops.EPSILON, float.MaxValue);

            return(100 * diff / preds.Shape[0]);
        }
コード例 #8
0
        /// <summary>
        /// Forwards the inputs and calculate the loss.
        /// </summary>
        /// <param name="preds">The predicted result.</param>
        /// <param name="labels">The true result.</param>
        /// <returns></returns>
        public override SuperArray Forward(SuperArray preds, SuperArray labels)
        {
            var diff = Ops.Abs(preds - labels) / Ops.Clip(Ops.Abs(labels), Ops.EPSILON, float.MaxValue);

            return(100 * Ops.Mean(diff, 1).Reshape(1, -1));
        }
コード例 #9
0
 /// <summary>
 /// Calculate the gradient of this layer function
 /// </summary>
 /// <param name="outputgrad">The calculated output grad from previous layer.</param>
 public override void Backward(SuperArray outputgrad)
 {
     Input.Grad = outputgrad / Ops.Square(Ops.Abs(Input.Data) + 1);
 }
コード例 #10
0
        /// <summary>
        /// Forwards the inputs and compute the output
        /// </summary>
        /// <param name="x">The input SuperArray for this layer.</param>
        public override void Forward(SuperArray x)
        {
            base.Forward(x);

            Output = x / (Ops.Abs(x) + 1);
        }