Пример #1
0
        /// <summary>
        /// Numerically stable shortcut of Log(Sum(Exp(a), axis, keepsDim).
        /// </summary>
        public static Array <Real> LogSumExp(Array <Real> a, int axis = -1, bool keepDims = false, Array <Real> result = null)
        {
            if (axis < 0)
            {
                axis += a.NDim;
            }
            var b   = NN.Max(a, axis: axis, keepDims: true);
            var sum = NN.Exp(a - b).Sum(axis: axis, keepDims: true, result: result);

            result = Apply(sum, b, (x, b_) => b_ + (Real)Math.Log(x), result: result);
            return(keepDims ? result : result.Reshape(GetAggregatorResultShape(a, axis, keepDims)));
        }
Пример #2
0
        /// <summary>
        /// http://aelag.com/translation-of-theano-softmax-function
        /// </summary>
        /// <param name="a"></param>
        /// <param name="axis">The axis to compute the Softmax along, like in the Max function. Default value mimics Theano behavior</param>
        /// <param name="result"></param>
        /// <returns></returns>
        public static Array <Real> Softmax(Array <Real> a, int axis = -1, Array <Real> result = null, Array <Real> buffer = null)
        {
            var maxes   = NN.Max(a, axis: axis, keepDims: true, result: buffer);
            var shifted = a.Sub(maxes, result: result);

            result = NN.Exp(shifted, result: result);
            var sum = NN.Sum(result, axis: axis, keepDims: true, result: maxes);

            //result = result.Apply(sum, (x, s) => Math.Max(x / s, 0.000001f), result: result);
            result = result.Div(sum, result: result);
            return(result);
        }