Exemple #1
0
        /// <summary>
        /// Numerically stable shortcut of Log(Sum(Exp(a), axis, keepsDim).
        /// </summary>
        public static Array <Real> LogSumExp(Array <Real> a, int axis = -1, bool keepDims = false, Array <Real> result = null)
        {
            if (axis < 0)
            {
                axis += a.NDim;
            }
            var b   = NN.Max(a, axis: axis, keepDims: true);
            var sum = NN.Exp(a - b).Sum(axis: axis, keepDims: true, result: result);

            result = Apply(sum, b, (x, b_) => b_ + (Real)Math.Log(x), result: result);
            return(keepDims ? result : result.Reshape(GetAggregatorResultShape(a, axis, keepDims)));
        }
Exemple #2
0
        /// <summary>
        /// http://aelag.com/translation-of-theano-softmax-function
        /// </summary>
        /// <param name="a"></param>
        /// <param name="axis">The axis to compute the Softmax along, like in the Max function. Default value mimics Theano behavior</param>
        /// <param name="result"></param>
        /// <returns></returns>
        public static Array <Real> Softmax(Array <Real> a, int axis = -1, Array <Real> result = null, Array <Real> buffer = null)
        {
            var maxes   = NN.Max(a, axis: axis, keepDims: true, result: buffer);
            var shifted = a.Sub(maxes, result: result);

            result = NN.Exp(shifted, result: result);
            var sum = NN.Sum(result, axis: axis, keepDims: true, result: maxes);

            //result = result.Apply(sum, (x, s) => Math.Max(x / s, 0.000001f), result: result);
            result = result.Div(sum, result: result);
            return(result);
        }
Exemple #3
0
        // "fast" softmax for 1D and 2D arrays
        public static Array <Real> Softmax_(Array <Real> a, Array <Real> result = null)
        {
            if (a.Shape.Length > 2)
            {
                throw new RankException(string.Format("Must be 1-d or 2-d tensor, got {0}-d with shape ({1}).", a.Shape.Length, string.Join(", ", a.Shape)));
            }
            if (result == null)
            {
                result = Zeros <Real>(a.Shape);
            }
            else
            {
                result.AssertOfShape(a);
            }

            if (a.Shape.Length == 1)
            {
                var max = a.Max();
                result = NN.Exp(a - max, result: result);
            }
            else
            {
                var  maxes = NN.Zeros <Real>(a.Shape[0], 1);
                var  vMax = maxes.Values;
                int  off = a.Offset, offX;
                int  incX = a.Stride[0], incY = a.Stride[1];
                int  nX = a.Shape[0], nY = a.Shape[1];
                Real max = Real.NegativeInfinity;
                var  v   = a.Values;
                for (int i = 0; i < nX; ++i)
                {
                    offX = off;
                    max  = Real.NegativeInfinity;
                    for (int j = 0; j < nY; ++j)
                    {
                        max  = Math.Max(v[off], max);
                        off += incY;
                    }
                    off     = offX + incX;
                    vMax[i] = max;
                }
                result = NN.Exp(a - maxes, result: result);
            }
            var sum = NN.Sum(result, axis: a.Shape.Length - 1, keepDims: true);

            result = result.Div(sum, result: result);
            //result = result.Apply(sum, (x, s) => Math.Max(x / s, 0.000001f), result: result);
            return(result);
        }