示例#1
0
        public static FunctionBinder.ParamsFunction <X> FiniteDifference_ <X>(IList <IVar> inputs, Scalar <X> output, Tensor <X> .Shared x, IDictionary givens = null)
        {
            var epsilon = Scalar <X>("epsilon");
            var i_      = Scalar <int>("i");
            var j_      = Scalar <int>("j");
            var k_      = Scalar <int>("k");
            var indexes = new Scalar <int> .Var[0];

            if (x.NDim == 1)
            {
                indexes = new[] { i_ }
            }
            ;
            if (x.NDim == 2)
            {
                indexes = new[] { i_, j_ }
            }
            ;
            if (x.NDim == 3)
            {
                indexes = new[] { i_, j_, k_ }
            }
            ;

            var inputSet = new List <IVar>();

            foreach (var i in inputs)
            {
                inputSet.Add(i);
            }
            foreach (var i in indexes)
            {
                inputSet.Add(i);
            }
            inputSet.Add(epsilon);

            var eps       = Op.OneHot(x.Shape, indexes, epsilon);
            var out_m_eps = (Scalar <X>)output.Patch(new Patch {
                [x] = x - eps
            });
            var out_p_eps = (Scalar <X>)output.Patch(new Patch {
                [x] = x + eps
            });
            var delta = (out_p_eps - out_m_eps) / (Numeric <X> .Two * epsilon);

            return(Function(inputSet, delta, givens: givens));
        }
示例#2
0
        /// <summary>
        /// Creates a function that help checking the gradient backpropagated to a shared.
        /// The function created will expect one argument for each given input and one float for "epsilon".
        /// The function returns a gradient computed by finite difference and by the <paramref name="computed"/> expression.
        /// </summary>
        /// <typeparam name="X">float</typeparam>
        /// <param name="inputs">The inputs of the graph. The created function will expect a value for each of these inputs</param>
        /// <param name="output">The function to derive</param>
        /// <param name="x">The gradient of x will be checked</param>
        /// <param name="computed">The gradient to be checked. By default will use the "Grad" operator.</param>
        /// <param name="givens"></param>
        /// <returns>The test function</returns>
        public static FunctionBinder.ParamsFunction <X, X> RandomGradientCheck <X>(IList <IVar> inputs, Scalar <X> output, Scalar <X> x, Scalar <X> computed = null, IDictionary givens = null)
        {
            var eps      = Scalar <X>("epsilon");
            var inputSet = new List <IVar>();

            foreach (var i in inputs)
            {
                inputSet.Add(i);
            }
            inputSet.Add(eps);

            var x_m_eps = x - eps;
            var x_p_eps = x + eps;

            if (x.Name != null)
            {
                x_m_eps.Name = x.Name + "_m_eps";
                x_p_eps.Name = x.Name + "_p_eps";
            }

            var out_m_eps = (Scalar <X>)output.Patch(new Patch {
                [x] = x_m_eps
            });
            var out_p_eps = (Scalar <X>)output.Patch(new Patch {
                [x] = x_p_eps
            });

            if (output.Name != null && out_m_eps != output)
            {
                out_m_eps.Name = output.Name + "_m_eps";
                out_p_eps.Name = output.Name + "_p_eps";
            }

            var finite = (out_p_eps - out_m_eps) / (Numeric <X> .Two * eps);

            if (finite.IsZero)
            {
                Trace.WriteLine($"The given output {output} doesn't depend on {x}", "Warning");
            }
            finite.Name = nameof(finite);

            computed = computed ?? Grad(output, x);
            return(Function(inputSet, output: (finite, computed), givens: givens));
        }
示例#3
0
        /// <summary>
        /// Creates a function that help checking the gradient backpropagated to a shared.
        /// The function created will expect one argument for each given input and one float for "epsilon".
        /// The function returns a gradient computed by finite difference and by the <paramref name="computed"/> expression.
        /// </summary>
        /// <remarks>
        /// "epsilon" is the length of the step used during finite difference.
        /// The gradient is checked every time in a different direction.
        /// </remarks>
        /// <typeparam name="X">float</typeparam>
        /// <param name="inputs">The inputs of the graph. The created function will expect a value for each of these inputs</param>
        /// <param name="output">The function to derive</param>
        /// <param name="x">The gradient of x will be checked</param>
        /// <param name="computed">The gradient to be checked. By default will use the "Grad" operator.</param>
        /// <param name="givens"></param>
        /// <returns>The test function</returns>
        public static FunctionBinder.ParamsFunction <X, X> RandomGradientCheck <X>(IList <IVar> inputs, Scalar <X> output, Tensor <X> x, Tensor <X> computed = null, IDictionary givens = null)
        {
            var epsilon  = Scalar <X>("epsilon");
            var inputSet = new List <IVar>();

            foreach (var i in inputs)
            {
                inputSet.Add(i);
            }
            inputSet.Add(epsilon);

            var eps     = Random.Uniform(-epsilon, epsilon, x.Shape); eps.Name = nameof(eps);
            var x_m_eps = x - eps;
            var x_p_eps = x + eps;

            if (x.Name != null)
            {
                x_m_eps.Name = x.Name + "_m_eps";
                x_p_eps.Name = x.Name + "_p_eps";
            }

            var out_m_eps = (Scalar <X>)output.Patch(new Patch {
                [x] = x_m_eps
            });

            out_m_eps.Name = output.Name + "_m_eps";

            var out_p_eps = (Scalar <X>)output.Patch(new Patch {
                [x] = x_p_eps
            });

            out_p_eps.Name = output.Name + "_p_eps";

            var finite = (out_p_eps - out_m_eps); finite.Name = nameof(finite);

            computed = computed ?? Grad(output, x);
            var backpropagated = Numeric <X> .Two * Sum(eps * computed);

            return(Function(inputSet, output: (finite, backpropagated), givens: givens));
        }
            public override NAry Clone(IReadOnlyList <IExpr> inputs)
            {
                var newVars = Vars.Select(v => new Scalar <Type> .Var(v.Name)).ToArray();
                var patch   = new Patch();

                for (int i = 0; i < Vars.Length; ++i)
                {
                    patch[Vars[i]] = newVars[i];
                }

                var abstraction = (Scalar <Type>)Abstraction.Patch(patch);

                return((NAry)Create(inputs.Cast <Tensor <Type> >().ToArray(), newVars, abstraction));
            }