Esempio n. 1
0
        /// <summary>
        /// Computes the gradient of the compiled term at the given point.
        /// </summary>
        /// <param name="arg">The point at which to differentiate.</param>
        /// <returns>A tuple, where the first item is the gradient at <paramref name="arg"/> and the second item is
        /// the value at <paramref name="arg"/>. That is, the second value is the same as running <see cref="Evaluate"/> on
        /// <paramref name="arg"/>.</returns>
        /// <remarks>The number at <c>arg[i]</c> is the value assigned to the variable <c>Variables[i]</c>.</remarks>
        public static Tuple <double[], double> Differentiate(this ICompiledTerm term, IReadOnlyList <double> arg)
        {
            var grad = new double[term.Variables.Count];
            var val  = term.Differentiate(arg, grad);

            return(Tuple.Create(grad, val));
        }
        public Tuple <double[], double> Differentiate(double[] arg, double[] parameters)
        {
            var combinedArg = arg.Concat(parameters).ToArray();
            var diffResult  = compiledTerm.Differentiate(combinedArg);

            var partialGradient = new double[arg.Length];

            Array.Copy(diffResult.Item1, partialGradient, partialGradient.Length);

            return(Tuple.Create(partialGradient, diffResult.Item2));
        }
        static double[] GradientDescent(ICompiledTerm func, double[] init, double stepSize, int iterations)
        {
            // clone the initial argument
            var x = (double[])init.Clone();

            // perform the iterations
            for (int i = 0; i < iterations; ++i)
            {
                // compute the gradient
                var gradient = func.Differentiate(x).Item1;

                // perform a descent step
                for (int j = 0; j < x.Length; ++j)
                    x[j] -= stepSize * gradient[j];
            }

            return x;
        }
        public double Differentiate(IReadOnlyList <double> arg, IReadOnlyList <double> parameters, IList <double> grad)
        {
            Guard.NotNull(arg, nameof(arg));
            Guard.MustHold(arg.Count == Variables.Count, ErrorMessages.ArgLength);
            Guard.NotNull(grad, nameof(grad));
            Guard.MustHold(grad.Count == Variables.Count, ErrorMessages.GradLength);
            Guard.NotNull(parameters, nameof(parameters));
            Guard.MustHold(parameters.Count == Parameters.Count, ErrorMessages.ParamLength);

            var combinedArg  = arg.Concat(parameters).ToArray();
            var combinedGrad = new double[combinedArg.Length];
            var val          = compiledTerm.Differentiate(combinedArg, combinedGrad);

            for (var i = 0; i < arg.Count; ++i)
            {
                grad[i] = combinedGrad[i];
            }
            return(val);
        }
Esempio n. 5
0
        static double[] GradientDescent(ICompiledTerm func, double[] init, double stepSize, int iterations)
        {
            // clone the initial argument
            var x        = (double[])init.Clone();
            var gradient = new double[x.Length];

            // perform the iterations
            for (int i = 0; i < iterations; ++i)
            {
                // compute the gradient - fill the gradient array
                func.Differentiate(x, gradient);

                // perform a descent step
                for (int j = 0; j < x.Length; ++j)
                {
                    x[j] -= stepSize * gradient[j];
                }
            }

            return(x);
        }
Esempio n. 6
0
        static void RunBenchmark(TextWriter resultWriter, TextWriter logWriter)
        {
            var fac = new CsvHelper.CsvFactory();

            using (var csvWriter = fac.CreateWriter(resultWriter))
            {
                csvWriter.WriteHeader <BenchmarkResult>();

                int[] sizes = { 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000, 10000 };
                foreach (int sz in sizes)
                {
                    var row = new BenchmarkResult();

                    var termsCount = sz; row.NumberOfTerms = termsCount;
                    var varsCount  = sz; row.NumberOfVars = varsCount;
                    var grad       = new double[sz];


                    logWriter.WriteLine("Benchmark for {0} terms and {1} variables", termsCount, varsCount);

                    logWriter.Write("\tConstructing coefficients ...");
                    var coefficients = GenerateCoefficients(termsCount, varsCount);
                    logWriter.WriteLine(" done");

                    // generate variables
                    var vars = new Variable[varsCount];
                    for (var j = 0; j < sz; ++j)
                    {
                        vars[j] = new Variable();
                    }


                    logWriter.Write("\tGenerating input data ...");
                    var inputData = new double[1000][];
                    for (var j = 0; j < inputData.Length; ++j)
                    {
                        inputData[j] = RandomDoubles(varsCount);
                    }
                    logWriter.WriteLine(" done");
                    GC.Collect(GC.MaxGeneration, GCCollectionMode.Default, true);


                    ICompiledTerm compiledTerm = null;
                    row.CompileMilliseconds = MeasureMsec("Constructing compiled term", logWriter,
                                                          () => compiledTerm = ConstructTerm(coefficients, vars));

                    row.MillisecondsPerManualEval = MeasureMsecPerOp("manual evaluation",
                                                                     logWriter, inputData.Length, () => inputData.Sum(array => NativeEvaluate(coefficients, array)));

                    row.MillisecondsPerCompiledEval = MeasureMsecPerOp("AutoDiff compiled evaluation",
                                                                       logWriter, inputData.Length, () => inputData.Sum(array => compiledTerm.Evaluate(array)));

                    row.MillisecondsPerCompiledDiff = MeasureMsecPerOp("compiled differentiation",
                                                                       logWriter, inputData.Length, () =>
                    {
                        var sum = 0.0;
                        foreach (var array in inputData)
                        {
                            var val = compiledTerm.Differentiate(array, grad);
                            sum    += val + grad.Sum();
                        }
                        return(sum);
                    });

                    csvWriter.WriteRecord(row);
                }
            }
        }