Пример #1
0
        /// <summary>
        /// Computes the gradient of the compiled term at the given point.
        /// </summary>
        /// <param name="arg">The point at which to differentiate.</param>
        /// <returns>A tuple, where the first item is the gradient at <paramref name="arg"/> and the second item is
        /// the value at <paramref name="arg"/>. That is, the second value is the same as running <see cref="Evaluate"/> on
        /// <paramref name="arg"/>.</returns>
        /// <remarks>The number at <c>arg[i]</c> is the value assigned to the variable <c>Variables[i]</c>.</remarks>
        public static Tuple <double[], double> Differentiate(this ICompiledTerm term, IReadOnlyList <double> arg)
        {
            var grad = new double[term.Variables.Count];
            var val  = term.Differentiate(arg, grad);

            return(Tuple.Create(grad, val));
        }
Пример #2
0
        static double[] GradientDescent(ICompiledTerm func, double[] init, double stepSize, int iterations)
        {
            // clone the initial argument
            var x = (double[])init.Clone();

            // perform the iterations
            for (int i = 0; i < iterations; ++i)
            {
                // compute the gradient
                var gradient = func.Differentiate(x).Item1;

                // perform a descent step
                for (int j = 0; j < x.Length; ++j)
                    x[j] -= stepSize * gradient[j];
            }

            return x;
        }
Пример #3
0
        static double[] GradientDescent(ICompiledTerm func, double[] init, double stepSize, int iterations)
        {
            // clone the initial argument
            var x        = (double[])init.Clone();
            var gradient = new double[x.Length];

            // perform the iterations
            for (int i = 0; i < iterations; ++i)
            {
                // compute the gradient - fill the gradient array
                func.Differentiate(x, gradient);

                // perform a descent step
                for (int j = 0; j < x.Length; ++j)
                {
                    x[j] -= stepSize * gradient[j];
                }
            }

            return(x);
        }
 public ParametricCompiledTerm(Term term, Variable[] variables, Variable[] parameters)
 {
     compiledTerm = term.Compile(variables.Concat(parameters).ToArray());
     Variables    = Array.AsReadOnly(variables.ToArray());
     Parameters   = Array.AsReadOnly(parameters.ToArray());
 }
 public ParametricCompiledTerm(Term term, Variable[] variables, Variable[] parameters)
 {
     compiledTerm = term.Compile(variables.Concat(parameters).ToArray());
     Variables = Array.AsReadOnly(variables.ToArray());
     Parameters = Array.AsReadOnly(parameters.ToArray());
 }
Пример #6
0
        static void RunBenchmark(TextWriter resultWriter, TextWriter logWriter)
        {
            var fac = new CsvHelper.CsvFactory();

            using (var csvWriter = fac.CreateWriter(resultWriter))
            {
                csvWriter.WriteHeader <BenchmarkResult>();

                int[] sizes = { 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000, 10000 };
                foreach (int sz in sizes)
                {
                    var row = new BenchmarkResult();

                    var termsCount = sz; row.NumberOfTerms = termsCount;
                    var varsCount  = sz; row.NumberOfVars = varsCount;
                    var grad       = new double[sz];


                    logWriter.WriteLine("Benchmark for {0} terms and {1} variables", termsCount, varsCount);

                    logWriter.Write("\tConstructing coefficients ...");
                    var coefficients = GenerateCoefficients(termsCount, varsCount);
                    logWriter.WriteLine(" done");

                    // generate variables
                    var vars = new Variable[varsCount];
                    for (var j = 0; j < sz; ++j)
                    {
                        vars[j] = new Variable();
                    }


                    logWriter.Write("\tGenerating input data ...");
                    var inputData = new double[1000][];
                    for (var j = 0; j < inputData.Length; ++j)
                    {
                        inputData[j] = RandomDoubles(varsCount);
                    }
                    logWriter.WriteLine(" done");
                    GC.Collect(GC.MaxGeneration, GCCollectionMode.Default, true);


                    ICompiledTerm compiledTerm = null;
                    row.CompileMilliseconds = MeasureMsec("Constructing compiled term", logWriter,
                                                          () => compiledTerm = ConstructTerm(coefficients, vars));

                    row.MillisecondsPerManualEval = MeasureMsecPerOp("manual evaluation",
                                                                     logWriter, inputData.Length, () => inputData.Sum(array => NativeEvaluate(coefficients, array)));

                    row.MillisecondsPerCompiledEval = MeasureMsecPerOp("AutoDiff compiled evaluation",
                                                                       logWriter, inputData.Length, () => inputData.Sum(array => compiledTerm.Evaluate(array)));

                    row.MillisecondsPerCompiledDiff = MeasureMsecPerOp("compiled differentiation",
                                                                       logWriter, inputData.Length, () =>
                    {
                        var sum = 0.0;
                        foreach (var array in inputData)
                        {
                            var val = compiledTerm.Differentiate(array, grad);
                            sum    += val + grad.Sum();
                        }
                        return(sum);
                    });

                    csvWriter.WriteRecord(row);
                }
            }
        }
Пример #7
0
 /// <summary>
 /// Computes the gradient of the compiled term at the given point.
 /// </summary>
 /// <param name="arg">The point at which to differentiate.</param>
 /// <returns>A tuple, where the first item is the gradient at <paramref name="arg"/> and the second item is
 /// the value at <paramref name="arg"/>. That is, the second value is the same as running <see cref="Evaluate"/> on
 /// <paramref name="arg"/>.</returns>
 /// <remarks>The number at <c>arg[i]</c> is the value assigned to the variable <c>Variables[i]</c>.</remarks>
 public static Tuple <double[], double> Differentiate(this ICompiledTerm term, params double[] arg)
 {
     return(Differentiate(term, (IReadOnlyList <double>)arg));
 }
 public ParametricCompiledTerm(Term term, IReadOnlyList <Variable> variables, IReadOnlyList <Variable> parameters)
 {
     compiledTerm = term.Compile(variables.Concat(parameters).ToArray());
     Variables    = variables.AsReadOnly();
     Parameters   = parameters.AsReadOnly();
 }