Пример #1
0
        public static void Test()
        {
            GSLFunctions.DToD dtodFnc = delegate(double x)
            {
                return Math.Cos(x) + 1.0d;
            };

            Minimization mini = new Minimization(Minimization.SolverType.Brent);
            double xValue = 2;
            mini.Solve(dtodFnc, 0, 6, 0.001, 0, 100, ref xValue);
            Console.WriteLine(xValue + " : " + mini.Iteration());

            mini = new Minimization(Minimization.SolverType.GoldenSection);
            xValue = 2;
            mini.Solve(dtodFnc, 0, 6, 0.001, 0, 100, ref xValue);
            Console.WriteLine(xValue + " : " + mini.Iteration());
        }
Пример #2
0
        private void SteepestDescent()
        {
            Minimization minimization = new Minimization()
            {
                MaxIter = maxIter,
            };
            int      n  = multifunc.N;
            Single1D p  = new Single1D(n);
            Single1D df = new Single1D(n);

            for (int i = 0; i < n; i++)
            {
                p[i] = _point[i];
            }
            minimization.GradientDescent(p, df, (iter) =>
            {
                return(multifunc.Calculate(p, df));
            }, alpha);
            Console.WriteLine();
        }
Пример #3
0
        public void Learn(
            SampleList samples, CalculationArguments arguments)
        // samples = yjks
        {
            arguments.reporter?.WriteStart($"Learning the network using a subset of {samples.Count} random samples...");
            Stopwatch timer = new Stopwatch();

            timer.Start();

            int nSamples      = samples.Count; // number of sample rows
            int nCoefficients = CoefficientCount();
            // Current biasses and weights of the neurons in this network:
            Single1D coefficients = new Single1D(nCoefficients);
            // The derivatives of the cost with respect to the biasses and weights:
            Single1D derivatives = new Single1D(nCoefficients);
            Single1D velocities  = new Single1D(nCoefficients);

            velocities.Clear();
            MeasurementList measurements = new MeasurementList(nSamples, Last.Count);

            GetCoefficients(coefficients, 0);
            Minimization minimization = new Minimization()
            {
                MaxIter = arguments.settings.MaxIter,
                Eps     = arguments.settings.Epsilon,
                Tol     = arguments.settings.Tolerance,
            };
            float finalCost = minimization.MomentumBasedGradientDescent(coefficients, derivatives, velocities,
                                                                        (iter) =>
            {
                SetCoefficients(coefficients, 0);
                arguments.reporter?.ReportCoefficients(coefficients);
                float cost = GetCostAndDerivatives(samples, derivatives, measurements, arguments);
                arguments.reporter?.ReportCostAndDerivatives(cost, derivatives, measurements);
                return(cost);
            }, arguments.settings.LearningRate, arguments.settings.MomentumCoefficient);

            arguments.reporter?.WriteEnd($"The network has learned in {timer.Elapsed.TotalSeconds} s, and the final cost value is {finalCost:F4}.");
        }