Exemplo n.º 1
0
        public void RelativeConvergenceConstructorTest()
        {
            var criteria = new RelativeConvergence(iterations: 0, tolerance: 0.1);

            int progress = 1;

            do
            {
                // Do some processing...


                // Update current iteration information:
                criteria.NewValue = 12345.6 / progress++;
            } while (!criteria.HasConverged);


            // The method will converge after reaching the
            // maximum of 11 iterations with a final value
            // of 1234.56:

            int    iterations = criteria.CurrentIteration; // 11
            double value      = criteria.OldValue;         // 1234.56


            Assert.AreEqual(11, criteria.CurrentIteration);
            Assert.AreEqual(1234.56, criteria.OldValue);
        }
Exemplo n.º 2
0
 /// <summary>
 ///   Creates a new <see cref="ExpectationMaximization{TObservation}"/> algorithm.
 /// </summary>
 ///
 /// <param name="coefficients">The initial coefficient values.</param>
 /// <param name="distributions">The initial component distributions.</param>
 ///
 public ExpectationMaximization(double[] coefficients,
                                IFittableDistribution <TObservation>[] distributions)
 {
     Coefficients  = coefficients;
     Distributions = distributions;
     Convergence   = new RelativeConvergence(0, 1e-3);
     Gamma         = new double[coefficients.Length][];
 }
Exemplo n.º 3
0
        private static TrainingSessionResult Optimize(
            SequentialNetwork network,
            BatchesCollection miniBatches,
            int epochs, float dropout,
            [NotNull] WeightsUpdater updater,
            [CanBeNull] IProgress <BatchProgress> batchProgress,
            [CanBeNull] IProgress <TrainingProgressEventArgs> trainingProgress,
            [CanBeNull] ValidationDataset validationDataset,
            [CanBeNull] TestDataset testDataset,
            CancellationToken token)
        {
            // Setup
            DateTime startTime = DateTime.Now;
            List <DatasetEvaluationResult>
            validationReports = new List <DatasetEvaluationResult>(),
                testReports   = new List <DatasetEvaluationResult>();

            TrainingSessionResult PrepareResult(TrainingStopReason reason, int loops)
            {
                return(new TrainingSessionResult(reason, loops, DateTime.Now.Subtract(startTime).RoundToSeconds(), validationReports, testReports));
            }

            // Convergence manager for the validation dataset
            RelativeConvergence convergence = validationDataset == null
                ? null
                : new RelativeConvergence(validationDataset.Tolerance, validationDataset.EpochsInterval);

            // Optional batch monitor
            BatchProgressMonitor batchMonitor = batchProgress == null ? null : new BatchProgressMonitor(miniBatches.Count, batchProgress);

            // Create the training batches
            for (int i = 0; i < epochs; i++)
            {
                // Shuffle the training set
                miniBatches.CrossShuffle();

                // Gradient descent over the current batches
                for (int j = 0; j < miniBatches.BatchesCount; j++)
                {
                    if (token.IsCancellationRequested)
                    {
                        return(PrepareResult(TrainingStopReason.TrainingCanceled, i));
                    }
                    network.Backpropagate(miniBatches.Batches[j], dropout, updater);
                    batchMonitor?.NotifyCompletedBatch(miniBatches.Batches[j].X.GetLength(0));
                }
                batchMonitor?.Reset();

                // Check for overflows
                if (!Parallel.For(0, network._Layers.Length, (j, state) =>
                {
                    if (network._Layers[j] is WeightedLayerBase layer && !layer.ValidateWeights())
                    {
                        state.Break();
                    }
                }).IsCompleted)
Exemplo n.º 4
0
        public void RelativeConvergenceConstructorTest4()
        {
            var criteria = new RelativeConvergence(iterations: 0, tolerance: 1e-5, startValue: 1);

            criteria.CurrentIteration = -2;

            criteria.NewValue /= 10.0;
            Assert.AreEqual(0.9, criteria.Delta, 1e-10);
            Assert.AreEqual(0.9, criteria.RelativeDelta, 1e-10);
            Assert.AreEqual(0.1, criteria.NewValue, 1e-10);
            Assert.AreEqual(1, criteria.OldValue, 1e-10);
            Assert.IsFalse(criteria.HasConverged);

            criteria.NewValue /= 10.0;
            Assert.AreEqual(0.09, criteria.Delta, 1e-10);
            Assert.AreEqual(0.9, criteria.RelativeDelta, 1e-10);
            Assert.AreEqual(0.01, criteria.NewValue, 1e-10);
            Assert.AreEqual(0.1, criteria.OldValue, 1e-10);
            Assert.IsFalse(criteria.HasConverged);

            criteria.NewValue /= 10.0;
            Assert.AreEqual(0.009, criteria.Delta, 1e-10);
            Assert.AreEqual(0.9, criteria.RelativeDelta, 1e-10);
            Assert.AreEqual(0.001, criteria.NewValue, 1e-10);
            Assert.AreEqual(0.01, criteria.OldValue, 1e-10);
            Assert.IsFalse(criteria.HasConverged);

            criteria.NewValue = criteria.NewValue * 1e-3;
            Assert.AreEqual(0.000999, criteria.Delta, 1e-10);
            Assert.AreEqual(0.999, criteria.RelativeDelta, 1e-10);
            Assert.AreEqual(1E-06, criteria.NewValue, 1e-10);
            Assert.AreEqual(0.001, criteria.OldValue, 1e-10);
            Assert.IsFalse(criteria.HasConverged);

            criteria.NewValue = criteria.NewValue - (criteria.NewValue * 1e-3);
            Assert.AreEqual(1e-9, criteria.Delta, 1e-10);
            Assert.AreEqual(1e-3, criteria.RelativeDelta, 1e-10);
            Assert.AreEqual(9.9899999999999988E-07, criteria.NewValue, 1e-10);
            Assert.AreEqual(1E-06, criteria.OldValue, 1e-10);
            Assert.IsFalse(criteria.HasConverged);

            criteria.NewValue = criteria.NewValue - (criteria.NewValue * 1e-5);
            Assert.AreEqual(9.999999999E-11, criteria.Delta, 1e-10);
            Assert.AreEqual(1e-5, criteria.RelativeDelta, 1e-10);
            Assert.AreEqual(9.9899000999999985E-07, criteria.NewValue, 1e-10);
            Assert.AreEqual(9.9899999999999988E-07, criteria.OldValue, 1e-10);
            Assert.IsFalse(criteria.HasConverged);

            criteria.NewValue = criteria.NewValue - (criteria.NewValue * 1e-6);
            Assert.AreEqual(9.999999999E-11, criteria.Delta, 1e-10);
            Assert.AreEqual(1.0000000000115289E-06, criteria.RelativeDelta, 1e-10);
            Assert.AreEqual(9.9898901100998984E-07, criteria.NewValue, 1e-10);
            Assert.AreEqual(9.9899000999999985E-07, criteria.OldValue, 1e-10);
            Assert.IsTrue(criteria.HasConverged);
        }
Exemplo n.º 5
0
        private void init(int numberOfVariables)
        {
            convergence = new RelativeConvergence();

            gradient         = new double[numberOfVariables];
            previousGradient = new double[numberOfVariables];
            weightsUpdates   = new double[numberOfVariables];

            // Initialize steps
            Reset(initialStep);
        }
        /// <summary>
        ///   Creates a new <see cref="ResilientBackpropagation"/> function optimizer.
        /// </summary>
        /// 
        /// <param name="numberOfVariables">The number of parameters in the function to be optimized.</param>
        /// 
        public ResilientBackpropagation(int numberOfVariables)
            : base(numberOfVariables)
        {
            convergence = new RelativeConvergence();

            gradient = new double[numberOfVariables];
            previousGradient = new double[numberOfVariables];
            weightsUpdates = new double[numberOfVariables];

            // Initialize steps
            Reset(initialStep);
        }
        /// <summary>
        ///   Creates a new <see cref="ResilientBackpropagation"/> function optimizer.
        /// </summary>
        ///
        /// <param name="numberOfVariables">The number of parameters in the function to be optimized.</param>
        ///
        public ResilientBackpropagation(int numberOfVariables)
            : base(numberOfVariables)
        {
            convergence = new RelativeConvergence();

            gradient         = new double[numberOfVariables];
            previousGradient = new double[numberOfVariables];
            weightsUpdates   = new double[numberOfVariables];

            // Initialize steps
            Reset(initialStep);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Called when the <see cref="IOptimizationMethod{TInput, TOutput}.NumberOfVariables" /> property has changed.
        /// </summary>
        ///
        /// <param name="numberOfVariables">The number of variables.</param>
        ///
        protected override void OnNumberOfVariablesChanged(int numberOfVariables)
        {
            base.OnNumberOfVariablesChanged(numberOfVariables);

            convergence = new RelativeConvergence();

            gradient         = new double[numberOfVariables];
            previousGradient = new double[numberOfVariables];
            weightsUpdates   = new double[numberOfVariables];

            // Initialize steps
            Reset(initialStep);
        }
Exemplo n.º 9
0
        public void RelativeConvergenceConstructorTest3()
        {
            var criteria = new RelativeConvergence(iterations: 1, tolerance: 1e-5, startValue: 1);

            criteria.CurrentIteration = -2;
            do
            {
                criteria.NewValue /= 10.0;
            } while (!criteria.HasConverged);

            Assert.AreEqual(1, criteria.CurrentIteration);
            Assert.AreEqual(-2, Math.Log10(criteria.OldValue));
            Assert.AreEqual(-3, Math.Log10(criteria.NewValue));
        }
Exemplo n.º 10
0
 /// <summary>
 ///   Initializes a new instance of the <see cref="AdaBoost&lt;TModel&gt;"/> class.
 /// </summary>
 ///
 /// <param name="model">The model to be learned.</param>
 /// <param name="creationFunction">The model fitting function.</param>
 ///
 public AdaBoost(Boost <TModel> model, ModelConstructor <TModel> creationFunction)
 {
     this.classifier  = model;
     this.Creation    = creationFunction;
     this.convergence = new RelativeConvergence();
 }
Exemplo n.º 11
0
 /// <summary>
 ///   Initializes a new instance of the <see cref="AdaBoost&lt;TModel&gt;"/> class.
 /// </summary>
 ///
 /// <param name="model">The model to be learned.</param>
 ///
 public AdaBoost(Boost <TModel> model)
 {
     this.classifier  = model;
     this.convergence = new RelativeConvergence();
 }
 /// <summary>
 ///   Creates a new instance of the Viterbi learning algorithm.
 /// </summary>
 ///
 protected BaseViterbiLearning()
 {
     this.convergence = new RelativeConvergence();
 }
 /// <summary>
 /// Initializes a new instance of the <see cref="BaseLeastSquaresMethod"/> class.
 /// </summary>
 ///
 public BaseLeastSquaresMethod()
 {
     this.convergence = new RelativeConvergence(0, 1e-5);
 }
Exemplo n.º 14
0
        private static TrainingSessionResult Optimize(
            NeuralNetworkBase network,
            BatchesCollection miniBatches,
            int epochs, float dropout,
            [NotNull] WeightsUpdater updater,
            [CanBeNull] IProgress <BatchProgress> batchProgress,
            [CanBeNull] IProgress <TrainingProgressEventArgs> trainingProgress,
            [CanBeNull] ValidationDataset validationDataset,
            [CanBeNull] TestDataset testDataset,
            CancellationToken token)
        {
            // Setup
            DateTime startTime = DateTime.Now;
            List <DatasetEvaluationResult>
            validationReports = new List <DatasetEvaluationResult>(),
                testReports   = new List <DatasetEvaluationResult>();

            TrainingSessionResult PrepareResult(TrainingStopReason reason, int loops)
            {
                return(new TrainingSessionResult(reason, loops, DateTime.Now.Subtract(startTime).RoundToSeconds(), validationReports, testReports));
            }

            // Convergence manager for the validation dataset
            RelativeConvergence convergence = validationDataset == null
                ? null
                : new RelativeConvergence(validationDataset.Tolerance, validationDataset.EpochsInterval);

            // Optional batch monitor
            BatchProgressMonitor batchMonitor = batchProgress == null ? null : new BatchProgressMonitor(miniBatches.Count, batchProgress);

            // Create the training batches
            for (int i = 0; i < epochs; i++)
            {
                // Shuffle the training set
                miniBatches.CrossShuffle();

                // Gradient descent over the current batches
                BackpropagationInProgress = true;
                for (int j = 0; j < miniBatches.BatchesCount; j++)
                {
                    if (token.IsCancellationRequested)
                    {
                        BackpropagationInProgress = false;
                        return(PrepareResult(TrainingStopReason.TrainingCanceled, i));
                    }
                    network.Backpropagate(miniBatches.Batches[j], dropout, updater);
                    batchMonitor?.NotifyCompletedBatch(miniBatches.Batches[j].X.GetLength(0));
                }
                BackpropagationInProgress = false;
                batchMonitor?.Reset();
                if (network.IsInNumericOverflow)
                {
                    return(PrepareResult(TrainingStopReason.NumericOverflow, i));
                }

                // Check the training progress
                if (trainingProgress != null)
                {
                    (float cost, _, float accuracy) = network.Evaluate(miniBatches);
                    trainingProgress.Report(new TrainingProgressEventArgs(i + 1, cost, accuracy));
                }

                // Check the validation dataset
                if (convergence != null)
                {
                    (float cost, _, float accuracy) = network.Evaluate(validationDataset.Dataset);
                    validationReports.Add(new DatasetEvaluationResult(cost, accuracy));
                    convergence.Value = accuracy;
                    if (convergence.HasConverged)
                    {
                        return(PrepareResult(TrainingStopReason.EarlyStopping, i));
                    }
                }

                // Report progress if necessary
                if (testDataset != null)
                {
                    (float cost, _, float accuracy) = network.Evaluate(testDataset.Dataset);
                    testReports.Add(new DatasetEvaluationResult(cost, accuracy));
                    testDataset.ThreadSafeProgressCallback?.Report(new TrainingProgressEventArgs(i + 1, cost, accuracy));
                }
            }
            return(PrepareResult(TrainingStopReason.EpochsCompleted, epochs));
        }
        private void init(int numberOfVariables)
        {
            convergence = new RelativeConvergence();

            gradient = new double[numberOfVariables];
            previousGradient = new double[numberOfVariables];
            weightsUpdates = new double[numberOfVariables];

            // Initialize steps
            Reset(initialStep);
        }