public TrainingWorker(AbstractBatchOptimizer _enclosing, T[] dataset, AbstractDifferentiableFunction <T> fn, ConcatVector initialWeights, double l2regularization, double convergenceDerivativeNorm, bool quiet)
 {
     this._enclosing        = _enclosing;
     this.optimizationState = this._enclosing.GetFreshOptimizationState(initialWeights);
     this.weights           = initialWeights.DeepClone();
     this.dataset           = dataset;
     this.fn = fn;
     this.l2regularization          = l2regularization;
     this.convergenceDerivativeNorm = convergenceDerivativeNorm;
     this.quiet = quiet;
 }
Exemple #2
0
        public virtual void TestOptimizeLogLikelihood(AbstractBatchOptimizer optimizer, GraphicalModel[] dataset, ConcatVector initialWeights, double l2regularization)
        {
            AbstractDifferentiableFunction <GraphicalModel> ll = new LogLikelihoodDifferentiableFunction();
            ConcatVector finalWeights = optimizer.Optimize((GraphicalModel[])dataset, ll, (ConcatVector)initialWeights, (double)l2regularization, 1.0e-9, true);

            System.Console.Error.WriteLine("Finished optimizing");
            double logLikelihood = GetValueSum((GraphicalModel[])dataset, finalWeights, ll, (double)l2regularization);
            // Check in a whole bunch of random directions really nearby that there is no nearby point with a higher log
            // likelihood
            Random r = new Random(42);

            for (int i = 0; i < 1000; i++)
            {
                int          size            = finalWeights.GetNumberOfComponents();
                ConcatVector randomDirection = new ConcatVector(size);
                for (int j = 0; j < size; j++)
                {
                    double[] dense = new double[finalWeights.IsComponentSparse(j) ? finalWeights.GetSparseIndex(j) + 1 : finalWeights.GetDenseComponent(j).Length];
                    for (int k = 0; k < dense.Length; k++)
                    {
                        dense[k] = (r.NextDouble() - 0.5) * 1.0e-3;
                    }
                    randomDirection.SetDenseComponent(j, dense);
                }
                ConcatVector randomPerturbation = finalWeights.DeepClone();
                randomPerturbation.AddVectorInPlace(randomDirection, 1.0);
                double randomPerturbedLogLikelihood = GetValueSum((GraphicalModel[])dataset, randomPerturbation, ll, (double)l2regularization);
                // Check that we're within a very small margin of error (around 3 decimal places) of the randomly
                // discovered value
                if (logLikelihood < randomPerturbedLogLikelihood - (1.0e-3 * Math.Max(1.0, Math.Abs(logLikelihood))))
                {
                    System.Console.Error.WriteLine("Thought optimal point was: " + logLikelihood);
                    System.Console.Error.WriteLine("Discovered better point: " + randomPerturbedLogLikelihood);
                }
                NUnit.Framework.Assert.IsTrue(logLikelihood >= randomPerturbedLogLikelihood - (1.0e-3 * Math.Max(1.0, Math.Abs(logLikelihood))));
            }
        }
 internal OptimizationState(AbstractBatchOptimizer _enclosing)
 {
     this._enclosing = _enclosing;
 }