public virtual void TestGetSummaryForInstance(GraphicalModel[] dataset, ConcatVector weights)
        {
            LogLikelihoodDifferentiableFunction fn = new LogLikelihoodDifferentiableFunction();

            foreach (GraphicalModel model in dataset)
            {
                double       goldLogLikelihood = LogLikelihood(model, (ConcatVector)weights);
                ConcatVector goldGradient      = DefinitionOfDerivative(model, (ConcatVector)weights);
                ConcatVector gradient          = new ConcatVector(0);
                double       logLikelihood     = fn.GetSummaryForInstance(model, (ConcatVector)weights, gradient);
                NUnit.Framework.Assert.AreEqual(logLikelihood, Math.Max(1.0e-3, goldLogLikelihood * 1.0e-2), goldLogLikelihood);
                // Our check for gradient similarity involves distance between endpoints of vectors, instead of elementwise
                // similarity, b/c it can be controlled as a percentage
                ConcatVector difference = goldGradient.DeepClone();
                difference.AddVectorInPlace(gradient, -1);
                double distance = Math.Sqrt(difference.DotProduct(difference));
                // The tolerance here is pretty large, since the gold gradient is computed approximately
                // 5% still tells us whether everything is working or not though
                if (distance > 5.0e-2)
                {
                    System.Console.Error.WriteLine("Definitional and calculated gradient differ!");
                    System.Console.Error.WriteLine("Definition approx: " + goldGradient);
                    System.Console.Error.WriteLine("Calculated: " + gradient);
                }
                NUnit.Framework.Assert.AreEqual(distance, 5.0e-2, 0.0);
            }
        }
예제 #2
0
        public virtual void TestOptimizeLogLikelihood(AbstractBatchOptimizer optimizer, GraphicalModel[] dataset, ConcatVector initialWeights, double l2regularization)
        {
            AbstractDifferentiableFunction <GraphicalModel> ll = new LogLikelihoodDifferentiableFunction();
            ConcatVector finalWeights = optimizer.Optimize((GraphicalModel[])dataset, ll, (ConcatVector)initialWeights, (double)l2regularization, 1.0e-9, true);

            System.Console.Error.WriteLine("Finished optimizing");
            double logLikelihood = GetValueSum((GraphicalModel[])dataset, finalWeights, ll, (double)l2regularization);
            // Check in a whole bunch of random directions really nearby that there is no nearby point with a higher log
            // likelihood
            Random r = new Random(42);

            for (int i = 0; i < 1000; i++)
            {
                int          size            = finalWeights.GetNumberOfComponents();
                ConcatVector randomDirection = new ConcatVector(size);
                for (int j = 0; j < size; j++)
                {
                    double[] dense = new double[finalWeights.IsComponentSparse(j) ? finalWeights.GetSparseIndex(j) + 1 : finalWeights.GetDenseComponent(j).Length];
                    for (int k = 0; k < dense.Length; k++)
                    {
                        dense[k] = (r.NextDouble() - 0.5) * 1.0e-3;
                    }
                    randomDirection.SetDenseComponent(j, dense);
                }
                ConcatVector randomPerturbation = finalWeights.DeepClone();
                randomPerturbation.AddVectorInPlace(randomDirection, 1.0);
                double randomPerturbedLogLikelihood = GetValueSum((GraphicalModel[])dataset, randomPerturbation, ll, (double)l2regularization);
                // Check that we're within a very small margin of error (around 3 decimal places) of the randomly
                // discovered value
                if (logLikelihood < randomPerturbedLogLikelihood - (1.0e-3 * Math.Max(1.0, Math.Abs(logLikelihood))))
                {
                    System.Console.Error.WriteLine("Thought optimal point was: " + logLikelihood);
                    System.Console.Error.WriteLine("Discovered better point: " + randomPerturbedLogLikelihood);
                }
                NUnit.Framework.Assert.IsTrue(logLikelihood >= randomPerturbedLogLikelihood - (1.0e-3 * Math.Max(1.0, Math.Abs(logLikelihood))));
            }
        }