public void GetWaldTestTest()
        {
            MultinomialLogisticRegression target = createExample1();

            double[][] inputs;
            int[]      outputs;

            CreateInputOutputsExample1(out inputs, out outputs);

            WaldTest actual;

            actual = target.GetWaldTest(0, 0);
            Assert.AreEqual(-6.6351, actual.Statistic, 1e-4);
            Assert.AreEqual(3.244e-11, actual.PValue, 1e-14);

            actual = target.GetWaldTest(0, 1);
            Assert.AreEqual(2.6966, actual.Statistic, 1e-4);
            Assert.AreEqual(0.007004, actual.PValue, 1e-5);

            actual = target.GetWaldTest(0, 2);
            Assert.AreEqual(6.6943, actual.Statistic, 1e-4);
            Assert.AreEqual(2.167e-11, actual.PValue, 1e-14);

            actual = target.GetWaldTest(1, 0);
            Assert.AreEqual(-11.0404, actual.Statistic, 1e-4);
            Assert.AreEqual(0.0, actual.PValue, 1e-25);

            actual = target.GetWaldTest(1, 1);
            Assert.AreEqual(2.0609, actual.Statistic, 1e-4);
            Assert.AreEqual(0.039315, actual.PValue, 1e-6);

            actual = target.GetWaldTest(1, 2);
            Assert.AreEqual(10.9524, actual.Statistic, 1e-3);
            Assert.AreEqual(0.0, actual.PValue, 1e-25);
        }
Example #2
0
        private void computeInformation()
        {
            // Store model information
            this.results = regression.Compute(inputData);

            this.deviance      = regression.GetDeviance(inputData, outputData);
            this.logLikelihood = regression.GetLogLikelihood(inputData, outputData);
            this.chiSquare     = regression.ChiSquare(inputData, outputData);

            this.coefficients   = regression.Coefficients;
            this.standardErrors = regression.StandardErrors;

            // Store coefficient information
            for (int i = 0; i < waldTests.Length; i++)
            {
                this.waldTests[i]   = regression.GetWaldTest(i);
                this.confidences[i] = regression.GetConfidenceInterval(i);
                this.oddsRatios[i]  = regression.GetOddsRatio(i);
            }
        }
Example #3
0
        private MultinomialLogisticRegression buildModel()
        {
            if (independent == null)
            {
                formatData();
            }
            mlr = new MultinomialLogisticRegression(nvars, ncat);
            LowerBoundNewtonRaphson lbn = new LowerBoundNewtonRaphson(mlr);

            do
            {
                delta = lbn.Run(independent, dependent);
                iteration++;
            } while (iteration < totit && delta > converg);
            coefficients  = mlr.Coefficients;
            standarderror = new double[ncat - 1][];
            waldstat      = new double[ncat - 1][];
            waldpvalue    = new double[ncat - 1][];
            for (int i = 0; i < coefficients.Length; i++)
            {
                double[] steArr        = new double[nvars + 1];
                double[] waldStatArr   = new double[nvars + 1];
                double[] waldPvalueArr = new double[nvars + 1];
                for (int j = 0; j < nvars + 1; j++)
                {
                    Accord.Statistics.Testing.WaldTest wt = mlr.GetWaldTest(i, j);
                    steArr[j]        = wt.StandardError;
                    waldStatArr[j]   = wt.Statistic;
                    waldPvalueArr[j] = wt.PValue;
                }
                waldstat[i]      = waldStatArr;
                waldpvalue[i]    = waldPvalueArr;
                standarderror[i] = steArr;
            }
            loglikelihood = mlr.GetLogLikelihood(independent, dependent);
            deviance      = mlr.GetDeviance(independent, dependent);
            x2            = mlr.ChiSquare(independent, dependent).Statistic;
            pv            = mlr.ChiSquare(independent, dependent).PValue;
            return(mlr);
        }
        public void RegressTest2()
        {
            double[][] inputs;
            int[]      outputs;

            CreateInputOutputsExample1(out inputs, out outputs);

            // Create a new Multinomial Logistic Regression for 3 categories
            var mlr = new MultinomialLogisticRegression(inputs: 2, categories: 3);

            // Create a estimation algorithm to estimate the regression
            LowerBoundNewtonRaphson lbnr = new LowerBoundNewtonRaphson(mlr);

            // Now, we will iteratively estimate our model. The Run method returns
            // the maximum relative change in the model parameters and we will use
            // it as the convergence criteria.

            double delta;
            int    iteration = 0;

            do
            {
                // Perform an iteration
                delta = lbnr.Run(inputs, outputs);
                iteration++;
            } while (iteration < 100 && delta > 1e-6);

            Assert.AreEqual(52, iteration);
            Assert.IsFalse(double.IsNaN(mlr.Coefficients[0][0]));
            Assert.IsFalse(double.IsNaN(mlr.Coefficients[0][1]));
            Assert.IsFalse(double.IsNaN(mlr.Coefficients[0][2]));
            Assert.IsFalse(double.IsNaN(mlr.Coefficients[1][0]));
            Assert.IsFalse(double.IsNaN(mlr.Coefficients[1][1]));
            Assert.IsFalse(double.IsNaN(mlr.Coefficients[1][2]));


            // This is the same example given in R Data Analysis Examples for
            // Multinomial Logistic Regression: http://www.ats.ucla.edu/stat/r/dae/mlogit.htm

            // brand 2
            Assert.AreEqual(-11.774655, mlr.Coefficients[0][0], 1e-4); // intercept
            Assert.AreEqual(0.523814, mlr.Coefficients[0][1], 1e-4);   // female
            Assert.AreEqual(0.368206, mlr.Coefficients[0][2], 1e-4);   // age

            // brand 3
            Assert.AreEqual(-22.721396, mlr.Coefficients[1][0], 1e-4); // intercept
            Assert.AreEqual(0.465941, mlr.Coefficients[1][1], 1e-4);   // female
            Assert.AreEqual(0.685908, mlr.Coefficients[1][2], 1e-4);   // age


            Assert.IsFalse(double.IsNaN(mlr.StandardErrors[0][0]));
            Assert.IsFalse(double.IsNaN(mlr.StandardErrors[0][1]));
            Assert.IsFalse(double.IsNaN(mlr.StandardErrors[0][2]));
            Assert.IsFalse(double.IsNaN(mlr.StandardErrors[1][0]));
            Assert.IsFalse(double.IsNaN(mlr.StandardErrors[1][1]));
            Assert.IsFalse(double.IsNaN(mlr.StandardErrors[1][2]));

            /*
             * // Using the standard Hessian estimation
             * Assert.AreEqual(1.774612, mlr.StandardErrors[0][0], 1e-6);
             * Assert.AreEqual(0.194247, mlr.StandardErrors[0][1], 1e-6);
             * Assert.AreEqual(0.055003, mlr.StandardErrors[0][2], 1e-6);
             *
             * Assert.AreEqual(2.058028, mlr.StandardErrors[1][0], 1e-6);
             * Assert.AreEqual(0.226090, mlr.StandardErrors[1][1], 1e-6);
             * Assert.AreEqual(0.062627, mlr.StandardErrors[1][2], 1e-6);
             */

            // Using the lower-bound approximation
            Assert.AreEqual(1.047378039787443, mlr.StandardErrors[0][0], 1e-6);
            Assert.AreEqual(0.153150051082552, mlr.StandardErrors[0][1], 1e-6);
            Assert.AreEqual(0.031640507386863, mlr.StandardErrors[0][2], 1e-6);

            Assert.AreEqual(1.047378039787443, mlr.StandardErrors[1][0], 1e-6);
            Assert.AreEqual(0.153150051082552, mlr.StandardErrors[1][1], 1e-6);
            Assert.AreEqual(0.031640507386863, mlr.StandardErrors[1][2], 1e-6);

            double ll = mlr.GetLogLikelihood(inputs, outputs);

            Assert.AreEqual(-702.97, ll, 1e-2);
            Assert.IsFalse(double.IsNaN(ll));

            var chi = mlr.ChiSquare(inputs, outputs);

            Assert.AreEqual(185.85, chi.Statistic, 1e-2);
            Assert.IsFalse(double.IsNaN(chi.Statistic));

            var wald00 = mlr.GetWaldTest(0, 0);
            var wald01 = mlr.GetWaldTest(0, 1);
            var wald02 = mlr.GetWaldTest(0, 2);

            var wald10 = mlr.GetWaldTest(1, 0);
            var wald11 = mlr.GetWaldTest(1, 1);
            var wald12 = mlr.GetWaldTest(1, 2);

            Assert.IsFalse(double.IsNaN(wald00.Statistic));
            Assert.IsFalse(double.IsNaN(wald01.Statistic));
            Assert.IsFalse(double.IsNaN(wald02.Statistic));

            Assert.IsFalse(double.IsNaN(wald10.Statistic));
            Assert.IsFalse(double.IsNaN(wald11.Statistic));
            Assert.IsFalse(double.IsNaN(wald12.Statistic));

            /*
             * // Using standard Hessian estimation
             * Assert.AreEqual(-6.6351, wald00.Statistic, 1e-4);
             * Assert.AreEqual( 2.6966, wald01.Statistic, 1e-4);
             * Assert.AreEqual( 6.6943, wald02.Statistic, 1e-4);
             *
             * Assert.AreEqual(-11.0404, wald10.Statistic, 1e-4);
             * Assert.AreEqual( 2.0609, wald11.Statistic, 1e-4);
             * Assert.AreEqual(10.9524, wald12.Statistic, 1e-4);
             */

            // Using Lower-Bound approximation
            Assert.AreEqual(-11.241995503283842, wald00.Statistic, 1e-4);
            Assert.AreEqual(3.4202662152119889, wald01.Statistic, 1e-4);
            Assert.AreEqual(11.637150673342207, wald02.Statistic, 1e-4);

            Assert.AreEqual(-21.693553825772664, wald10.Statistic, 1e-4);
            Assert.AreEqual(3.0423802097069097, wald11.Statistic, 1e-4);
            Assert.AreEqual(21.678124991086548, wald12.Statistic, 1e-4);
        }