Esempio n. 1
0
        private void computeInner()
        {
            if (inputCount <= 2)
            {
                return;
            }

            // Perform likelihood-ratio tests against diminished nested models
            ProportionalHazards innerModel            = new ProportionalHazards(inputCount - 1);
            ProportionalHazardsNewtonRaphson learning = new ProportionalHazardsNewtonRaphson(innerModel);

            for (int i = 0; i < inputCount; i++)
            {
                // Create a diminished inner model without the current variable
                double[][] data = inputData.RemoveColumn(i);

                System.Diagnostics.Trace.Assert(data[0].Length > 0);

                Array.Clear(innerModel.Coefficients, 0, inputCount - 1);

                learning.Iterations = Iterations;
                learning.Tolerance  = Tolerance;

                learning.Run(data, timeData, censorData);


                double ratio = 2.0 * (logLikelihood - innerModel.GetPartialLogLikelihood(data, timeData, censorData));
                ratioTests[i] = new ChiSquareTest(ratio, 1);
            }

            innerComputed = true;
        }
        public void RunTest()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html

            double[,] data =
            {
                { 50,  1, 0 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(1);

            double[][] inputs = data.GetColumn(0).ToArray();
            double[]   time   = data.GetColumn(1);
            int[]      output = data.GetColumn(2).ToInt32();

            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(inputs, time, output);

            double log = -2 * regression.GetPartialLogLikelihood(inputs, time, output);


            Assert.AreEqual(0.3770, regression.Coefficients[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[0]));

            Assert.AreEqual(0.2542, regression.StandardErrors[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[0]));


            double[] actual = new double[inputs.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = regression.Compute(inputs[i]);
            }

            double[] expected =
            {
                // Computed using R's predict(fit,type="risk")
                0.640442743,  1206.226657448, 0.097217211, 0.002240107,
                59.081223025,    0.640442743, 0.097217211, 8.968345353,
                0.000722814,     8.968345353, 27.794227993
            };

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);
                Assert.IsFalse(Double.IsNaN(actual[i]));
            }
        }
        public void RunTest2()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html

            double[,] data =
            {
                { 50, 30,  1, 0 },
                { 70, 22,  2, 1 },
                { 45, 12,  3, 0 },
                { 35, 22,  5, 0 },
                { 62, 54,  7, 1 },
                { 50, 12, 11, 0 },
                { 45, 11,  4, 0 },
                { 57, 62,  6, 0 },
                { 32, 16,  8, 0 },
                { 57, 14,  9, 1 },
                { 60, 12, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(2);

            double[][] inputs = data.Submatrix(null, 0, 1).ToArray();
            double[]   time   = data.GetColumn(2);
            int[]      output = data.GetColumn(3).ToInt32();


            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);


            double error = target.Run(inputs, time, output);

            double log = -2 * regression.GetPartialLogLikelihood(inputs, time, output);

            Assert.AreEqual(3.4261, log, 1e-4);
            Assert.IsFalse(Double.IsNaN(log));

            double actual = regression.Coefficients[0];

            Assert.AreEqual(0.3909, regression.Coefficients[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[0]));

            Assert.AreEqual(0.0424, regression.Coefficients[1], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[1]));

            Assert.AreEqual(0.2536, regression.StandardErrors[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[0]));

            Assert.AreEqual(0.0624, regression.StandardErrors[1], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[1]));
        }
Esempio n. 4
0
        private void computeInformation()
        {
            // Store model information
            this.result        = regression.Compute(inputData, timeData);
            this.deviance      = regression.GetDeviance(inputData, timeData, censorData);
            this.logLikelihood = regression.GetPartialLogLikelihood(inputData, timeData, censorData);
            this.chiSquare     = regression.ChiSquare(inputData, timeData, censorData);

            // Store coefficient information
            for (int i = 0; i < regression.Coefficients.Length; i++)
            {
                this.standardErrors[i] = regression.StandardErrors[i];

                this.waldTests[i]    = regression.GetWaldTest(i);
                this.coefficients[i] = regression.Coefficients[i];
                this.confidences[i]  = regression.GetConfidenceInterval(i);
                this.hazardRatios[i] = regression.GetHazardRatio(i);
            }
        }
        public void RunTest4()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html
            // with added tied times

            double[,] data =
            {
                { 50,  1, 1 },
                { 60,  1, 1 },
                { 40,  1, 1 },
                { 51,  1, 1 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(1);

            double[][] inputs = data.GetColumn(0).ToArray();
            double[]   time   = data.GetColumn(1);
            int[]      output = data.GetColumn(2).ToInt32();

            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(inputs, time, output);

            double log = -2 * regression.GetPartialLogLikelihood(inputs, time, output);


            Assert.AreEqual(0.04863, regression.Coefficients[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[0]));

            Assert.AreEqual(0.04186, regression.StandardErrors[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[0]));
        }
        public void RunTest5()
        {
            double[,] inputs =
            {
                {   1,   1,   1, 1,   1,   0,   1,   1,   0,   0,   1,   0, 0, 1,   1,   1,   1,   0, 0,   1,   1,   1,   1,   0,   1,   1,   0,   1, 1,   1, 0,   0,   1,   0, 1,   0,   0,   0,   1,   1,   1,   0,   1,   0,   1,   0,   1,   1,   1,   1,   1,   1,   1,   1,   1,   1,   0,   1,   1,   1,   0,   1,   1,   1, 1,   0,   1, 1,   1,   0,   0,   1,   1,   0,   0,   0,   0,   1, 1,   0,   1,   0,   1,   0,   1,   0,   1, 0,   0,   1,   0,   1,   1,   1,   1,   0, 1, 0, 1,   1,   1,   1,   1,   0,   0,   0,   1,   0,   0,   1,   1,   1, 1,   1,   1,   1,   0,   0, 1,   0,   1,   1,   1, 1, 1,   1,   1,   1,   1,   0,   1,   1,   1,   1,   0, 1,   0,   1,   1,   0,   1,   0,   0,   1,   1,   0,   1, 1,   1,   0, 1,   1,   1,   1,   0,   1,   1,   1, 1,   0,   1,   0,   1,   1,   1,   1,   1, 1,   1,   1,   1,   1,   1,   1,   0,   1,   0,   0,   1,   1,   0, 0,   0,   0,   1, 0,   1, 1,   1,   0, 0,   0,   1,   1,   1,   1,   1,   1,   1, 1,   1,   0,   1,   1,   0,   1,   1,   1,   1,   0,   1,   1, 1, 1,   0,   0, 1, 1,   1,   1,   1, 0,   1,   1,   1,   1,   0,   0, 1,   1,   1,   0,   0, 0,   1, 0,   1,   1,   1, 1, 1,   0,   1,   0, 1,   1,   0,   0,   1, 0,   0, 1,   0,   1,   0,   1,   1, 1,   1,   1,   0,   0,   1,   0,   1,   1,   1, 1,   1,   1, 1,   0,   1,   1,   1,   1,   0, 0,   1,   1,   1,   1, 0,   1, 1,   0,   1,   0,   1,   0,   1,   1, 0,   0,   1,   1,   1, 1,   0,   0,   0,   0,   0,   1,   0,   1,   1,   0, 0,   1, 1,   0,   0,   1,   1,   0,   1, 1,   1,   1,   1,   1,   1, 0,   1,   1, 0,   1,   1,   1,   1,   0,   0,   1,   0,   0,   1,   1,   1, 1,   1,   1,   0,   1,   1,   0,   0,   1,   1,   0, 1,   1,   1,   1,   1,   0,   1,   1,   1,   1, 1,   1,   1, 1,   1,   1,   0,   0,   0,   0, 1,   1,   1,   1,   0,   1,   1,   1,   1, 0,   1,   0,   1,   1,   1,   1,   1,   0,   1,   0,   1,   1,   1,   1,   1,   1,   1,   1, 0,   1,   1,   1, 0,   0,   1,   1,   1,   1,   1,   1,   1, 1,   0,   1,   1,   1,   0,   1,   1,   0,   0,   1,   1,   1,   1, 1,   1,   1,   0,   0,   0,   0, 0,   1,   1,   0,   1,   1,   1,   1,   0,   1,   1,   1,   1,   1,   1,   1,   1,   1, 0,   1,   1,   1, 0,   1,   0,   1,   1,   0,   1,   1,   1, 1,   1,   1,   0,   1,   0,   1,   1,   0,   1,   1,   0,   1, 0,   1,   0,   0,   1,   1,   1,   0,   0,   1,   1,   0, 1, 1,   1,   1,   1,   1,   1, 0,   0, 1,   1,   1,   0, 0,   1,   1,   1,   1, 1,   0,   1,   1,   1,   0,   1,   0,   1,   1,   1,   1,   0, 0,   0,   1,   1,   1,   1,   0,   0,   1,   1,   1,   0,   1,   1,   0,   0,   1, 1,   1, 1,   1,   0,   0,   0,   0,   1,   1,   1,   1, 1,   1,   0,   0,   1,   1,   0, 1,   1,   1,   0,   1,   1, 0, 0,   1, 0, 0,   0,   1,   1,   1,   0, 1, 1, 1,   1,   1,   0, 0,   0,   0,   1,   0, 1,   1,   1,   1,   1,   1,   1,   1, 1,   0,   0,   1,   0,   1,   1,   1, 0,   0,   0, 1,   1,   1,   1,   1,   0,   1,   0,   1,   1, 0,   1,   1,   0,   1,   0,   1,   1,   1,   0,   1,   1,   0,   1,   1,   1,   1,   1,   1, 1,   0,   1,   0,   1,   1,   1, 0,   1,   0,   1,   0,   1,   1, 1,   1,   1,   1,   1, 1,   1, 1,   1,   1,   0, 1,   0,   1,   1,   1,   1,   1,   1,   1,   1,   1,   1,   0,   1,   0,   1, 1, 0,   1,   0,   1,   0,   1,   1,   0,   1, 0,   1,   1,   0,   1,   1,   0,   1,   0,   0,   1,   1,   0,   0, 1, 1,   0,   1,   0,   0,   1,   1,   0, 1,   0,   1,   1,   1,   1,   0,   0,   1, 1, 0,   1,   1,   1,   0,   1,   0,   1,   1,   1,   0,   1,   1,   0,   0,   1,   1,   1,   1,   0,   1,   1,   0,   1,   1,   1,   1,   1, 0,   1,   0,   1,   1,   1,   0,   0,   1,   0,   1,   1,   1,   0,   0,   0,   1,   1, 1,   1,   1,   1,   1,   1,   1,   0,   1,   1,   1,   0,   0, 0, 1,   0,   0,   1,   1,   1,   0,   1,   1,   0,   1, 1,   1,   1,   0, 1,   1,   0,   1,   1,   0,   1, 1,   1,   1,   1,   1,   0,   1,   1,   0,   1,   1,   0,   1,   0,   1,   1,   0,   1,   0,   1,   1,   1,   1,   1,   1,   0,   1,   0,   1,   1,   1,   1,   1,   1,   0,   1,   1,   0, 1,   1,   0, 0,   1,   1,   1,   0, 1,   1,   1, 1, 0,   0,   1,   1,   1,   1,   1,   0,   1,   1, 1,   1,   1,   1,   1,   1,   1,   1, 1,   1,   1,   1,   1,   0,   0,   1,   0,   1,   1,   0,   1,   0,   1,   1,   0,   1,   1,   1,   0,   1, 1,   1,   0,   0,   1,   1,   1,   1,   1, 0,   1,   0,   0, 1,   1,   1,   1,   0,   1,   0,   1,   1,   0, 1,   0,   1,   1,   1,   0,   1,   1, 1, 1,   1, 1 },
                { 0.9, 1.3, 1.5, 1, 2.4, 1.1, 1.3, 1.1, 0.8, 0.7, 1.1, 0.8, 1, 1, 1.4, 1.4, 1.2, 1.1, 1, 1.4, 2.5, 1.6, 1.2, 0.7, 0.8, 0.7, 0.7, 1.3, 1, 1.7, 1, 1.1, 1.3, 0.9, 1, 1.1, 0.7, 0.9, 1.3, 1.3, 0.9, 1.3, 0.9, 0.6, 1.2, 1.1, 1.1, 0.9, 1.1, 1.1, 1.9, 1.1, 0.9, 1.3, 1.1, 1.2, 0.8, 0.9, 1.7, 1.2, 0.7, 1.2, 1.6, 2.9, 1, 0.9, 0.8, 1, 1.5, 0.8, 0.7, 1.1, 1.2, 0.5, 0.9, 0.9, 0.9, 1.2, 1, 0.9, 1.3, 0.6, 1.2, 0.6, 1.7, 0.8, 1.1, 1, 1.1, 1.2, 0.6, 1.3, 1.7, 1.2, 2.7, 0.8, 1, 1, 1, 1.6, 1.2, 1.2, 2.4, 0.7, 0.8, 0.7, 1.3, 0.9, 1.1, 1.2, 1.2, 1.7, 1, 1.1, 2.1, 1.8, 2.2, 0.5, 1, 1.6, 1.2, 1.5, 1.1, 1, 1, 1.4, 1.2, 1.2, 1.1, 1.5, 0.9, 0.9, 1.2, 1.4, 1.2, 1, 0.9, 1.9, 1.7, 1.3, 1.2, 1.4, 1.7, 0.9, 1.6, 0.7, 0.9, 1, 1.1, 1.4, 1, 1.1, 1.4, 1.3, 1.1, 1.8, 1.6, 1.2, 1, 0.8, 1.1, 0.7, 1.4, 1.1, 1.8, 0.7, 1.7, 1, 1.2, 1.5, 1.1, 1.6, 1.7, 1.3, 0.8, 1.7, 1.5, 0.8, 1.4, 0.9, 4.7, 1, 0.7, 0.9, 0.9, 1, 2.6, 1, 1.1, 1.4, 1, 0.9, 1.3, 1.6, 1.8, 0.9, 1.6, 0.9, 1.2, 1, 1.2, 0.7, 1.4, 1.6, 1.2, 1.1, 1.3, 0.9, 1.3, 0.7, 1.2, 1.1, 1, 1, 0.9, 0.8, 1, 1, 1.5, 1.3, 1.2, 1, 1.2, 1.1, 1.6, 1.7, 1.1, 0.9, 1, 0.9, 1.3, 0.8, 0.8, 1, 0.9, 1, 1.2, 1.4, 0.8, 1, 1, 1.2, 1.3, 1.1, 1, 1.6, 1.2, 0.8, 2.1, 1, 1.5, 1, 1.5, 1.2, 1.1, 1.3, 0.8, 1, 1.1, 1.5, 0.8, 1.2, 1.1, 0.8, 0.9, 1.4, 1.5, 1, 1.3, 1.1, 1, 1.2, 2.2, 1.2, 1.3, 1.3, 0.7, 1, 1.1, 1.1, 1.7, 1.3, 1, 1.1, 1, 1.8, 1.2, 1.2, 1.3, 1.1, 1.6, 4.1, 1, 0.9, 1.5, 1.5, 1.4, 1, 0.5, 0.9, 0.8, 0.6, 1.3, 0.7, 0.7, 1.1, 1.4, 0.9, 1, 4.2, 1, 0.9, 1.2, 2.1, 2.4, 1.2, 2.5, 1, 1.2, 1.3, 0.9, 1.1, 1.6, 1, 1.5, 1.1, 1, 2.6, 0.9, 1.3, 1.4, 0.8, 1.4, 2.7, 1.1, 0.8, 1.2, 1.4, 1.2, 1, 1.1, 1.2, 1.2, 1.3, 1.2, 1.2, 1.2, 1.1, 1.3, 0.9, 1, 1.9, 1.4, 1.2, 1.1, 0.7, 0.9, 1.1, 1.1, 1.4, 1, 1.2, 1.4, 1, 2.8, 1.1, 0.7, 0.7, 1.2, 0.7, 1, 1.5, 1.5, 1.7, 0.9, 1.6, 0.9, 1.7, 0.8, 1, 0.8, 1.1, 1.3, 1.2, 0.9, 1.1, 1.1, 0.9, 0.9, 0.7, 1.1, 1.8, 0.8, 0.9, 1.4, 0.9, 1.2, 1.4, 1, 0.8, 1.4, 1.4, 1, 0.7, 0.6, 0.8, 1.4, 1.4, 0.8, 1.9, 1.3, 1, 1.3, 0.9, 0.9, 1.5, 1.5, 0.8, 1.4, 1.1, 0.8, 0.8, 1.4, 1.3, 1.1, 1, 2.2, 1.6, 1.7, 1.2, 0.9, 1.1, 1, 2.6, 1.6, 0.9, 0.9, 0.6, 1.6, 1.1, 1.3, 1.7, 1.2, 1.2, 1.6, 0.9, 0.9, 1.3, 2.2, 1.3, 1, 0.8, 1.5, 1.3, 1, 1.2, 1.4, 1.3, 1.3, 1.4, 1.3, 1.4, 1.2, 1, 1.2, 0.8, 1.1, 1.2, 1.1, 1.2, 1.8, 1.1, 1.6, 1.1, 0.8, 1.2, 1, 1.2, 0.7, 1.9, 1.6, 1.5, 0.9, 1.1, 0.8, 1.3, 1.1, 0.7, 1, 2, 1.2, 1.1, 1.2, 0.9, 0.9, 1, 1.2, 1, 1.2, 1.1, 1.1, 1, 1.4, 0.8, 1.3, 0.8, 1, 1.4, 1.2, 1.1, 1.3, 0.7, 1.2, 0.6, 1.2, 1.4, 1.5, 1.7, 0.9, 1, 0.6, 0.9, 1.4, 1.5, 1.1, 1.3, 0.9, 1.3, 2.1, 1.2, 1.3, 1.1, 1.4, 0.9, 1.4, 1.5, 1, 1.5, 1, 1.3, 0.9, 1.2, 1.1, 0.9, 1.1, 1.4, 1.4, 0.8, 1, 1.2, 1.1, 0.8, 1.2, 4.1, 0.7, 2, 0.8, 1.2, 3.8, 1.2, 1.4, 1, 1, 1.7, 1, 1, 0.9, 1.6, 0.7, 1.2, 1.1, 1, 1, 1, 0.7, 1.5, 1.1, 1, 0.9, 0.8, 0.9, 1.9, 1, 1.4, 1.3, 1.5, 0.9, 1.1, 1.2, 0.8, 1, 1.4, 4.2, 2.5, 1.2, 1.6, 0.9, 1.1, 1, 0.9, 0.6, 1, 1.1, 1.2, 1.2, 1.2, 0.9, 1.2, 1.1, 1.2, 0.8, 1, 1.6, 0.9, 1.2, 1.4, 0.7, 1.6, 1.1, 1.5, 0.8, 1.4, 0.9, 0.9, 1.5, 1.1, 9.1, 1.6, 1.3, 0.8, 4, 0.7, 1.4, 0.8, 3.6, 1.3, 1.3, 1, 1.1, 0.8, 1.2, 0.8, 0.9, 1.5, 1, 0.7, 6.6, 1.2, 1.8, 1, 1.3, 1, 1.3, 1.1, 1.1, 1, 0.9, 0.9, 1.1, 1.1, 0.9, 1.5, 4.5, 1.1, 0.8, 0.7, 0.9, 0.8, 1.6, 0.8, 0.8, 2, 2, 1.5, 1.2, 1.2, 0.9, 1.7, 1.4, 0.8, 1.5, 1, 1.1, 1.1, 0.7, 1.1, 1.2, 0.9, 1.5, 0.7, 1.5, 1.3, 0.8, 0.7, 1.1, 1, 1, 1.2, 0.8, 0.9, 0.8, 1.6, 2.4, 1.1, 2, 0.9, 1.4, 1.2, 1.1, 1.6, 0.9, 0.9, 1.3, 2, 1, 1.4, 1.3, 1.3, 0.7, 0.9, 1.1, 1.7, 1.1, 1.2, 0.9, 0.9, 1.1, 0.9, 5.2, 1.3, 0.7, 1.4, 1.4, 0.8, 1.2, 1.4, 0.9, 1.1, 1.1, 1.3, 1.7, 0.8, 1, 1.2, 1.9, 1.1, 1.3, 1.8, 0.6, 0.8, 1.4, 0.7, 0.9, 0.9, 1.2, 1.5, 0.7, 4.2, 1.1, 1.1, 1, 1.3, 0.8, 1.3, 1.1, 1.1, 0.8, 1.1, 1.5, 1.2, 1.2, 1.1, 1.7, 1, 1, 0.9, 0.9, 0.8, 1.2, 1.1, 0.7, 0.8, 1.1, 0.9, 1.4, 1, 1.1, 1.4, 0.9, 1, 1.7, 0.9, 1.3, 1.3, 0.8, 2.1, 1, 0.9, 1.2, 0.9, 1.1, 1.1, 1.2, 1.2, 0.9, 1.4, 1.2, 0.8, 1.1, 1.3, 1.1, 1.1, 0.8, 0.9, 0.9, 1.2, 1.1, 1.7, 1.3, 1.1, 1.7, 0.8, 0.9, 1.5, 1.1, 1.4, 1.4, 1.5, 1.1, 1.3, 0.9, 1.1, 1.1, 0.7, 1, 1.1, 0.6, 1, 1.2, 1.4, 1.1, 0.8, 1, 1.3, 0.9, 1, 1, 0.9, 1.5, 1.1, 1.5, 6.3, 1.4, 1.1, 5.2, 1.6, 1, 1.2, 1.3, 0.6, 1.1, 1.2, 1.1, 1.2, 1, 1.1, 1.9, 1.1, 1.2, 1.1, 0.7, 1.4, 2.2, 1.1, 1.5, 0.9, 1.2, 1.1, 0.8, 1.3, 1.1, 1.3, 1.8, 1.1, 1.1, 1.1, 1, 0.8, 1.7, 1.2, 1.4, 1.1, 1.4, 1.1, 0.8, 1, 1.1, 1.2, 1.4, 1, 1.3, 1.1, 2.3, 0.7, 1.3, 0.7, 0.9, 0.9, 1.2, 2, 0.7, 1.2, 1.6, 1.3, 1.4, 2.7, 1.5, 1, 1, 1.5, 1 }
            };

            double[,] outputs =
            {
                { 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 13, 30, 30, 30, 30, 30, 30, 0, 30, 13, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 16, 30, 30, 30, 30, 30, 30, 0, 30, 30, 30, 30, 30, 30, 30, 30, 5, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 21, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 30, 16, 30, 30, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 16, 30, 30, 30, 30, 30, 3, 30, 18, 30, 30, 30, 30, 30, 28, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 30, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 17, 30, 1, 30, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 27, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 16, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 20, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 4, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 26, 30, 30, 30, 30, 12, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 21, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 30, 30, 30, 30, 1, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 30, 25, 30, 30, 12, 30, 30, 30, 30, 30, 30, 10, 30, 30, 30, 30, 3, 30, 11, 30, 30, 30, 30, 30, 11, 30, 30, 30, 30, 30, 30, 4, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 1, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 11, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 10, 30, 30, 30, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 3, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 19, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 29, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 15, 30, 30, 30, 30, 30, 30, 30, 30, 3, 30, 30, 0, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 2, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 8, 30, 30, 30, 30, 30, 30, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 14, 30, 30, 30, 9, 30, 30, 30, 30, 30, 13, 30, 30, 30, 4, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 30, 10, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 2, 30, 30, 30, 30, 30 },
                {  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0, 1,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0, 1,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  1,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0, 1, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0, 1,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0, 1,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0, 1,  0,  0,  0,  0,  0,  1,  0,  0,  0, 1,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0, 1,  0,  0,  0,  0,  0 }
            };


            double[][] covariates = inputs.Transpose().ToArray();
            double[]   time       = outputs.GetRow(0);
            int[]      censor     = outputs.GetRow(1).ToInt32();


            string inputStr  = inputs.Transpose().ToString(Accord.Math.DefaultMatrixFormatProvider.InvariantCulture);
            string outputStr = outputs.Transpose().ToString(Accord.Math.DefaultMatrixFormatProvider.InvariantCulture);

            ProportionalHazards regression          = new ProportionalHazards(2);
            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(covariates, time, censor);

            double log = -2 * regression.GetPartialLogLikelihood(covariates, time, censor);

            Assert.AreEqual(-0.270, regression.Coefficients[0], 1e-4);
            Assert.AreEqual(0.463, regression.Coefficients[1], 1e-2);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[0]));
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[1]));

            Assert.AreEqual(0.2454, regression.StandardErrors[0], 1e-4);
            Assert.AreEqual(0.0671, regression.StandardErrors[1], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[0]));
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[1]));
        }
Esempio n. 7
0
        private void computeInner()
        {
            if (inputCount <= 2)
            {
                return;
            }

            // Perform likelihood-ratio tests against diminished nested models
            var innerModel = new ProportionalHazards(inputCount - 1);
            var learning   = createLearner(innerModel);

            for (int i = 0; i < inputCount; i++)
            {
                // Create a diminished inner model without the current variable
                double[][] data = inputData.RemoveColumn(i);

#if DEBUG
                if (data[0].Length == 0)
                {
                    throw new Exception();
                }
#endif

                Array.Clear(innerModel.Coefficients, 0, inputCount - 1);

                learning.MaxIterations = Iterations;
                learning.Tolerance     = Tolerance;

                learning.Learn(data, timeData, censorData);


                double ratio = 2.0 * (logLikelihood - innerModel.GetPartialLogLikelihood(data, timeData, censorData));
                ratioTests[i] = new ChiSquareTest(ratio, 1);
            }

            innerComputed = true;
        }
        public void RunTest()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html

            double[,] data =
            {
                { 50,  1, 0 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(1);

            regression.Coefficients[0]   = 0.37704239281494084;
            regression.StandardErrors[0] = 0.25415755113043753;

            double[][]        inputs = data.GetColumn(0).ToJagged();
            double[]          time   = data.GetColumn(1);
            SurvivalOutcome[] output = data.GetColumn(2).To <SurvivalOutcome[]>();


            {
                double actual   = -2 * regression.GetPartialLogLikelihood(inputs, time, output);
                double expected = 4.0505;
                Assert.AreEqual(expected, actual, 1e-4);
                Assert.IsFalse(Double.IsNaN(actual));
            }

            {
                var test = regression.GetWaldTest(0);
                Assert.AreEqual(0.1379, test.PValue, 1e-4);
            }

            {
                var ci = regression.GetConfidenceInterval(0);
                Assert.AreEqual(0.8859, ci.Min, 1e-4);
                Assert.AreEqual(2.3993, ci.Max, 1e-4);
            }


            {
                double actual   = regression.GetHazardRatio(0);
                double expected = 1.4580;
                Assert.AreEqual(expected, actual, 1e-4);
            }

            {
                var chi = regression.ChiSquare(inputs, time, output);
                Assert.AreEqual(7.3570, chi.Statistic, 1e-4);
                Assert.AreEqual(1, chi.DegreesOfFreedom);
                Assert.AreEqual(0.0067, chi.PValue, 1e-3);
            }
        }
        public void BaselineHazardTest()
        {
            double[,] data =
            {
                // t   c  in
                {  8, 0, 13 },
                {  4, 1, 56 },
                { 12, 0, 25 },
                {  6, 0, 64 },
                { 10, 0, 38 },
                {  8, 1, 80 },
                {  5, 0,  0 },
                {  5, 0, 81 },
                {  3, 1, 81 },
                { 14, 1, 38 },
                {  8, 0, 23 },
                { 11, 0, 99 },
                {  7, 0, 12 },
                {  7, 1, 36 },
                { 12, 0, 63 },
                {  8, 0, 92 },
                {  7, 0, 38 },
            };

            double[]   time   = data.GetColumn(0);
            int[]      censor = data.GetColumn(1).ToInt32();
            double[][] inputs = data.GetColumn(2).ToArray();

            ProportionalHazards regression = new ProportionalHazards(1);

            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            target.Normalize = false;

            double error = target.Run(inputs, time, censor);
            double log   = -2 * regression.GetPartialLogLikelihood(inputs, time, censor);

            EmpiricalHazardDistribution baseline = regression.BaselineHazard as EmpiricalHazardDistribution;

            double[] actual = new double[(int)baseline.Support.Max];
            for (int i = (int)baseline.Support.Min; i < baseline.Support.Max; i++)
            {
                actual[i] = baseline.CumulativeHazardFunction(i);
            }

            Assert.AreEqual(14, actual.Length);

            double[] expected =
            {
                0,                                       0,                    0,
                0.025000345517572315, 0.052363663484639708, 0.052363663484639708, 0.052363663484639708,
                0.16317880290786446,
                0.34217461190678861,   0.34217461190678861,  0.34217461190678861,
                0.34217461190678861,   0.34217461190678861, 0.34217461190678861
            };

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.025);
            }
        }
Esempio n. 10
0
        /// <summary>
        ///   Runs the Newton-Raphson update for Cox's hazards learning until convergence.
        /// </summary>
        ///
        /// <param name="inputs">The input data.</param>
        /// <param name="censor">The output (event) associated with each input vector.</param>
        /// <param name="time">The time-to-event for the non-censored training samples.</param>
        ///
        /// <returns>The maximum relative change in the parameters after the iteration.</returns>
        ///
        public double Run(double[][] inputs, double[] time, SurvivalOutcome[] censor)
        {
            if (inputs.Length != time.Length || time.Length != censor.Length)
            {
                throw new DimensionMismatchException("time",
                                                     "The inputs, time and output vector must have the same length.");
            }


            // Sort data by time to accelerate performance
            EmpiricalHazardDistribution.Sort(ref time, ref censor, ref inputs);


            double[] means = new double[parameterCount];
            double[] sdev  = new double[parameterCount];
            for (int i = 0; i < sdev.Length; i++)
            {
                sdev[i] = 1;
            }

            if (normalize)
            {
                // Store means as regression centers
                means = inputs.Mean();
                for (int i = 0; i < means.Length; i++)
                {
                    regression.Offsets[i] = means[i];
                }

                // Convert to unit scores for increased accuracy
                sdev   = BestCS.Statistics.Tools.StandardDeviation(inputs);
                inputs = inputs.Subtract(means, 0).ElementwiseDivide(sdev, 0, inPlace: true);

                for (int i = 0; i < regression.Coefficients.Length; i++)
                {
                    regression.Coefficients[i] *= sdev[i];
                }
            }



            // Compute actual outputs
            double[] output = new double[inputs.Length];
            for (int i = 0; i < output.Length; i++)
            {
                double sum = 0;
                for (int j = 0; j < regression.Coefficients.Length; j++)
                {
                    sum += regression.Coefficients[j] * inputs[i][j];
                }
                output[i] = Math.Exp(sum);
            }

            // Compute ties
            int[] ties = new int[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < time.Length; j++)
                {
                    if (time[j] == time[i])
                    {
                        ties[i]++;
                    }
                }
            }

            if (parameterCount == 0)
            {
                createBaseline(time, censor, output);
                return(regression.GetPartialLogLikelihood(inputs, time, censor));
            }

            CurrentIteration = 0;
            double smooth = Lambda;

            do
            {
                // learning iterations until convergence
                // or maximum number of iterations reached

                CurrentIteration++;

                // Reset Hessian matrix and gradient
                Array.Clear(gradient, 0, gradient.Length);
                Array.Clear(hessian, 0, hessian.Length);

                // For each observation instance
                for (int i = 0; i < inputs.Length; i++)
                {
                    // Check if we should censor
                    if (censor[i] == SurvivalOutcome.Censored)
                    {
                        continue;
                    }

                    // Compute partials
                    double den = 0;
                    Array.Clear(partialGradient, 0, partialGradient.Length);
                    Array.Clear(partialHessian, 0, partialHessian.Length);

                    for (int j = 0; j < inputs.Length; j++)
                    {
                        if (time[j] >= time[i])
                        {
                            den += output[j];
                        }
                    }

                    for (int j = 0; j < inputs.Length; j++)
                    {
                        if (time[j] >= time[i])
                        {
                            // Compute partial gradient
                            for (int k = 0; k < partialGradient.Length; k++)
                            {
                                partialGradient[k] += inputs[j][k] * output[j] / den;
                            }

                            // Compute partial Hessian
                            for (int ii = 0; ii < inputs[j].Length; ii++)
                            {
                                for (int jj = 0; jj < inputs[j].Length; jj++)
                                {
                                    partialHessian[ii, jj] += inputs[j][ii] * inputs[j][jj] * output[j] / den;
                                }
                            }
                        }
                    }

                    // Compute gradient vector
                    for (int j = 0; j < gradient.Length; j++)
                    {
                        gradient[j] += inputs[i][j] - partialGradient[j];
                    }

                    // Compute Hessian matrix
                    for (int j = 0; j < partialGradient.Length; j++)
                    {
                        for (int k = 0; k < partialGradient.Length; k++)
                        {
                            hessian[j, k] -= partialHessian[j, k] - partialGradient[j] * partialGradient[k];
                        }
                    }
                }


                // Decompose to solve the linear system. Usually the Hessian will
                // be invertible and LU will succeed. However, sometimes the Hessian
                // may be singular and a Singular Value Decomposition may be needed.

                // The SVD is very stable, but is quite expensive, being on average
                // about 10-15 times more expensive than LU decomposition. There are
                // other ways to avoid a singular Hessian. For a very interesting
                // reading on the subject, please see:
                //
                //  - Jeff Gill & Gary King, "What to Do When Your Hessian Is Not Invertible",
                //    Sociological Methods & Research, Vol 33, No. 1, August 2004, 54-87.
                //    Available in: http://gking.harvard.edu/files/help.pdf
                //

                decomposition = new SingularValueDecomposition(hessian);
                double[] deltas = decomposition.Solve(gradient);

                if (convergence.Iterations > 0 || convergence.Tolerance > 0)
                {
                    // Update coefficients using the calculated deltas
                    for (int i = 0; i < regression.Coefficients.Length; i++)
                    {
                        regression.Coefficients[i] -= smooth * deltas[i];
                    }
                }

                smooth += Lambda;
                if (smooth > 1)
                {
                    smooth = 1;
                }

                // Check relative maximum parameter change
                convergence.NewValues = regression.Coefficients;


                if (convergence.HasDiverged)
                {
                    // Restore previous coefficients
                    for (int i = 0; i < regression.Coefficients.Length; i++)
                    {
                        regression.Coefficients[i] = convergence.OldValues[i];
                    }
                }

                // Recompute current outputs
                for (int i = 0; i < output.Length; i++)
                {
                    double sum = 0;
                    for (int j = 0; j < regression.Coefficients.Length; j++)
                    {
                        sum += regression.Coefficients[j] * inputs[i][j];
                    }
                    output[i] = Math.Exp(sum);
                }
            } while (!convergence.HasConverged);


            for (int i = 0; i < regression.Coefficients.Length; i++)
            {
                regression.Coefficients[i] /= sdev[i];
            }

            if (computeStandardErrors)
            {
                // Grab the regression information matrix
                double[,] inverse = decomposition.Inverse();

                // Calculate coefficients' standard errors
                double[] standardErrors = regression.StandardErrors;
                for (int i = 0; i < standardErrors.Length; i++)
                {
                    standardErrors[i] = Math.Sqrt(Math.Abs(inverse[i, i])) / sdev[i];
                }
            }

            if (computeBaselineFunction)
            {
                createBaseline(time, censor, output);
            }

            return(regression.GetPartialLogLikelihood(inputs, time, censor));
        }
Esempio n. 11
0
 public double Run(double[][] inputs, double[] time, SurvivalOutcome[] censor)
 {
     Learn(inputs, time, censor, null);
     return(regression.GetPartialLogLikelihood(inputs, time, censor));
 }
Esempio n. 12
0
        public void BaselineHazardTestR()
        {
            double[,] data =
            {
                // t   c  in
                {  8, 0, 13 },
                {  4, 1, 56 },
                { 12, 0, 25 },
                {  6, 0, 64 },
                { 10, 0, 38 },
                {  8, 1, 80 },
                {  5, 0,  0 },
                {  5, 0, 81 },
                {  3, 1, 81 },
                { 14, 1, 38 },
                {  8, 0, 23 },
                { 11, 0, 99 },
                {  7, 0, 12 },
                {  7, 1, 36 },
                { 12, 0, 63 },
                {  8, 0, 92 },
                {  7, 0, 38 },
            };


            double[]          time   = data.GetColumn(0);
            SurvivalOutcome[] censor = data.GetColumn(1).To <SurvivalOutcome[]>();
            double[][]        inputs = data.GetColumn(2).ToJagged();

            var regression = new ProportionalHazards(1);

            var target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(inputs, time, censor);

            // Assert.AreEqual(-10.257417973830666, error, 1e-8);

            /*
             * library('survival')
             * options(digits=17)
             * time <- c(8, 4, 12, 6, 10, 8, 5, 5, 3, 14, 8, 11, 7, 7, 12, 8, 7)
             * x <- c(13, 56, 25, 64, 38, 80, 0, 81, 81, 38, 23, 99, 12, 36, 63, 92, 38)
             * c <- c(0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0)
             *
             * fit <- coxph(Surv(time, c) ~ x, ties="breslow")
             *
             * predict(fit,type="risk")
             *
             * fit$loglik
             *
             *      coef           exp(coef)          se(coef)               z              p
             * x 0.01633097532122  1.016465054586   0.01711960930183    0.9539338797573   0.340117112635
             *
             * Likelihood ratio test=0.94  on 1 df, p=0.332836850925  n= 17, number of events= 5
             */

            // Tested against GNU R
            Assert.AreEqual(49.352941176470587, regression.Offsets[0]);
            Assert.AreEqual(0.01633097532122, regression.Coefficients[0], 1e-10);
            Assert.AreEqual(0.01711960930183, regression.StandardErrors[0], 1e-10);
            Assert.AreEqual(0.340117112635, regression.GetWaldTest(0).PValue, 1e-5);
            Assert.AreEqual(-10.2879332934202168, regression.GetPartialLogLikelihood(time, censor));
            Assert.AreEqual(-9.8190189050165948, regression.GetPartialLogLikelihood(inputs, time, censor));

            double[] actual = inputs.Apply(x => regression.Compute(x));

            /*
             * predict(r,type="risk")
             *  [1] 0.55229166964915244 1.11466393245000361 0.67185866444081555 1.27023351821156782 0.83076808526813917 1.64953983529334769 0.44664925161695829 1.67669959872327912
             *  [9] 1.67669959872327912 0.83076808526813917 0.65026895029003673 2.24967304521214029 0.54334545703992021 0.80407192663266613 1.24965783376477391 2.00665280971219540
             *  [17] 0.83076808526813917
             */

            double[] expected =
            {
                0.55229166964915244, 1.11466393245000361, 0.67185866444081555, 1.27023351821156782,
                0.83076808526813917, 1.64953983529334769, 0.44664925161695829, 1.67669959872327912,
                1.67669959872327912, 0.83076808526813917, 0.65026895029003673, 2.24967304521214029,
                0.54334545703992021, 0.80407192663266613, 1.24965783376477391, 2.00665280971219540,
                0.83076808526813917
            };

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.025);
            }
        }