Example #1
0
        public void KaplanMeierTest()
        {
            double[,] data =
            {
                //  time censor
                {  1, 0 },        // died at time 1
                {  2, 1 },        // lost at time 2
                {  3, 0 },        // died at time 3
                {  5, 0 },        // died at time 5
                {  7, 1 },        // lost at time 7
                { 11, 0 },        // ...
                {  4, 0 },
                {  6, 0 },
                {  8, 0 },
                {  9, 1 },
                { 10, 1 },
            };

            double[] time   = data.GetColumn(0);
            int[]    censor = data.GetColumn(1).ToInt32();

            var regression = new ProportionalHazards(inputs: 0);

            var target = new ProportionalHazardsNewtonRaphson(regression)
            {
                Estimator = HazardEstimator.KaplanMeier
            };

            double error = target.Run(time, censor);

            Assert.AreEqual(-5.7037824746562009, error);
        }
Example #2
0
        public void PredictTest1()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html

            double[,] data =
            {
                { 50,  1, 0 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            double[] distHazards =
            {
                0,                  0.0351683340828711, 0.0267358118285064, 0,
                0.0103643094219679,                  0,                  0, 0,0, 0.000762266794052363, 0
            };

            double[] distTimes =
            {
                11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1
            };

            ProportionalHazards regression = new ProportionalHazards(1,
                                                                     new EmpiricalHazardDistribution(distTimes, distHazards));

            regression.Coefficients[0]   = 0.37704239281494084;
            regression.StandardErrors[0] = 0.25415755113043753;
            regression.Offsets[0]        = 51.181818;

            double[][] inputs = data.GetColumn(0).ToArray();
            double[]   time   = data.GetColumn(1);


            double[] expected =
            {
                0.000000000000, 0.919466527073, 0.000074105451, 0.000001707560,
                0.657371730925, 0.046771996036, 0.000074105451, 0.006836271860,
                0.000008042445, 0.339562971888, 2.029832541310
            };

            double[] actual = new double[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = regression.Compute(inputs[i], time[i]);
            }

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-6);
                Assert.IsFalse(Double.IsNaN(actual[i]));
            }
        }
Example #3
0
        private void computeInner()
        {
            if (inputCount <= 2)
            {
                return;
            }

            // Perform likelihood-ratio tests against diminished nested models
            ProportionalHazards innerModel            = new ProportionalHazards(inputCount - 1);
            ProportionalHazardsNewtonRaphson learning = new ProportionalHazardsNewtonRaphson(innerModel);

            for (int i = 0; i < inputCount; i++)
            {
                // Create a diminished inner model without the current variable
                double[][] data = inputData.RemoveColumn(i);

                System.Diagnostics.Trace.Assert(data[0].Length > 0);

                Array.Clear(innerModel.Coefficients, 0, inputCount - 1);

                learning.Iterations = Iterations;
                learning.Tolerance  = Tolerance;

                learning.Run(data, timeData, censorData);


                double ratio = 2.0 * (logLikelihood - innerModel.GetPartialLogLikelihood(data, timeData, censorData));
                ratioTests[i] = new ChiSquareTest(ratio, 1);
            }

            innerComputed = true;
        }
        public void RunTest()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html

            double[,] data =
            {
                { 50,  1, 0 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(1);

            double[][] inputs = data.GetColumn(0).ToArray();
            double[]   time   = data.GetColumn(1);
            int[]      output = data.GetColumn(2).ToInt32();

            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(inputs, time, output);

            double log = -2 * regression.GetPartialLogLikelihood(inputs, time, output);


            Assert.AreEqual(0.3770, regression.Coefficients[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[0]));

            Assert.AreEqual(0.2542, regression.StandardErrors[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[0]));


            double[] actual = new double[inputs.Length];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = regression.Compute(inputs[i]);
            }

            double[] expected =
            {
                // Computed using R's predict(fit,type="risk")
                0.640442743,  1206.226657448, 0.097217211, 0.002240107,
                59.081223025,    0.640442743, 0.097217211, 8.968345353,
                0.000722814,     8.968345353, 27.794227993
            };

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-3);
                Assert.IsFalse(Double.IsNaN(actual[i]));
            }
        }
Example #5
0
        public void Compute(ProportionalHazards regression)
        {
            this.regression = regression;

            computeInformation();

            innerComputed = false;
        }
Example #6
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="inputs">The model inputs.</param>
        /// <param name="censor">The output (event) associated with each input vector.</param>
        /// <param name="time">The time-to-event for the non-censored training samples.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="censor" /> given <paramref name="inputs" /> and <paramref name="time" />.
        /// </returns>
        public ProportionalHazards Learn(double[][] inputs, double[] time, int[] censor, double[] weights = null)
        {
            var learning = createLearner(regression);

            this.regression = learning.Learn(inputs, time, censor, weights);
            initialize(inputs, time, censor.To <SurvivalOutcome[]>());
            return(store());
        }
Example #7
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public ProportionalHazards Learn(Tuple <double[], double>[] x, SurvivalOutcome[] y, double[] weights = null)
        {
            var learning = createLearner(regression);

            this.regression = learning.Learn(x, y, weights);
            initialize(x.Apply(a => a.Item1), x.Apply(a => a.Item2), y);
            return(store());
        }
Example #8
0
 private ProportionalHazardsNewtonRaphson createLearner(ProportionalHazards model)
 {
     return(new ProportionalHazardsNewtonRaphson()
     {
         Model = model,
         MaxIterations = Iterations,
         Tolerance = Tolerance,
         Token = Token
     });
 }
Example #9
0
        private void init(ProportionalHazards hazards)
        {
            this.regression     = hazards;
            this.parameterCount = hazards.Coefficients.Length;

            this.hessian  = new double[parameterCount, parameterCount];
            this.gradient = new double[parameterCount];

            this.partialHessian  = new double[parameterCount, parameterCount];
            this.partialGradient = new double[parameterCount];
        }
Example #10
0
        private static void coxProportionalHazards()
        {
            // Let's say we have the following survival problem. Each row in the table below
            // represents a patient under care in a hospital. The first colum represents their
            // age (a single feature, but there could have been many like age, height, weight,
            // etc), the time until an event has happened (like, for example, unfortunatey death)
            // and the event outcome (i.e. what has exactly happened after this amount of time,
            // has the patient died or did he simply leave the hospital and we couldn't get more
            // data about him?)

            object[,] data =
            {
                //    input         time until           outcome
                // (features)     event happened     (what happened?)
                { 50,  1, SurvivalOutcome.Censored },
                { 70,  2, SurvivalOutcome.Failed   },
                { 45,  3, SurvivalOutcome.Censored },
                { 35,  5, SurvivalOutcome.Censored },
                { 62,  7, SurvivalOutcome.Failed   },
                { 50, 11, SurvivalOutcome.Censored },
                { 45,  4, SurvivalOutcome.Censored },
                { 57,  6, SurvivalOutcome.Censored },
                { 32,  8, SurvivalOutcome.Censored },
                { 57,  9, SurvivalOutcome.Failed   },
                { 60, 10, SurvivalOutcome.Failed   },
            }; // Note: Censored means that we stopped recording data for that person,
               // so we do not know what actually happened to them, except that things
               // were going fine until the point in time appointed by "time to event"

            // Parse the data above
            double[][]        inputs = data.GetColumn(0).ToDouble().ToJagged();
            double[]          time   = data.GetColumn(1).ToDouble();
            SurvivalOutcome[] output = data.GetColumn(2).To <SurvivalOutcome[]>();

            // Create a new PH Newton-Raphson learning algorithm
            var teacher = new ProportionalHazardsNewtonRaphson()
            {
                ComputeBaselineFunction = true,
                ComputeStandardErrors   = true,
                MaxIterations           = 100
            };

            // Use the learning algorithm to infer a Proportional Hazards model
            ProportionalHazards regression = teacher.Learn(inputs, time, output);

            // Use the regression to make predictions (problematic)
            SurvivalOutcome[] prediction = regression.Decide(inputs);

            // Use the regression to make score estimates
            double[] score = regression.Score(inputs);

            // Use the regression to make probability estimates
            double[] probability = regression.Probability(inputs);
        }
Example #11
0
        /// <summary>
        ///   Computes the Proportional Hazards Analysis for an already computed regression.
        /// </summary>
        ///
        public void Compute(ProportionalHazards regression, double limit = 1e-4, int maxIterations = 50)
        {
            this.regression = regression;

            computeInformation();

            if (inputCount > 0)
            {
                computeInner(limit, maxIterations);
            }
        }
        public void RunTest2()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html

            double[,] data =
            {
                { 50, 30,  1, 0 },
                { 70, 22,  2, 1 },
                { 45, 12,  3, 0 },
                { 35, 22,  5, 0 },
                { 62, 54,  7, 1 },
                { 50, 12, 11, 0 },
                { 45, 11,  4, 0 },
                { 57, 62,  6, 0 },
                { 32, 16,  8, 0 },
                { 57, 14,  9, 1 },
                { 60, 12, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(2);

            double[][] inputs = data.Submatrix(null, 0, 1).ToArray();
            double[]   time   = data.GetColumn(2);
            int[]      output = data.GetColumn(3).ToInt32();


            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);


            double error = target.Run(inputs, time, output);

            double log = -2 * regression.GetPartialLogLikelihood(inputs, time, output);

            Assert.AreEqual(3.4261, log, 1e-4);
            Assert.IsFalse(Double.IsNaN(log));

            double actual = regression.Coefficients[0];

            Assert.AreEqual(0.3909, regression.Coefficients[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[0]));

            Assert.AreEqual(0.0424, regression.Coefficients[1], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[1]));

            Assert.AreEqual(0.2536, regression.StandardErrors[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[0]));

            Assert.AreEqual(0.0624, regression.StandardErrors[1], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[1]));
        }
        public void PredictTest1()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html

            double[,] data =
            {
                { 50,  1, 0 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(1);

            double[][] inputs = data.GetColumn(0).ToArray();
            double[]   time   = data.GetColumn(1);
            int[]      output = data.GetColumn(2).ToInt32();


            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(inputs, time, output);


            double[] expected =
            {
                0.000000000000, 0.919466527073, 0.000074105451, 0.000001707560,
                0.657371730925, 0.046771996036, 0.000074105451, 0.006836271860,
                0.000008042445, 0.339562971888, 2.029832541310
            };

            double[] actual = new double[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i] = regression.Compute(inputs[i], time[i]);
            }

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-6);
                Assert.IsFalse(Double.IsNaN(actual[i]));
            }
        }
        /// <summary>
        ///   Constructs a new Newton-Raphson learning algorithm
        ///   for Cox's Proportional Hazards models.
        /// </summary>
        ///
        /// <param name="hazards">The model to estimate.</param>
        ///
        public ProportionalHazardsNewtonRaphson(ProportionalHazards hazards)
        {
            this.regression     = hazards;
            this.parameterCount = hazards.Coefficients.Length;

            this.hessian  = new double[parameterCount, parameterCount];
            this.gradient = new double[parameterCount];

            this.partialHessian  = new double[parameterCount, parameterCount];
            this.partialGradient = new double[parameterCount];

            this.convergence = new RelativeParameterConvergence()
            {
                Iterations = 0,
                Tolerance  = 1e-5
            };
        }
        public void RunTest4()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html
            // with added tied times

            double[,] data =
            {
                { 50,  1, 1 },
                { 60,  1, 1 },
                { 40,  1, 1 },
                { 51,  1, 1 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(1);

            double[][] inputs = data.GetColumn(0).ToArray();
            double[]   time   = data.GetColumn(1);
            int[]      output = data.GetColumn(2).ToInt32();

            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(inputs, time, output);

            double log = -2 * regression.GetPartialLogLikelihood(inputs, time, output);


            Assert.AreEqual(0.04863, regression.Coefficients[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[0]));

            Assert.AreEqual(0.04186, regression.StandardErrors[0], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[0]));
        }
Example #16
0
        /// <summary>
        ///   Constructs a new Newton-Raphson learning algorithm
        ///   for Cox's Proportional Hazards models.
        /// </summary>
        ///
        /// <param name="hazards">The model to estimate.</param>
        ///
        public ProportionalHazardsNewtonRaphson(ProportionalHazards hazards)
        {
            this.regression     = hazards;
            this.parameterCount = hazards.Coefficients.Length;

            this.hessian  = new double[parameterCount, parameterCount];
            this.gradient = new double[parameterCount];

            this.partialHessian  = new double[parameterCount, parameterCount];
            this.partialGradient = new double[parameterCount];

            this.convergence = new RelativeParameterConvergence()
            {
                Iterations = 0,
                Tolerance  = 1e-5
            };

            this.Estimator = HazardEstimator.BreslowNelsonAalen;
            this.Ties      = HazardTiesMethod.Efron;
            this.Lambda    = 0.1;
        }
Example #17
0
        public void KaplanMeierTest2()
        {
            int[][] data = Classes.Expand(
                new[] { 1, 2, 3, 4, 5, 6 },     // years
                new[] { 3, 3, 3, 3, 3, 0 },     // died
                new[] { 5, 10, 15, 20, 25, 10 } // censored
                );

            double[] time   = data.GetColumn(0).ToDouble();
            int[]    censor = data.GetColumn(1);

            ProportionalHazards kp;
            ProportionalHazards km;

            double errkm, errp;

            {
                km = new ProportionalHazards(inputs: 0);

                var target = new ProportionalHazardsNewtonRaphson(km)
                {
                    Estimator = HazardEstimator.KaplanMeier
                };

                errkm = target.Run(time, censor);
                Assert.AreEqual(-63.734599918211551, errkm);
            }

            {
                kp = new ProportionalHazards(inputs: 0);

                var target = new ProportionalHazardsNewtonRaphson(kp)
                {
                    Estimator = HazardEstimator.BreslowNelsonAalen
                };

                errp = target.Run(time, censor);
                Assert.AreEqual(errkm, errp);
            }
        }
        public void RunTest5()
        {
            double[,] inputs =
            {
                {   1,   1,   1, 1,   1,   0,   1,   1,   0,   0,   1,   0, 0, 1,   1,   1,   1,   0, 0,   1,   1,   1,   1,   0,   1,   1,   0,   1, 1,   1, 0,   0,   1,   0, 1,   0,   0,   0,   1,   1,   1,   0,   1,   0,   1,   0,   1,   1,   1,   1,   1,   1,   1,   1,   1,   1,   0,   1,   1,   1,   0,   1,   1,   1, 1,   0,   1, 1,   1,   0,   0,   1,   1,   0,   0,   0,   0,   1, 1,   0,   1,   0,   1,   0,   1,   0,   1, 0,   0,   1,   0,   1,   1,   1,   1,   0, 1, 0, 1,   1,   1,   1,   1,   0,   0,   0,   1,   0,   0,   1,   1,   1, 1,   1,   1,   1,   0,   0, 1,   0,   1,   1,   1, 1, 1,   1,   1,   1,   1,   0,   1,   1,   1,   1,   0, 1,   0,   1,   1,   0,   1,   0,   0,   1,   1,   0,   1, 1,   1,   0, 1,   1,   1,   1,   0,   1,   1,   1, 1,   0,   1,   0,   1,   1,   1,   1,   1, 1,   1,   1,   1,   1,   1,   1,   0,   1,   0,   0,   1,   1,   0, 0,   0,   0,   1, 0,   1, 1,   1,   0, 0,   0,   1,   1,   1,   1,   1,   1,   1, 1,   1,   0,   1,   1,   0,   1,   1,   1,   1,   0,   1,   1, 1, 1,   0,   0, 1, 1,   1,   1,   1, 0,   1,   1,   1,   1,   0,   0, 1,   1,   1,   0,   0, 0,   1, 0,   1,   1,   1, 1, 1,   0,   1,   0, 1,   1,   0,   0,   1, 0,   0, 1,   0,   1,   0,   1,   1, 1,   1,   1,   0,   0,   1,   0,   1,   1,   1, 1,   1,   1, 1,   0,   1,   1,   1,   1,   0, 0,   1,   1,   1,   1, 0,   1, 1,   0,   1,   0,   1,   0,   1,   1, 0,   0,   1,   1,   1, 1,   0,   0,   0,   0,   0,   1,   0,   1,   1,   0, 0,   1, 1,   0,   0,   1,   1,   0,   1, 1,   1,   1,   1,   1,   1, 0,   1,   1, 0,   1,   1,   1,   1,   0,   0,   1,   0,   0,   1,   1,   1, 1,   1,   1,   0,   1,   1,   0,   0,   1,   1,   0, 1,   1,   1,   1,   1,   0,   1,   1,   1,   1, 1,   1,   1, 1,   1,   1,   0,   0,   0,   0, 1,   1,   1,   1,   0,   1,   1,   1,   1, 0,   1,   0,   1,   1,   1,   1,   1,   0,   1,   0,   1,   1,   1,   1,   1,   1,   1,   1, 0,   1,   1,   1, 0,   0,   1,   1,   1,   1,   1,   1,   1, 1,   0,   1,   1,   1,   0,   1,   1,   0,   0,   1,   1,   1,   1, 1,   1,   1,   0,   0,   0,   0, 0,   1,   1,   0,   1,   1,   1,   1,   0,   1,   1,   1,   1,   1,   1,   1,   1,   1, 0,   1,   1,   1, 0,   1,   0,   1,   1,   0,   1,   1,   1, 1,   1,   1,   0,   1,   0,   1,   1,   0,   1,   1,   0,   1, 0,   1,   0,   0,   1,   1,   1,   0,   0,   1,   1,   0, 1, 1,   1,   1,   1,   1,   1, 0,   0, 1,   1,   1,   0, 0,   1,   1,   1,   1, 1,   0,   1,   1,   1,   0,   1,   0,   1,   1,   1,   1,   0, 0,   0,   1,   1,   1,   1,   0,   0,   1,   1,   1,   0,   1,   1,   0,   0,   1, 1,   1, 1,   1,   0,   0,   0,   0,   1,   1,   1,   1, 1,   1,   0,   0,   1,   1,   0, 1,   1,   1,   0,   1,   1, 0, 0,   1, 0, 0,   0,   1,   1,   1,   0, 1, 1, 1,   1,   1,   0, 0,   0,   0,   1,   0, 1,   1,   1,   1,   1,   1,   1,   1, 1,   0,   0,   1,   0,   1,   1,   1, 0,   0,   0, 1,   1,   1,   1,   1,   0,   1,   0,   1,   1, 0,   1,   1,   0,   1,   0,   1,   1,   1,   0,   1,   1,   0,   1,   1,   1,   1,   1,   1, 1,   0,   1,   0,   1,   1,   1, 0,   1,   0,   1,   0,   1,   1, 1,   1,   1,   1,   1, 1,   1, 1,   1,   1,   0, 1,   0,   1,   1,   1,   1,   1,   1,   1,   1,   1,   1,   0,   1,   0,   1, 1, 0,   1,   0,   1,   0,   1,   1,   0,   1, 0,   1,   1,   0,   1,   1,   0,   1,   0,   0,   1,   1,   0,   0, 1, 1,   0,   1,   0,   0,   1,   1,   0, 1,   0,   1,   1,   1,   1,   0,   0,   1, 1, 0,   1,   1,   1,   0,   1,   0,   1,   1,   1,   0,   1,   1,   0,   0,   1,   1,   1,   1,   0,   1,   1,   0,   1,   1,   1,   1,   1, 0,   1,   0,   1,   1,   1,   0,   0,   1,   0,   1,   1,   1,   0,   0,   0,   1,   1, 1,   1,   1,   1,   1,   1,   1,   0,   1,   1,   1,   0,   0, 0, 1,   0,   0,   1,   1,   1,   0,   1,   1,   0,   1, 1,   1,   1,   0, 1,   1,   0,   1,   1,   0,   1, 1,   1,   1,   1,   1,   0,   1,   1,   0,   1,   1,   0,   1,   0,   1,   1,   0,   1,   0,   1,   1,   1,   1,   1,   1,   0,   1,   0,   1,   1,   1,   1,   1,   1,   0,   1,   1,   0, 1,   1,   0, 0,   1,   1,   1,   0, 1,   1,   1, 1, 0,   0,   1,   1,   1,   1,   1,   0,   1,   1, 1,   1,   1,   1,   1,   1,   1,   1, 1,   1,   1,   1,   1,   0,   0,   1,   0,   1,   1,   0,   1,   0,   1,   1,   0,   1,   1,   1,   0,   1, 1,   1,   0,   0,   1,   1,   1,   1,   1, 0,   1,   0,   0, 1,   1,   1,   1,   0,   1,   0,   1,   1,   0, 1,   0,   1,   1,   1,   0,   1,   1, 1, 1,   1, 1 },
                { 0.9, 1.3, 1.5, 1, 2.4, 1.1, 1.3, 1.1, 0.8, 0.7, 1.1, 0.8, 1, 1, 1.4, 1.4, 1.2, 1.1, 1, 1.4, 2.5, 1.6, 1.2, 0.7, 0.8, 0.7, 0.7, 1.3, 1, 1.7, 1, 1.1, 1.3, 0.9, 1, 1.1, 0.7, 0.9, 1.3, 1.3, 0.9, 1.3, 0.9, 0.6, 1.2, 1.1, 1.1, 0.9, 1.1, 1.1, 1.9, 1.1, 0.9, 1.3, 1.1, 1.2, 0.8, 0.9, 1.7, 1.2, 0.7, 1.2, 1.6, 2.9, 1, 0.9, 0.8, 1, 1.5, 0.8, 0.7, 1.1, 1.2, 0.5, 0.9, 0.9, 0.9, 1.2, 1, 0.9, 1.3, 0.6, 1.2, 0.6, 1.7, 0.8, 1.1, 1, 1.1, 1.2, 0.6, 1.3, 1.7, 1.2, 2.7, 0.8, 1, 1, 1, 1.6, 1.2, 1.2, 2.4, 0.7, 0.8, 0.7, 1.3, 0.9, 1.1, 1.2, 1.2, 1.7, 1, 1.1, 2.1, 1.8, 2.2, 0.5, 1, 1.6, 1.2, 1.5, 1.1, 1, 1, 1.4, 1.2, 1.2, 1.1, 1.5, 0.9, 0.9, 1.2, 1.4, 1.2, 1, 0.9, 1.9, 1.7, 1.3, 1.2, 1.4, 1.7, 0.9, 1.6, 0.7, 0.9, 1, 1.1, 1.4, 1, 1.1, 1.4, 1.3, 1.1, 1.8, 1.6, 1.2, 1, 0.8, 1.1, 0.7, 1.4, 1.1, 1.8, 0.7, 1.7, 1, 1.2, 1.5, 1.1, 1.6, 1.7, 1.3, 0.8, 1.7, 1.5, 0.8, 1.4, 0.9, 4.7, 1, 0.7, 0.9, 0.9, 1, 2.6, 1, 1.1, 1.4, 1, 0.9, 1.3, 1.6, 1.8, 0.9, 1.6, 0.9, 1.2, 1, 1.2, 0.7, 1.4, 1.6, 1.2, 1.1, 1.3, 0.9, 1.3, 0.7, 1.2, 1.1, 1, 1, 0.9, 0.8, 1, 1, 1.5, 1.3, 1.2, 1, 1.2, 1.1, 1.6, 1.7, 1.1, 0.9, 1, 0.9, 1.3, 0.8, 0.8, 1, 0.9, 1, 1.2, 1.4, 0.8, 1, 1, 1.2, 1.3, 1.1, 1, 1.6, 1.2, 0.8, 2.1, 1, 1.5, 1, 1.5, 1.2, 1.1, 1.3, 0.8, 1, 1.1, 1.5, 0.8, 1.2, 1.1, 0.8, 0.9, 1.4, 1.5, 1, 1.3, 1.1, 1, 1.2, 2.2, 1.2, 1.3, 1.3, 0.7, 1, 1.1, 1.1, 1.7, 1.3, 1, 1.1, 1, 1.8, 1.2, 1.2, 1.3, 1.1, 1.6, 4.1, 1, 0.9, 1.5, 1.5, 1.4, 1, 0.5, 0.9, 0.8, 0.6, 1.3, 0.7, 0.7, 1.1, 1.4, 0.9, 1, 4.2, 1, 0.9, 1.2, 2.1, 2.4, 1.2, 2.5, 1, 1.2, 1.3, 0.9, 1.1, 1.6, 1, 1.5, 1.1, 1, 2.6, 0.9, 1.3, 1.4, 0.8, 1.4, 2.7, 1.1, 0.8, 1.2, 1.4, 1.2, 1, 1.1, 1.2, 1.2, 1.3, 1.2, 1.2, 1.2, 1.1, 1.3, 0.9, 1, 1.9, 1.4, 1.2, 1.1, 0.7, 0.9, 1.1, 1.1, 1.4, 1, 1.2, 1.4, 1, 2.8, 1.1, 0.7, 0.7, 1.2, 0.7, 1, 1.5, 1.5, 1.7, 0.9, 1.6, 0.9, 1.7, 0.8, 1, 0.8, 1.1, 1.3, 1.2, 0.9, 1.1, 1.1, 0.9, 0.9, 0.7, 1.1, 1.8, 0.8, 0.9, 1.4, 0.9, 1.2, 1.4, 1, 0.8, 1.4, 1.4, 1, 0.7, 0.6, 0.8, 1.4, 1.4, 0.8, 1.9, 1.3, 1, 1.3, 0.9, 0.9, 1.5, 1.5, 0.8, 1.4, 1.1, 0.8, 0.8, 1.4, 1.3, 1.1, 1, 2.2, 1.6, 1.7, 1.2, 0.9, 1.1, 1, 2.6, 1.6, 0.9, 0.9, 0.6, 1.6, 1.1, 1.3, 1.7, 1.2, 1.2, 1.6, 0.9, 0.9, 1.3, 2.2, 1.3, 1, 0.8, 1.5, 1.3, 1, 1.2, 1.4, 1.3, 1.3, 1.4, 1.3, 1.4, 1.2, 1, 1.2, 0.8, 1.1, 1.2, 1.1, 1.2, 1.8, 1.1, 1.6, 1.1, 0.8, 1.2, 1, 1.2, 0.7, 1.9, 1.6, 1.5, 0.9, 1.1, 0.8, 1.3, 1.1, 0.7, 1, 2, 1.2, 1.1, 1.2, 0.9, 0.9, 1, 1.2, 1, 1.2, 1.1, 1.1, 1, 1.4, 0.8, 1.3, 0.8, 1, 1.4, 1.2, 1.1, 1.3, 0.7, 1.2, 0.6, 1.2, 1.4, 1.5, 1.7, 0.9, 1, 0.6, 0.9, 1.4, 1.5, 1.1, 1.3, 0.9, 1.3, 2.1, 1.2, 1.3, 1.1, 1.4, 0.9, 1.4, 1.5, 1, 1.5, 1, 1.3, 0.9, 1.2, 1.1, 0.9, 1.1, 1.4, 1.4, 0.8, 1, 1.2, 1.1, 0.8, 1.2, 4.1, 0.7, 2, 0.8, 1.2, 3.8, 1.2, 1.4, 1, 1, 1.7, 1, 1, 0.9, 1.6, 0.7, 1.2, 1.1, 1, 1, 1, 0.7, 1.5, 1.1, 1, 0.9, 0.8, 0.9, 1.9, 1, 1.4, 1.3, 1.5, 0.9, 1.1, 1.2, 0.8, 1, 1.4, 4.2, 2.5, 1.2, 1.6, 0.9, 1.1, 1, 0.9, 0.6, 1, 1.1, 1.2, 1.2, 1.2, 0.9, 1.2, 1.1, 1.2, 0.8, 1, 1.6, 0.9, 1.2, 1.4, 0.7, 1.6, 1.1, 1.5, 0.8, 1.4, 0.9, 0.9, 1.5, 1.1, 9.1, 1.6, 1.3, 0.8, 4, 0.7, 1.4, 0.8, 3.6, 1.3, 1.3, 1, 1.1, 0.8, 1.2, 0.8, 0.9, 1.5, 1, 0.7, 6.6, 1.2, 1.8, 1, 1.3, 1, 1.3, 1.1, 1.1, 1, 0.9, 0.9, 1.1, 1.1, 0.9, 1.5, 4.5, 1.1, 0.8, 0.7, 0.9, 0.8, 1.6, 0.8, 0.8, 2, 2, 1.5, 1.2, 1.2, 0.9, 1.7, 1.4, 0.8, 1.5, 1, 1.1, 1.1, 0.7, 1.1, 1.2, 0.9, 1.5, 0.7, 1.5, 1.3, 0.8, 0.7, 1.1, 1, 1, 1.2, 0.8, 0.9, 0.8, 1.6, 2.4, 1.1, 2, 0.9, 1.4, 1.2, 1.1, 1.6, 0.9, 0.9, 1.3, 2, 1, 1.4, 1.3, 1.3, 0.7, 0.9, 1.1, 1.7, 1.1, 1.2, 0.9, 0.9, 1.1, 0.9, 5.2, 1.3, 0.7, 1.4, 1.4, 0.8, 1.2, 1.4, 0.9, 1.1, 1.1, 1.3, 1.7, 0.8, 1, 1.2, 1.9, 1.1, 1.3, 1.8, 0.6, 0.8, 1.4, 0.7, 0.9, 0.9, 1.2, 1.5, 0.7, 4.2, 1.1, 1.1, 1, 1.3, 0.8, 1.3, 1.1, 1.1, 0.8, 1.1, 1.5, 1.2, 1.2, 1.1, 1.7, 1, 1, 0.9, 0.9, 0.8, 1.2, 1.1, 0.7, 0.8, 1.1, 0.9, 1.4, 1, 1.1, 1.4, 0.9, 1, 1.7, 0.9, 1.3, 1.3, 0.8, 2.1, 1, 0.9, 1.2, 0.9, 1.1, 1.1, 1.2, 1.2, 0.9, 1.4, 1.2, 0.8, 1.1, 1.3, 1.1, 1.1, 0.8, 0.9, 0.9, 1.2, 1.1, 1.7, 1.3, 1.1, 1.7, 0.8, 0.9, 1.5, 1.1, 1.4, 1.4, 1.5, 1.1, 1.3, 0.9, 1.1, 1.1, 0.7, 1, 1.1, 0.6, 1, 1.2, 1.4, 1.1, 0.8, 1, 1.3, 0.9, 1, 1, 0.9, 1.5, 1.1, 1.5, 6.3, 1.4, 1.1, 5.2, 1.6, 1, 1.2, 1.3, 0.6, 1.1, 1.2, 1.1, 1.2, 1, 1.1, 1.9, 1.1, 1.2, 1.1, 0.7, 1.4, 2.2, 1.1, 1.5, 0.9, 1.2, 1.1, 0.8, 1.3, 1.1, 1.3, 1.8, 1.1, 1.1, 1.1, 1, 0.8, 1.7, 1.2, 1.4, 1.1, 1.4, 1.1, 0.8, 1, 1.1, 1.2, 1.4, 1, 1.3, 1.1, 2.3, 0.7, 1.3, 0.7, 0.9, 0.9, 1.2, 2, 0.7, 1.2, 1.6, 1.3, 1.4, 2.7, 1.5, 1, 1, 1.5, 1 }
            };

            double[,] outputs =
            {
                { 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 13, 30, 30, 30, 30, 30, 30, 0, 30, 13, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 16, 30, 30, 30, 30, 30, 30, 0, 30, 30, 30, 30, 30, 30, 30, 30, 5, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 21, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 30, 16, 30, 30, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 16, 30, 30, 30, 30, 30, 3, 30, 18, 30, 30, 30, 30, 30, 28, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 30, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 17, 30, 1, 30, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 27, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 16, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 20, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 4, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 26, 30, 30, 30, 30, 12, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 21, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 30, 30, 30, 30, 1, 30, 1, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 0, 30, 25, 30, 30, 12, 30, 30, 30, 30, 30, 30, 10, 30, 30, 30, 30, 3, 30, 11, 30, 30, 30, 30, 30, 11, 30, 30, 30, 30, 30, 30, 4, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 1, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 11, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 10, 30, 30, 30, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 3, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 19, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 29, 30, 30, 30, 30, 30, 30, 2, 30, 30, 30, 30, 15, 30, 30, 30, 30, 30, 30, 30, 30, 3, 30, 30, 0, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 9, 30, 30, 30, 2, 30, 30, 30, 2, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 8, 30, 30, 30, 30, 30, 30, 0, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 14, 30, 30, 30, 9, 30, 30, 30, 30, 30, 13, 30, 30, 30, 4, 30, 30, 30, 30, 1, 30, 30, 30, 30, 30, 30, 10, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 1, 30, 30, 2, 30, 30, 30, 30, 30 },
                {  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0, 1,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0, 1,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  1,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0, 1, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  1,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0, 1,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0, 1,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0, 1,  0,  0,  0,  0,  0,  1,  0,  0,  0, 1,  0,  0,  0,  0, 1,  0,  0,  0,  0,  0,  0,  1,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 1,  0,  0, 1,  0,  0,  0,  0,  0 }
            };


            double[][] covariates = inputs.Transpose().ToArray();
            double[]   time       = outputs.GetRow(0);
            int[]      censor     = outputs.GetRow(1).ToInt32();


            string inputStr  = inputs.Transpose().ToString(Accord.Math.DefaultMatrixFormatProvider.InvariantCulture);
            string outputStr = outputs.Transpose().ToString(Accord.Math.DefaultMatrixFormatProvider.InvariantCulture);

            ProportionalHazards regression          = new ProportionalHazards(2);
            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(covariates, time, censor);

            double log = -2 * regression.GetPartialLogLikelihood(covariates, time, censor);

            Assert.AreEqual(-0.270, regression.Coefficients[0], 1e-4);
            Assert.AreEqual(0.463, regression.Coefficients[1], 1e-2);
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[0]));
            Assert.IsFalse(Double.IsNaN(regression.Coefficients[1]));

            Assert.AreEqual(0.2454, regression.StandardErrors[0], 1e-4);
            Assert.AreEqual(0.0671, regression.StandardErrors[1], 1e-4);
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[0]));
            Assert.IsFalse(Double.IsNaN(regression.StandardErrors[1]));
        }
Example #19
0
        /// <summary>
        ///   Constructs a new Cox's Proportional Hazards Analysis.
        /// </summary>
        ///
        /// <param name="inputs">The input data for the analysis.</param>
        /// <param name="times">The output data for the analysis.</param>
        /// <param name="censor">The right-censoring indicative values.</param>
        ///
        public ProportionalHazardsAnalysis(double[][] inputs, double[] times, int[] censor)
        {
            // Initial argument checking
            if (inputs == null)
            {
                throw new ArgumentNullException("inputs");
            }
            if (times == null)
            {
                throw new ArgumentNullException("times");
            }

            if (inputs.Length != times.Length)
            {
                throw new ArgumentException("The number of rows in the input array must match the number of given outputs.");
            }

            initialize(inputs, times, censor);

            // Start regression using the Null Model
            this.regression = new ProportionalHazards(inputCount);
        }
Example #20
0
        private void computeInner()
        {
            if (inputCount <= 2)
            {
                return;
            }

            // Perform likelihood-ratio tests against diminished nested models
            var innerModel = new ProportionalHazards(inputCount - 1);
            var learning   = createLearner(innerModel);

            for (int i = 0; i < inputCount; i++)
            {
                // Create a diminished inner model without the current variable
                double[][] data = inputData.RemoveColumn(i);

#if DEBUG
                if (data[0].Length == 0)
                {
                    throw new Exception();
                }
#endif

                Array.Clear(innerModel.Coefficients, 0, inputCount - 1);

                learning.MaxIterations = Iterations;
                learning.Tolerance     = Tolerance;

                learning.Learn(data, timeData, censorData);


                double ratio = 2.0 * (logLikelihood - innerModel.GetPartialLogLikelihood(data, timeData, censorData));
                ratioTests[i] = new ChiSquareTest(ratio, 1);
            }

            innerComputed = true;
        }
Example #21
0
        public void BaselineHazardTestR()
        {
            double[,] data =
            {
                // t   c  in
                {  8, 0, 13 },
                {  4, 1, 56 },
                { 12, 0, 25 },
                {  6, 0, 64 },
                { 10, 0, 38 },
                {  8, 1, 80 },
                {  5, 0,  0 },
                {  5, 0, 81 },
                {  3, 1, 81 },
                { 14, 1, 38 },
                {  8, 0, 23 },
                { 11, 0, 99 },
                {  7, 0, 12 },
                {  7, 1, 36 },
                { 12, 0, 63 },
                {  8, 0, 92 },
                {  7, 0, 38 },
            };


            double[]          time   = data.GetColumn(0);
            SurvivalOutcome[] censor = data.GetColumn(1).To <SurvivalOutcome[]>();
            double[][]        inputs = data.GetColumn(2).ToJagged();

            var regression = new ProportionalHazards(1);

            var target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(inputs, time, censor);

            // Assert.AreEqual(-10.257417973830666, error, 1e-8);

            /*
             * library('survival')
             * options(digits=17)
             * time <- c(8, 4, 12, 6, 10, 8, 5, 5, 3, 14, 8, 11, 7, 7, 12, 8, 7)
             * x <- c(13, 56, 25, 64, 38, 80, 0, 81, 81, 38, 23, 99, 12, 36, 63, 92, 38)
             * c <- c(0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0)
             *
             * fit <- coxph(Surv(time, c) ~ x, ties="breslow")
             *
             * predict(fit,type="risk")
             *
             * fit$loglik
             *
             *      coef           exp(coef)          se(coef)               z              p
             * x 0.01633097532122  1.016465054586   0.01711960930183    0.9539338797573   0.340117112635
             *
             * Likelihood ratio test=0.94  on 1 df, p=0.332836850925  n= 17, number of events= 5
             */

            // Tested against GNU R
            Assert.AreEqual(49.352941176470587, regression.Offsets[0]);
            Assert.AreEqual(0.01633097532122, regression.Coefficients[0], 1e-10);
            Assert.AreEqual(0.01711960930183, regression.StandardErrors[0], 1e-10);
            Assert.AreEqual(0.340117112635, regression.GetWaldTest(0).PValue, 1e-5);
            Assert.AreEqual(-10.2879332934202168, regression.GetPartialLogLikelihood(time, censor));
            Assert.AreEqual(-9.8190189050165948, regression.GetPartialLogLikelihood(inputs, time, censor));

            double[] actual = inputs.Apply(x => regression.Compute(x));

            /*
             * predict(r,type="risk")
             *  [1] 0.55229166964915244 1.11466393245000361 0.67185866444081555 1.27023351821156782 0.83076808526813917 1.64953983529334769 0.44664925161695829 1.67669959872327912
             *  [9] 1.67669959872327912 0.83076808526813917 0.65026895029003673 2.24967304521214029 0.54334545703992021 0.80407192663266613 1.24965783376477391 2.00665280971219540
             *  [17] 0.83076808526813917
             */

            double[] expected =
            {
                0.55229166964915244, 1.11466393245000361, 0.67185866444081555, 1.27023351821156782,
                0.83076808526813917, 1.64953983529334769, 0.44664925161695829, 1.67669959872327912,
                1.67669959872327912, 0.83076808526813917, 0.65026895029003673, 2.24967304521214029,
                0.54334545703992021, 0.80407192663266613, 1.24965783376477391, 2.00665280971219540,
                0.83076808526813917
            };

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.025);
            }
        }
Example #22
0
        public void BaselineHazardTest()
        {
            double[,] data =
            {
                // t   c  in
                {  8, 0,  -1.2372626521865966 },
                {  4, 1,  0.22623087329625477 },
                { 12, 0,  -0.8288458543774289 },
                {  6, 0,  0.49850873850236665 },
                { 10, 0, -0.38639432341749696 },
                {  8, 1,   1.0430644689145904 },
                {  5, 0,  -1.6797141831465285 },
                {  5, 0,   1.0770992020653544 },
                {  3, 1,   1.0770992020653544 },
                { 14, 1, -0.38639432341749696 },
                {  8, 0,  -0.8969153206789568 },
                { 11, 0,   1.6897243987791061 },
                {  7, 0,  -1.2712973853373605 },
                {  7, 0, -0.38639432341749696 },
                {  7, 1, -0.45446378971902495 },
                { 12, 0,   0.4644740053516027 },
                {  8, 0,   1.4514812667237584 },
            };

            double[]          time   = data.GetColumn(0);
            SurvivalOutcome[] censor = data.GetColumn(1).To <SurvivalOutcome[]>();
            double[][]        inputs = data.GetColumn(2).ToJagged();

            var regression = new ProportionalHazards(1);

            var target = new ProportionalHazardsNewtonRaphson(regression);

            target.Normalize           = false;
            target.Lambda              = 0;
            regression.Coefficients[0] = 0.47983261821350764;

            double error = target.Run(inputs, time, censor);

            /* Tested against http://statpages.org/prophaz2.html
             *  13, 8,  0
             *  56, 4,  1
             *  25, 12, 0
             *  64, 6,  0
             *  38, 10, 0
             *  80, 8,  1
             *  0 , 5,  0
             *  81, 5,  0
             *  81, 3,  1
             *  38, 14, 1
             *  23, 8,  0
             *  99, 11, 0
             *  12, 7,  0
             *  38, 7,  0
             *  36, 7,  1
             *  63, 12, 0
             *  92, 8,  0
             */

            double[] baseline =
            {
                regression.Survival(3),  // 0.9465
                regression.Survival(4),  // 0.8919
                regression.Survival(7),  // 0.8231
                regression.Survival(8),  // 0.7436
                regression.Survival(12), // 0.7436
                regression.Survival(14), // 0.0000
            };

            Assert.AreEqual(0.9465, baseline[0], 1e-4);
            Assert.AreEqual(0.8919, baseline[1], 1e-4);
            Assert.AreEqual(0.8231, baseline[2], 1e-4);
            Assert.AreEqual(0.7436, baseline[3], 1e-4);
            Assert.AreEqual(0.7436, baseline[4], 1e-4);
            Assert.AreEqual(0.0000, baseline[5], 1e-4);

            // The value of the baseline must be exact the same if it was computed
            // after the Newton-Raphson or in a standalone EmpiricalHazard computation
            double[] outputs   = inputs.Apply(x => regression.Compute(x));
            var      empirical = EmpiricalHazardDistribution.Estimate(time, censor, outputs);

            baseline = new[]
            {
                empirical.ComplementaryDistributionFunction(3),  // 0.9465
                empirical.ComplementaryDistributionFunction(4),  // 0.8919
                empirical.ComplementaryDistributionFunction(7),  // 0.8231
                empirical.ComplementaryDistributionFunction(8),  // 0.7436
                empirical.ComplementaryDistributionFunction(12), // 0.7436
                empirical.ComplementaryDistributionFunction(14), // 0.0000
            };

            Assert.AreEqual(0.9465, baseline[0], 1e-4);
            Assert.AreEqual(0.8919, baseline[1], 1e-4);
            Assert.AreEqual(0.8231, baseline[2], 1e-4);
            Assert.AreEqual(0.7436, baseline[3], 1e-4);
            Assert.AreEqual(0.7436, baseline[4], 1e-4);
            Assert.AreEqual(0.0000, baseline[5], 1e-4);
        }
Example #23
0
 /// <summary>
 ///   Gets the Log-Likelihood Ratio between this model and another model.
 /// </summary>
 ///
 /// <param name="model">Another proportional hazards model.</param>
 ///
 /// <returns>The Likelihood-Ratio between the two models.</returns>
 ///
 public double GetLikelihoodRatio(ProportionalHazards model)
 {
     return(regression.GetLogLikelihoodRatio(inputData, timeData, censorData, model));
 }
Example #24
0
        private void btnSampleRunAnalysis_Click(object sender, EventArgs e)
        {
            // Check requirements
            if (sourceTable == null)
            {
                MessageBox.Show("A sample spreadsheet can be found in the " +
                                "Resources folder in the same directory as this application.",
                                "Please load some data before attempting an analysis");
                return;
            }



            // Finishes and save any pending changes to the given data
            dgvAnalysisSource.EndEdit();
            sourceTable.AcceptChanges();

            // Gets the column of the dependent variable
            String    dependentName = (string)cbTimeName.SelectedItem;
            String    censorName    = (string)cbEventName.SelectedItem;
            DataTable timeTable     = sourceTable.DefaultView.ToTable(false, dependentName);
            DataTable censorTable   = sourceTable.DefaultView.ToTable(false, censorName);

            // Gets the columns of the independent variables
            List <string> names = new List <string>();

            foreach (string name in checkedListBox1.CheckedItems)
            {
                names.Add(name);
            }

            String[] independentNames = names.ToArray();

            // Creates the input and output matrices from the source data table
            this.time   = timeTable.Columns[dependentName].ToArray();
            this.censor = censorTable.Columns[censorName].ToArray().ToInt32();

            if (independentNames.Length == 0)
            {
                this.inputs = Jagged.Zeros(time.Length, 0);
            }
            else
            {
                DataTable independent = sourceTable.DefaultView.ToTable(false, independentNames);
                this.inputs = independent.ToJagged();
            }


            String[] sourceColumns;
            this.sourceMatrix = sourceTable.ToJagged(out sourceColumns);

            // Creates the Simple Descriptive Analysis of the given source
            var sda = new DescriptiveAnalysis(sourceColumns).Learn(sourceMatrix);

            // Populates statistics overview tab with analysis data
            dgvDistributionMeasures.DataSource = sda.Measures;


            // Creates the Logistic Regression Analysis of the given source
            pha = new ProportionalHazardsAnalysis(independentNames, dependentName, censorName);


            // Compute the Logistic Regression Analysis
            ProportionalHazards model = pha.Learn(inputs, time, censor);

            // Populates coefficient overview with analysis data
            dgvLogisticCoefficients.DataSource = pha.Coefficients;

            // Populate details about the fitted model
            tbChiSquare.Text     = pha.ChiSquare.Statistic.ToString("N5");
            tbPValue.Text        = pha.ChiSquare.PValue.ToString("N5");
            checkBox1.Checked    = pha.ChiSquare.Significant;
            tbDeviance.Text      = pha.Deviance.ToString("N5");
            tbLogLikelihood.Text = pha.LogLikelihood.ToString("N5");


            // Populate projection source table
            string[] cols = independentNames;
            if (!independentNames.Contains(dependentName))
            {
                cols = cols.Concatenate(dependentName);
            }

            if (!independentNames.Contains(censorName))
            {
                cols = cols.Concatenate(censorName);
            }

            DataTable projSource = sourceTable.DefaultView.ToTable(false, cols);

            dgvProjectionSource.DataSource = projSource;
        }
        public void RunTest()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html

            double[,] data =
            {
                { 50,  1, 0 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            ProportionalHazards regression = new ProportionalHazards(1);

            regression.Coefficients[0]   = 0.37704239281494084;
            regression.StandardErrors[0] = 0.25415755113043753;

            double[][]        inputs = data.GetColumn(0).ToJagged();
            double[]          time   = data.GetColumn(1);
            SurvivalOutcome[] output = data.GetColumn(2).To <SurvivalOutcome[]>();


            {
                double actual   = -2 * regression.GetPartialLogLikelihood(inputs, time, output);
                double expected = 4.0505;
                Assert.AreEqual(expected, actual, 1e-4);
                Assert.IsFalse(Double.IsNaN(actual));
            }

            {
                var test = regression.GetWaldTest(0);
                Assert.AreEqual(0.1379, test.PValue, 1e-4);
            }

            {
                var ci = regression.GetConfidenceInterval(0);
                Assert.AreEqual(0.8859, ci.Min, 1e-4);
                Assert.AreEqual(2.3993, ci.Max, 1e-4);
            }


            {
                double actual   = regression.GetHazardRatio(0);
                double expected = 1.4580;
                Assert.AreEqual(expected, actual, 1e-4);
            }

            {
                var chi = regression.ChiSquare(inputs, time, output);
                Assert.AreEqual(7.3570, chi.Statistic, 1e-4);
                Assert.AreEqual(1, chi.DegreesOfFreedom);
                Assert.AreEqual(0.0067, chi.PValue, 1e-3);
            }
        }
        public void learn_test()
        {
            #region doc_learn_part1
            // Consider the following example data, adapted from John C. Pezzullo's
            // example for his great Cox's proportional hazards model example in
            // JavaScript (http://statpages.org/prophaz2.html).

            // In this data, we have three columns. The first column denotes the
            // input variables for the problem. The second column, the survival
            // times. And the last one is the output of the experiment (if the
            // subject has died [1] or has survived [0]).

            double[][] example =
            {
                //             input  time censor
                new double[] { 50,  1, 0 },
                new double[] { 70,  2, 1 },
                new double[] { 45,  3, 0 },
                new double[] { 35,  5, 0 },
                new double[] { 62,  7, 1 },
                new double[] { 50, 11, 0 },
                new double[] { 45,  4, 0 },
                new double[] { 57,  6, 0 },
                new double[] { 32,  8, 0 },
                new double[] { 57,  9, 1 },
                new double[] { 60, 10, 1 },
            };

            // First we will extract the input, times and outputs
            double[][]        inputs = example.Get(null, 0, 1);
            double[]          times  = example.GetColumn(1);
            SurvivalOutcome[] output = example.GetColumn(2).To <SurvivalOutcome[]>();

            // Now we can proceed and create the analysis (giving optional variable names)
            var cox = new ProportionalHazardsAnalysis(new[] { "input" }, "time", "censor");

            // Then compute the analysis, learning a regression in the process:
            ProportionalHazards regression = cox.Learn(inputs, times, output);

            // Now we can show an analysis summary
            // Accord.Controls.DataGridBox.Show(cox.Coefficients);
            #endregion

            #region doc_learn_part2

            // We can also investigate all parameters individually. For
            // example the coefficients values will be available at

            double[] coef = cox.CoefficientValues;     // should be { 0.37704239281490765 }
            double[] stde = cox.StandardErrors;        // should be { 0.25415746361167235 }

            // We can also obtain the hazards ratios
            double[] ratios = cox.HazardRatios;        // should be { 1.4579661153488215 }

            // And other information such as the partial
            // likelihood, the deviance and also make
            // hypothesis tests on the parameters

            double partialL = cox.LogLikelihood;       // should be -2.0252666205735466
            double deviance = cox.Deviance;            // should be 4.0505332411470931

            // Chi-Square for whole model
            ChiSquareTest chi = cox.ChiSquare;         // should be 7.3570 (p=0.0067)

            // Wald tests for individual parameters
            WaldTest wald = cox.Coefficients[0].Wald;  // should be 1.4834 (p=0.1379)


            // Finally, we can also use the model to predict
            // scores for new observations (without considering time)

            double y1 = cox.Regression.Probability(new double[] { 63 }); // should be 86.138421225296526
            double y2 = cox.Regression.Probability(new double[] { 32 }); // should be 0.00072281400325299814

            // Those scores can be interpreted by comparing then
            // to 1. If they are greater than one, the odds are
            // the patient will not survive. If the value is less
            // than one, the patient is likely to survive.

            // The first value, y1, gives approximately 86.138,
            // while the second value, y2, gives about 0.00072.


            // We can also consider instant estimates for a given time:
            double p1 = cox.Regression.Probability(new double[] { 63 }, 2);   // should be 0.17989138010770425
            double p2 = cox.Regression.Probability(new double[] { 63 }, 10);  // should be 15.950244161356357

            // Here, p1 is the score after 2 time instants, with a
            // value of 0.0656. The second value, p2, is the time
            // after 10 time instants, with a value of 6.2907.

            // In addition, if we would like a higher precision when
            // computing very small probabilities using the methods
            // above, we can use the LogLikelihood methods instead:

            double log_y1 = cox.Regression.LogLikelihood(new double[] { 63 });      // should be  4.4559555514489091
            double log_y2 = cox.Regression.LogLikelihood(new double[] { 32 });      // should be -7.2323586258132284
            double log_p1 = cox.Regression.LogLikelihood(new double[] { 63 }, 2);   // should be -1.7154020540835324
            double log_p2 = cox.Regression.LogLikelihood(new double[] { 63 }, 10);  // should be  2.7694741370357177
            #endregion

            Assert.AreEqual(86.138421225296526, y1, 1e-10);
            Assert.AreEqual(0.00072281400325299814, y2, 1e-10);

            Assert.AreEqual(0.17989138010770425, p1, 1e-10);
            Assert.AreEqual(15.950244161356357, p2, 1e-10);

            Assert.AreEqual(4.4559555514489091, log_y1, 1e-10);
            Assert.AreEqual(-7.2323586258132284, log_y2, 1e-10);

            Assert.AreEqual(-1.7154020540835324, log_p1, 1e-10);
            Assert.AreEqual(2.7694741370357177, log_p2, 1e-10);

            Assert.AreEqual(System.Math.Log(y1), log_y1, 1e-10);
            Assert.AreEqual(System.Math.Log(y2), log_y2, 1e-10);

            Assert.AreEqual(System.Math.Log(p1), log_p1, 1e-10);
            Assert.AreEqual(System.Math.Log(p2), log_p2, 1e-10);

            Assert.AreEqual(1, coef.Length);
            Assert.AreEqual(0.37704239281490765, coef[0]);
            Assert.AreEqual(0.25415746361167235, stde[0]);
            Assert.AreEqual(1.4579661153488215, ratios[0]);

            Assert.AreEqual(-2.0252666205735466, partialL, 1e-6);
            Assert.AreEqual(4.0505332411470931, deviance, 1e-6);

            Assert.AreEqual(1.4834991955655938, wald.Statistic, 1e-4);
            Assert.AreEqual(0.13794183001851756, wald.PValue, 1e-4);

            Assert.AreEqual(1, chi.DegreesOfFreedom);
            Assert.AreEqual(7.3570, chi.Statistic, 1e-4);
            Assert.AreEqual(0.0067, chi.PValue, 1e-3);
        }
Example #27
0
 /// <summary>
 ///   Constructs a new Newton-Raphson learning algorithm
 ///   for Cox's Proportional Hazards models.
 /// </summary>
 ///
 /// <param name="hazards">The model to estimate.</param>
 ///
 public ProportionalHazardsNewtonRaphson(ProportionalHazards hazards)
     : this()
 {
     init(hazards);
 }
        public void BaselineHazardTest()
        {
            double[,] data =
            {
                // t   c  in
                {  8, 0, 13 },
                {  4, 1, 56 },
                { 12, 0, 25 },
                {  6, 0, 64 },
                { 10, 0, 38 },
                {  8, 1, 80 },
                {  5, 0,  0 },
                {  5, 0, 81 },
                {  3, 1, 81 },
                { 14, 1, 38 },
                {  8, 0, 23 },
                { 11, 0, 99 },
                {  7, 0, 12 },
                {  7, 1, 36 },
                { 12, 0, 63 },
                {  8, 0, 92 },
                {  7, 0, 38 },
            };

            double[]   time   = data.GetColumn(0);
            int[]      censor = data.GetColumn(1).ToInt32();
            double[][] inputs = data.GetColumn(2).ToArray();

            ProportionalHazards regression = new ProportionalHazards(1);

            ProportionalHazardsNewtonRaphson target = new ProportionalHazardsNewtonRaphson(regression);

            target.Normalize = false;

            double error = target.Run(inputs, time, censor);
            double log   = -2 * regression.GetPartialLogLikelihood(inputs, time, censor);

            EmpiricalHazardDistribution baseline = regression.BaselineHazard as EmpiricalHazardDistribution;

            double[] actual = new double[(int)baseline.Support.Max];
            for (int i = (int)baseline.Support.Min; i < baseline.Support.Max; i++)
            {
                actual[i] = baseline.CumulativeHazardFunction(i);
            }

            Assert.AreEqual(14, actual.Length);

            double[] expected =
            {
                0,                                       0,                    0,
                0.025000345517572315, 0.052363663484639708, 0.052363663484639708, 0.052363663484639708,
                0.16317880290786446,
                0.34217461190678861,   0.34217461190678861,  0.34217461190678861,
                0.34217461190678861,   0.34217461190678861, 0.34217461190678861
            };

            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.025);
            }
        }
        /// <summary>
        ///   Constructs a new Newton-Raphson learning algorithm
        ///   for Cox's Proportional Hazards models.
        /// </summary>
        /// 
        /// <param name="hazards">The model to estimate.</param>
        /// 
        public ProportionalHazardsNewtonRaphson(ProportionalHazards hazards)
        {
            this.regression = hazards;
            this.parameterCount = hazards.Coefficients.Length;

            this.hessian = new double[parameterCount, parameterCount];
            this.gradient = new double[parameterCount];

            this.partialHessian = new double[parameterCount, parameterCount];
            this.partialGradient = new double[parameterCount];

            this.convergence = new RelativeParameterConvergence()
            {
                Iterations = 0,
                Tolerance = 1e-5
            };
        }
Example #30
0
        public void PredictTest1()
        {
            // Data from: http://statpages.org/prophaz2.html

            double[,] data =
            {
                { 50,  1, 0 },
                { 70,  2, 1 },
                { 45,  3, 0 },
                { 35,  5, 0 },
                { 62,  7, 1 },
                { 50, 11, 0 },
                { 45,  4, 0 },
                { 57,  6, 0 },
                { 32,  8, 0 },
                { 57,  9, 1 },
                { 60, 10, 1 },
            };

            var regression = new ProportionalHazards(1);

            double[][] inputs = data.GetColumn(0).ToJagged();
            double[]   time   = data.GetColumn(1);
            int[]      censor = data.GetColumn(2).ToInt32();


            var target = new ProportionalHazardsNewtonRaphson(regression);

            double error = target.Run(inputs, time, censor);

            // Tested against http://statpages.org/prophaz2.html
            Assert.AreEqual(0.3770, regression.Coefficients[0], 1e-4);
            Assert.AreEqual(0.2542, regression.StandardErrors[0], 1e-4);
            Assert.AreEqual(51.18181818181818, regression.Offsets[0]);

            double mean = regression.Offsets[0];

            // Baseline survivor function at predictor means
            double[] baseline =
            {
                regression.Survival(2),
                regression.Survival(7),
                regression.Survival(9),
                regression.Survival(10),
            };

            // Tested against http://statpages.org/prophaz2.html
            Assert.AreEqual(0.9979, baseline[0], 1e-4);
            Assert.AreEqual(0.9820, baseline[1], 1e-4);
            Assert.AreEqual(0.9525, baseline[2], 1e-4);
            Assert.AreEqual(0.8310, baseline[3], 1e-4);

            double[] expected =
            {
                0,                     2.51908236823927, 0.000203028311170645, 4.67823782106946E-06, 1.07100164957025,
                0.118590728553659, 0.000203028311170645,   0.0187294821517496, 1.31028937819308E-05,
                0.436716853556834, 5.14665484304978
            };

            double[] actual = new double[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                double a = actual[i] = regression.Compute(inputs[i], time[i]);
                double e = expected[i];

                Assert.AreEqual(e, a, 1e-3);
            }
            // string exStr = actual.ToString(CSharpArrayFormatProvider.InvariantCulture);
        }
        /// <summary>
        ///   Constructs a new Newton-Raphson learning algorithm
        ///   for Cox's Proportional Hazards models.
        /// </summary>
        /// 
        /// <param name="hazards">The model to estimate.</param>
        /// 
        public ProportionalHazardsNewtonRaphson(ProportionalHazards hazards)
        {
            this.regression = hazards;
            this.parameterCount = hazards.Coefficients.Length;

            this.hessian = new double[parameterCount, parameterCount];
            this.gradient = new double[parameterCount];

            this.partialHessian = new double[parameterCount, parameterCount];
            this.partialGradient = new double[parameterCount];

            this.convergence = new RelativeParameterConvergence()
            {
                Iterations = 0,
                Tolerance = 1e-5
            };

            this.Estimator = HazardEstimator.BreslowNelsonAalen;
            this.Ties = HazardTiesMethod.Efron;
            this.Lambda = 0.1;
        }
Example #32
0
        public void doc_learn()
        {
            // Data from: http://www.sph.emory.edu/~cdckms/CoxPH/prophaz2.html / http://statpages.info/prophaz2.html

            #region doc_learn
            // Let's say we have the following survival problem. Each row in the
            // table below represents a patient under care in a hospital. The first
            // colum represents their age (a single feature, but there could have
            // been many like age, height, weight, etc), the time until an event
            // has happened (like, for example, unfortunatey death) and the event
            // outcome (i.e. what has exactly happened after this amount of time,
            // has the patient died or did he simply leave the hospital and we
            // couldn't get more data about him?)

            object[,] data =
            {
                //    input         time until           outcome
                // (features)     event happened     (what happened?)
                { 50,  1, SurvivalOutcome.Censored },
                { 70,  2, SurvivalOutcome.Failed   },
                { 45,  3, SurvivalOutcome.Censored },
                { 35,  5, SurvivalOutcome.Censored },
                { 62,  7, SurvivalOutcome.Failed   },
                { 50, 11, SurvivalOutcome.Censored },
                { 45,  4, SurvivalOutcome.Censored },
                { 57,  6, SurvivalOutcome.Censored },
                { 32,  8, SurvivalOutcome.Censored },
                { 57,  9, SurvivalOutcome.Failed   },
                { 60, 10, SurvivalOutcome.Failed   },
            }; // Note: Censored means that we stopped recording data for that person,
               // so we do not know what actually happened to them, except that things
               // were going fine until the point in time appointed by "time to event"

            // Parse the data above
            double[][]        inputs = data.GetColumn(0).ToDouble().ToJagged();
            double[]          time   = data.GetColumn(1).ToDouble();
            SurvivalOutcome[] output = data.GetColumn(2).To <SurvivalOutcome[]>();

            // Create a new PH Newton-Raphson learning algorithm
            var teacher = new ProportionalHazardsNewtonRaphson()
            {
                ComputeBaselineFunction = true,
                ComputeStandardErrors   = true,
                MaxIterations           = 100
            };

            // Use the learning algorithm to infer a Proportional Hazards model
            ProportionalHazards regression = teacher.Learn(inputs, time, output);

            // Use the regression to make predictions (problematic)
            SurvivalOutcome[] prediction = regression.Decide(inputs);

            // Use the regression to make score estimates
            double[] score = regression.Score(inputs);

            // Use the regression to make probability estimates
            double[] probability = regression.Probability(inputs);
            #endregion

            string   str      = probability.ToCSharp();
            double[] expected = { 0.640442743460877, 1206.22665747906, 0.0972172106179122, 0.00224010744584941, 59.0812230260151, 0.640442743460877, 0.0972172106179122, 8.9683453534747, 0.000722814003252998, 8.9683453534747, 27.7942279934438 };
            Assert.IsTrue(expected.IsEqual(probability, rtol: 1e-8));
        }