Example #1
0
        /// <summary>
        ///   Runs the Newton-Raphson update for Cox's hazards learning until convergence.
        /// </summary>
        ///
        /// <param name="censor">The output (event) associated with each input vector.</param>
        /// <param name="time">The time-to-event for the non-censored training samples.</param>
        ///
        /// <returns>The maximum relative change in the parameters after the iteration.</returns>
        ///
        public double Run(double[] time, SurvivalOutcome[] censor)
        {
            if (time.Length != censor.Length)
            {
                throw new DimensionMismatchException("time",
                                                     "The time and output vector must have the same length.");
            }

            // Sort data by time to accelerate performance
            EmpiricalHazardDistribution.Sort(ref time, ref censor);

            createBaseline(time, censor);

            return(regression.GetPartialLogLikelihood(time, censor));
        }
Example #2
0
        /// <summary>
        ///   Runs the Newton-Raphson update for Cox's hazards learning until convergence.
        /// </summary>
        ///
        /// <param name="inputs">The input data.</param>
        /// <param name="censor">The output (event) associated with each input vector.</param>
        /// <param name="time">The time-to-event for the non-censored training samples.</param>
        ///
        /// <returns>The maximum relative change in the parameters after the iteration.</returns>
        ///
        public double Run(double[][] inputs, double[] time, SurvivalOutcome[] censor)
        {
            if (inputs.Length != time.Length || time.Length != censor.Length)
            {
                throw new DimensionMismatchException("time",
                                                     "The inputs, time and output vector must have the same length.");
            }


            // Sort data by time to accelerate performance
            EmpiricalHazardDistribution.Sort(ref time, ref censor, ref inputs);


            double[] means = new double[parameterCount];
            double[] sdev  = new double[parameterCount];
            for (int i = 0; i < sdev.Length; i++)
            {
                sdev[i] = 1;
            }

            if (normalize)
            {
                // Store means as regression centers
                means = inputs.Mean();
                for (int i = 0; i < means.Length; i++)
                {
                    regression.Offsets[i] = means[i];
                }

                // Convert to unit scores for increased accuracy
                sdev   = BestCS.Statistics.Tools.StandardDeviation(inputs);
                inputs = inputs.Subtract(means, 0).ElementwiseDivide(sdev, 0, inPlace: true);

                for (int i = 0; i < regression.Coefficients.Length; i++)
                {
                    regression.Coefficients[i] *= sdev[i];
                }
            }



            // Compute actual outputs
            double[] output = new double[inputs.Length];
            for (int i = 0; i < output.Length; i++)
            {
                double sum = 0;
                for (int j = 0; j < regression.Coefficients.Length; j++)
                {
                    sum += regression.Coefficients[j] * inputs[i][j];
                }
                output[i] = Math.Exp(sum);
            }

            // Compute ties
            int[] ties = new int[inputs.Length];
            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < time.Length; j++)
                {
                    if (time[j] == time[i])
                    {
                        ties[i]++;
                    }
                }
            }

            if (parameterCount == 0)
            {
                createBaseline(time, censor, output);
                return(regression.GetPartialLogLikelihood(inputs, time, censor));
            }

            CurrentIteration = 0;
            double smooth = Lambda;

            do
            {
                // learning iterations until convergence
                // or maximum number of iterations reached

                CurrentIteration++;

                // Reset Hessian matrix and gradient
                Array.Clear(gradient, 0, gradient.Length);
                Array.Clear(hessian, 0, hessian.Length);

                // For each observation instance
                for (int i = 0; i < inputs.Length; i++)
                {
                    // Check if we should censor
                    if (censor[i] == SurvivalOutcome.Censored)
                    {
                        continue;
                    }

                    // Compute partials
                    double den = 0;
                    Array.Clear(partialGradient, 0, partialGradient.Length);
                    Array.Clear(partialHessian, 0, partialHessian.Length);

                    for (int j = 0; j < inputs.Length; j++)
                    {
                        if (time[j] >= time[i])
                        {
                            den += output[j];
                        }
                    }

                    for (int j = 0; j < inputs.Length; j++)
                    {
                        if (time[j] >= time[i])
                        {
                            // Compute partial gradient
                            for (int k = 0; k < partialGradient.Length; k++)
                            {
                                partialGradient[k] += inputs[j][k] * output[j] / den;
                            }

                            // Compute partial Hessian
                            for (int ii = 0; ii < inputs[j].Length; ii++)
                            {
                                for (int jj = 0; jj < inputs[j].Length; jj++)
                                {
                                    partialHessian[ii, jj] += inputs[j][ii] * inputs[j][jj] * output[j] / den;
                                }
                            }
                        }
                    }

                    // Compute gradient vector
                    for (int j = 0; j < gradient.Length; j++)
                    {
                        gradient[j] += inputs[i][j] - partialGradient[j];
                    }

                    // Compute Hessian matrix
                    for (int j = 0; j < partialGradient.Length; j++)
                    {
                        for (int k = 0; k < partialGradient.Length; k++)
                        {
                            hessian[j, k] -= partialHessian[j, k] - partialGradient[j] * partialGradient[k];
                        }
                    }
                }


                // Decompose to solve the linear system. Usually the Hessian will
                // be invertible and LU will succeed. However, sometimes the Hessian
                // may be singular and a Singular Value Decomposition may be needed.

                // The SVD is very stable, but is quite expensive, being on average
                // about 10-15 times more expensive than LU decomposition. There are
                // other ways to avoid a singular Hessian. For a very interesting
                // reading on the subject, please see:
                //
                //  - Jeff Gill & Gary King, "What to Do When Your Hessian Is Not Invertible",
                //    Sociological Methods & Research, Vol 33, No. 1, August 2004, 54-87.
                //    Available in: http://gking.harvard.edu/files/help.pdf
                //

                decomposition = new SingularValueDecomposition(hessian);
                double[] deltas = decomposition.Solve(gradient);

                if (convergence.Iterations > 0 || convergence.Tolerance > 0)
                {
                    // Update coefficients using the calculated deltas
                    for (int i = 0; i < regression.Coefficients.Length; i++)
                    {
                        regression.Coefficients[i] -= smooth * deltas[i];
                    }
                }

                smooth += Lambda;
                if (smooth > 1)
                {
                    smooth = 1;
                }

                // Check relative maximum parameter change
                convergence.NewValues = regression.Coefficients;


                if (convergence.HasDiverged)
                {
                    // Restore previous coefficients
                    for (int i = 0; i < regression.Coefficients.Length; i++)
                    {
                        regression.Coefficients[i] = convergence.OldValues[i];
                    }
                }

                // Recompute current outputs
                for (int i = 0; i < output.Length; i++)
                {
                    double sum = 0;
                    for (int j = 0; j < regression.Coefficients.Length; j++)
                    {
                        sum += regression.Coefficients[j] * inputs[i][j];
                    }
                    output[i] = Math.Exp(sum);
                }
            } while (!convergence.HasConverged);


            for (int i = 0; i < regression.Coefficients.Length; i++)
            {
                regression.Coefficients[i] /= sdev[i];
            }

            if (computeStandardErrors)
            {
                // Grab the regression information matrix
                double[,] inverse = decomposition.Inverse();

                // Calculate coefficients' standard errors
                double[] standardErrors = regression.StandardErrors;
                for (int i = 0; i < standardErrors.Length; i++)
                {
                    standardErrors[i] = Math.Sqrt(Math.Abs(inverse[i, i])) / sdev[i];
                }
            }

            if (computeBaselineFunction)
            {
                createBaseline(time, censor, output);
            }

            return(regression.GetPartialLogLikelihood(inputs, time, censor));
        }