private void computeInformation(double[][] inputData, double[] outputData, double[] weights) { // Store model information #pragma warning disable 612, 618 result = regression.Compute(inputData); #pragma warning restore 612, 618 if (weights == null) { this.deviance = regression.GetDeviance(inputData, outputData); this.logLikelihood = regression.GetLogLikelihood(inputData, outputData); this.chiSquare = regression.ChiSquare(inputData, outputData); } else { this.deviance = regression.GetDeviance(inputData, outputData, weights); this.logLikelihood = regression.GetLogLikelihood(inputData, outputData, weights); this.chiSquare = regression.ChiSquare(inputData, outputData, weights); } // Store coefficient information for (int i = 0; i < regression.Coefficients.Length; i++) { this.standardErrors[i] = regression.StandardErrors[i]; this.waldTests[i] = regression.GetWaldTest(i); this.coefficients[i] = regression.Coefficients[i]; this.confidences[i] = regression.GetConfidenceInterval(i); this.oddsRatios[i] = regression.GetOddsRatio(i); } }
/// <summary> /// Computes the Logistic Regression Analysis. /// </summary> /// <remarks>The likelihood surface for the /// logistic regression learning is convex, so there will be only one /// peak. Any local maxima will be also a global maxima. /// </remarks> /// <param name="limit"> /// The difference between two iterations of the regression algorithm /// when the algorithm should stop. If not specified, the value of /// 10e-4 will be used. The difference is calculated based on the largest /// absolute parameter change of the regression. /// </param> /// <param name="maxIterations"> /// The maximum number of iterations to be performed by the regression /// algorithm. /// </param> /// <returns> /// True if the model converged, false otherwise. /// </returns> /// public bool Compute(double limit, int maxIterations) { double delta; int iteration = 0; do // learning iterations until convergence { delta = regression.Regress(inputData, outputData); iteration++; } while (delta > limit && iteration < maxIterations); // Check if the full model has converged bool converged = iteration <= maxIterations; // Store model information this.result = regression.Compute(inputData); this.deviance = regression.GetDeviance(inputData, outputData); this.logLikelihood = regression.GetLogLikelihood(inputData, outputData); this.chiSquare = regression.ChiSquare(inputData, outputData); // Store coefficient information for (int i = 0; i < regression.Coefficients.Length; i++) { this.waldTests[i] = regression.GetWaldTest(i); this.standardErrors[i] = regression.GetStandardError(i); this.coefficients[i] = regression.Coefficients[i]; this.confidences[i] = regression.GetConfidenceInterval(i); this.oddsRatios[i] = regression.GetOddsRatio(i); } // Perform likelihood-ratio tests against diminished nested models for (int i = 0; i < inputCount; i++) { // Create a diminished inner model without the current variable double[][] data = inputData.RemoveColumn(i); LogisticRegression inner = new LogisticRegression(inputCount - 1); iteration = 0; do // learning iterations until convergence { delta = inner.Regress(data, outputData); iteration++; } while (delta > limit && iteration < maxIterations); double ratio = 2.0 * (logLikelihood - inner.GetLogLikelihood(data, outputData)); ratioTests[i + 1] = new ChiSquareTest(ratio, 1); } // Returns true if the full model has converged, false otherwise. return(converged); }
private void computeInformation() { // Store model information this.result = regression.Compute(inputData); this.deviance = regression.GetDeviance(inputData, outputData); this.logLikelihood = regression.GetLogLikelihood(inputData, outputData); this.chiSquare = regression.ChiSquare(inputData, outputData); // Store coefficient information for (int i = 0; i < regression.Coefficients.Length; i++) { this.standardErrors[i] = regression.StandardErrors[i]; this.waldTests[i] = regression.GetWaldTest(i); this.coefficients[i] = regression.Coefficients[i]; this.confidences[i] = regression.GetConfidenceInterval(i); this.oddsRatios[i] = regression.GetOddsRatio(i); } }