private void computeInformation(double[][] inputData, double[] outputData, double[] weights) { // Store model information #pragma warning disable 612, 618 result = regression.Compute(inputData); #pragma warning restore 612, 618 if (weights == null) { this.deviance = regression.GetDeviance(inputData, outputData); this.logLikelihood = regression.GetLogLikelihood(inputData, outputData); this.chiSquare = regression.ChiSquare(inputData, outputData); } else { this.deviance = regression.GetDeviance(inputData, outputData, weights); this.logLikelihood = regression.GetLogLikelihood(inputData, outputData, weights); this.chiSquare = regression.ChiSquare(inputData, outputData, weights); } // Store coefficient information for (int i = 0; i < regression.Coefficients.Length; i++) { this.standardErrors[i] = regression.StandardErrors[i]; this.waldTests[i] = regression.GetWaldTest(i); this.coefficients[i] = regression.Coefficients[i]; this.confidences[i] = regression.GetConfidenceInterval(i); this.oddsRatios[i] = regression.GetOddsRatio(i); } }
/// <summary> /// Computes the Logistic Regression Analysis. /// </summary> /// <remarks>The likelihood surface for the /// logistic regression learning is convex, so there will be only one /// peak. Any local maxima will be also a global maxima. /// </remarks> /// <param name="limit"> /// The difference between two iterations of the regression algorithm /// when the algorithm should stop. If not specified, the value of /// 10e-4 will be used. The difference is calculated based on the largest /// absolute parameter change of the regression. /// </param> /// <param name="maxIterations"> /// The maximum number of iterations to be performed by the regression /// algorithm. /// </param> /// <returns> /// True if the model converged, false otherwise. /// </returns> /// public bool Compute(double limit, int maxIterations) { double delta; int iteration = 0; do // learning iterations until convergence { delta = regression.Regress(inputData, outputData); iteration++; } while (delta > limit && iteration < maxIterations); // Check if the full model has converged bool converged = iteration <= maxIterations; // Store model information this.result = regression.Compute(inputData); this.deviance = regression.GetDeviance(inputData, outputData); this.logLikelihood = regression.GetLogLikelihood(inputData, outputData); this.chiSquare = regression.ChiSquare(inputData, outputData); // Store coefficient information for (int i = 0; i < regression.Coefficients.Length; i++) { this.waldTests[i] = regression.GetWaldTest(i); this.standardErrors[i] = regression.GetStandardError(i); this.coefficients[i] = regression.Coefficients[i]; this.confidences[i] = regression.GetConfidenceInterval(i); this.oddsRatios[i] = regression.GetOddsRatio(i); } // Perform likelihood-ratio tests against diminished nested models for (int i = 0; i < inputCount; i++) { // Create a diminished inner model without the current variable double[][] data = inputData.RemoveColumn(i); LogisticRegression inner = new LogisticRegression(inputCount - 1); iteration = 0; do // learning iterations until convergence { delta = inner.Regress(data, outputData); iteration++; } while (delta > limit && iteration < maxIterations); double ratio = 2.0 * (logLikelihood - inner.GetLogLikelihood(data, outputData)); ratioTests[i + 1] = new ChiSquareTest(ratio, 1); } // Returns true if the full model has converged, false otherwise. return(converged); }
/// <summary> /// Constructs a new Logistic regression model. /// </summary> /// internal StepwiseLogisticRegressionModel(StepwiseLogisticRegressionAnalysis analysis, LogisticRegression regression, int[] variables, ChiSquareTest chiSquare, ChiSquareTest[] tests) { this.Analysis = analysis; this.Regression = regression; int coefficientCount = regression.NumberOfInputs + 1; this.Inputs = analysis.Inputs.Get(variables); this.ChiSquare = chiSquare; this.LikelihoodRatioTests = tests; this.Variables = variables; this.StandardErrors = new double[coefficientCount]; this.WaldTests = new WaldTest[coefficientCount]; this.CoefficientValues = new double[coefficientCount]; this.Confidences = new DoubleRange[coefficientCount]; this.OddsRatios = new double[coefficientCount]; // Store coefficient information for (int i = 0; i < regression.NumberOfInputs + 1; i++) { this.StandardErrors[i] = regression.StandardErrors[i]; this.WaldTests[i] = regression.GetWaldTest(i); this.CoefficientValues[i] = regression.GetCoefficient(i); this.Confidences[i] = regression.GetConfidenceInterval(i); this.OddsRatios[i] = regression.GetOddsRatio(i); } StringBuilder sb = new StringBuilder(); for (int i = 0; i < Inputs.Length; i++) { sb.Append(Inputs[i]); if (i < Inputs.Length - 1) { sb.Append(", "); } } this.Names = sb.ToString(); var logCoefs = new List <NestedLogisticCoefficient>(coefficientCount); for (int i = 0; i < coefficientCount; i++) { logCoefs.Add(new NestedLogisticCoefficient(this, i)); } this.Coefficients = new NestedLogisticCoefficientCollection(logCoefs); }
private void computeInformation() { // Store model information this.result = regression.Compute(inputData); this.deviance = regression.GetDeviance(inputData, outputData); this.logLikelihood = regression.GetLogLikelihood(inputData, outputData); this.chiSquare = regression.ChiSquare(inputData, outputData); // Store coefficient information for (int i = 0; i < regression.Coefficients.Length; i++) { this.standardErrors[i] = regression.StandardErrors[i]; this.waldTests[i] = regression.GetWaldTest(i); this.coefficients[i] = regression.Coefficients[i]; this.confidences[i] = regression.GetConfidenceInterval(i); this.oddsRatios[i] = regression.GetOddsRatio(i); } }