public double GetLikelihoodRatio(GeneralizedLinearRegression model) { #pragma warning disable 612, 618 return(regression.GetLogLikelihoodRatio(inputData, outputData, model)); #pragma warning restore 612, 618 }
private void constructor(GeneralizedLinearRegression regression) { this.regression = regression; this.parameterCount = regression.Coefficients.Length; this.hessian = new double[parameterCount, parameterCount]; this.gradient = new double[parameterCount]; }
private void constructor(GeneralizedLinearRegression regression) { if (regression == null) { throw new ArgumentNullException("regression"); } this.regression = regression; this.parameterCount = regression.Coefficients.Length; this.hessian = new double[parameterCount, parameterCount]; this.gradient = new double[parameterCount]; }
public void ComputeTest2() { double[][] input = { new double[] { 55, 0 }, // 0 - no cancer new double[] { 28, 0 }, // 0 new double[] { 65, 1 }, // 0 new double[] { 46, 0 }, // 1 - have cancer new double[] { 86, 1 }, // 1 new double[] { 56, 1 }, // 1 new double[] { 85, 0 }, // 0 new double[] { 33, 0 }, // 0 new double[] { 21, 1 }, // 0 new double[] { 42, 1 }, // 1 }; double[] output = { 0, 0, 0, 1, 1, 1, 0, 0, 0, 1 }; var regression = new GeneralizedLinearRegression(new ProbitLinkFunction(), inputs: 2); var teacher = new IterativeReweightedLeastSquares(regression); double delta = 0; do { // Perform an iteration delta = teacher.Run(input, output); } while (delta > 0.001); Assert.AreEqual(3, regression.Coefficients.Length); Assert.AreEqual(-1.4807594445304693, regression.Coefficients[0], 1e-10); Assert.AreEqual(0.012417175632016827, regression.Coefficients[1], 1e-10); Assert.AreEqual(1.072665379969842, regression.Coefficients[2], 1e-10); Assert.IsFalse(regression.Coefficients.HasNaN()); Assert.AreEqual(3, regression.StandardErrors.Length); Assert.AreEqual(1.6402037052797314, regression.StandardErrors[0], 1e-10); Assert.AreEqual(0.026119425452145524, regression.StandardErrors[1], 1e-10); Assert.AreEqual(1.1297252500874606, regression.StandardErrors[2], 1e-10); Assert.IsFalse(regression.StandardErrors.HasNaN()); }
protected override void EndProcessing() { double[][] inputs; double[] outputs; if (ParameterSetName == "XY") { inputs = Converter.ToDoubleJaggedArray(X); outputs = Converter.ToDoubleArray(Y); } else { outputs = _data.GetColumn(OutputName).ToDoubleArray(); _data.RemoveColumn(OutputName); inputs = _data.ToDoubleJaggedArray(); } double[] w = null; if (Weights != null) { w = Converter.ToDoubleArray(Weights); } var model = new GeneralizedLinearRegression(LinkFunctionConvert.Get(LinkFunction)) { NumberOfInputs = inputs[0].Length }; var learner = new IterativeReweightedLeastSquares(model) { MaxIterations = 200, Regularization = 0 }; learner.Learn(inputs, outputs, w); WriteObject(model); }
public void ComputeTest() { // Example from http://bayes.bgsu.edu/bcwr/vignettes/probit_regression.pdf double[][] input = { new double[] { 525 }, new double[] { 533 }, new double[] { 545 }, new double[] { 582 }, new double[] { 581 }, new double[] { 576 }, new double[] { 572 }, new double[] { 609 }, new double[] { 559 }, new double[] { 543 }, new double[] { 576 }, new double[] { 525 }, new double[] { 574 }, new double[] { 582 }, new double[] { 574 }, new double[] { 471 }, new double[] { 595 }, new double[] { 557 }, new double[] { 557 }, new double[] { 584 }, new double[] { 599 }, new double[] { 517 }, new double[] { 649 }, new double[] { 584 }, new double[] { 463 }, new double[] { 591 }, new double[] { 488 }, new double[] { 563 }, new double[] { 553 }, new double[] { 549 } }; double[] output = { 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1 }; var regression = new GeneralizedLinearRegression(new ProbitLinkFunction(), inputs: 1); var teacher = new IterativeReweightedLeastSquares(regression); double delta = 0; do { // Perform an iteration delta = teacher.Run(input, output); } while (delta > 1e-6); Assert.AreEqual(2, regression.Coefficients.Length); Assert.AreEqual(-17.6984, regression.Coefficients[0], 1e-4); Assert.AreEqual(0.03293, regression.Coefficients[1], 1e-4); Assert.AreEqual(2, regression.StandardErrors.Length); Assert.AreEqual(9.2731983954911374, regression.StandardErrors[0], 1e-6); Assert.AreEqual(0.016768779446085, regression.StandardErrors[1], 1e-6); }
//--------------------------------------------- #region Public Methods /// <summary> /// Gets the Log-Likelihood Ratio between this model and another model. /// </summary> /// /// <param name="model">Another logistic regression model.</param> /// <returns>The Likelihood-Ratio between the two models.</returns> /// public double GetLikelihoodRatio(GeneralizedLinearRegression model) { return(regression.GetLogLikelihoodRatio(inputData, outputData, model)); }
/// <summary> /// Constructs a new Iterative Reweighted Least Squares. /// </summary> /// /// <param name="regression">The regression to estimate.</param> /// public IterativeReweightedLeastSquares(GeneralizedLinearRegression regression) { constructor(regression); }
/// <summary> /// Constructs a new Iterative Reweighted Least Squares. /// </summary> /// /// <param name="regression">The regression to estimate.</param> /// public IterativeReweightedLeastSquares(LogisticRegression regression) { var glm = GeneralizedLinearRegression.FromLogisticRegression(regression, makeCopy: false); constructor(glm); }
private void constructor(GeneralizedLinearRegression regression) { if (regression == null) throw new ArgumentNullException("regression"); this.regression = regression; this.parameterCount = regression.Coefficients.Length; this.hessian = new double[parameterCount, parameterCount]; this.gradient = new double[parameterCount]; }