public void PairedTTest() { var stat = TTest.PairedTTest(Enumerable.Range(1, 8), i => i, i => i + (i / 10.0)); Assert.Equal(5.1962, stat.T, 3); Assert.Equal(7.0, stat.DegreesOfFreedom, 5); }
public bool IsStatiscallyDifferentFrom(TestMeasure testMeasure, int n, double minPvalue) { var ttestResult = TTest.CalcTTest(this.Value, testMeasure.Value, this.StdDev * this.StdDev, testMeasure.StdDev * testMeasure.StdDev, n, n); return(ttestResult.PValue <= minPvalue); }
public void TwoSampleTest() { var result = TTest.TwoSample(5.5, 3.0277, 10, 5.97, 3.0782, 10); Assert.Equal(-0.3442, result.T, 3); Assert.Equal(18, result.DegreesOfFreedom, 2); }
private void ComputeComparisonStats() { comparisonTable.Clear(); comparisonChart.Clear(); var activityA = chartA.Activity; var activityB = chartB.Activity; if (TTest.Busy || activityA == null || activityB == null || activityA.Count <= 1 || activityB.Count <= 1) { return; } var firstValue = activityA[0].Y; var allSame = activityA.Zip(activityB, (a, b) => a.Y == b.Y && a.Y == firstValue).All(i => i); if (allSame) { MessageBox.Show("The selected activity sets are identical and of singular value."); return; } //Compute T-test var tTest = TTest.Perform ( activityA.Select(p => p.Y).ToList(), activityB.Select(p => p.Y).ToList() ); //Update table comparisonTable.lblAvgA.Content = tTest.TTest.FirstSeriesMean.ToString("N2"); comparisonTable.lblAvgB.Content = tTest.TTest.SecondSeriesMean.ToString("N2"); comparisonTable.lblNA.Content = tTest.FirstSeriesCount.ToString("N0"); comparisonTable.lblNB.Content = tTest.SecondSeriesCount.ToString("N0"); comparisonTable.lblStDevA.Content = tTest.FirstSeriesStandardDeviation.ToString("N2"); comparisonTable.lblStDevB.Content = tTest.SecondSeriesStandardDeviation.ToString("N2"); comparisonTable.lblAvgDiff.Content = (tTest.MeanDifference > 0 ? "+" : "") + tTest.MeanDifference.ToString("N2"); if (tTest.TTest.FirstSeriesMean != 0) { comparisonTable.lblAvgPercent.Content = tTest.PercentMeanDifference.ToString("P2"); } else { comparisonTable.lblAvgPercent.Content = "-"; } //Update Chart comparisonChart.UpdateChart ( tTest.TTest.FirstSeriesMean, tTest.FirstSeries95ConfidenceBound, tTest.TTest.SecondSeriesMean, tTest.SecondSeries95ConfidenceBound ); }
public void TTestConstructorTest() { // mean = 0.5, var = 1 double[] sample = { -0.849886940156521, 3.53492346633185, 1.22540422494611, 0.436945126810344, 1.21474290382610, 0.295033941700225, 0.375855651783688, 1.98969760778547, 1.90903448980048, 1.91719241342961 }; double hypothesizedMean = 0; TTestHypotesis hypothesis = TTestHypotesis.MeanIsDifferentThanHypothesis; TTest target = new TTest(sample, hypothesizedMean, hypothesis); Assert.AreEqual(3.1254485381338246, target.Statistic); Assert.AreEqual(Hypothesis.TwoTail, target.Hypothesis); Assert.AreEqual(0.012210924322697769, target.PValue); hypothesis = TTestHypotesis.MeanIsGreaterThanHypothesis; target = new TTest(sample, hypothesizedMean, hypothesis); Assert.AreEqual(3.1254485381338246, target.Statistic); Assert.AreEqual(Hypothesis.OneUpper, target.Hypothesis); // right tail Assert.AreEqual(0.0061054621613488846, target.PValue); hypothesis = TTestHypotesis.MeanIsSmallerThanHypothesis; target = new TTest(sample, hypothesizedMean, hypothesis); Assert.AreEqual(3.1254485381338246, target.Statistic); Assert.AreEqual(Hypothesis.OneLower, target.Hypothesis); // left tail Assert.AreEqual(0.99389453783865112, target.PValue); }
private DoubleRange createInterval(double input, double[] inputs, double percent, double se) { double y = Transform(input); double df = GetDegreesOfFreedom(inputs.Length); var t = new TTest(estimatedValue: y, standardError: se, degreesOfFreedom: df); return(t.GetConfidenceInterval(percent)); }
public void TwoSampleTestFromSamples() { var result = TTest.TwoSample(new [] { 1.0, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, new[] { 1.0, 2, 3, 4, 5, 6, 7, 8, 9, 10 }.Select(i => i + 0.5)); Assert.Equal(-0.3693, result.T, 3); Assert.Equal(18, result.DegreesOfFreedom, 2); }
private TestResult[] TestMethodInvocationResults <TTest>() where TTest : notnull, new() { var attribute = new FuzzTestMethodAttribute(); var fuzzClassInstance = new TTest(); var method = CreateTestMethodMock(fuzzClassInstance); return(attribute.Execute(method)); }
public void TTestConstructorTest() { // mean = 0.5, var = 1 double[] sample = { -0.849886940156521, 3.53492346633185, 1.22540422494611, 0.436945126810344, 1.21474290382610, 0.295033941700225, 0.375855651783688, 1.98969760778547, 1.90903448980048, 1.91719241342961 }; // Null Hypothesis: Values are equal // Alternative : Values are different double hypothesizedMean = 0; OneSampleHypothesis hypothesis = OneSampleHypothesis.ValueIsDifferentFromHypothesis; TTest target = new TTest(sample, hypothesizedMean, hypothesis); Assert.AreEqual(3.1254485381338246, target.Statistic); Assert.AreEqual(OneSampleHypothesis.ValueIsDifferentFromHypothesis, target.Hypothesis); Assert.AreEqual(DistributionTail.TwoTail, target.Tail); Assert.AreEqual(0.012210924322697769, target.PValue); Assert.IsTrue(target.Significant); // Null hypothesis: value is smaller than hypothesis // Alternative : value is greater than hypothesis // If the null hypothesis states that the population parameter is less // than zero (or a constant), the z-score that rejects the null is always // positive and greater than the score set for the rejection condition. hypothesis = OneSampleHypothesis.ValueIsGreaterThanHypothesis; target = new TTest(sample, hypothesizedMean, hypothesis); // z-score is positive: Assert.AreEqual(3.1254485381338246, target.Statistic); Assert.AreEqual(OneSampleHypothesis.ValueIsGreaterThanHypothesis, target.Hypothesis); // right tail Assert.AreEqual(DistributionTail.OneUpper, target.Tail); // right tail Assert.AreEqual(0.0061054621613488846, target.PValue); Assert.IsTrue(target.Significant); // null should be rejected // Null hypothesis: value is greater than hypothesis // Alternative: value is smaller than hypothesis // If the null hypothesis states that the population parameter is // greater than zero (or a constant), the z-score that rejects the // null is always negative and less than the score set for the // rejection condition. hypothesis = OneSampleHypothesis.ValueIsSmallerThanHypothesis; target = new TTest(sample, hypothesizedMean, hypothesis); // z-score is positive: Assert.AreEqual(3.1254485381338246, target.Statistic); Assert.AreEqual(OneSampleHypothesis.ValueIsSmallerThanHypothesis, target.Hypothesis); // left tail Assert.AreEqual(DistributionTail.OneLower, target.Tail); // left tail Assert.AreEqual(0.99389453783865112, target.PValue); Assert.IsFalse(target.Significant); // null cannot be rejected }
public void OneSampleRaw() { var result = TTest.OneSample(142.1, 140.5, 10, 30); Assert.Equal(-0.87635609, result.T, 4); Assert.Equal(29, result.DegreesOfFreedom); Assert.Equal(0.80598089, result.OneSidedGreaterP, 6); Assert.Equal(0.19401911, result.OneSidedLessP, 6); Assert.Equal(0.38803823, result.TwoTailedP, 7); }
private DoubleRange computeInterval(double[] input, int numberOfSamples, double percent, double se) { double y = linear.Transform(input); double df = GetDegreesOfFreedom(numberOfSamples); var t = new TTest(estimatedValue: y, standardError: se, degreesOfFreedom: df); DoubleRange lci = t.GetConfidenceInterval(percent); DoubleRange nci = new DoubleRange(linkFunction.Inverse(lci.Min), linkFunction.Inverse(lci.Max)); return(nci); }
public void PValueToStatisticTest() { double df = 2.6; double p = 0.05; double t = 0; { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsDifferentFromHypothesis); Assert.AreEqual(DistributionTail.TwoTail, target.Tail); double actual = target.PValueToStatistic(p); double expected = 3.4782; Assert.AreEqual(expected, actual, 1e-4); } { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsSmallerThanHypothesis); Assert.AreEqual(DistributionTail.OneLower, target.Tail); double actual = target.PValueToStatistic(p); double expected = -2.5086; Assert.AreEqual(expected, actual, 1e-4); } { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsGreaterThanHypothesis); Assert.AreEqual(DistributionTail.OneUpper, target.Tail); double actual = target.PValueToStatistic(p); double expected = 2.5086; Assert.AreEqual(expected, actual, 1e-4); } p = 0.95; { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsDifferentFromHypothesis); Assert.AreEqual(DistributionTail.TwoTail, target.Tail); double actual = target.PValueToStatistic(p); double expected = 0.0689; Assert.AreEqual(expected, actual, 1e-4); } { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsSmallerThanHypothesis); Assert.AreEqual(DistributionTail.OneLower, target.Tail); double actual = target.PValueToStatistic(p); double expected = 2.5086; Assert.AreEqual(expected, actual, 1e-4); } { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsGreaterThanHypothesis); Assert.AreEqual(DistributionTail.OneUpper, target.Tail); double actual = target.PValueToStatistic(p); double expected = -2.5086; Assert.AreEqual(expected, actual, 1e-4); } }
public void StatisticToPValueTest() { double df = 2.2; double t = 1.96; { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsDifferentFromHypothesis); Assert.AreEqual(DistributionTail.TwoTail, target.Tail); double actual = target.StatisticToPValue(t); double expected = 0.1773; Assert.AreEqual(expected, actual, 1e-4); } { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsSmallerThanHypothesis); Assert.AreEqual(DistributionTail.OneLower, target.Tail); double actual = target.StatisticToPValue(t); double expected = 0.9113; Assert.AreEqual(expected, actual, 1e-4); } { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsGreaterThanHypothesis); Assert.AreEqual(DistributionTail.OneUpper, target.Tail); double actual = target.StatisticToPValue(t); double expected = 0.0887; Assert.AreEqual(expected, actual, 1e-4); } t = -1.96; { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsDifferentFromHypothesis); Assert.AreEqual(DistributionTail.TwoTail, target.Tail); double actual = target.StatisticToPValue(t); double expected = 0.1773; Assert.AreEqual(expected, actual, 1e-4); } { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsSmallerThanHypothesis); Assert.AreEqual(DistributionTail.OneLower, target.Tail); double actual = target.StatisticToPValue(t); double expected = 0.0887; Assert.AreEqual(expected, actual, 1e-4); } { TTest target = new TTest(t, df, OneSampleHypothesis.ValueIsGreaterThanHypothesis); Assert.AreEqual(DistributionTail.OneUpper, target.Tail); double actual = target.StatisticToPValue(t); double expected = 0.9113; Assert.AreEqual(expected, actual, 1e-4); } }
static void Main(string[] args) { city a = new city(); FTest ReadGeoF = new FTest(a.readGeo); mega b = new mega(); STest ReadGeoS = new STest(b.readGeo); STest ReadNameS = new STest(b.readName); region c = new region(); TTest ReadGeoT = new TTest(c.readGeo); TTest ReadNameT = new TTest(c.readName); TTest Sum; Sum = ReadGeoT + ReadNameT; }
private static RegressionResult PerformOls(double[] yVal, string[] xVars, double[][] xVals) { var ols = new OrdinaryLeastSquares() { // intercept should represent the return if no impact from inputs. (mean daily return) // forcing to 0 as we assume that all explanation of price move should be due to independenet variables. // in reality there is likely an unexplainable drift. For example daily bleed (MER, funding etc). UseIntercept = false }; MultipleLinearRegression regression = ols.Learn(xVals, yVal); double[] predicted = regression.Transform(xVals); RegressionResult r = new RegressionResult(); r.ModelType = "OLS"; r.StandardError = regression.GetStandardError(xVals, yVal); // r.RSquared = new RSquaredLoss(xVals.Length, yVal, ).Loss(predicted); r.RSquared = regression.CoefficientOfDetermination(xVals, yVal, false); r.AdjRSquared = regression.CoefficientOfDetermination(xVals, yVal, true); r.Betas = SgtStringUtils.DoubleArrayToRoundedDelimitedString(regression.Weights); r.xVars = string.Join(";", xVars); // TODO need to validate the below section. add p-values? double[] coeffStandardErrors = regression.GetStandardErrors(r.StandardError, ols.GetInformationMatrix()); double[] coeffTScores = DoubleDivide(regression.Weights, coeffStandardErrors); double[] coeffPVals = new double[coeffTScores.Length]; double df = xVals.Length - 1; for (int i = 0; i < coeffTScores.Length; i++) { TTest tTest = new TTest(coeffTScores[i], df, OneSampleHypothesis.ValueIsDifferentFromHypothesis); coeffPVals[i] = tTest.PValue; } r.CoeffStandardErrors = SgtStringUtils.DoubleArrayToRoundedDelimitedString(coeffStandardErrors); r.CoeffTScores = SgtStringUtils.DoubleArrayToRoundedDelimitedString(coeffTScores); r.CoeffPVals = SgtStringUtils.DoubleArrayToRoundedDelimitedString(coeffPVals); // /TODO r.xVarCount = regression.NumberOfInputs; r.SamplesCount = yVal.Length; r.Mean = regression.Intercept; return(r); }
private void ComputeComparisonStats() { if (TTest.Busy || chartA.Activity == null || chartB.Activity == null || chartA.Activity.Count <= 1 || chartB.Activity.Count <= 1) { return; } //Compute T-test var tTest = TTest.Perform ( chartA.Activity.Select(p => p.Y).ToList(), chartB.Activity.Select(p => p.Y).ToList() ); //Update table comparisonTable.lblAvgA.Content = tTest.TTest.FirstSeriesMean.ToString("N2"); comparisonTable.lblAvgB.Content = tTest.TTest.SecondSeriesMean.ToString("N2"); comparisonTable.lblNA.Content = tTest.FirstSeriesCount.ToString("N0"); comparisonTable.lblNB.Content = tTest.SecondSeriesCount.ToString("N0"); comparisonTable.lblStDevA.Content = tTest.FirstSeriesStandardDeviation.ToString("N2"); comparisonTable.lblStDevB.Content = tTest.SecondSeriesStandardDeviation.ToString("N2"); comparisonTable.lblAvgDiff.Content = (tTest.MeanDifference > 0 ? "+" : "") + tTest.MeanDifference.ToString("N2"); if (tTest.TTest.FirstSeriesMean != 0) { comparisonTable.lblAvgPercent.Content = tTest.PercentMeanDifference.ToString("P2"); } else { comparisonTable.lblAvgPercent.Content = "-"; } //Update Chart comparisonChart.UpdateChart ( tTest.TTest.FirstSeriesMean, tTest.FirstSeries95ConfidenceBound, tTest.TTest.SecondSeriesMean, tTest.SecondSeries95ConfidenceBound ); }
public void TTestConstructorTest2() { // Consider a sample generated from a Gaussian // distribution with mean 0.5 and unit variance. double[] sample = { -0.849886940156521, 3.53492346633185, 1.22540422494611, 0.436945126810344, 1.21474290382610, 0.295033941700225, 0.375855651783688, 1.98969760778547, 1.90903448980048, 1.91719241342961 }; // One may rise the hypothesis that the mean of the sample is not // significantly different from zero. In other words, the fact that // this particular sample has mean 0.5 may be attributed to chance. double hypothesizedMean = 0; // Create a T-Test to check this hypothesis TTest test = new TTest(sample, hypothesizedMean, OneSampleHypothesis.ValueIsDifferentFromHypothesis); // Check if the mean is significantly different Assert.AreEqual(true, test.Significant); // Now, we would like to test if the sample mean is // significantly greater than the hypothesized zero. // Create a T-Test to check this hypothesis TTest greater = new TTest(sample, hypothesizedMean, OneSampleHypothesis.ValueIsGreaterThanHypothesis); // Check if the mean is significantly larger Assert.AreEqual(true, greater.Significant); // Now, we would like to test if the sample mean is // significantly smaller than the hypothesized zero. // Create a T-Test to check this hypothesis TTest smaller = new TTest(sample, hypothesizedMean, OneSampleHypothesis.ValueIsSmallerThanHypothesis); // Check if the mean is significantly smaller Assert.AreEqual(false, smaller.Significant); }
protected override void EndProcessing() { var hypo = TestingHelper.GetOneSampleHypothesis(Alternate); TTest test = null; if (ParameterSetName == "Samples") { test = new TTest(_data.ToArray(), HypothesizedMean, hypo); } else if (ParameterSetName == "Mean") { test = new TTest(Mean, StdDev, Samples, HypothesizedMean, hypo); } else if (ParameterSetName == "SE") { test = new TTest(Value, StandardError, (double)DegreesOfFreedom, HypothesizedMean, hypo); } test.Size = Size; WriteObject(test); }
private void Button_Click(object sender, RoutedEventArgs e) { List <double> vpop1 = new List <double>(); List <double> vpop2 = new List <double>(); try { //Try comma separated instead var TextScores = pop1.Text.Replace(" ", string.Empty).Split(',').ToList(); foreach (var S in TextScores) { vpop1.Add(double.Parse(S)); } TextScores = pop2.Text.Replace(" ", string.Empty).Split(',').ToList(); foreach (var S in TextScores) { vpop2.Add(double.Parse(S)); } } catch { return; } var Hypo = TwoSampleHypothesis.FirstValueIsGreaterThanSecond; switch (((Button)sender).Content.ToString()) { case "Dif": Hypo = TwoSampleHypothesis.ValuesAreDifferent; break; case "Less": Hypo = TwoSampleHypothesis.FirstValueIsSmallerThanSecond; break; case "More": Hypo = TwoSampleHypothesis.FirstValueIsGreaterThanSecond; break; default: break; } dynamic test, testWilcoxon; if (vpop1.Count == vpop2.Count) { test = new PairedTTest(vpop1.ToArray(), vpop2.ToArray(), Hypo); testWilcoxon = new TwoSampleWilcoxonSignedRankTest(vpop1.ToArray(), vpop2.ToArray(), Hypo); } else { test = new TTest( vpop1.ToArray(), vpop2[0], OneSampleHypothesis.ValueIsSmallerThanHypothesis); testWilcoxon = new WilcoxonSignedRankTest( vpop1.ToArray(), vpop2[0], OneSampleHypothesis.ValueIsSmallerThanHypothesis); } results.Text = "T-Test:\n Significant: " + test.Significant + "\n p-value: " + test.PValue + "\nMannWhitneyWilcoxon Test:\n Significant: " + testWilcoxon.Significant + "\n p-value: " + testWilcoxon.PValue; }
private void compute(double[][] x, double[] y) { int n = x.Length; int p = NumberOfInputs; SSt = 0; SSe = 0; outputMean = 0.0; NumberOfSamples = x.Length; // Compute the regression OrdinaryLeastSquares.Token = Token; regression = OrdinaryLeastSquares.Learn(x, y); informationMatrix = OrdinaryLeastSquares.GetInformationMatrix(); // Calculate mean of the expected outputs outputMean = y.Mean(); // Calculate actual outputs (results) #pragma warning disable 612, 618 results = regression.Transform(x); // Calculate SSe and SSt for (int i = 0; i < x.Length; i++) { double d; d = y[i] - results[i]; SSe += d * d; d = y[i] - outputMean; SSt += d * d; } // Calculate SSr SSr = SSt - SSe; // Calculate R-Squared rSquared = (SSt != 0) ? 1.0 - (SSe / SSt) : 1.0; // Calculated Adjusted R-Squared if (rSquared == 1) { rAdjusted = 1; } else { if (n - p == 1) { rAdjusted = double.NaN; } else { rAdjusted = 1.0 - (1.0 - rSquared) * ((n - 1.0) / (n - p - 1.0)); } } // Calculate Degrees of Freedom DFr = p; DFe = n - (p + 1); DFt = DFr + DFe; // Calculate Sum of Squares Mean MSe = SSe / DFe; MSr = SSr / DFr; MSt = SSt / DFt; // Calculate the F statistic ftest = new FTest(MSr / MSe, DFr, DFe); stdError = Math.Sqrt(MSe); // Create the ANOVA table List <AnovaVariationSource> table = new List <AnovaVariationSource>(); table.Add(new AnovaVariationSource(this, "Regression", SSr, DFr, MSr, ftest)); table.Add(new AnovaVariationSource(this, "Error", SSe, DFe, MSe, null)); table.Add(new AnovaVariationSource(this, "Total", SSt, DFt, MSt, null)); this.anovaTable = new AnovaSourceCollection(table); // Compute coefficient standard errors; standardErrors = new double[NumberOfInputs + 1]; for (int i = 0; i < informationMatrix.Length; i++) { standardErrors[i] = Math.Sqrt(MSe * informationMatrix[i][i]); } // Compute coefficient tests for (int i = 0; i < CoefficientValues.Length; i++) { double tStatistic = CoefficientValues[i] / standardErrors[i]; ttests[i] = new TTest(estimatedValue: CoefficientValues[i], standardError: standardErrors[i], degreesOfFreedom: DFe); ftests[i] = new FTest(tStatistic * tStatistic, 1, DFe); confidences[i] = ttests[i].GetConfidenceInterval(confidencePercent); } // Compute model performance tests ttest = new TTest(results, outputMean); ztest = new ZTest(results, outputMean); chiSquareTest = new ChiSquareTest(y, results, n - p - 1); #pragma warning restore 612, 618 }
private AccordResult CalculateLinearRegression(List <BalancePointPair> allBalancePointPairs, WthNormalParams normalParamsKey) { var allBalancePointGroups = allBalancePointPairs.GroupBy(s => new { s.CoolingBalancePoint, s.HeatingBalancePoint }); List <AccordResult> accordResults = new List <AccordResult>(); foreach (var group in allBalancePointGroups) { try { List <BalancePointPair> IdenticalBalancePointPairsFromAllReadings = group.ToList(); BalancePointPair _pointPair = IdenticalBalancePointPairsFromAllReadings.First(); int readingsCount = IdenticalBalancePointPairsFromAllReadings.Count; double[] fullYData = new double[readingsCount]; double[] fullYDataDailyAvg = new double[readingsCount]; double[][] hcddMatrix = new double[readingsCount][]; double[][] hcddMatrixNonDaily = new double[readingsCount][]; foreach (BalancePointPair balancePointPair in IdenticalBalancePointPairsFromAllReadings) { fullYData[IdenticalBalancePointPairsFromAllReadings.IndexOf(balancePointPair)] = (balancePointPair.ActualUsage); fullYDataDailyAvg[IdenticalBalancePointPairsFromAllReadings.IndexOf(balancePointPair)] = (balancePointPair.ActualUsage / balancePointPair.DaysInReading); hcddMatrix[IdenticalBalancePointPairsFromAllReadings.IndexOf(balancePointPair)] = new double[] { (balancePointPair.HeatingDegreeDays / balancePointPair.DaysInReading), (balancePointPair.CoolingDegreeDays / balancePointPair.DaysInReading) }; } double[] avgHddsForEachReadingInYear = new double[readingsCount]; double[] avgCddsForEachReadingInYear = new double[readingsCount]; for (int i = 0; i < readingsCount; i++) { avgHddsForEachReadingInYear[i] = hcddMatrix[i][0]; avgCddsForEachReadingInYear[i] = hcddMatrix[i][1]; } double[] modelParams = new double[3]; modelParams[0] = 0; modelParams[1] = 0; modelParams[2] = 0; if (_pointPair.HeatingBalancePoint == 0 && _pointPair.CoolingBalancePoint == 0) { double[] onesVector = new double[readingsCount]; for (int i = 0; i < readingsCount; i++) { onesVector[i] = 1; } modelParams[0] = Fit.LineThroughOrigin(onesVector, fullYDataDailyAvg); OrdinaryLeastSquares ols = new OrdinaryLeastSquares() { UseIntercept = false }; double r2 = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination( onesVector.Select(x => x * modelParams[0]), fullYDataDailyAvg); AccordResult accordResult = new AccordResult() { IsSimpleSingleRegression = true, HeatingBP = _pointPair.HeatingBalancePoint, CoolingBP = _pointPair.CoolingBalancePoint, Intercept = modelParams[0], R2Accord = r2, }; accordResults.Add(accordResult); } else if (_pointPair.CoolingBalancePoint != 0 && _pointPair.HeatingBalancePoint != 0) { try { MultipleLinearRegressionAnalysis mlra = new MultipleLinearRegressionAnalysis(intercept: true); mlra.Learn(hcddMatrix, fullYDataDailyAvg); var regressionAccord = mlra.Regression; double[] predicted = regressionAccord.Transform(hcddMatrix); double r2Accord = new RSquaredLoss(numberOfInputs: 2, expected: fullYDataDailyAvg) { Adjust = false }.Loss(predicted); double r2Coeff = regressionAccord.CoefficientOfDetermination(hcddMatrix, fullYDataDailyAvg, adjust: false); bool FTestFailed = !mlra.FTest.Significant; AccordResult accordResult = new AccordResult() { IsMultipleLinearRegression = true, HeatingBP = _pointPair.HeatingBalancePoint, CoolingBP = _pointPair.CoolingBalancePoint, Intercept = regressionAccord.Intercept, B2 = regressionAccord.Weights[0], B4 = regressionAccord.Weights[1], R2Accord = r2Accord, FTestFailed = FTestFailed }; if (mlra.Coefficients.All(x => x.TTest.Significant)) { accordResults.Add(accordResult); } } catch (Exception e) { Log.Debug(normalParamsKey.AccID + " " + normalParamsKey.UtilID + " " + normalParamsKey.UnitID + " " + e.Message + " " + e.StackTrace); } } else if (_pointPair.HeatingBalancePoint > 0) { OrdinaryLeastSquares ols = new OrdinaryLeastSquares() { UseIntercept = true }; SimpleLinearRegression regressionAccord = ols.Learn(avgHddsForEachReadingInYear, fullYDataDailyAvg); double[] predictedAccord = regressionAccord.Transform(avgHddsForEachReadingInYear); double r2Accord = new RSquaredLoss(1, fullYDataDailyAvg).Loss(predictedAccord); int degreesOfFreedom = normalParamsKey.MoCt - 2; double ssx = Math.Sqrt((avgHddsForEachReadingInYear.Subtract(avgHddsForEachReadingInYear.Mean())).Pow(2).Sum()); double s = Math.Sqrt(((fullYDataDailyAvg.Subtract(predictedAccord).Pow(2)).Sum()) / degreesOfFreedom); double error = regressionAccord.GetStandardError(avgHddsForEachReadingInYear, fullYDataDailyAvg); double seSubB = s / ssx; double hypothesizedValue = 0; TTest tTest = new TTest( estimatedValue: regressionAccord.Slope, standardError: seSubB, degreesOfFreedom: degreesOfFreedom, hypothesizedValue: hypothesizedValue, alternate: OneSampleHypothesis.ValueIsDifferentFromHypothesis ); AccordResult accordResult = new AccordResult() { IsSimpleSingleRegression = true, HeatingBP = _pointPair.HeatingBalancePoint, Intercept = regressionAccord.Intercept, B2 = regressionAccord.Slope, R2Accord = r2Accord }; if (tTest.Significant) { accordResults.Add(accordResult); } } else if (_pointPair.CoolingBalancePoint > 0) { OrdinaryLeastSquares ols = new OrdinaryLeastSquares() { UseIntercept = true }; SimpleLinearRegression regressionAccord = ols.Learn(avgCddsForEachReadingInYear, fullYDataDailyAvg); double[] predictedAccord = regressionAccord.Transform(avgCddsForEachReadingInYear); double rAccord = new RSquaredLoss(1, fullYDataDailyAvg).Loss(predictedAccord); int degreesOfFreedom = normalParamsKey.MoCt - 2; double ssx = Math.Sqrt(avgCddsForEachReadingInYear.Subtract(avgCddsForEachReadingInYear.Mean()).Pow(2).Sum()); double s = Math.Sqrt(((fullYDataDailyAvg.Subtract(predictedAccord).Pow(2)).Sum()) / degreesOfFreedom); double seSubB = s / ssx; double hypothesizedValue = 0; double myT = seSubB / regressionAccord.Slope; TTest tTest = new TTest( estimatedValue: regressionAccord.Slope, standardError: seSubB, degreesOfFreedom: degreesOfFreedom, hypothesizedValue: hypothesizedValue, alternate: OneSampleHypothesis.ValueIsDifferentFromHypothesis ); AccordResult accordResult = new AccordResult() { IsSimpleSingleRegression = true, CoolingBP = _pointPair.CoolingBalancePoint, Intercept = regressionAccord.Intercept, B4 = regressionAccord.Slope, R2Accord = rAccord }; if (tTest.Significant) { accordResults.Add(accordResult); } } ; } catch (Exception e) { Log.Debug(normalParamsKey.AccID + " " + normalParamsKey.UtilID + " " + normalParamsKey.UnitID + " " + e.Message + e.StackTrace); } } AccordResult accordWinner = accordResults .Where(s => s.Intercept >= 0) .OrderByDescending(s => s.R2Accord).ToList().FirstOrDefault(); return(accordWinner); }
public void learn_Test() { #region doc_learn_part1 // Consider the following data. An experimenter would // like to infer a relationship between two variables // A and B and a corresponding outcome variable R. double[][] example = { // A B R new double[] { 6.41, 10.11, 26.1 }, new double[] { 6.61, 22.61, 33.8 }, new double[] { 8.45, 11.11, 52.7 }, new double[] { 1.22, 18.11, 16.2 }, new double[] { 7.42, 12.81, 87.3 }, new double[] { 4.42, 10.21, 12.5 }, new double[] { 8.61, 11.94, 77.5 }, new double[] { 1.73, 13.13, 12.1 }, new double[] { 7.47, 17.11, 86.5 }, new double[] { 6.11, 15.13, 62.8 }, new double[] { 1.42, 16.11, 17.5 }, }; // For this, we first extract the input and output // pairs. The first two columns have values for the // input variables, and the last for the output: double[][] inputs = example.GetColumns(new[] { 0, 1 }); double[] output = example.GetColumn(2); // We can create a new multiple linear analysis for the variables var mlra = new MultipleLinearRegressionAnalysis(intercept: true); // Compute the analysis and obtain the estimated regression MultipleLinearRegression regression = mlra.Learn(inputs, output); #endregion // We can also show a summary ANOVA // Accord.Controls.DataGridBox.Show(regression.Table); #region doc_learn_part2 // And also extract other useful information, such // as the linear coefficients' values and std errors: double[] coef = mlra.CoefficientValues; double[] stde = mlra.StandardErrors; // Coefficients of performance, such as r² double rsquared = mlra.RSquared; // 0.62879 // Hypothesis tests for the whole model ZTest ztest = mlra.ZTest; // 0.99999 FTest ftest = mlra.FTest; // 0.01898 // and for individual coefficients TTest ttest0 = mlra.Coefficients[0].TTest; // 0.00622 TTest ttest1 = mlra.Coefficients[1].TTest; // 0.53484 // and also extract confidence intervals DoubleRange ci = mlra.Coefficients[0].Confidence; // [3.2616, 14.2193] // We can use the analysis to predict an output for a sample double y = mlra.Regression.Transform(new double[] { 10, 15 }); // We can also obtain confidence intervals for the prediction: DoubleRange pci = mlra.GetConfidenceInterval(new double[] { 10, 15 }); // and also prediction intervals for the same prediction: DoubleRange ppi = mlra.GetPredictionInterval(new double[] { 10, 15 }); #endregion Assert.AreEqual(3, coef.Length); Assert.AreEqual(8.7405051051757816, coef[0]); Assert.AreEqual(1.1198079243314365, coef[1], 1e-10); Assert.AreEqual(-19.604474518407862, coef[2], 1e-10); Assert.IsFalse(coef.HasNaN()); Assert.AreEqual(2.375916659234715, stde[0], 1e-10); Assert.AreEqual(1.7268508921418664, stde[1], 1e-10); Assert.AreEqual(30.989640986710953, stde[2], 1e-10); Assert.IsFalse(coef.HasNaN()); Assert.AreEqual(0.62879941171298936, rsquared, 1e-10); Assert.AreEqual(0.99999999999999822, ztest.PValue, 1e-10); Assert.AreEqual(0.018986050133298293, ftest.PValue, 1e-10); Assert.AreEqual(0.0062299844256985537, ttest0.PValue, 1e-10); Assert.AreEqual(0.53484850318449118, ttest1.PValue, 1e-14); Assert.IsFalse(Double.IsNaN(ttest1.PValue)); Assert.AreEqual(3.2616314640800566, ci.Min, 1e-10); Assert.AreEqual(14.219378746271506, ci.Max, 1e-10); double[][] im = mlra.InformationMatrix; double mse = regression.GetStandardError(inputs, output); DoubleRange epci = regression.GetConfidenceInterval(new double[] { 10, 15 }, mse, inputs.Length, im); Assert.AreEqual(epci.Min, pci.Min, 1e-10); Assert.AreEqual(epci.Max, pci.Max, 1e-10); Assert.AreEqual(55.27840511658215, pci.Min, 1e-10); Assert.AreEqual(113.91698568006086, pci.Max, 1e-10); Assert.AreEqual(28.783074454641557, ppi.Min, 1e-10); Assert.AreEqual(140.41231634200145, ppi.Max, 1e-10); }
public void ComputeTest2() { // Consider the following data. An experimenter would // like to infer a relationship between two variables // A and B and a corresponding outcome variable R. double[][] example = { // A B R new double[] { 6.41, 10.11, 26.1 }, new double[] { 6.61, 22.61, 33.8 }, new double[] { 8.45, 11.11, 52.7 }, new double[] { 1.22, 18.11, 16.2 }, new double[] { 7.42, 12.81, 87.3 }, new double[] { 4.42, 10.21, 12.5 }, new double[] { 8.61, 11.94, 77.5 }, new double[] { 1.73, 13.13, 12.1 }, new double[] { 7.47, 17.11, 86.5 }, new double[] { 6.11, 15.13, 62.8 }, new double[] { 1.42, 16.11, 17.5 }, }; // For this, we first extract the input and output // pairs. The first two columns have values for the // input variables, and the last for the output: double[][] inputs = example.GetColumns(0, 1); double[] output = example.GetColumn(2); // Next, we can create a new multiple linear regression for the variables var regression = new MultipleLinearRegressionAnalysis(inputs, output, intercept: true); regression.Compute(); // compute the analysis // Now we can show a summary of analysis // Accord.Controls.DataGridBox.Show(regression.Coefficients); // We can also show a summary ANOVA // Accord.Controls.DataGridBox.Show(regression.Table); // And also extract other useful information, such // as the linear coefficients' values and std errors: double[] coef = regression.CoefficientValues; double[] stde = regression.StandardErrors; // Coefficients of performance, such as r² double rsquared = regression.RSquared; // Hypothesis tests for the whole model ZTest ztest = regression.ZTest; FTest ftest = regression.FTest; // and for individual coefficients TTest ttest0 = regression.Coefficients[0].TTest; TTest ttest1 = regression.Coefficients[1].TTest; // and also extract confidence intervals DoubleRange ci = regression.Coefficients[0].Confidence; Assert.AreEqual(3, coef.Length); Assert.AreEqual(8.7405051051757816, coef[0]); Assert.AreEqual(1.1198079243314365, coef[1], 1e-10); Assert.AreEqual(-19.604474518407862, coef[2], 1e-10); Assert.IsFalse(coef.HasNaN()); Assert.AreEqual(2.375916659234715, stde[0], 1e-10); Assert.AreEqual(1.7268508921418664, stde[1], 1e-10); Assert.AreEqual(30.989640986710953, stde[2], 1e-10); Assert.IsFalse(coef.HasNaN()); Assert.AreEqual(0.62879941171298936, rsquared); Assert.AreEqual(0.99999999999999822, ztest.PValue); Assert.AreEqual(0.018986050133298293, ftest.PValue, 1e-10); Assert.AreEqual(0.0062299844256985537, ttest0.PValue); Assert.AreEqual(0.53484850318449118, ttest1.PValue, 1e-14); Assert.IsFalse(Double.IsNaN(ttest1.PValue)); Assert.AreEqual(3.2616314640800566, ci.Min); Assert.AreEqual(14.219378746271506, ci.Max); }
private List <AccordResult> CalculateLinearRegression(List <BalancePointPair> allBalancePointPairs, WthNormalParams normalParamsKey) { var allBalancePointGroups = allBalancePointPairs.GroupBy(s => new { s.CoolingBalancePoint, s.HeatingBalancePoint }); List <AccordResult> accordResults = new List <AccordResult>(); List <AccordResult> rejectedAccords = new List <AccordResult>(); foreach (var group in allBalancePointGroups) { try { List <BalancePointPair> IdenticalBalancePointPairsForAllReadings = group.ToList(); BalancePointPair _pointPair = IdenticalBalancePointPairsForAllReadings.First(); int readingsCount = IdenticalBalancePointPairsForAllReadings.Count; double[] fullYData = new double[readingsCount]; double[] fullYDataDailyAvg = new double[readingsCount]; double[][] hcddMatrix = new double[readingsCount][]; double[][] hcddMatrixNonDaily = new double[readingsCount][]; foreach (BalancePointPair balancePointPair in IdenticalBalancePointPairsForAllReadings) { fullYData[IdenticalBalancePointPairsForAllReadings.IndexOf(balancePointPair)] = (balancePointPair.ActualUsage); fullYDataDailyAvg[IdenticalBalancePointPairsForAllReadings.IndexOf(balancePointPair)] = (balancePointPair.ActualUsage / balancePointPair.DaysInReading); hcddMatrix[IdenticalBalancePointPairsForAllReadings.IndexOf(balancePointPair)] = new double[] { (balancePointPair.HeatingDegreeDays / balancePointPair.DaysInReading), (balancePointPair.CoolingDegreeDays / balancePointPair.DaysInReading) }; } double[] avgHddsForEachReadingInYear = new double[readingsCount]; double[] avgCddsForEachReadingInYear = new double[readingsCount]; for (int i = 0; i < readingsCount; i++) { avgHddsForEachReadingInYear[i] = hcddMatrix[i][0]; avgCddsForEachReadingInYear[i] = hcddMatrix[i][1]; } double[] modelParams = new double[3]; modelParams[0] = 0; modelParams[1] = 0; modelParams[2] = 0; if (fullYData.Sum() == 0) { AccordResult empty = new AccordResult { bpPair = _pointPair }; accordResults.Add(empty); } else if (_pointPair.HeatingBalancePoint == 0 && _pointPair.CoolingBalancePoint == 0) { double[] onesVector = new double[readingsCount]; for (int i = 0; i < readingsCount; i++) { onesVector[i] = 1; } modelParams[0] = Fit.LineThroughOrigin(onesVector, fullYDataDailyAvg); OrdinaryLeastSquares ols = new OrdinaryLeastSquares() { UseIntercept = false }; SimpleLinearRegression regressionAccord = ols.Learn(onesVector, fullYDataDailyAvg); //double[] predictedAccord = regressionAccord.Transform(onesVector); double r2 = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination(onesVector.Select(x => x * modelParams[0]), fullYDataDailyAvg); //double mean = fullYDataDailyAvg.Mean(); //if (mean != modelParams[0] || mean != regressionAccord.Slope) //{ // Console.WriteLine("Hey!"); //} //double r2Accord = regressionAccord.CoefficientOfDetermination(onesVector, fullYDataDailyAvg); //double sxx = onesVector.Subtract(onesVector.Mean()).Pow(2).Sum(); //double hypothesizedValue = 0; //try //{ // TTest test = new TTest( // estimatedValue: regressionAccord.Slope, standardError: sxx, degreesOfFreedom: _pointPair.ReadingsInNormalYear - 2, // hypothesizedValue: hypothesizedValue, alternate: OneSampleHypothesis.ValueIsDifferentFromHypothesis // ); // if (test.Significant) // { AccordResult accordResult = new AccordResult() { SimpleLinearRegression = regressionAccord, R2Accord = r2, IsSimpleSingleRegression = true, HeatingBP = _pointPair.HeatingBalancePoint, CoolingBP = _pointPair.CoolingBalancePoint, Intercept = regressionAccord.Slope, bpPair = _pointPair }; accordResults.Add(accordResult); // } //} //catch (Exception e) //{ // Console.WriteLine(e.Message + e.StackTrace); //} } else if (_pointPair.CoolingBalancePoint != 0 && _pointPair.HeatingBalancePoint != 0) { //modelParams = MultipleRegression.QR(hcddMatrix, fullYDataDailyAvg, intercept: true); //Accord //var ols = new OrdinaryLeastSquares() //{ // UseIntercept = true //}; try { MultipleLinearRegressionAnalysis mlra = new MultipleLinearRegressionAnalysis(intercept: true); mlra.Learn(hcddMatrix, fullYDataDailyAvg); // //MultipleLinearRegression regressionAccord = ols.Learn(hcddMatrix, fullYDataDailyAvg); var regressionAccord = mlra.Regression; double[] predicted = regressionAccord.Transform(hcddMatrix); double r2Accord = new RSquaredLoss(numberOfInputs: 2, expected: fullYDataDailyAvg) { Adjust = false }.Loss(predicted); double r2Coeff = regressionAccord.CoefficientOfDetermination(hcddMatrix, fullYDataDailyAvg, adjust: false); //double r2Math = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination(hcddMatrix.Select( // x => (x[0] * regressionAccord.Weights[0]) + (x[1] * regressionAccord.Weights[1]) + regressionAccord.Intercept //), fullYDataDailyAvg); //double r2MathPred = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination(predicted, fullYDataDailyAvg); AccordResult accordResult = new AccordResult() { //MultipleRegression = regressionAccord, R2Accord = r2Accord, R2Coeff = r2Coeff, HeatingBP = _pointPair.HeatingBalancePoint, CoolingBP = _pointPair.CoolingBalancePoint, IsSimpleSingleRegression = false, MLRA = mlra, Intercept = regressionAccord.Intercept, bpPair = _pointPair, IsMultipleLinearRegression = true }; if (mlra.Coefficients.All(x => x.TTest.Significant)) { accordResults.Add(accordResult); } else { rejectedAccords.Add(accordResult); } } catch (Exception e) { Console.WriteLine(e.Message + " " + e.StackTrace); } } else if (_pointPair.HeatingBalancePoint > 0) { // Tuple<double, double> heatingTuple = Fit.Line(avgHddsForEachReadingInYear, fullYDataDailyAvg); // modelParams[0] = heatingTuple.Item1; // modelParams[1] = heatingTuple.Item2; // double r = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination( // avgHddsForEachReadingInYear.Select(x => heatingTuple.Item1 + heatingTuple.Item2 * x), fullYDataDailyAvg); OrdinaryLeastSquares ols = new OrdinaryLeastSquares() { UseIntercept = true }; SimpleLinearRegression regressionAccord = ols.Learn(avgHddsForEachReadingInYear, fullYDataDailyAvg); double[] predictedAccord = regressionAccord.Transform(avgHddsForEachReadingInYear); double rAccord = new RSquaredLoss(1, fullYDataDailyAvg).Loss(predictedAccord); //double rAccord2 = regressionAccord.CoefficientOfDetermination(avgHddsForEachReadingInYear, fullYDataDailyAvg, adjust: false); //double r2Math = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination(avgHddsForEachReadingInYear.Select( // x => (x * regressionAccord.Slope) + regressionAccord.Intercept // ), fullYDataDailyAvg); //double r2 = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination(predictedAccord, fullYDataDailyAvg); int degreesOfFreedom = _pointPair.ReadingsInNormalYear - 2; double ssx = Math.Sqrt((avgHddsForEachReadingInYear.Subtract(avgHddsForEachReadingInYear.Mean())).Pow(2).Sum()); double s = Math.Sqrt(((fullYDataDailyAvg.Subtract(predictedAccord).Pow(2)).Sum()) / degreesOfFreedom); double error = regressionAccord.GetStandardError(avgHddsForEachReadingInYear, fullYDataDailyAvg); double seSubB = s / ssx; double hypothesizedValue = 0; TTest tTest = new TTest( estimatedValue: regressionAccord.Slope, standardError: seSubB, degreesOfFreedom: degreesOfFreedom, hypothesizedValue: hypothesizedValue, alternate: OneSampleHypothesis.ValueIsDifferentFromHypothesis ); AccordResult accordResult = new AccordResult() { SimpleLinearRegression = regressionAccord, R2Accord = rAccord, IsSimpleSingleRegression = true, HeatingBP = _pointPair.HeatingBalancePoint, CoolingBP = _pointPair.CoolingBalancePoint, TTest = tTest, Intercept = regressionAccord.Intercept, bpPair = _pointPair }; if (tTest.Significant) { accordResults.Add(accordResult); } else { rejectedAccords.Add(accordResult); } } else if (_pointPair.CoolingBalancePoint > 0) { //Tuple<double, double> coolingTuple = Fit.Line(avgCddsForEachReadingInYear, fullYDataDailyAvg); //modelParams[0] = coolingTuple.Item1; //modelParams[2] = coolingTuple.Item2; OrdinaryLeastSquares ols = new OrdinaryLeastSquares() { UseIntercept = true }; SimpleLinearRegression regressionAccord = ols.Learn(avgCddsForEachReadingInYear, fullYDataDailyAvg); double[] predictedAccord = regressionAccord.Transform(avgCddsForEachReadingInYear); double rAccord = new RSquaredLoss(1, fullYDataDailyAvg).Loss(predictedAccord); //double r2Math = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination(avgCddsForEachReadingInYear.Select( // x => (x * regressionAccord.Slope) + regressionAccord.Intercept // ), fullYDataDailyAvg); //double r2 = MathNet.Numerics.GoodnessOfFit.CoefficientOfDetermination(predictedAccord, fullYDataDailyAvg); int degreesOfFreedom = _pointPair.ReadingsInNormalYear - 2; double ssx = Math.Sqrt(avgCddsForEachReadingInYear.Subtract(avgCddsForEachReadingInYear.Mean()).Pow(2).Sum()); double s = Math.Sqrt(((fullYDataDailyAvg.Subtract(predictedAccord).Pow(2)).Sum()) / degreesOfFreedom); double seSubB = s / ssx; double hypothesizedValue = 0; double myT = seSubB / regressionAccord.Slope; TTest tTest = new TTest( estimatedValue: regressionAccord.Slope, standardError: seSubB, degreesOfFreedom: degreesOfFreedom, hypothesizedValue: hypothesizedValue, alternate: OneSampleHypothesis.ValueIsDifferentFromHypothesis ); AccordResult accordResult = new AccordResult() { SimpleLinearRegression = regressionAccord, R2Accord = rAccord, IsSimpleSingleRegression = true, HeatingBP = _pointPair.HeatingBalancePoint, CoolingBP = _pointPair.CoolingBalancePoint, TTest = tTest, Intercept = regressionAccord.Intercept, bpPair = _pointPair }; if (tTest.Significant) { accordResults.Add(accordResult); } else { rejectedAccords.Add(accordResult); } } ; } catch (Exception e) { Console.WriteLine(normalParamsKey.AccID + " " + normalParamsKey.UtilID + " " + normalParamsKey.UnitID + " " + e.Message + e.StackTrace); } } //rejectedAccords = rejectedAccords.OrderByDescending(s => s.R2Accord).ToList(); //accordResults = accordResults.OrderByDescending(s => s.R2Accord).ToList(); return(accordResults); }
/// <summary> /// Computes the Multiple Linear Regression Analysis. /// </summary> /// public void Compute() { int n = inputData.Length; int p = inputCount; SSt = SSe = outputMean = 0.0; // Compute the regression double[,] informationMatrix; regression.Regress(inputData, outputData, out informationMatrix); // Calculate mean of the expected outputs for (int i = 0; i < outputData.Length; i++) { outputMean += outputData[i]; } outputMean /= outputData.Length; // Calculate actual outputs (results) results = new double[inputData.Length]; for (int i = 0; i < inputData.Length; i++) { results[i] = regression.Compute(inputData[i]); } // Calculate SSe and SSt for (int i = 0; i < inputData.Length; i++) { double d; d = outputData[i] - results[i]; SSe += d * d; d = outputData[i] - outputMean; SSt += d * d; } // Calculate SSr SSr = SSt - SSe; // Calculate R-Squared rSquared = (SSt != 0) ? 1.0 - (SSe / SSt) : 1.0; // Calculated Adjusted R-Squared if (rSquared == 1) { rAdjusted = 1; } else { if (n - p == 1) { rAdjusted = double.NaN; } else { rAdjusted = 1.0 - (1.0 - rSquared) * ((n - 1.0) / (n - p - 1.0)); } } // Calculate Degrees of Freedom DFr = p; DFe = n - (p + 1); DFt = DFr + DFe; // Calculate Sum of Squares Mean MSe = SSe / DFe; MSr = SSr / DFr; MSt = SSt / DFt; // Calculate the F statistic ftest = new FTest(MSr / MSe, DFr, DFe); stdError = Math.Sqrt(MSe); // Create the ANOVA table List <AnovaVariationSource> table = new List <AnovaVariationSource>(); table.Add(new AnovaVariationSource(this, "Regression", SSr, DFr, MSr, ftest)); table.Add(new AnovaVariationSource(this, "Error", SSe, DFe, MSe, null)); table.Add(new AnovaVariationSource(this, "Total", SSt, DFt, MSt, null)); this.anovaTable = new AnovaSourceCollection(table); // Compute coefficient standard errors; standardErrors = new double[coefficientCount]; for (int i = 0; i < standardErrors.Length; i++) { standardErrors[i] = Math.Sqrt(MSe * informationMatrix[i, i]); } // Compute coefficient tests for (int i = 0; i < regression.Coefficients.Length; i++) { double tStatistic = regression.Coefficients[i] / standardErrors[i]; ttests[i] = new TTest(estimatedValue: regression.Coefficients[i], standardError: standardErrors[i], degreesOfFreedom: DFe); ftests[i] = new FTest(tStatistic * tStatistic, 1, DFe); confidences[i] = ttests[i].GetConfidenceInterval(confidencePercent); } // Compute model performance tests ttest = new TTest(results, outputMean); ztest = new ZTest(results, outputMean); chiSquareTest = new ChiSquareTest(outputData, results, n - p - 1); }
public void PowerTest() { int samples = 5; double stdDev = 1; double mean = 0.2; { TTest test = new TTest(mean, stdDev: stdDev, samples: samples, alternate: OneSampleHypothesis.ValueIsSmallerThanHypothesis); Assert.AreEqual(4, test.StatisticDistribution.DegreesOfFreedom); Assert.AreEqual(0.02138791, test.Analysis.Power, 1e-6); Assert.AreEqual(0.2, test.Analysis.Effect); Assert.AreEqual(5, test.Analysis.Samples); TTestPowerAnalysis target = (TTestPowerAnalysis)test.Analysis; target.Power = 0.6; target.ComputeSamples(); Assert.IsTrue(Double.IsNaN(target.Samples)); Assert.AreEqual(0.6, target.Power, 1e-6); Assert.AreEqual(0.2, target.Effect); } { TTest test = new TTest(mean, stdDev: stdDev, samples: samples, alternate: OneSampleHypothesis.ValueIsGreaterThanHypothesis); Assert.AreEqual(4, test.StatisticDistribution.DegreesOfFreedom); Assert.AreEqual(0.2, test.Analysis.Effect); Assert.AreEqual(0.102444276600, test.Analysis.Power, 1e-6); Assert.AreEqual(5, test.Analysis.Samples, 1e-4); TTestPowerAnalysis target = (TTestPowerAnalysis)test.Analysis; target.Power = 0.6; target.ComputeSamples(); Assert.AreEqual(91.444828012, target.Samples, 1e-6); Assert.AreEqual(0.6, target.Power, 1e-6); Assert.AreEqual(0.2, target.Effect); } { TTest test = new TTest(mean, stdDev: stdDev, samples: samples, alternate: OneSampleHypothesis.ValueIsDifferentFromHypothesis); Assert.AreEqual(4, test.StatisticDistribution.DegreesOfFreedom); Assert.AreEqual(0.2, test.Analysis.Effect); Assert.AreEqual(0.06426957, test.Analysis.Power, 1e-6); Assert.AreEqual(5, test.Analysis.Samples, 1e-4); TTestPowerAnalysis target = (TTestPowerAnalysis)test.Analysis; target.Power = 0.6; target.ComputeSamples(); Assert.AreEqual(124.3957558, target.Samples, 1e-6); Assert.AreEqual(0.6, target.Power, 1e-6); Assert.AreEqual(0.2, target.Effect); } }
public void OneSampleFromList() { var result = TTest.OneSample(7, new[] { 1.0, 2, 3, 4, 5, 6, 7, 8, 9, 10 }); Assert.Equal(-1.566699, result.T, 4); }