public void InverseDistributionFunctionTest() { TDistribution target; double[] expected; target = new TDistribution(1); expected = new double[] { 6.3138, 3.0777, 1.9626, 1.3764, 1, 0.7265, 0.5095, 0.3249, 0.1584, 0 }; for (int i = 1; i <= 10; i++) { double percent = i / 10.0; double actual = target.InverseDistributionFunction(1.0 - percent / 2); Assert.AreEqual(expected[i - 1], actual, 1e-4); Assert.IsFalse(Double.IsNaN(actual)); } target = new TDistribution(4.2); expected = new double[] { 2.103, 1.5192, 1.1814, 0.9358, 0.7373, 0.5664, 0.4127, 0.2699, 0.1334, 0 }; for (int i = 1; i <= 10; i++) { double percent = i / 10.0; double actual = target.InverseDistributionFunction(1.0 - percent / 2); Assert.AreEqual(expected[i - 1], actual, 1e-4); Assert.IsFalse(Double.IsNaN(actual)); } }
protected override void EndProcessing() { var dist = new TDistribution(DegreesOfFreedom); var obj = DistributionHelper.AddConvinienceMethods(dist); WriteObject(obj); }
public void ProbabilityDensityFunctionTest() { TDistribution target = new TDistribution(1); double expected = 0.31830988618379075; double actual = target.ProbabilityDensityFunction(0); Assert.AreEqual(expected, actual); expected = 0.017076710632177614; actual = target.ProbabilityDensityFunction(4.2); Assert.AreEqual(expected, actual); target = new TDistribution(2); expected = 0.35355339059327379; actual = target.ProbabilityDensityFunction(0); Assert.AreEqual(expected, actual); expected = 0.011489146700777093; actual = target.ProbabilityDensityFunction(4.2); Assert.AreEqual(expected, actual); target = new TDistribution(3); expected = 0.36755259694786141; actual = target.ProbabilityDensityFunction(0); Assert.AreEqual(expected, actual); expected = 0.0077650207237835792; actual = target.ProbabilityDensityFunction(4.2); Assert.AreEqual(expected, actual); }
public void ConstructorTest() { var t = new TDistribution(degreesOfFreedom: 4.2); double mean = t.Mean; // 0.0 double median = t.Median; // 0.0 double var = t.Variance; // 1.9090909090909089 double cdf = t.DistributionFunction(x: 1.4); // 0.88456136730659074 double pdf = t.ProbabilityDensityFunction(x: 1.4); // 0.13894002185341031 double lpdf = t.LogProbabilityDensityFunction(x: 1.4); // -1.9737129364307417 double ccdf = t.ComplementaryDistributionFunction(x: 1.4); // 0.11543863269340926 double icdf = t.InverseDistributionFunction(p: cdf); // 1.4000000000000012 double hf = t.HazardFunction(x: 1.4); // 1.2035833984833988 double chf = t.CumulativeHazardFunction(x: 1.4); // 2.1590162088918525 string str = t.ToString(CultureInfo.InvariantCulture); // T(x; df = 4.2) Assert.AreEqual(0.0, mean); Assert.AreEqual(0.0, median); Assert.AreEqual(1.9090909090909089, var); Assert.AreEqual(2.1590162088918525, chf); Assert.AreEqual(0.88456136730659074, cdf); Assert.AreEqual(0.13894002185341031, pdf); Assert.AreEqual(-1.9737129364307417, lpdf); Assert.AreEqual(1.2035833984833988, hf); Assert.AreEqual(0.11543863269340926, ccdf); Assert.AreEqual(1.4000000000000012, icdf); Assert.AreEqual("T(x; df = 4.2)", str); }
public ActionResult Contrast(FormCollection form) { ViewBag.SSA = form.GetValues("ssa").Select(x => double.Parse(x)).FirstOrDefault(); ViewBag.SSE = form.GetValues("sse").Select(x => double.Parse(x)).FirstOrDefault(); ViewBag.Ftest = form.GetValues("ftest").Select(x => double.Parse(x)).FirstOrDefault(); ViewBag.Sc = form.GetValues("Sc").Select(x => double.Parse(x)).FirstOrDefault(); ViewBag.n = form.GetValues("n").Select(x => int.Parse(x)).FirstOrDefault(); ViewBag.k = form.GetValues("k").Select(x => int.Parse(x)).FirstOrDefault(); ViewBag.Alt1 = form.GetValues("alt1").Select(x => int.Parse(x)).FirstOrDefault(); ViewBag.Alt2 = form.GetValues("alt2").Select(x => int.Parse(x)).FirstOrDefault(); ViewBag.Certainty = form.GetValues("certainty").Select(x => double.Parse(x)).FirstOrDefault(); string alts = form.GetValues("alts").FirstOrDefault(); ViewBag.alts = alts.Split(',').Select(x => double.Parse(x)).ToArray(); double c = ViewBag.alts[ViewBag.Alt1 - 1] - ViewBag.alts[ViewBag.Alt2 - 1]; var tdist = new TDistribution(ViewBag.k * (ViewBag.n - 1)); double alpha = 1 - ViewBag.Certainty; double div = tdist.InverseCDF(1 - alpha / 2) * ViewBag.Sc; ViewBag.LowLimit = c - div; ViewBag.HighLimit = c + div; return(View("Result")); }
public void LogProbabilityDensityFunctionTest() { TDistribution target = new TDistribution(1); double expected = System.Math.Log(0.31830988618379075); double actual = target.LogProbabilityDensityFunction(0); Assert.AreEqual(expected, actual); expected = System.Math.Log(0.017076710632177614); actual = target.LogProbabilityDensityFunction(4.2); Assert.AreEqual(expected, actual, 1e-6); target = new TDistribution(2); expected = System.Math.Log(0.35355339059327379); actual = target.LogProbabilityDensityFunction(0); Assert.AreEqual(expected, actual, 1e-6); expected = System.Math.Log(0.011489146700777093); actual = target.LogProbabilityDensityFunction(4.2); Assert.AreEqual(expected, actual, 1e-6); target = new TDistribution(3); expected = System.Math.Log(0.36755259694786141); actual = target.LogProbabilityDensityFunction(0); Assert.AreEqual(expected, actual, 1e-6); expected = System.Math.Log(0.0077650207237835792); actual = target.LogProbabilityDensityFunction(4.2); Assert.AreEqual(expected, actual, 1e-6); }
/// <summary> /// Tests the null hypothesis that the population mean is equal to a specified value. /// </summary> /// public TTest(double[] sample, double hypothesizedMean, TTestHypotesis type) { int n = sample.Length; double x = Accord.Statistics.Tools.Mean(sample); double s = Accord.Statistics.Tools.StandardDeviation(sample, x); StatisticDistribution = new TDistribution(n - 1); Statistic = (x - hypothesizedMean) / (s / Math.Sqrt(n)); if (type == TTestHypotesis.MeanIsDifferentThanHypothesis) { PValue = 2.0 * StatisticDistribution.SurvivalFunction(Statistic); Hypothesis = Testing.Hypothesis.TwoTail; } else if (type == TTestHypotesis.MeanIsGreaterThanHypothesis) { PValue = StatisticDistribution.SurvivalFunction(Statistic); Hypothesis = Testing.Hypothesis.OneUpper; } else if (type == TTestHypotesis.MeanIsSmallerThanHypothesis) { PValue = StatisticDistribution.DistributionFunction(Statistic); Hypothesis = Testing.Hypothesis.OneLower; } }
public void TestCriticalPointCalculation() { foreach (int key in s_T_Coeff_95.Keys) { var coeff = TDistribution.CalculateCriticalValue(key, (1 - 0.95), 0.0001); Assert.AreEqual(s_T_Coeff_95[key], coeff, 0.002); } }
public void FitTest() { bool thrown = false; TDistribution target = new TDistribution(1); try { target.Fit(null, null, null); } catch (NotSupportedException) { thrown = true; } Assert.IsTrue(thrown); }
public void InverseDistributionFunctionTest2() { TDistribution target = new TDistribution(24); double expected = 1.710882023; double actual = target.InverseDistributionFunction(0.95); Assert.AreEqual(expected, actual, 1e-06); }
public double GetSignificance() { if (this.n < 3L) { return(Double.NaN); } TDistribution distribution = new TDistribution(this.n - 2L); return(2.0D * (1.0D - distribution.cumulativeProbability(Math.Abs(GetSlope()) / GetSlopeStdErr()))); }
public void CloneTest() { int degreesOfFreedom = 5; TDistribution target = new TDistribution(degreesOfFreedom); TDistribution clone = (TDistribution)target.Clone(); Assert.AreNotSame(target, clone); Assert.AreEqual(target.DegreesOfFreedom, clone.DegreesOfFreedom); Assert.AreEqual(target.Mean, clone.Mean); Assert.AreEqual(target.Variance, clone.Variance); }
public void ConstructorTest() { var t = new TDistribution(degreesOfFreedom: 4.2); double mean = t.Mean; // 0.0 double median = t.Median; // 0.0 double var = t.Variance; // 1.9090909090909089 double mode = t.Mode; double cdf = t.DistributionFunction(x: 1.4); // 0.88456136730659074 double pdf = t.ProbabilityDensityFunction(x: 1.4); // 0.13894002185341031 double lpdf = t.LogProbabilityDensityFunction(x: 1.4); // -1.9737129364307417 double ccdf = t.ComplementaryDistributionFunction(x: 1.4); // 0.11543863269340926 double icdf = t.InverseDistributionFunction(p: cdf); // 1.4000000000000012 double hf = t.HazardFunction(x: 1.4); // 1.2035833984833988 double chf = t.CumulativeHazardFunction(x: 1.4); // 2.1590162088918525 string str = t.ToString(CultureInfo.InvariantCulture); // T(x; df = 4.2) Assert.AreEqual(double.NegativeInfinity, t.Support.Min); Assert.AreEqual(double.PositiveInfinity, t.Support.Max); double icdf0 = t.InverseDistributionFunction(0); double icdf1 = t.InverseDistributionFunction(1); Assert.AreEqual(icdf0, t.Support.Min); Assert.AreEqual(icdf1, t.Support.Max); Assert.AreEqual(0.0, mean); Assert.AreEqual(0.0, median); Assert.AreEqual(0.0, mode); Assert.AreEqual(1.9090909090909089, var); Assert.AreEqual(2.1590162088918525, chf); Assert.AreEqual(0.88456136730659074, cdf); Assert.AreEqual(0.13894002185341031, pdf); Assert.AreEqual(-1.9737129364307417, lpdf); Assert.AreEqual(1.2035833984833988, hf); Assert.AreEqual(0.11543863269340926, ccdf); Assert.AreEqual(1.4000000000000012, icdf); Assert.AreEqual("T(x; df = 4.2)", str); var range1 = t.GetRange(0.95); var range2 = t.GetRange(0.99); var range3 = t.GetRange(0.01); Assert.AreEqual(-2.1030107450099362, range1.Min); Assert.AreEqual(2.1030107450099362, range1.Max); Assert.AreEqual(-3.6502571302187774, range2.Min); Assert.AreEqual(3.6502571302187774, range2.Max); Assert.AreEqual(-3.6502571302187792, range3.Min); Assert.AreEqual(3.6502571302187774, range3.Max); }
public StudentUniformDistribution(double uniformLowerBound, double uniformUpperBound, double mean, double tStd, double degreesOfFreedom) { ua = uniformLowerBound; ub = uniformUpperBound; df = degreesOfFreedom; tm = mean; ts = tStd; baseDistribution = new TDistribution(df); this.mean = ((ua + ub) / 2d) + tm; variance = (Math.Pow(ts, 2) * df / (df - 2)) + (Math.Pow(ub - ua, 2) / 12d); }
/// <summary> /// confidence level is alpha /// </summary> /// <param name="alpha"></param> /// <returns></returns> public ClosedNeighborhood <double> meanDifferenceConfidenceInterval(int levelA, int levelB, double confidenceLevel) { return(new ClosedNeighborhood <double>( observationsAverageByLevel(levelA) - observationsAverageByLevel(levelB), TDistribution.UpperDividePoint((1 - confidenceLevel) / 2, this.degreesOfFreedomError) * ((errorSquareAverage * (1.0 / observationsCountByLevel(levelA) + 1.0 / observationsCountByLevel(levelB)) ).Power(.5)) )); }
public void InverseDistributionFunctionLeftTailTest() { double[] a = { 0.1, 0.05, 0.025, 0.01, 0.005, 0.001, 0.0005 }; double[,] expected = { { 1, 3.078, 6.314, 12.706, 31.821, 63.656, 318.289, 636.578 }, { 2, 1.886, 2.920, 4.303, 6.965, 9.925, 22.328, 31.600 }, { 3, 1.638, 2.353, 3.182, 4.541, 5.841, 10.214, 12.924 }, { 4, 1.533, 2.132, 2.776, 3.747, 4.604, 7.173, 8.610 }, { 5, 1.476, 2.015, 2.571, 3.365, 4.032, 5.894, 6.869 }, { 6, 1.440, 1.943, 2.447, 3.143, 3.707, 5.208, 5.959 }, { 7, 1.415, 1.895, 2.365, 2.998, 3.499, 4.785, 5.408 }, { 8, 1.397, 1.860, 2.306, 2.896, 3.355, 4.501, 5.041 }, { 9, 1.383, 1.833, 2.262, 2.821, 3.250, 4.297, 4.781 }, { 10, 1.372, 1.812, 2.228, 2.764, 3.169, 4.144, 4.587 }, { 11, 1.363, 1.796, 2.201, 2.718, 3.106, 4.025, 4.437 }, { 12, 1.356, 1.782, 2.179, 2.681, 3.055, 3.930, 4.318 }, { 13, 1.350, 1.771, 2.160, 2.650, 3.012, 3.852, 4.221 }, { 14, 1.345, 1.761, 2.145, 2.624, 2.977, 3.787, 4.140 }, { 15, 1.341, 1.753, 2.131, 2.602, 2.947, 3.733, 4.073 }, { 16, 1.337, 1.746, 2.120, 2.583, 2.921, 3.686, 4.015 }, { 17, 1.333, 1.740, 2.110, 2.567, 2.898, 3.646, 3.965 }, { 18, 1.330, 1.734, 2.101, 2.552, 2.878, 3.610, 3.922 }, { 19, 1.328, 1.729, 2.093, 2.539, 2.861, 3.579, 3.883 }, { 20, 1.325, 1.725, 2.086, 2.528, 2.845, 3.552, 3.850 }, { 21, 1.323, 1.721, 2.080, 2.518, 2.831, 3.527, 3.819 }, { 22, 1.321, 1.717, 2.074, 2.508, 2.819, 3.505, 3.792 }, { 23, 1.319, 1.714, 2.069, 2.500, 2.807, 3.485, 3.768 }, { 24, 1.318, 1.711, 2.064, 2.492, 2.797, 3.467, 3.745 }, { 25, 1.316, 1.708, 2.060, 2.485, 2.787, 3.450, 3.725 }, { 26, 1.315, 1.706, 2.056, 2.479, 2.779, 3.435, 3.707 }, { 27, 1.314, 1.703, 2.052, 2.473, 2.771, 3.421, 3.689 }, { 28, 1.313, 1.701, 2.048, 2.467, 2.763, 3.408, 3.674 }, { 29, 1.311, 1.699, 2.045, 2.462, 2.756, 3.396, 3.660 }, { 30, 1.310, 1.697, 2.042, 2.457, 2.750, 3.385, 3.646 }, { 60, 1.296, 1.671, 2.000, 2.390, 2.660, 3.232, 3.460 }, { 120, 1.289, 1.658, 1.980, 2.358, 2.617, 3.160, 3.373 }, }; for (int i = 0; i < expected.GetLength(0); i++) { int df = (int)expected[i, 0]; TDistribution target = new TDistribution(df); for (int j = 1; j < expected.GetLength(1); j++) { double actual = target.InverseDistributionFunction(1.0 - a[j - 1]); Assert.IsTrue(Math.Abs(expected[i, j] / actual - 1) < 1e-3); } } }
public StudentUniformDistribution(double n, double degreesOfFreedom) { ts = Math.Sqrt(1d / (Math.Pow(n, 2) + 1d)); double a = n * ts * Math.Sqrt(3); df = degreesOfFreedom; ua = -a; ub = a; tm = 0; baseDistribution = new TDistribution(df); mean = 0; variance = (Math.Pow(ts, 2) * df / (df - 2)) + (Math.Pow(a, 2) / 3d); }
public double GetSlopeConfidenceInterval(double alpha) { if (this.n < 3L) { return(Double.NaN); } if ((alpha >= 1.0D) || (alpha <= 0.0D)) { // throw new OutOfRangeException(LocalizedFormats.SIGNIFICANCE_LEVEL, Double.valueOf(alpha), Integer.valueOf(0), Integer.valueOf(1)); } TDistribution distribution = new TDistribution(this.n - 2L); return(GetSlopeStdErr() * distribution.inverseCumulativeProbability(1.0D - alpha / 2.0D)); }
public void VarianceTest() { TDistribution target = new TDistribution(3); double actual = target.Variance; double expected = 3; Assert.AreEqual(expected, actual); target = new TDistribution(2); actual = target.Variance; expected = Double.PositiveInfinity; Assert.AreEqual(expected, actual); target = new TDistribution(1); actual = target.Variance; Assert.IsTrue(Double.IsNaN(actual)); }
public void TDistributionConstructorTest() { int degreesOfFreedom = 4; TDistribution target = new TDistribution(degreesOfFreedom); Assert.AreEqual(degreesOfFreedom, target.DegreesOfFreedom); bool thrown = false; try { target = new TDistribution(0); } catch (ArgumentOutOfRangeException) { thrown = true; } Assert.IsTrue(thrown); thrown = false; try { target = new TDistribution(-1); } catch (ArgumentOutOfRangeException) { thrown = true; } Assert.IsTrue(thrown); }
public StudentGeneralizedDistribution(double mean, double std, double degreesOfFreedom) { ScaleCoefficient = std; this.mean = mean; DegreesOfFreedom = degreesOfFreedom; baseT = new TDistribution(degreesOfFreedom); baseNormal = new NormalDistribution(0, 1); gammaDistr = new GammaDistribution(2.0, 0.5 * degreesOfFreedom); if (degreesOfFreedom > 2) { variance = Math.Pow(ScaleCoefficient, 2) * degreesOfFreedom / (degreesOfFreedom - 2); } else { variance = double.NaN; } }
public void MeanTest() { TDistribution target; double actual; target = new TDistribution(1); actual = target.Mean; Assert.IsTrue(Double.IsNaN(actual)); target = new TDistribution(2); actual = target.Mean; double expected = 0; Assert.AreEqual(expected, actual); target = new TDistribution(3); actual = target.Mean; expected = 0; Assert.AreEqual(expected, actual); }
private LeastSquaresRegressionResult getResultWithStatistics(double[][] x, double[] y, double[] betas, double[] yModel, DoubleMatrix transpose, DoubleMatrix matrix, bool useIntercept) { double yMean = 0.0; foreach (double y1 in y) { yMean += y1; } yMean /= y.Length; double totalSumOfSquares = 0.0; double errorSumOfSquares = 0.0; int n = x.Length; int k = betas.Length; double[] residuals = new double[n]; double[] stdErrorBetas = new double[k]; double[] tStats = new double[k]; double[] pValues = new double[k]; for (int i = 0; i < n; i++) { totalSumOfSquares += (y[i] - yMean) * (y[i] - yMean); residuals[i] = y[i] - yModel[i]; errorSumOfSquares += residuals[i] * residuals[i]; } double regressionSumOfSquares = totalSumOfSquares - errorSumOfSquares; double[][] covarianceBetas = convertArray(_algebra.getInverse(_algebra.multiply(transpose, matrix)).toArray()); double rSquared = regressionSumOfSquares / totalSumOfSquares; double adjustedRSquared = 1.0 - (1 - rSquared) * (n - 1.0) / (n - k); double meanSquareError = errorSumOfSquares / (n - k); TDistribution studentT = new TDistribution(n - k); for (int i = 0; i < k; i++) { stdErrorBetas[i] = Math.Sqrt(meanSquareError * covarianceBetas[i][i]); tStats[i] = betas[i] / stdErrorBetas[i]; pValues[i] = 1 - studentT.cumulativeProbability(Math.Abs(tStats[i])); } return(new LeastSquaresRegressionResult(betas, residuals, meanSquareError, stdErrorBetas, rSquared, adjustedRSquared, tStats, pValues, useIntercept)); }
/// <summary> /// Computes the power for a test with givens values of /// <see cref="IPowerAnalysis.Effect">effect size</see> and <see cref="IPowerAnalysis.Samples"> /// number of samples</see> under <see cref="IPowerAnalysis.Size"/>. /// </summary> /// /// <returns> /// The power for the test under the given conditions. /// </returns> /// public override void ComputePower() { double delta = Effect / Math.Sqrt(1.0 / Samples1 + 1.0 / Samples2); double df = Samples1 + Samples2 - 2; TDistribution td = new TDistribution(df); NoncentralTDistribution nt = new NoncentralTDistribution(df, delta); switch (Tail) { case DistributionTail.TwoTail: { double Ta = td.InverseDistributionFunction(1.0 - Size / 2); double pa = nt.ComplementaryDistributionFunction(+Ta); double pb = nt.DistributionFunction(-Ta); Power = pa + pb; break; } case DistributionTail.OneLower: { double Ta = td.InverseDistributionFunction(Size); Power = nt.DistributionFunction(Ta); break; } case DistributionTail.OneUpper: { double Ta = td.InverseDistributionFunction(1.0 - Size); Power = nt.ComplementaryDistributionFunction(Ta); break; } default: throw new InvalidOperationException(); } }
public void DistributionFunctionTest() { TDistribution target = new TDistribution(1); double expected = 0.5; double actual = target.DistributionFunction(0); Assert.IsFalse(Double.IsNaN(actual)); Assert.AreEqual(expected, actual, 1e-15); expected = 0.92559723470138278; actual = target.DistributionFunction(4.2); Assert.AreEqual(expected, actual); target = new TDistribution(2); expected = 0.5; actual = target.DistributionFunction(0); Assert.AreEqual(expected, actual); expected = 0.97385836652685043; actual = target.DistributionFunction(4.2); Assert.AreEqual(expected, actual); target = new TDistribution(3); expected = 0.5; actual = target.DistributionFunction(0); Assert.IsFalse(Double.IsNaN(actual)); Assert.AreEqual(expected, actual, 1e-15); expected = 0.98768396091153043; actual = target.DistributionFunction(4.2); Assert.AreEqual(expected, actual); expected = 0.16324737815131229; actual = target.DistributionFunction(-1.17); Assert.AreEqual(expected, actual); }
public RealMatrix GetCorrelationPValues() { TDistribution tDistribution = new TDistribution(this.nObs - 2); int nVars = this.correlationMatrix.getColumnDimension(); double[][] outVar = new double[nVars][]; for (int i = 0; i < nVars; i++) { for (int j = 0; j < nVars; j++) { if (i == j) { outVar[i][j] = 0.0D; } else { double r = this.correlationMatrix.getEntry(i, j); double t = Math.Abs(r * Math.Sqrt((this.nObs - 2) / (1.0D - r * r))); outVar[i][j] = (2.0D * tDistribution.cumulativeProbability(-t)); } } } return(new BlockRealMatrix(outVar)); }
/// <summary> /// H0:b=0. /// If b=0 is rejected, the linear is conspicuous. /// </summary> public ISet <IReal> conspicuousDomain(double alpha) { return ((ISet <IReal>) ( (new IntervalLeftOpenRightInfinite <IReal>( (Real <double>)( TDistribution.AStudT(alpha, pointsCount - 2) ) ) + new IntervalLeftInfiniteRightOpen <IReal>( (Real <double>)( TDistribution.AStudT(alpha, pointsCount - 2) ) )) ) ); }
public static double Student(double significance_level, int degrees_of_freedom) { var td = new TDistribution(degrees_of_freedom); return(Math.Abs(td.InverseDistributionFunction(significance_level))); }
/// <summary> /// Returns a new line chart plotting the specified function of the given distribution for 0.0001 <= p <= 0.9999. /// </summary> /// <param name="dist">The distribution.</param> /// <param name="function">The distribution function to plot.</param> /// <param name="numInterpolatedValues">The number of interpolated values.</param> /// <returns>A new chart.</returns> public static ChartControl ToChart( TDistribution dist, DistributionFunction function = DistributionFunction.PDF, int numInterpolatedValues = 100 ) { ChartControl chart = GetDefaultChart(); Update( ref chart, dist, function, numInterpolatedValues ); return chart; }
/// <summary> /// Updates the given chart with the specified distribution. /// </summary> /// <param name="chart">A chart.</param> /// <param name="dist">The distribution.</param> /// <param name="function">The distribution function to plot.</param> /// <param name="numInterpolatedValues">The number of interpolated values.</param> /// <returns>A new chart.</returns> /// <remarks> /// Plots the specified function of the given distribution for 0.0001 <= p <= 0.9999. /// <br/> /// Titles are added only if chart does not currently contain any titles. /// <br/> /// chart.Series[0] is replaced, or added if necessary. /// </remarks> public static void Update( ref ChartControl chart, TDistribution dist, DistributionFunction function = DistributionFunction.PDF, int numInterpolatedValues = 100 ) { List<string> titles = new List<string>() { "TDistribution", String.Format("df={0}", dist.DegreesOfFreedom) }; UpdateContinuousDistribution( ref chart, dist, titles, function, numInterpolatedValues ); }
/// <summary> /// Shows a new chart in a default form. /// </summary> /// <param name="dist">The distribution.</param> /// <param name="function">The distribution function to plot.</param> /// <param name="numInterpolatedValues">The number of interpolated values.</param> /// <remarks> /// Equivalent to: /// <code> /// NMathStatsChart.Show( ToChart( dist, function, numInterpolatedValues ) ); /// </code> /// </remarks> public static void Show( TDistribution dist, DistributionFunction function = DistributionFunction.PDF, int numInterpolatedValues = 100 ) { Show( ToChart( dist, function, numInterpolatedValues ) ); }
public ProcessingReturnValues FitAndPlotSlowMotion( Dictionary <int, SingleMultiFrameMeasurement> measurements, FlybyMeasurementContext meaContext, GetProcessingValueCallback getValueCallback, Graphics g, FlybyPlottingContext plottingContext, float xScale, float yScale, int imageWidth) { // Compute median, use median based exclusion rules // Report the median position for the time at the middle of the measured interval // Do not expect elongated image (no corrections from the exposure) // May apply instrumental delay corrections for the frame time var rv = new ProcessingReturnValues(); double sum = 0; double userSum = 0; double stdDevUserSum = 0; int numFramesUser = 0; double userMidFrom = meaContext.UserMidValue - meaContext.MaxStdDev; double userMidTo = meaContext.UserMidValue + meaContext.MaxStdDev; rv.EarliestFrame = int.MaxValue; rv.LatestFrame = int.MinValue; List <double> medianList = new List <double>(); List <double> medianWeightsList = new List <double>(); var minPosUncertaintyArcSec = TangraConfig.Settings.Astrometry.AssumedPositionUncertaintyPixels * meaContext.ArsSecsInPixel; foreach (SingleMultiFrameMeasurement measurement in measurements.Values) { float x = (measurement.FrameNo - meaContext.MinFrameNo) * xScale + 5; ProcessingValues val = getValueCallback(measurement); double valueFrom = val.Value - val.StdDev; double valueTo = val.Value + val.StdDev; float yFrom = (float)(valueFrom - plottingContext.MinValue) * yScale + 5; float yTo = (float)(valueTo - plottingContext.MinValue) * yScale + 5; sum += val.Value; Pen mPen = plottingContext.IncludedPen; if (!double.IsNaN(meaContext.UserMidValue)) { if ((valueFrom >= userMidFrom && valueFrom <= userMidTo) || (valueTo >= userMidFrom && valueTo <= userMidTo)) { numFramesUser++; userSum += val.Value; medianList.Add(val.Value); medianWeightsList.Add(ComputePositionWeight(val.StdDev, measurement, minPosUncertaintyArcSec, WeightingMode.SNR)); stdDevUserSum += val.StdDev * val.StdDev; if (rv.EarliestFrame > measurement.FrameNo) { rv.EarliestFrame = measurement.FrameNo; } if (rv.LatestFrame < measurement.FrameNo) { rv.LatestFrame = measurement.FrameNo; } } else { mPen = plottingContext.ExcludedPen; } } g.DrawLine(mPen, x, yFrom, x, yTo); g.DrawLine(mPen, x - 1, yFrom, x + 1, yFrom); g.DrawLine(mPen, x - 1, yTo, x + 1, yTo); } if (!double.IsNaN(meaContext.UserMidValue) && numFramesUser > 0) { double average = userSum / numFramesUser; double err = Math.Sqrt(stdDevUserSum) / (numFramesUser - 1); float yAve = (float)(average - plottingContext.MinValue) * yScale + 5; g.DrawLine(plottingContext.AveragePen, 5, yAve - 1, imageWidth - 5, yAve - 1); g.DrawLine(plottingContext.AveragePen, 5, yAve, imageWidth - 5, yAve); g.DrawLine(plottingContext.AveragePen, 5, yAve + 1, imageWidth - 5, yAve + 1); float yMin = (float)(userMidFrom - plottingContext.MinValue) * yScale + 5; float yMax = (float)(userMidTo - plottingContext.MinValue) * yScale + 5; g.DrawLine(plottingContext.AveragePen, 5, yMin, imageWidth - 5, yMin); g.DrawLine(plottingContext.AveragePen, 5, yMax, imageWidth - 5, yMax); double median; double medianWeight; WeightedMedian(Tuple.Create(medianList, medianWeightsList), out median, out medianWeight); double standardMedian = medianList.Median(); Trace.WriteLine(string.Format("{0}; Included: {1}; Average: {2}; Wighted Median: {3}; Standard Median: {4}", meaContext.UserMidValue.ToString("0.00000"), numFramesUser, AstroConvert.ToStringValue(average, "+HH MM SS.TTT"), AstroConvert.ToStringValue(median, "+HH MM SS.TTT"), AstroConvert.ToStringValue(standardMedian, "+HH MM SS.TTT"))); rv.FittedValue = median; var stdDevArcSec = 3600 * Math.Sqrt(medianList.Sum(x => (x - median) * (x - median)) / (medianList.Count - 1)); var tCoeff95 = TDistribution.CalculateCriticalValue(medianList.Count, (1 - 0.95), 0.0001); var error95 = 1.253 * tCoeff95 * stdDevArcSec / Math.Sqrt(medianList.Count); rv.FittedValueUncertaintyArcSec = error95; rv.IsVideoNormalPosition = false; } else { double average = sum / measurements.Count; float yAve = (float)(average - plottingContext.MinValue) * yScale + 5; g.DrawLine(Pens.WhiteSmoke, 5, yAve, imageWidth - 5, yAve); rv.FittedValue = double.NaN; } return(rv); }
public void MedianTest() { TDistribution target = new TDistribution(7.6); Assert.AreEqual(target.Median, target.InverseDistributionFunction(0.5)); }