public void KolmogorovSmirnovTestConstructorTest() { // Test against a standard Uniform distribution // References: http://www.math.nsysu.edu.tw/~lomn/homepage/class/92/kstest/kolmogorov.pdf double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; UniformContinuousDistribution distribution = UniformContinuousDistribution.Standard; // Null hypothesis: the sample comes from a standard uniform distribution // Alternate: the sample is not from a standard uniform distribution var target = new KolmogorovSmirnovTest(sample, distribution); Assert.AreEqual(distribution, target.TheoreticalDistribution); Assert.AreEqual(KolmogorovSmirnovTestHypothesis.SampleIsDifferent, target.Hypothesis); Assert.AreEqual(DistributionTail.TwoTail, target.Tail); Assert.AreEqual(0.29, target.Statistic, 1e-16); Assert.AreEqual(0.3067, target.PValue, 1e-4); Assert.IsFalse(Double.IsNaN(target.Statistic)); // The null hypothesis fails to be rejected: // sample can be from a uniform distribution Assert.IsFalse(target.Significant); }
static void weightRun() { DinamicTimeWrapping DTW = new DinamicTimeWrapping(); KolmogorovSmirnovTest KST = new KolmogorovSmirnovTest(); Duration D = new Duration(); double[,,] dtwRes = new double[3, 1600, 40]; double[,,] kstRes = new double[3, 1600, 40]; double[,] durRes = new double[1600, 40]; for (int user = 1; user <= 40; user++) { for (int signId = 1; signId <= 20; signId++) { List <List <double> > sign = InPut.readSample.GetDataFromFile(user, signId); for (int signId2 = 1; signId2 <= 40; signId2++) { List <List <double> > sign2 = InPut.readSample.GetDataFromFile(user, signId2); durRes[user * 40 + signId, signId2] = D.calculate(sign, sign2); for (int column = 0; column < 2; column++) { kstRes[user * 40 + signId, signId2, column] = KST.Calculate(sign[column], sign2[column]); dtwRes[user * 40 + signId, signId2, column] = DTW.Calculate(sign[column], sign2[column]); } } } } //TODO: for lm in user, sign // for i in user, sign //count value for KST, DTW, Dur, PenUp //TODO: PLOT }
public void MutationDistributionDoesNotDepartFromNormalDistribution() { // Build up unbounded domain. this._continuousDomain = new ContinuousDomain(); // Fix the value to mutate and the variance percentage. Allele <double> valueToMutate = new Allele <double>(3.4); // Divide by 2 to prevent overflows. Want: 0.5 / (max - min) double variancePercentage = (0.5 / 2) / ((this._continuousDomain.Maximum / 2) - (this._continuousDomain.Minimum / 2)); // Collect results of a lot of mutations. double[] mutations = new double[ContinuousDomainTest.TriesForRandomTests]; for (int i = 0; i < ContinuousDomainTest.TriesForRandomTests; i++) { mutations[i] = (double)this._continuousDomain.MutateGeneValue(valueToMutate, variancePercentage).GetValue(); } // Apply the Kolmogorov-Smirnov test. double stdDev = Math.Sqrt(0.5); KolmogorovSmirnovTest normalityTest = new KolmogorovSmirnovTest( sample: mutations, hypothesizedDistribution: new NormalDistribution(mean: valueToMutate.GetValue(), stdDev: stdDev)); Assert.False( double.IsNaN(normalityTest.PValue) || normalityTest.Significant, $"Mutation was found to be not normal by the Kolmogorov-Smirnov test with significance level of {normalityTest.Size}."); }
protected virtual void RunTestOfFit() { //var test = new AndersonDarlingTest(this.data.ToArray(), testDistribution); var test = new KolmogorovSmirnovTest(this.data.ToArray(), testDistribution); this.score = test.PValue; }
public void MutationDistributionDoesNotDepartFromNormalDistributionInLogSpace() { // Build up unbounded domain. var domain = new LogDomain(1.0 / double.MaxValue, double.MaxValue); // Fix the value to mutate and the variance percentage. Allele <double> valueToMutate = new Allele <double>(3.4); double variancePercentage = 0.000001; // Collect results in log space for a lot of mutations. int numberRuns = 1000; double[] mutationsInLogSpace = new double[numberRuns]; for (int i = 0; i < numberRuns; i++) { mutationsInLogSpace[i] = Math.Log((double)domain.MutateGeneValue(valueToMutate, variancePercentage).GetValue()); } // Apply the Kolmogorov-Smirnov test. double stdDev = Math.Sqrt(variancePercentage * (Math.Log(domain.Maximum) - Math.Log(domain.Minimum))); KolmogorovSmirnovTest normalityTest = new KolmogorovSmirnovTest( sample: mutationsInLogSpace, hypothesizedDistribution: new NormalDistribution(mean: Math.Log(valueToMutate.GetValue()), stdDev: stdDev)); Assert.False( double.IsNaN(normalityTest.PValue) || normalityTest.Significant, $"Mutation was found to be not normal by the Kolmogorov-Smirnov test with significance level of {normalityTest.Size}."); }
public void SampleFromTruncatedNormalDoesNotDepartFromNormalDistributionForNoTruncation() { // Fix the value to mean and the variance. double mean = 3.4; double variance = 0.2; // Collect a large set of samples. int numberRuns = 10000; double[] results = new double[numberRuns]; for (int i = 0; i < numberRuns; i++) { results[i] = Randomizer.Instance.SampleFromTruncatedNormal( mean, standardDeviation: Math.Sqrt(variance), minimum: double.MinValue, maximum: double.MaxValue); } // Apply the Kolmogorov-Smirnov test. NormalDistribution expected = new NormalDistribution(mean, stdDev: Math.Sqrt(variance)); KolmogorovSmirnovTest normalityTest = new KolmogorovSmirnovTest(results, expected); Assert.False( double.IsNaN(normalityTest.PValue) || normalityTest.Significant, $"Truncated normal without truncation was identified as non normal by the Kolmogorov-Smirnov test with significance level of {normalityTest.Size}."); }
public void TheoreticalDistributionTest() { double[] sample = { 1, 5, 3, 1, 5, 2, 1 }; UnivariateContinuousDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution); UnivariateContinuousDistribution actual = target.TheoreticalDistribution; Assert.AreEqual(distribution, actual); }
protected override void EndProcessing() { var hypo = TestingHelper.GetKolmogorovSmirnovTestHypothesis(Alternate); var test = new KolmogorovSmirnovTest(_data.ToArray(), HypothesizedDistribution, hypo) { Size = Size }; WriteObject(test); }
public void RNGVentura_KolmogorovSmirnovTest_Approaches_Uniform_Continuous_Distribution() { double[] sample = GenerateFloatingPointNumberArray(); var distribution = UniformContinuousDistribution.Standard; var uniformTest = new KolmogorovSmirnovTest(sample, distribution); var nDistribution = NormalDistribution.Standard; var normalTest = new KolmogorovSmirnovTest(sample, nDistribution); // no significant deviation from a uniform continuous distribution uniformTest.Significant.Should().BeFalse(); // significant deviation from a normal distribution normalTest.Significant.Should().BeTrue(); }
public void KolmogorovSmirnovTestConstructorTest2() { // Test against a Normal distribution double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; NormalDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution); Assert.AreEqual(distribution, target.TheoreticalDistribution); Assert.AreEqual(Hypothesis.TwoTail, target.Hypothesis); Assert.AreEqual(0.580432, target.Statistic, 1e-5); Assert.AreEqual(0.000999, target.PValue, 1e-5); Assert.IsFalse(Double.IsNaN(target.Statistic)); }
public void KolmogorovSmirnovTestConstructorTest4() { // Test if the sample's distribution is smaller than a Normal distribution, double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; NormalDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution, KolmogorovSmirnovTestHypothesis.SampleIsSmaller); Assert.AreEqual(distribution, target.TheoreticalDistribution); Assert.AreEqual(Hypothesis.OneLower, target.Hypothesis); Assert.AreEqual(0.580432, target.Statistic, 1e-5); Assert.AreEqual(0.000499, target.PValue, 1e-5); Assert.IsFalse(Double.IsNaN(target.Statistic)); }
public void KolmogorovSmirnovTestConstructorTest() { // Test against a Uniform distribution // References: http://www.math.nsysu.edu.tw/~lomn/homepage/class/92/kstest/kolmogorov.pdf double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; ContinuousUniformDistribution distribution = ContinuousUniformDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution); Assert.AreEqual(distribution, target.TheoreticalDistribution); Assert.AreEqual(Hypothesis.TwoTail, target.Hypothesis); Assert.AreEqual(0.29, target.Statistic, 1e-16); Assert.AreEqual(0.3067, target.PValue, 1e-4); Assert.IsFalse(Double.IsNaN(target.Statistic)); }
public void KolmogorovSmirnovTestConstructorTest3() { // Test if the sample's distribution is greater than a standard Normal distribution. double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; NormalDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution, KolmogorovSmirnovTestHypothesis.SampleIsGreater); Assert.AreEqual(distribution, target.TheoreticalDistribution); Assert.AreEqual(KolmogorovSmirnovTestHypothesis.SampleIsGreater, target.Hypothesis); Assert.AreEqual(DistributionTail.OneUpper, target.Tail); Assert.AreEqual(0.238852, target.Statistic, 1e-5); Assert.AreEqual(0.275544, target.PValue, 1e-5); Assert.IsFalse(Double.IsNaN(target.Statistic)); }
public void KolmogorovSmirnovTestConstructorTest() { // Test against a standard Uniform distribution // References: http://www.math.nsysu.edu.tw/~lomn/homepage/class/92/kstest/kolmogorov.pdf // Suppose we got a new sample, and we would like to test whether this // sample seems to have originated from a uniform continuous distribution. // double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; // First, we create the distribution we would like to test against: // var distribution = UniformContinuousDistribution.Standard; // Now we can define our hypothesis. The null hypothesis is that the sample // comes from a standard uniform distribution, while the alternate is that // the sample is not from a standard uniform distribution. // var kstest = new KolmogorovSmirnovTest(sample, distribution); double statistic = kstest.Statistic; // 0.29 double pvalue = kstest.PValue; // 0.3067 bool significant = kstest.Significant; // false // Since the null hypothesis could not be rejected, then the sample // can perhaps be from a uniform distribution. However, please note // that this doesn't means that the sample *is* from the uniform, it // only means that we could not rule out the possibility. Assert.AreEqual(distribution, kstest.TheoreticalDistribution); Assert.AreEqual(KolmogorovSmirnovTestHypothesis.SampleIsDifferent, kstest.Hypothesis); Assert.AreEqual(DistributionTail.TwoTail, kstest.Tail); Assert.AreEqual(0.29, statistic, 1e-16); Assert.AreEqual(0.3067, pvalue, 1e-4); Assert.IsFalse(Double.IsNaN(pvalue)); Assert.IsFalse(kstest.Significant); }
public void EmpiricalDistributionTest() { double[] sample = { 1, 5, 3, 1, 5, 2, 1 }; UnivariateContinuousDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution); EmpiricalDistribution actual = target.EmpiricalDistribution; Assert.AreNotSame(sample, actual.Samples); Array.Sort(sample); for (int i = 0; i < sample.Length; i++) { Assert.AreEqual(sample[i], actual.Samples[i]); } }
public void KolmogorovSmirnovTestConstructorTest2() { // Test against a Normal distribution // This time, let's see if the same sample from the previous example // could have originated from a standard Normal (Gaussian) distribution. // double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; // Before we could not rule out the possibility that the sample came from // a uniform distribution, which means the sample was not very far from // uniform. This would be an indicative that it would be far from what // would be expected from a Normal distribution: NormalDistribution distribution = NormalDistribution.Standard; var kstest = new KolmogorovSmirnovTest(sample, distribution); double statistic = kstest.Statistic; // 0.580432 double pvalue = kstest.PValue; // 0.000999 bool significant = kstest.Significant; // true // Since the test says that the null hypothesis should be rejected, then // this can be regarded as a strong indicative that the sample does not // comes from a Normal distribution, just as we expected. Assert.AreEqual(distribution, kstest.TheoreticalDistribution); Assert.AreEqual(KolmogorovSmirnovTestHypothesis.SampleIsDifferent, kstest.Hypothesis); Assert.AreEqual(DistributionTail.TwoTail, kstest.Tail); Assert.AreEqual(0.580432, kstest.Statistic, 1e-5); Assert.AreEqual(0.000999, kstest.PValue, 1e-5); Assert.IsFalse(Double.IsNaN(kstest.Statistic)); // The null hypothesis can be rejected: // the sample is not from a standard Normal distribution Assert.IsTrue(kstest.Significant); }
public static IHypothesisTestingTwoSample CreateTests(HypothesisTests test) { IHypothesisTestingTwoSample newTest = null; switch (test) { case HypothesisTests.TTest: newTest = new StudentTTest(); break; case HypothesisTests.MannWhitneyU: newTest = new MannWhitneyTest(); break; case HypothesisTests.KolmogorovSmirnov: newTest = new KolmogorovSmirnovTest(); break; } return(newTest); }
public void KolmogorovSmirnovTestConstructorTest2() { // Test against a Normal distribution double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; // The sample is most likely from a uniform distribution, so it would // most likely be different from a standard normal distribution. NormalDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution); Assert.AreEqual(distribution, target.TheoreticalDistribution); Assert.AreEqual(KolmogorovSmirnovTestHypothesis.SampleIsDifferent, target.Hypothesis); Assert.AreEqual(DistributionTail.TwoTail, target.Tail); Assert.AreEqual(0.580432, target.Statistic, 1e-5); Assert.AreEqual(0.000999, target.PValue, 1e-5); Assert.IsFalse(Double.IsNaN(target.Statistic)); // The null hypothesis can be rejected: // the sample is not from a standard Normal distribution Assert.IsTrue(target.Significant); }
public void KolmogorovSmirnovTestConstructorTest3() { // Test if the sample's distribution is greater than a Normal distribution, double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; NormalDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution, KolmogorovSmirnovTestHypothesis.SampleIsGreater); Assert.AreEqual(distribution, target.TheoreticalDistribution); Assert.AreEqual(Hypothesis.OneUpper, target.Hypothesis); Assert.AreEqual(0.238852, target.Statistic, 1e-5); Assert.AreEqual(0.275544, target.PValue, 1e-5); Assert.IsFalse(Double.IsNaN(target.Statistic)); }
public void EmpiricalDistributionTest() { double[] sample = { 1, 5, 3, 1, 5, 2, 1 }; UnivariateContinuousDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution); EmpiricalDistribution actual = target.EmpiricalDistribution; Assert.AreNotSame(sample, actual.Samples); Array.Sort(sample); for (int i = 0; i < sample.Length; i++) Assert.AreEqual(sample[i], actual.Samples[i]); }
public void KolmogorovSmirnovTestConstructorTest4() { // Test if the sample's distribution is smaller than a standard Normal distribution double[] sample = { 0.621, 0.503, 0.203, 0.477, 0.710, 0.581, 0.329, 0.480, 0.554, 0.382 }; NormalDistribution distribution = NormalDistribution.Standard; var target = new KolmogorovSmirnovTest(sample, distribution, KolmogorovSmirnovTestHypothesis.SampleIsSmaller); Assert.AreEqual(distribution, target.TheoreticalDistribution); Assert.AreEqual(KolmogorovSmirnovTestHypothesis.SampleIsSmaller, target.Hypothesis); Assert.AreEqual(DistributionTail.OneLower, target.Tail); Assert.AreEqual(0.580432, target.Statistic, 1e-5); Assert.AreEqual(0.000499, target.PValue, 1e-5); Assert.IsFalse(Double.IsNaN(target.Statistic)); }
static void MainRun(string[] args) { double FINAL_maxbad = 0; double FINAL_mingood = 1; int userCount = 40; double falsePozitive = 0; double falseNegative = 0; double kstFP = 0; double kstFN = 0; double dtwFP = 0; double dtwFN = 0; double durFP = 0; double durFN = 0; List <List <List <double> > > teacherMatrix; List <List <double> > testedMatrix; DinamicTimeWrapping DTW = new DinamicTimeWrapping(); KolmogorovSmirnovTest KST = new KolmogorovSmirnovTest(); Duration D = new Duration(); for (int i = 1; i <= userCount; i++) { teacherMatrix = InPut.readSample.GetTeachers(userId); KST.Teach(teacherMatrix, 2); DTW.Teach(teacherMatrix, 2); D.Teach(teacherMatrix); for (int j = 1; j < 31; j++) { double kstRET = 0.5; double dtwRET = 0.5; double durRET = 0.5; double bayes = 0.5; testedMatrix = InPut.readSample.GetDataFromFile(userId, 10 + j); kstRET = KST.Test(teacherMatrix, testedMatrix[2], 2); dtwRET = DTW.Test(teacherMatrix, testedMatrix[2], 2); durRET = D.TestMethod(testedMatrix); //KST.Teach(kstTMatrix2); //DTW.Teach(dtwTMatrix2); //kstRET = StatMathLib.LikelihoodFusion.BayesTrap(KST.Test(teacherMatrix, testedMatrix[2], 2), kstRET); //dtwRET = StatMathLib.LikelihoodFusion.BayesTrap(DTW.Test(teacherMatrix, testedMatrix[2], 2), dtwRET); bayes = LikelihoodFusion.BayesTrap(kstRET, LikelihoodFusion.BayesTrap(dtwRET, durRET)); if (j <= 10) { if (FINAL_mingood > bayes) { FINAL_mingood = bayes; } if (kstRET < 0.5) { kstFN++; } if (dtwRET < 0.5) { dtwFN++; } if (durRET < 0.5) { durFN++; } if (bayes < 0.5) { falseNegative++; } } else { if (FINAL_maxbad < bayes) { FINAL_maxbad = bayes; } if (kstRET >= 0.5) { kstFP++; } if (dtwRET >= 0.5) { dtwFP++; } if (durRET >= 0.5) { durFP++; } if (bayes >= 0.5) { falsePozitive++; } } Console.WriteLine("U:" + i + " S:" + j + " KST: " + kstRET + " DTW: " + dtwRET + " Dur: " + durRET + " FINAL: " + bayes); if (j == 10) { Console.WriteLine(); } } Console.WriteLine("\n"); } Console.WriteLine("KST FN: " + kstFN / (10 * userCount)); Console.WriteLine("KST FP: " + kstFP / (20 * userCount)); Console.WriteLine("KST sum: " + (2 * kstFN + kstFP) / (40 * userCount) + "\n"); Console.WriteLine("DTW FN: " + dtwFN / (10 * userCount)); Console.WriteLine("DTW FP: " + dtwFP / (20 * userCount)); Console.WriteLine("DTW sum: " + (2 * dtwFN + dtwFP) / (40 * userCount) + "\n"); Console.WriteLine("KST FN: " + durFN / (10 * userCount)); Console.WriteLine("KST FP: " + durFP / (20 * userCount)); Console.WriteLine("KST sum: " + (2 * durFN + durFP) / (40 * userCount) + "\n"); Console.WriteLine("____BAYES_____"); Console.WriteLine("FN: " + falseNegative / (10 * userCount)); Console.WriteLine("FP: " + falsePozitive / (20 * userCount)); Console.WriteLine("sum: " + (2 * falseNegative + falsePozitive) / (40 * userCount) + "\n"); Console.WriteLine("________________________________________________"); Console.WriteLine("FINAL_mingood" + FINAL_mingood); Console.WriteLine("FINAL_maxbad" + FINAL_maxbad); Console.ReadKey(); }
/// <summary> /// Computes the analysis. /// </summary> /// public void Compute() { bool[] fail = new bool[Distributions.Length]; // Step 1. Fit all candidate distributions to the data. for (int i = 0; i < Distributions.Length; i++) { var distribution = Distributions[i]; try { distribution.Fit(data); } catch { // TODO: Maybe revisit the decision to swallow exceptions here. fail[i] = true; } } // Step 2. Use statistical tests to see how well each // distribution was able to model the data. KolmogorovSmirnov = new KolmogorovSmirnovTest[Distributions.Length]; ChiSquare = new ChiSquareTest[Distributions.Length]; AndersonDarling = new AndersonDarlingTest[Distributions.Length]; DistributionNames = new string[Distributions.Length]; double[] ks = new double[Distributions.Length]; double[] cs = new double[Distributions.Length]; double[] ad = new double[Distributions.Length]; var measures = new List <GoodnessOfFit>(); for (int i = 0; i < Distributions.Length; i++) { ks[i] = Double.NegativeInfinity; cs[i] = Double.NegativeInfinity; ad[i] = Double.NegativeInfinity; var d = this.Distributions[i] as IUnivariateDistribution; if (d == null || fail[i]) { continue; } this.DistributionNames[i] = GetName(d.GetType()); int ms = 5000; run(() => { this.KolmogorovSmirnov[i] = new KolmogorovSmirnovTest(data, d); ks[i] = -KolmogorovSmirnov[i].Statistic; }, ms); run(() => { this.ChiSquare[i] = new ChiSquareTest(data, d); cs[i] = -ChiSquare[i].Statistic; }, ms); run(() => { this.AndersonDarling[i] = new AndersonDarlingTest(data, d); ad[i] = AndersonDarling[i].Statistic; }, ms); if (Double.IsNaN(ks[i])) { ks[i] = Double.NegativeInfinity; } if (Double.IsNaN(cs[i])) { cs[i] = Double.NegativeInfinity; } if (Double.IsNaN(ad[i])) { ad[i] = Double.NegativeInfinity; } measures.Add(new GoodnessOfFit(this, i)); } this.KolmogorovSmirnovRank = getRank(ks); this.ChiSquareRank = getRank(cs); this.AndersonDarlingRank = getRank(ad); measures.Sort(); this.GoodnessOfFit = new GoodnessOfFitCollection(measures); }