Inheritance: UnivariateContinuousDistribution
        public void TwoSampleKolmogorovSmirnovTestConstructorTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Create a K-S test to verify if two samples have been
            // drawn from different populations. In this example, we
            // will first generate a number of samples from different
            // distributions, and then check if the K-S test can indeed
            // see the difference:

            // Generate 15 points from a Normal distribution with mean 5 and sigma 2
            double[] sample1 = new NormalDistribution(mean: 5, stdDev: 1).Generate(25);

            // Generate 15 points from an uniform distribution from 0 to 10
            double[] sample2 = new UniformContinuousDistribution(a: 0, b: 10).Generate(25);

            // Now we can create a K-S test and test the unequal hypothesis:
            var test = new TwoSampleKolmogorovSmirnovTest(sample1, sample2,
                TwoSampleKolmogorovSmirnovTestHypothesis.SamplesDistributionsAreUnequal);

            bool significant = test.Significant; // outputs true

            Assert.IsTrue(test.Significant);
            Assert.AreEqual(0.44, test.Statistic, 1e-15);
            Assert.IsFalse(Double.IsNaN(test.Statistic));
            Assert.AreEqual(0.00826, test.PValue, 1e-5);
        }
        /// <summary>
        ///   Creates a new Shapiro-Wilk distribution.
        /// </summary>
        /// 
        /// <param name="samples">The number of samples.</param>
        /// 
        public ShapiroWilkDistribution(int samples)
        {
            if (samples < 4)
            {
                throw new ArgumentOutOfRangeException("samples",
                    "The number of samples must be higher than 3.");
            }

            this.NumberOfSamples = samples;

            if (samples <= 11)
            {
                double n = samples;
                double n2 = n * n;
                double n3 = n2 * n;

                this.g = w => -Math.Log(0.459 * n - 2.273 - Math.Log(1 - w));
                double mean = -0.0006714 * n3 + 0.0250540 * n2 - 0.39978 * n + 0.54400;
                double sigma = Math.Exp(-0.0020322 * n3 + 0.0627670 * n2 - 0.77857 * n + 1.38220);

                this.normal = new NormalDistribution(mean, sigma);
            }
            else
            {
                double u = Math.Log(samples);
                double u2 = u * u;
                double u3 = u2 * u;

                this.g = w => Math.Log(1.0 - w);
                double mean = 0.00389150 * u3 - 0.083751 * u2 - 0.31082 * u - 1.5861; // 1.5861?
                double sigma = Math.Exp(0.00303020 * u2 - 0.082676 * u - 0.48030);

                this.normal = new NormalDistribution(mean, sigma);
            }
        }
        public static HiddenMarkovClassifier<NormalDistribution> CreateModel1()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a univariate sequence and the same sequence backwards.
            double[][] sequences = new double[][] 
            {
                new double[] { 0,1,2,3,4 }, // This is the first  sequence with label = 0
                new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            NormalDistribution density = new NormalDistribution();
            var classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);


            return classifier;
        }
 public ObservationGenerationModel(NormalDistribution foveaPeripheryOperatingCharacteristic,
     int fixation,
     int location)
 {
     _foveaPeripheryOperatingCharacteristic = foveaPeripheryOperatingCharacteristic;
     _fixation = fixation;
     _location = location;
 }
 public void Initialise()
 {
     State = new double[7];
     for (var i = 0; i < State.Length; i++)
     {
         State[i] = 1d/7d;
     }
     ;
     _foveaPeripheryOperatingCharacteristic = NormalDistribution.Standard;
 }
 private double[] GenerateObservations(int fixation, int[] visualArray)
 {
     _observationValues = new double[visualArray.Length];
     for (var location = 0; location < visualArray.Length; location++)
     {
         if (visualArray[location] == 0)
         {
             _observationValues[location] = _standardDistribution.Generate();
         }
         else
         {
             //discriminability is explained in Butko and Movellan (2008) and is determined using FPOC and eceentricity from fixation.
             var discriminability =
                 new ObservationGenerationModel(_standardDistribution, fixation, location)
                     .GenerateDiscriminabilityValue();
             _observationValues[location] = new NormalDistribution(discriminability, 1.0).Generate();
         }
     }
     return _observationValues;
 }
        /// <summary>
        ///   Generates a random vector of observations from the
        ///   Inverse Gaussian distribution with the given parameters.
        /// </summary>
        ///
        /// <param name="mean">The mean parameter mu.</param>
        /// <param name="shape">The shape parameter lambda.</param>
        /// <param name="samples">The number of samples to generate.</param>
        /// <param name="result">The location where to store the samples.</param>
        /// <param name="source">The random number generator to use as a source of randomness.
        ///   Default is to use <see cref="Accord.Math.Random.Generator.Random"/>.</param>
        ///
        /// <returns>An array of double values sampled from the inverse Gaussian distribution.</returns>
        ///
        public static double[] Random(double mean, double shape, int samples, double[] result, Random source)
        {
            NormalDistribution.Random(samples, result, source);

            for (int i = 0; i < result.Length; i++)
            {
                double v = result[i];
                double y = v * v;
                double x = mean + (mean * mean * y) / (2 * shape) - (mean / (2 * shape)) * Math.Sqrt(4 * mean * shape * y + mean * mean * y * y);

                double t = source.NextDouble();

                if (t <= (mean) / (mean + x))
                {
                    result[i] = x;
                }
                else
                {
                    result[i] = (mean * mean) / x;
                }
            }

            return(result);
        }
        private void init(int n, double[] ranks, bool?exact)
        {
            if (n <= 0)
            {
                throw new ArgumentOutOfRangeException("n", "The number of samples must be positive.");
            }

            this.n = n;

            double mean   = n * (n + 1.0) / 4.0;
            double stdDev = Math.Sqrt((n * (n + 1.0) * (2.0 * n + 1.0)) / 24.0);

            bool hasVectors = ranks != null;

            // For small samples (< 12) the distribution can be exact
            this.exact = hasVectors && (n <= 12);

            if (exact.HasValue)
            {
                if (exact.Value && !hasVectors)
                {
                    throw new ArgumentException("exact", "Cannot use exact method if rank vectors are not specified.");
                }
                this.exact = exact.Value; // force
            }

            if (hasVectors)
            {
                if (this.exact)
                {
                    initExactMethod(ranks);
                }
            }

            this.approximation = new NormalDistribution(mean, stdDev);
        }
Esempio n. 9
0
        private void init(int n1, int n2, double[] ranks, bool?exact)
        {
            if (n1 <= 0)
            {
                throw new ArgumentOutOfRangeException("n1", "The number of observations in the first sample (n1) must be higher than zero.");
            }

            if (n2 <= 0)
            {
                throw new ArgumentOutOfRangeException("n2", "The number of observations in the second sample (n2) must be higher than zero.");
            }

            if (n1 > n2)
            {
                Trace.TraceWarning("Creating a MannWhitneyDistribution where the first sample is larger than the second sample. If possible, please re-organize your samples such that the first sample is smaller than the second sample.");
            }

            if (ranks != null)
            {
                if (ranks.Length <= 1)
                {
                    throw new ArgumentOutOfRangeException("ranks", "The rank vector must contain a minimum of 2 elements.");
                }

                for (int i = 0; i < ranks.Length; i++)
                {
                    if (ranks[i] < 0)
                    {
                        throw new ArgumentOutOfRangeException("The rank values cannot be negative.");
                    }
                }
            }

            int n = n1 + n2;

            this.n1 = n1;
            this.n2 = n2;

            // From https://en.wikipedia.org/wiki/Mann%E2%80%93Whitney_U_test: For large samples, U
            // is approximately normally distributed. In that case, the standardized value is given
            // by z = (U - mean) / stdDev where:
            double mean   = ((double)n1 * n2) / 2.0;
            double stdDev = Math.Sqrt(((double)n1 * n2 * (n + 1)) / 12.0);

            bool hasVectors = ranks != null;

            // For small samples (< 30) the distribution can be exact
            this.exact = hasVectors && (n1 <= 30 && n2 <= 30);

            if (exact.HasValue)
            {
                if (exact.Value && !hasVectors)
                {
                    throw new ArgumentException("exact", "Cannot use exact method if rank vectors are not specified.");
                }
                this.exact = exact.Value; // force
            }

            if (hasVectors)
            {
                // Apply correction to the variance
                double correction = MannWhitneyDistribution.correction(ranks);
                double a          = ((double)n1 * n2) / 12.0;
                double b          = (n + 1.0) - correction;
                stdDev = Math.Sqrt(a * b);

                if (this.exact)
                {
                    initExactMethod(ranks);
                }
            }

            this.approximation = new NormalDistribution(mean, stdDev);
        }
Esempio n. 10
0
        public static double BaumWelchLearning(double[][] data)
        {
            // Specify a initial normal distribution for the samples.
            NormalDistribution density = new NormalDistribution();          

            // Creates a continuous hidden Markov Model with two states organized in a forward
            //  topology and an underlying univariate Normal distribution as probability density.
            var model = new HiddenMarkovModel<NormalDistribution>(new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier until the
            // difference in the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning<NormalDistribution>(model)
            {
                Tolerance = 0.001,
                Iterations = 0,
            };

            // Fit the model
            double likelihood = teacher.Run(data);

            // See the log-probability of the sequences learned
            double a1 = model.Evaluate(new[] { 0.999999999999928, 0 , 0.999999999999988 , 0 , 0.999999999999988 }); // -0.12799388666109757
            return a1;
        }
        public void NormalTest()
        {
            var target = GeneralizedNormalDistribution.Normal(mean: 0.42, stdDev: 4.2);
            var normal = new NormalDistribution(mean: 0.42, stdDev: 4.2);

            test(target, normal);
        }
Esempio n. 12
0
 public static Bitmap AWGNNoise(Bitmap bmp, int intensity)
 {
     Bitmap img = new Bitmap(bmp.Width, bmp.Height);
     var normal = new NormalDistribution(mean: 0, stdDev: 1);
     double[] noise = normal.Generate(img.Width*img.Height);
     int l = 0;
     for (int i = 0; i < bmp.Height; i++)
     {
         for(int j = 0; j < bmp.Width; j++)
         {
             Color c = bmp.GetPixel(j, i);
             int c1 = c.R + ((int)Math.Ceiling(noise[l]) * intensity);//(int)noise + intensity;//(int)Math.Sqrt(noise() * 255) + intensity;
             int c2 = c.G + ((int)Math.Ceiling(noise[l]) * intensity);//(int)Math.Sqrt(noise() * 255) + intensity;
             int c3 = c.B + ((int)Math.Ceiling(noise[l]) * intensity);//(int)Math.Sqrt(noise() * 255) + intensity;
             c1 = c1 > 255 ? 255 : c1;
             c1 = c1 < 0 ? 0 : c1;
             c2 = c2 > 255 ? 255 : c2;
             c2 = c2 < 0 ? 0 : c2;
             c3 = c3 > 255 ? 255 : c3;
             c3 = c3 < 0 ? 0 : c3;
             Color newC = Color.FromArgb(c1, c2, c3);
             img.SetPixel(j, i, newC);
             l++;
         }
     }
     return img;
 }
        public void DistributionFunctionPerComponent()
        {
            NormalDistribution[] components = new NormalDistribution[2];
            components[0] = new NormalDistribution(2, 1);
            components[1] = new NormalDistribution(5, 1);

            double[] coefficients = { 0.4, 0.5 };

            var mixture = new Mixture<NormalDistribution>(coefficients, components);

            double expected = mixture.DistributionFunction(0, 0.42) +
                              mixture.DistributionFunction(1, 0.42);

            double actual = mixture.DistributionFunction(0.42);

            Assert.AreEqual(expected, actual);
        }
Esempio n. 14
0
        public void ProbabilityDensityFunctionTest2()
        {
            double expected, actual;

            // Test for small variance
            NormalDistribution target = new NormalDistribution(4.2, double.Epsilon);

            expected = 0;
            actual = target.ProbabilityDensityFunction(0);
            Assert.AreEqual(expected, actual);

            expected = double.PositiveInfinity;
            actual = target.ProbabilityDensityFunction(4.2);
            Assert.AreEqual(expected, actual);
        }
        public void FitTest()
        {
            double[] coefficients = { 0.50, 0.50 };

            NormalDistribution[] components = new NormalDistribution[2];
            components[0] = new NormalDistribution(2, 1);
            components[1] = new NormalDistribution(5, 1);

            var target = new Mixture<NormalDistribution>(coefficients, components);

            double[] values = { 0, 1, 1, 0, 1, 6, 6, 5, 7, 5 };
            double[] part1 = values.Submatrix(0, 4);
            double[] part2 = values.Submatrix(5, 9);

            MixtureOptions options = new MixtureOptions() { Threshold = 1e-10 };

            target.Fit(values, options);
            var actual = target;

            var mean1 = Accord.Statistics.Tools.Mean(part1);
            var var1 = Accord.Statistics.Tools.Variance(part1);
            Assert.AreEqual(mean1, actual.Components[0].Mean, 1e-6);
            Assert.AreEqual(var1, actual.Components[0].Variance, 1e-6);

            var mean2 = Accord.Statistics.Tools.Mean(part2);
            var var2 = Accord.Statistics.Tools.Variance(part2);
            Assert.AreEqual(mean2, actual.Components[1].Mean, 1e-6);
            Assert.AreEqual(var2, actual.Components[1].Variance, 1e-5);

            var expectedMean = Accord.Statistics.Tools.Mean(values);
            var actualMean = actual.Mean;
            Assert.AreEqual(expectedMean, actualMean, 1e-7);

            var expectedVar = Accord.Statistics.Tools.Variance(values, false);
            var actualVar = actual.Variance;
            Assert.AreEqual(expectedVar, actualVar, 0.15);
        }
        public void LogProbabilityDensityFunction()
        {
            NormalDistribution[] components = new NormalDistribution[2];
            components[0] = new NormalDistribution(2, 1);
            components[1] = new NormalDistribution(5, 1);

            double[] coefficients = { 0.4, 0.5 };

            var mixture = new Mixture<NormalDistribution>(coefficients, components);

            double expected = System.Math.Log(
                0.4 * components[0].ProbabilityDensityFunction(0.42) +
                0.5 * components[1].ProbabilityDensityFunction(0.42));

            double actual = mixture.LogProbabilityDensityFunction(0.42);

            Assert.AreEqual(expected, actual);
        }
Esempio n. 17
0
        public void LogProbabilityDensityFunctionTest()
        {
            double x = 3;
            double mean = 7;
            double dev = 5;

            NormalDistribution target = new NormalDistribution(mean, dev);

            double expected = System.Math.Log(0.0579383105522966);
            double actual = target.LogProbabilityDensityFunction(x);

            Assert.IsFalse(double.IsNaN(actual));
            Assert.AreEqual(expected, actual, 1e-15);
        }
        public void ConstructorTest0()
        {
            var original = new NormalDistribution(mean: 4, stdDev: 4.2);

            var normal = new GeneralContinuousDistribution(
                original.Support,
                original.ProbabilityDensityFunction,
                original.DistributionFunction);

            testNormal(normal, 1);
        }
Esempio n. 19
0
        public void DistributionFunctionTest()
        {
            double x = 3;
            double mean = 7;
            double dev = 5;

            NormalDistribution target = new NormalDistribution(mean, dev);

            double expected = 0.211855398583397;
            double actual = target.DistributionFunction(x);

            Assert.IsFalse(double.IsNaN(actual));
            Assert.AreEqual(expected, actual, 1e-15);
        }
        public void MixtureFitTest()
        {
            var samples1 = new NormalDistribution(mean: -2, stdDev: 0.5).Generate(100000);
            var samples2 = new NormalDistribution(mean: +4, stdDev: 0.5).Generate(100000);

            // Mix the samples from both distributions
            var samples = samples1.Concatenate(samples2);

            // Create a new mixture distribution with two Normal components
            var mixture = new Mixture<NormalDistribution>(new[] { 0.2, 0.8 },
                new NormalDistribution(-1),
                new NormalDistribution(+1));

            // Estimate the distribution
            mixture.Fit(samples, new MixtureOptions
            {
                Iterations = 50,
                Threshold = 0
            });

            var result = mixture.ToString("N2", System.Globalization.CultureInfo.InvariantCulture);

            Assert.AreEqual("Mixture(x; 0.50*N(x; μ = -2.00, σ² = 0.25) + 0.50*N(x; μ = 4.00, σ² = 0.25))", result);
        }
Esempio n. 21
0
        public void DistributionFunctionTest3()
        {
            double expected, actual;

            // Test small variance
            NormalDistribution target = new NormalDistribution(1.0, double.Epsilon);

            expected = 0;
            actual = target.DistributionFunction(0);
            Assert.AreEqual(expected, actual);

            expected = 0.5;
            actual = target.DistributionFunction(1.0);
            Assert.AreEqual(expected, actual);

            expected = 1.0;
            actual = target.DistributionFunction(1.0 + 1e-15);
            Assert.AreEqual(expected, actual);

            expected = 0.0;
            actual = target.DistributionFunction(1.0 - 1e-15);
            Assert.AreEqual(expected, actual);
        }
        public void DistributionFunctionTest1()
        {
            var target = GeneralizedNormalDistribution.Normal(mean: 0.42, stdDev: 4.2);
            var normal = new NormalDistribution(mean: 0.42, stdDev: 4.2);

            for (double x = -10; x < 10; x += 0.0001)
            {
                double actual = target.DistributionFunction(x);
                double expected = normal.DistributionFunction(x);
                Assert.AreEqual(expected, actual, 1e-10);
                Assert.IsFalse(Double.IsNaN(actual));
            }
        }
Esempio n. 23
0
        public void ZScoreTest()
        {
            double x = 5;
            double mean = 3;
            double dev = 6;

            NormalDistribution target = new NormalDistribution(mean, dev);

            double expected = (x - 3) / 6;
            double actual = target.ZScore(x);

            Assert.AreEqual(expected, actual);
        }
        public void NormalTest2()
        {
            var target = GeneralizedNormalDistribution.Normal(mean: 0.0, stdDev: 2 / Constants.Sqrt2);
            var normal = new NormalDistribution(mean: 0.0, stdDev: 2 / Constants.Sqrt2);

            test(target, normal);

            var support = target.Support;
            Assert.AreEqual(normal.Support.Min, support.Min);
            Assert.AreEqual(normal.Support.Max, support.Max);

            for (double i = 0.01; i <= 1; i += 0.01)
            {
                var actual = normal.GetRange(i);
                var expected = normal.GetRange(i);

                Assert.AreEqual(expected.Min, actual.Min);
                Assert.AreEqual(expected.Max, actual.Max);
            }
        }
Esempio n. 25
0
        public void CloneTest()
        {
            NormalDistribution target = new NormalDistribution(0.5, 4.2);

            NormalDistribution clone = (NormalDistribution)target.Clone();

            Assert.AreNotSame(target, clone);
            Assert.AreEqual(target.Entropy, clone.Entropy);
            Assert.AreEqual(target.Mean, clone.Mean);
            Assert.AreEqual(target.StandardDeviation, clone.StandardDeviation);
            Assert.AreEqual(target.Variance, clone.Variance);
        }
 public Activation(NormalDistribution standardDistribution)
 {
     _standardDistribution = standardDistribution;
 }
Esempio n. 27
0
        public void FitTest()
        {
            double expectedMean = 1.125;
            double expectedSigma = 1.01775897605147;

            NormalDistribution target;

            target = new NormalDistribution();
            double[] observations = { 0.10, 0.40, 2.00, 2.00 };
            double[] weights = { 0.25, 0.25, 0.25, 0.25 };
            target.Fit(observations, weights);

            Assert.AreEqual(expectedMean, target.Mean);
            Assert.AreEqual(expectedSigma, target.StandardDeviation, 1e-6);


            target = new NormalDistribution();
            double[] observations2 = { 0.10, 0.10, 0.40, 2.00 };
            double[] weights2 = { 0.125, 0.125, 0.25, 0.50 };
            target.Fit(observations2, weights2);

            Assert.AreEqual(expectedMean, target.Mean);
            // Assert.AreEqual(expectedSigma, target.StandardDeviation, 1e-6);
        }
 /// <summary>
 ///   Generates a random observation from the
 ///   Lognormal distribution with the given parameters.
 /// </summary>
 ///
 /// <param name="location">The distribution's location value.</param>
 /// <param name="shape">The distribution's shape deviation.</param>
 /// <param name="source">The random number generator to use as a source of randomness.
 ///   Default is to use <see cref="Accord.Math.Random.Generator.Random"/>.</param>
 ///
 /// <returns>A random double value sampled from the specified Lognormal distribution.</returns>
 ///
 public static double Random(double location, double shape, Random source)
 {
     return(Math.Exp(location + shape * NormalDistribution.Random(source)));
 }
        public void ConstructorTest2()
        {
            var original = new NormalDistribution(mean: 4, stdDev: 4.2);

            var normal = GeneralContinuousDistribution.FromDensityFunction(
                original.Support, original.ProbabilityDensityFunction);

            for (double i = -10; i < +10; i += 0.1)
            {
                double expected = original.DistributionFunction(i);
                double actual = normal.DistributionFunction(i);

                double diff = Math.Abs(expected - actual) / expected;
                Assert.IsTrue(diff < 1e-7);
            }

            testNormal(normal, 1);
        }
        public void ConstructorTest1()
        {
            NormalDistribution[] components = new NormalDistribution[2];
            components[0] = new NormalDistribution(2, 1);
            components[1] = new NormalDistribution(5, 1);

            var mixture = new Mixture<NormalDistribution>(components);

            double[] expected = { 0.5, 0.5 };

            Assert.IsTrue(expected.IsEqual(mixture.Coefficients));
            Assert.AreEqual(components, mixture.Components);
        }
        public void InverseDistributionFunctionTest()
        {
            double[] expected =
            {
                Double.NegativeInfinity, -4.38252, -2.53481, -1.20248,
                -0.0640578, 1.0, 2.06406, 3.20248, 4.53481, 6.38252,
                Double.PositiveInfinity
            };

            NormalDistribution original = new NormalDistribution(1.0, 4.2);
            var target = GeneralContinuousDistribution.FromDistributionFunction(
               original.Support, original.DistributionFunction);

            for (int i = 0; i < expected.Length; i++)
            {
                double x = i / 10.0;
                double actual = target.InverseDistributionFunction(x);
                Assert.AreEqual(expected[i], actual, 1e-5);
                Assert.IsFalse(Double.IsNaN(actual));
            }
        }
        public void FitTest2()
        {
            double[] coefficients = { 0.50, 0.50 };

            NormalDistribution[] components = new NormalDistribution[2];
            components[0] = new NormalDistribution(2, 1);
            components[1] = new NormalDistribution(5, 1);

            var target = new Mixture<NormalDistribution>(coefficients, components);

            double[] values =  { 12512, 1, 1, 0, 1, 6, 6, 5, 7, 5 };
            double[] weights = {     0, 1, 1, 1, 1, 1, 1, 1, 1, 1 };
            weights = weights.Divide(weights.Sum());
            double[] part1 = values.Submatrix(1, 4);
            double[] part2 = values.Submatrix(5, 9);

            MixtureOptions opt = new MixtureOptions()
            {
                Threshold = 0.000001
            };

            target.Fit(values, weights, opt);

            var mean1 = Accord.Statistics.Tools.Mean(part1);
            var var1 = Accord.Statistics.Tools.Variance(part1);
            Assert.AreEqual(mean1, target.Components[0].Mean, 1e-5);
            Assert.AreEqual(var1, target.Components[0].Variance, 1e-5);

            var mean2 = Accord.Statistics.Tools.Mean(part2);
            var var2 = Accord.Statistics.Tools.Variance(part2);
            Assert.AreEqual(mean2, target.Components[1].Mean, 1e-5);
            Assert.AreEqual(var2, target.Components[1].Variance, 1e-5);

            var expectedMean = Accord.Statistics.Tools.WeightedMean(values, weights);
            var actualMean = target.Mean;
            Assert.AreEqual(expectedMean, actualMean, 1e-5);
        }
        public void MedianTest2()
        {
            NormalDistribution original = new NormalDistribution(0.4, 2.2);

            var target = GeneralContinuousDistribution.FromDistributionFunction(
               original.Support, original.DistributionFunction);

            Assert.AreEqual(target.Median, target.InverseDistributionFunction(0.5));
            Assert.AreEqual(target.Median, original.Median, 1e-10);

            target = GeneralContinuousDistribution.FromDensityFunction(
               original.Support, original.ProbabilityDensityFunction);

            Assert.AreEqual(target.Median, target.InverseDistributionFunction(0.5), 1e-10);
            Assert.AreEqual(target.Median, original.Median, 1e-10);
        }
        public void MixtureWeightsFitTest()
        {
            // Randomly initialize some mixture components
            NormalDistribution[] components = new NormalDistribution[2];
            components[0] = new NormalDistribution(2, 1);
            components[1] = new NormalDistribution(5, 1);

            // Create an initial mixture
            Mixture<NormalDistribution> mixture = new Mixture<NormalDistribution>(components);

            // Now, suppose we have a weighted data
            // set. Those will be the input points:

            double[] points = { 0, 3, 1, 7, 3, 5, 1, 2, -1, 2, 7, 6, 8, 6 }; // (14 points)

            // And those are their respective unormalized weights:
            double[] weights = { 1, 1, 1, 2, 2, 1, 1, 1, 2, 1, 2, 3, 1, 1 }; // (14 weights)

            // Let's normalize the weights so they sum up to one:
            weights = weights.Divide(weights.Sum());

            // Now we can fit our model to the data:
            mixture.Fit(points, weights);   // done!

            // Our model will be:
            double mean1 = mixture.Components[0].Mean; // 1.41126
            double mean2 = mixture.Components[1].Mean; // 6.53301

            // If we need the GaussianMixtureModel functionality, we can
            // use the estimated mixture to initialize a new model:
            GaussianMixtureModel gmm = new GaussianMixtureModel(mixture);

            Assert.AreEqual(mean1, gmm.Gaussians[0].Mean[0]);
            Assert.AreEqual(mean2, gmm.Gaussians[1].Mean[0]);

            Assert.AreEqual(mean1, 1.4112610766836404, 1e-15);
            Assert.AreEqual(mean2, 6.5330177004151082, 1e-14);

            Assert.AreEqual(mixture.Coefficients[0], gmm.Gaussians[0].Proportion);
            Assert.AreEqual(mixture.Coefficients[1], gmm.Gaussians[1].Proportion);
        }
Esempio n. 35
0
        static void Main(string[] args)
        {
            Process _process       = Process.GetCurrentProcess();
            bool    _takearguments = false;

            // How many arguments have been stored in the game.
            if (args.Length > 0)
            {
                Console.WriteLine("Arguments found.");
                _takearguments = true;
                HandleArguments(args);
            }

            string HostName = "localhost";

            if (!_takearguments)
            {
                Console.WriteLine("Host name:");
                HostName = Console.ReadLine();

                Console.Clear();
                Console.WriteLine("How many games do you wish to simulate?");
                string _count  = Console.ReadLine();
                int    _result = 0;

                // Determine that the value that has been inputted is
                // in fact valid.
                while (!int.TryParse(_count, out _result))
                {
                    Console.Clear();
                    Console.WriteLine("Please try again: ");
                    _count = Console.ReadLine();
                }

                // Set the new count of games that we want to simulate.
                gamesToPlay = _result;

                Console.Clear();
                string _consoleoutput = "";

                while (_consoleoutput != "n" && _consoleoutput != "y")
                {
                    // Determine if we want to log output to be silence while we do this
                    Console.WriteLine("Silence output?");
                    _consoleoutput = Console.ReadLine();
                }

                if (_consoleoutput == "n")
                {
                    m_RemainQuiet = false;
                }
                else if (_consoleoutput == "y")
                {
                    m_RemainQuiet = true;
                }
            }

            //RunGamesParallel();

            //return;

            // Get some strange invocation error here.
            // tryLoadController(_agentName);

            gs           = new GameState(125);
            gs.GameOver += new EventHandler(GameOverHandler);
            gs.StartPlay();

            BasePacman controller = new LucPacScripted();

            Console.WriteLine("Choose an AI agent to control Pacman:");
            Console.WriteLine(" 1 - LucPacScripted");
            Console.WriteLine(" 2 - LucPac (MCTS)");
            Console.WriteLine(" 3 - MMLocPac (Evolved Neural Network) from .nn file");
            Console.WriteLine(" 5 - SimRandom");
            int Selection = int.Parse(Console.ReadKey().KeyChar.ToString());

            switch (Selection)
            {
            case 1:
                controller = new LucPacScripted();
                break;

            case 2:
                controller = new LucPac();
                break;

            case 3:
                controller = new MMPac.MMLocPac("NeuralNetworkLocPac.nn");
                break;

            default:
                controller = new RandomPac();
                break;
            }


            var GR = new GameRunner();

            var Base = new double[9] {
                3.0, 2.8, 2.8, 2.8, 2.8, 1.5, 1.5, 1.5, 1.5
            };
            var Params = new double[9] {
                0.07, 0.01, 0.02, -0.16, 0.06, -0.05, 0, 0.06, -0.09
            };
            //var Params = new double[9] { -0.17, 0.01, 0.02, -0.16, 0.06, -0.05, 0, 0.06, -0.09 };

            // Params = Params.Add(Base);

            string TestAgent = "PacmanAI.UncertainAgent,PacmanAI";

            var GRR = GR.RunGamesOnline(HostName, gamesToPlay,
                                        controller.GetType().Name
                                        //TestAgent
                                        , new Random().Next(),
                                        //null
                                        new List <double>(Params)
                                        );

            var NewScores = new List <double>();

            NewScores.AddRange(GRR.scores);
            for (int i = 0; i < NewScores.Count; i++)
            {
                NewScores[i] += 9000;
            }
            NewScores.AddRange(GRR.scores);

            var ZeroScores = new List <double>();

            for (int i = 0; i < 100; i++)
            {
                ZeroScores.Add(0);
            }

            Console.WriteLine("Done - " + GRR.scores.Average() + " " + GRR.gamesPlayed);
            Console.WriteLine("Scores over 1600: " + GRR.scores.Where(s => s >= 1600).Count());

            Console.WriteLine("Done (Altered) - " + NewScores.Average() + " " + GRR.gamesPlayed);
            Console.WriteLine("Scores over 1500 (Altered): " + NewScores.Where(s => s >= 1500).Count());

            /*Console.WriteLine("Evaluation score via distribution evaluation: " + new DistributionWeightEvaluation(null).CalculateFitnessScore(GRR.scores, 5000, 1));
             * Console.WriteLine("Evaluation score via average evaluation: " + new AccurateThresholdEvaluation(null).CalculateFitnessScore(GRR.scores, 5000, 1));
             *
             *
             * Console.WriteLine("Evaluation score via distribution evaluation (All zeroes): " + new DistributionWeightEvaluation(null).CalculateFitnessScore(ZeroScores, 1500, 1));
             * Console.WriteLine("Evaluation score via average evaluation (All zeroes): " + new AccurateThresholdEvaluation(null).CalculateFitnessScore(ZeroScores, 1500, 1));
             *
             * Console.WriteLine("Evaluation score via distribution evaluation (Altered scores): " + new DistributionWeightEvaluation(null).CalculateFitnessScore(NewScores, 5000, 1));
             * Console.WriteLine("Evaluation score via average evaluation (Altered scores): " + new AccurateThresholdEvaluation(null).CalculateFitnessScore(NewScores, 5000, 1));
             *
             * var gaussianDist = new Accord.Statistics.Distributions.Univariate.NormalDistribution(2000, 10);
             *
             * Console.WriteLine("Evaluation score via distribution evaluation (Gaussian scores): " + new DistributionWeightEvaluation(null).CalculateFitnessScore(gaussianDist.Generate(100).ToList(), 2000, 1));
             * Console.WriteLine("Evaluation score via average evaluation (Gaussian scores): " + new AccurateThresholdEvaluation(null).CalculateFitnessScore(gaussianDist.Generate(100).ToList(), 2000, 1));
             */
            Accord.Statistics.Distributions.Univariate.EmpiricalDistribution rdb = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(GRR.scores.ToArray(), 25);
            Accord.Controls.DataSeriesBox.Show("Pacman score distribution", rdb.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, 12000));
            Accord.Statistics.Distributions.Univariate.EmpiricalDistribution rdb2 = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(NewScores.ToArray());
            Accord.Controls.DataSeriesBox.Show("Pacman score distribution (Altered)", rdb2.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, 12000));

            double[] coef       = { 4, 1 };
            var      skewNormal = new Mixture <NormalDistribution>(coef, new NormalDistribution(2000, 1500), new NormalDistribution(7000, 1500));// new SkewNormalDistribution(4500, 3000, 7.2);

            Accord.Controls.DataSeriesBox.Show("Skew Normal", skewNormal.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, 12000));

            Console.ReadKey();

            return;

            // DEFINE CONTROLLER //
            //BasePacman controller = new MMMCTSCode.MMMCTS();
            //BasePacman controller = new RandomPac();
            //BasePacman controller = new LucPacScripted();
            //BasePacman controller = new LucPac();
            //BasePacman controller = new MMPac.MMPac("NeuralNetwork.nn");
            //BasePacman controller = new MMPac.MMPac(Weights);
            //BasePacman controller = new MMPac.MMLocPac("NeuralNetworkLocPac.nn");

            // Turn off the logging
            if (controller.GetType() == typeof(LucPac) && m_RemainQuiet)
            {
                LucPac.REMAIN_QUIET = true;
            }

            if (controller.GetType() == typeof(LucPacScripted) && m_RemainQuiet)
            {
                LucPacScripted.REMAIN_QUIET = true;
            }

            //BasePacman controller = new SmartDijkstraPac();
            gs.Controller = controller;

            Stopwatch watch      = new Stopwatch();
            int       percentage = -1;
            int       lastUpdate = 0;

            watch.Start();
            while (gamesPlayed < gamesToPlay)
            {
                int newPercentage = (int)Math.Floor(((float)gamesPlayed / gamesToPlay) * 100);
                if (newPercentage != percentage || gamesPlayed - lastUpdate >= 100)
                {
                    lastUpdate = gamesPlayed;
                    percentage = newPercentage;
                    Console.Clear();
                    Console.WriteLine("Simulating ... " + percentage + "% (" + gamesPlayed + " : " + gamesToPlay + ")");
                    Console.WriteLine(" - Elapsed: " + (watch.ElapsedMilliseconds / 1000.0) + "ms");
                    Console.WriteLine(" - Current best: " + highestScore);
                    Console.WriteLine(" - Current worst: " + lowestScore);
                    if (gamesPlayed > 0)
                    {
                        Console.WriteLine(" - Current avg.: " + (totalScore / gamesPlayed));
                    }
                }
                // update gamestate
                Direction direction = controller.Think(gs);
                gs.Pacman.SetDirection(direction);

                // update game
                gs.Update();
                ms += GameState.MSPF;
            }
            watch.Stop();

            // shut down controller
            controller.SimulationFinished();

            // output results
            Console.Clear();
            long seconds = ms / 1000;

            Console.WriteLine("Games played: " + gamesPlayed);
            Console.WriteLine("Avg. score: " + (totalScore / gamesPlayed));
            Console.WriteLine("Highest score: " + highestScore + " points");
            Console.WriteLine("Lowest score: " + lowestScore + " points");
            Console.WriteLine("Max Pills Eaten: " + maxPillsEaten);
            Console.WriteLine("Min Pills Eaten: " + minPillsEaten);
            Console.WriteLine("Average Pills Eaten: " + pillsEatenTotal / gamesPlayed);
            Console.WriteLine("Max Ghosts Eaten: " + maxGhostsEaten);
            Console.WriteLine("Min Ghosts Eaten: " + minGhostsEaten);
            Console.WriteLine("Average Ghosts Eaten: " + totalGhostsEaten / gamesPlayed);
            Console.WriteLine("Longest game: " + ((float)longestGame / 1000.0f) + " seconds");
            Console.WriteLine("Total simulated time: " + (seconds / 60 / 60 / 24) + "d " + ((seconds / 60 / 60) % 24) + "h " + ((seconds / 60) % 60) + "m " + (seconds % 60) + "s");
            Console.WriteLine("Avg. simulated time pr. game: " + ((float)ms / 1000.0f / gamesPlayed) + " seconds");
            Console.WriteLine("Simulation took: " + (watch.ElapsedMilliseconds / 1000.0f) + " seconds");
            Console.WriteLine("Speed: " + (ms / watch.ElapsedMilliseconds) + " (" + ((ms / watch.ElapsedMilliseconds) / 60) + "m " + ((ms / watch.ElapsedMilliseconds) % 60) + " s) simulated seconds pr. second");
            Console.WriteLine("For a total of: " + gamesPlayed / (watch.ElapsedMilliseconds / 1000.0f) + " games pr. second");
            Console.WriteLine();

            //Calculate standard deviation
            double mean       = totalScore / gamesPlayed;
            double totalsqdif = 0;

            foreach (var val in scores)
            {
                totalsqdif += (val - mean) * (val - mean);
            }
            double variance = totalsqdif / gamesPlayed;
            double stddev   = Math.Sqrt(variance);

            Console.WriteLine("Standard deviation of: " + stddev);

            Console.WriteLine("Standard deviation of (Accord): " + scores.ToArray().StandardDeviation());

            //Generates a distribution from existing data
            Accord.Statistics.Distributions.Univariate.EmpiricalDistribution db = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(scores.ToArray());

            //Calculates standard deviation
            Console.WriteLine("Standard deviation of (Accord 2): " + db.StandardDeviation);


            double[] sample =
                //{ 1000, 960, 1000, 960, 1000, 600, 100, 1000, 1500};
            { 2000, 2500, 2100, 9000, 1900, 2000, 150, 2100 };
            //{ 60000, 70000, 80000, 90000, 40000, 100000, 200000, 15000, 500000, 44444 };
            //scores.Take(20).ToArray();

            double[] sample2 =
                scores.Take(100).ToArray();

            //Shapiro Wilk test to see if distribution is normal
            var swT = new Accord.Statistics.Testing.ShapiroWilkTest(scores.ToArray());

            Console.WriteLine("Shapiro Wilk Test on all scores: Statistic - " + swT.Statistic + " , PValue - " + swT.PValue + " , Significant - " + swT.Significant);

            var normalDist = new Accord.Statistics.Distributions.Univariate.NormalDistribution(950, 1200);
            var swT2       = new Accord.Statistics.Testing.ShapiroWilkTest(normalDist.Generate(1000));

            Console.WriteLine("Shapiro Wilk Test on normal dist: Statistic - " + swT2.Statistic + " , PValue - " + swT2.PValue + " , Significant - " + swT2.Significant);


            //Accord.Statistics.Testing.KolmogorovSmirnovTest ks = new Accord.Statistics.Testing.KolmogorovSmirnovTest(sample, db);

            //Console.WriteLine("KS Test: Statistic - " + ks.Statistic + " , PValue - " + ks.PValue + " , Significant - " + ks.Significant);

            //Probability that the given scores were sampled from the previous distribution
            Accord.Statistics.Testing.ZTest ts = new Accord.Statistics.Testing.ZTest(sample, totalScore / gamesPlayed);

            /*sample.Average(),
             * db.StandardDeviation,
             * sample.Length,
             * totalScore / gamesPlayed);
             */
            Console.WriteLine("Z Test: Statistic - " + ts.Statistic + " , PValue - " + ts.PValue + " , Significant - " + ts.Significant);


            Accord.Statistics.Testing.ZTest ts2 = new Accord.Statistics.Testing.ZTest(sample2, totalScore / gamesPlayed);

            /*sample2.Average(),
             * //Accord.Statistics.Tools.StandardDeviation(sample2.ToArray()),
             * db.StandardDeviation,
             * sample2.Length,
             * totalScore / gamesPlayed);
             */
            Console.WriteLine("Z Test 2: Statistic - " + ts2.Statistic + " , PValue - " + ts2.PValue + " , Significant - " + ts2.Significant);

            //% of values that are between given ranges
            Console.WriteLine("Distribution function 0 - 1000: " + db.DistributionFunction(0, 1000));
            Console.WriteLine("Distribution function 1000 - 11000: " + db.DistributionFunction(1000, 11000));
            Console.WriteLine("Distribution function 0 - 500: " + db.DistributionFunction(0, 500));
            Console.WriteLine("Distribution function 1500 - 11000: " + db.DistributionFunction(1500, 11000));

            //MannWhitneyWilcoxon test on whether 2 samples are from the same distribution - high P value = likely same distribution
            Accord.Statistics.Testing.MannWhitneyWilcoxonTest mwTest = new Accord.Statistics.Testing.MannWhitneyWilcoxonTest(scores.ToArray(), sample2);
            Console.WriteLine("MWW Test: Statistic - " + mwTest.Statistic + " , PValue - " + mwTest.PValue + " , Significant - " + mwTest.Significant);


            Accord.Statistics.Testing.MannWhitneyWilcoxonTest mwTest2 = new Accord.Statistics.Testing.MannWhitneyWilcoxonTest(normalDist.Generate(1000), scores.ToArray());
            Console.WriteLine("MWW Test 2 (actual scores versus normal dist): Statistic - " + mwTest2.Statistic + " , PValue - " + mwTest2.PValue + " , Significant - " + mwTest2.Significant);

            //Accord.Controls.HistogramBox.Show(scores.ToArray());


            //Guess what distribution this is
            var analysis = new Accord.Statistics.Analysis.DistributionAnalysis(scores.ToArray());

            // Compute the analysis
            analysis.Compute();

            // Get the most likely distribution (first)
            var mostLikely = analysis.GoodnessOfFit[0];

            var result = mostLikely.Distribution.ToString();

            Console.WriteLine(result);

            //Plots the distributions
            Accord.Controls.DataSeriesBox.Show("Pacman score distribution", db.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, highestScore));

            Accord.Controls.DataSeriesBox.Show("Normal distribution", normalDist.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, highestScore));

            Accord.Controls.DataSeriesBox.Show("Gamma distribution", mostLikely.Distribution.ProbabilityFunction, new Accord.DoubleRange(-2000, highestScore));



            //Calculate some CDF related malarkey
            //top 20 scores - 1 empirical
            //next 20 scores - 2nd empirical
            //calculate cdf of both
            //calculate cdf of cumulative

            int games1 = 80;
            int games2 = 20;

            Accord.Statistics.Distributions.Univariate.EmpiricalDistribution edb  = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(scores.GetRange(0, games1).ToArray());
            Accord.Statistics.Distributions.Univariate.EmpiricalDistribution edb2 = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(scores.GetRange(games1, games2).ToArray());
            Accord.Statistics.Distributions.Univariate.EmpiricalDistribution edbC = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(scores.GetRange(0, games1 + games2).ToArray());

            var cdf1 = edb.DistributionFunction(800);
            var cdf2 = edb2.DistributionFunction(800);
            var cdfC = edbC.DistributionFunction(800);

            Console.WriteLine("CDF1 = " + cdf1 + ", CDF2 = " + cdf2 + ", Guess = " + (cdf1 * games1 + cdf2 * games2) / (games1 + games2) + ", Actual = " + cdfC);


            //Convolution
            var ScoresA = scores.GetRange(0, games1).ToArray();
            var ScoresB = scores.GetRange(games1, games1).ToArray();

            //var Convolution = ScoresA.Convolve(ScoresB);
            double[] Convolution = new double[games1];
            Accord.Math.Transforms.FourierTransform2.Convolve(ScoresA, ScoresB, Convolution);

            Console.ReadLine();
        }