public void EmpiricalDistributionConstructorTest2() { double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; EmpiricalDistribution target = new EmpiricalDistribution(samples); Assert.AreEqual(samples, target.Samples); Assert.AreEqual(0.67595864392399474, target.Smoothing); }
public void EmpiricalDistributionConstructorTest3() { double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; EmpiricalDistribution distribution = new EmpiricalDistribution(samples); double mean = distribution.Mean; // 3 double median = distribution.Median; // 2.9999993064186787 double var = distribution.Variance; // 1.2941176470588236 double chf = distribution.CumulativeHazardFunction(x: 4.2); // 2.1972245773362191 double cdf = distribution.DistributionFunction(x: 4.2); // 0.88888888888888884 double pdf = distribution.ProbabilityDensityFunction(x: 4.2); // 0.15552784414141974 double lpdf = distribution.LogProbabilityDensityFunction(x: 4.2); // -1.8609305013898356 double hf = distribution.HazardFunction(x: 4.2); // 1.3997505972727771 double ccdf = distribution.ComplementaryDistributionFunction(x: 4.2); //0.11111111111111116 double icdf = distribution.InverseDistributionFunction(p: cdf); // 4.1999999999999993 double smoothing = distribution.Smoothing; // 1.9144923416414432 string str = distribution.ToString(); // Fn(x; S) Assert.AreEqual(samples, distribution.Samples); Assert.AreEqual(1.9144923416414432, smoothing); Assert.AreEqual(3.0, mean); Assert.AreEqual(2.9999993064186787, median); Assert.AreEqual(1.2941176470588236, var); Assert.AreEqual(2.1972245773362191, chf); Assert.AreEqual(0.88888888888888884, cdf); Assert.AreEqual(0.15552784414141974, pdf); Assert.AreEqual(-1.8609305013898356, lpdf); Assert.AreEqual(1.3997505972727771, hf); Assert.AreEqual(0.11111111111111116, ccdf); Assert.AreEqual(4.1999999999999993, icdf); Assert.AreEqual("Fn(x; S)", str); }
public void EmpiricalDistributionConstructorTest2() { double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; EmpiricalDistribution target = new EmpiricalDistribution(samples); Assert.AreEqual(samples, target.Samples); Assert.AreEqual(1.9144923416414432, target.Smoothing); }
public void EmpiricalDistributionConstructorTest3() { double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; EmpiricalDistribution distribution = new EmpiricalDistribution(samples); double mean = distribution.Mean; // 3 double median = distribution.Median; // 2.9999993064186787 double var = distribution.Variance; // 1.2941176470588236 double chf = distribution.CumulativeHazardFunction(x: 4.2); // 2.1972245773362191 double cdf = distribution.DistributionFunction(x: 4.2); // 0.88888888888888884 double pdf = distribution.ProbabilityDensityFunction(x: 4.2); // 0.181456280142802 double lpdf = distribution.LogProbabilityDensityFunction(x: 4.2); // -1.7067405350495708 double hf = distribution.HazardFunction(x: 4.2); // 1.6331065212852196 double ccdf = distribution.ComplementaryDistributionFunction(x: 4.2); //0.11111111111111116 double icdf = distribution.InverseDistributionFunction(p: cdf); // 4.1999999999999993 double smoothing = distribution.Smoothing; // 0.67595864392399474 string str = distribution.ToString(); // Fn(x; S) Assert.AreEqual(samples, distribution.Samples); Assert.AreEqual(0.67595864392399474, smoothing); Assert.AreEqual(3.0, mean); Assert.AreEqual(2.9999993064186787, median); Assert.AreEqual(1.2941176470588236, var); Assert.AreEqual(2.1972245773362191, chf); Assert.AreEqual(0.88888888888888884, cdf); Assert.AreEqual(0.18145628014280227, pdf); Assert.AreEqual(-1.7067405350495708, lpdf); Assert.AreEqual(1.6331065212852196, hf); Assert.AreEqual(0.11111111111111116, ccdf); Assert.AreEqual(4.1999999999999993, icdf); Assert.AreEqual("Fn(x; S)", str); }
/// <summary> /// Creates a new object that is a copy of the current instance. /// </summary> /// <returns> /// A new object that is a copy of this instance. /// </returns> /// public override object Clone() { EmpiricalDistribution e = new EmpiricalDistribution(); e.samples = (double[])samples.Clone(); e.smoothing = smoothing; return e; }
public void EmpiricalDistributionConstructorTest1() { double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; double smoothing = 0.5; EmpiricalDistribution target = new EmpiricalDistribution(samples, smoothing); Assert.AreEqual(samples, target.Samples); Assert.AreEqual(smoothing, target.Smoothing); Assert.AreEqual(3, target.Mean); Assert.AreEqual(1.1375929179890421, target.StandardDeviation); Assert.AreEqual(target.Variance, target.Variance); }
/// <summary> /// Creates a new object that is a copy of the current instance. /// </summary> /// <returns> /// A new object that is a copy of this instance. /// </returns> /// public override object Clone() { var clone = new EmpiricalDistribution(); clone.type = type; clone.sumOfWeights = sumOfWeights; clone.numberOfSamples = numberOfSamples; clone.smoothing = smoothing; clone.constant = constant; clone.samples = (double[])samples.Clone(); if (weights != null) { clone.weights = (double[])weights.Clone(); } if (repeats != null) { clone.repeats = (int[])repeats.Clone(); } return(clone); }
public void CloneTest() { double[] samples = { 4, 2 }; EmpiricalDistribution target = new EmpiricalDistribution(samples); EmpiricalDistribution clone = (EmpiricalDistribution)target.Clone(); Assert.AreNotSame(target, clone); Assert.AreEqual(target.Entropy, clone.Entropy); Assert.AreEqual(target.Mean, clone.Mean); Assert.AreNotSame(target.Samples, clone.Samples); Assert.AreEqual(target.StandardDeviation, clone.StandardDeviation); Assert.AreEqual(target.Variance, clone.Variance); for (int i = 0; i < clone.Samples.Length; i++) Assert.AreEqual(target.Samples[i], clone.Samples[i]); }
public void WeightedEmpiricalDistributionConstructorTest3() { double[] weights = { 2, 1, 1, 1, 2, 3, 1, 3, 1, 1, 1, 1 }; double[] samples = { 5, 1, 4, 1, 2, 3, 4, 3, 4, 3, 2, 3 }; weights = weights.Divide(weights.Sum()); var target = new EmpiricalDistribution(samples, weights); Assert.AreEqual(1.2377597081667415, target.Smoothing); }
public void WeightedEmpiricalDistributionConstructorTest2() { double[] original = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; var distribution = new EmpiricalDistribution(original); double[] weights = { 2, 1, 1, 1, 2, 3, 1, 3, 1, 1, 1, 1 }; double[] samples = { 5, 1, 4, 1, 2, 3, 4, 3, 4, 3, 2, 3 }; weights = weights.Divide(weights.Sum()); var target = new EmpiricalDistribution(samples, weights, distribution.Smoothing); Assert.AreEqual(distribution.Mean, target.Mean); Assert.AreEqual(distribution.Median, target.Median); Assert.AreEqual(distribution.Mode, target.Mode); Assert.AreEqual(distribution.Quartiles.Min, target.Quartiles.Min); Assert.AreEqual(distribution.Quartiles.Max, target.Quartiles.Max); Assert.AreEqual(distribution.Smoothing, target.Smoothing); Assert.AreEqual(1.1685534824642432, target.StandardDeviation); Assert.AreEqual(distribution.Support.Min, target.Support.Min); Assert.AreEqual(distribution.Support.Max, target.Support.Max); Assert.AreEqual(1.3655172413793104, target.Variance); Assert.AreEqual(target.Weights, weights); Assert.AreEqual(target.Samples, samples); for (double x = 0; x < 6; x += 0.1) { double actual, expected; expected = distribution.ComplementaryDistributionFunction(x); actual = target.ComplementaryDistributionFunction(x); Assert.AreEqual(expected, actual, 1e-15); expected = distribution.CumulativeHazardFunction(x); actual = target.CumulativeHazardFunction(x); Assert.AreEqual(expected, actual, 1e-15); expected = distribution.DistributionFunction(x); actual = target.DistributionFunction(x); Assert.AreEqual(expected, actual, 1e-15); expected = distribution.HazardFunction(x); actual = target.HazardFunction(x); Assert.AreEqual(expected, actual, 1e-14); expected = distribution.InverseDistributionFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x)); actual = target.InverseDistributionFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x)); Assert.AreEqual(expected, actual, 1e-14); expected = distribution.LogProbabilityDensityFunction(x); actual = target.LogProbabilityDensityFunction(x); Assert.AreEqual(expected, actual, 1e-15); expected = distribution.ProbabilityDensityFunction(x); actual = target.ProbabilityDensityFunction(x); Assert.AreEqual(expected, actual, 1e-15); expected = distribution.QuantileDensityFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x)); actual = target.QuantileDensityFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x)); Assert.AreEqual(expected, actual, 1e-10); } }
public void ProbabilityDensityFunctionTest() { double[] samples = { 1, 5, 2, 5, 1, 7, 1, 9, 4, 2 }; EmpiricalDistribution target = new EmpiricalDistribution(samples, 1); Assert.AreEqual(1.0, target.Smoothing); double actual; actual = target.ProbabilityDensityFunction(1); Assert.AreEqual(0.16854678051819402, actual); actual = target.ProbabilityDensityFunction(2); Assert.AreEqual(0.15866528844260089, actual); actual = target.ProbabilityDensityFunction(3); Assert.AreEqual(0.0996000842425018, actual); actual = target.ProbabilityDensityFunction(4); Assert.AreEqual(0.1008594542833362, actual); actual = target.ProbabilityDensityFunction(6); Assert.AreEqual(0.078460710909263, actual); actual = target.ProbabilityDensityFunction(8); Assert.AreEqual(0.049293898826709738, actual); }
public void LogProbabilityDensityFunctionTest() { double[] samples = { 1, 5, 2, 5, 1, 7, 1, 9, 4, 2 }; EmpiricalDistribution target = new EmpiricalDistribution(samples, 1); Assert.AreEqual(1.0, target.Smoothing); double actual; double expected; actual = target.LogProbabilityDensityFunction(1); expected = System.Math.Log(0.16854678051819402); Assert.AreEqual(expected, actual, 1e-6); actual = target.LogProbabilityDensityFunction(2); expected = System.Math.Log(0.15866528844260089); Assert.AreEqual(expected, actual, 1e-6); actual = target.LogProbabilityDensityFunction(3); expected = System.Math.Log(0.0996000842425018); Assert.AreEqual(expected, actual, 1e-6); actual = target.LogProbabilityDensityFunction(4); expected = System.Math.Log(0.1008594542833362); Assert.AreEqual(expected, actual, 1e-6); actual = target.LogProbabilityDensityFunction(6); expected = System.Math.Log(0.078460710909263); Assert.AreEqual(expected, actual, 1e-6); actual = target.LogProbabilityDensityFunction(8); expected = System.Math.Log(0.049293898826709738); Assert.AreEqual(expected, actual, 1e-6); }
public void FitTest2() { EmpiricalDistribution target = new EmpiricalDistribution(new double[] { 0 }); double[] observations = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; double[] weights = null; IFittingOptions options = new EmpiricalOptions { SmoothingRule = FaultySmoothingRule }; target.Fit(observations, weights, options); Assert.AreEqual(1.9144923416414432, target.Smoothing); Assert.AreNotSame(observations, target.Samples); CollectionAssert.AreEqual(observations, target.Samples); }
public void FitTest1() { EmpiricalDistribution target = new EmpiricalDistribution(new double[] { 0 }); double[] observations = { 1, 5, 2, 5, 1, 7, 1, 9, 4, 2 }; double[] weights = null; IFittingOptions options = null; target.Fit(observations, weights, options); Assert.AreEqual(1.8652004071576875, target.Smoothing); Assert.AreNotSame(observations, target.Samples); CollectionAssert.AreEqual(observations, target.Samples); }
public void FitTest() { EmpiricalDistribution target = new EmpiricalDistribution(new double[] { 0 }); double[] observations = { 1, 5, 2, 5, 1, 7, 1, 9, 4, 2 }; double[] weights = null; IFittingOptions options = null; target.Fit(observations, weights, options); Assert.AreNotSame(observations, target.Samples); for (int i = 0; i < observations.Length; i++) Assert.AreEqual(observations[i], target.Samples[i]); }
public void MedianTest() { double[] samples = { 1, 5, 2, 5, 1, 7, 1, 9 }; EmpiricalDistribution target = new EmpiricalDistribution(samples); Assert.AreEqual(target.Median, target.InverseDistributionFunction(0.5)); }
public void DistributionFunctionTest() { double[] samples = { 1, 5, 2, 5, 1, 7, 1, 9 }; EmpiricalDistribution target = new EmpiricalDistribution(samples); Assert.AreEqual(0.000, target.DistributionFunction(0)); Assert.AreEqual(0.375, target.DistributionFunction(1)); Assert.AreEqual(0.500, target.DistributionFunction(2)); Assert.AreEqual(0.750, target.DistributionFunction(5)); Assert.AreEqual(0.875, target.DistributionFunction(7)); Assert.AreEqual(1.000, target.DistributionFunction(9)); }
static void Main(string[] args) { Process _process = Process.GetCurrentProcess(); bool _takearguments = false; // How many arguments have been stored in the game. if (args.Length > 0) { Console.WriteLine("Arguments found."); _takearguments = true; HandleArguments(args); } string HostName = "localhost"; if (!_takearguments) { Console.WriteLine("Host name:"); HostName = Console.ReadLine(); Console.Clear(); Console.WriteLine("How many games do you wish to simulate?"); string _count = Console.ReadLine(); int _result = 0; // Determine that the value that has been inputted is // in fact valid. while (!int.TryParse(_count, out _result)) { Console.Clear(); Console.WriteLine("Please try again: "); _count = Console.ReadLine(); } // Set the new count of games that we want to simulate. gamesToPlay = _result; Console.Clear(); string _consoleoutput = ""; while (_consoleoutput != "n" && _consoleoutput != "y") { // Determine if we want to log output to be silence while we do this Console.WriteLine("Silence output?"); _consoleoutput = Console.ReadLine(); } if (_consoleoutput == "n") { m_RemainQuiet = false; } else if (_consoleoutput == "y") { m_RemainQuiet = true; } } //RunGamesParallel(); //return; // Get some strange invocation error here. // tryLoadController(_agentName); gs = new GameState(125); gs.GameOver += new EventHandler(GameOverHandler); gs.StartPlay(); BasePacman controller = new LucPacScripted(); Console.WriteLine("Choose an AI agent to control Pacman:"); Console.WriteLine(" 1 - LucPacScripted"); Console.WriteLine(" 2 - LucPac (MCTS)"); Console.WriteLine(" 3 - MMLocPac (Evolved Neural Network) from .nn file"); Console.WriteLine(" 5 - SimRandom"); int Selection = int.Parse(Console.ReadKey().KeyChar.ToString()); switch (Selection) { case 1: controller = new LucPacScripted(); break; case 2: controller = new LucPac(); break; case 3: controller = new MMPac.MMLocPac("NeuralNetworkLocPac.nn"); break; default: controller = new RandomPac(); break; } var GR = new GameRunner(); var Base = new double[9] { 3.0, 2.8, 2.8, 2.8, 2.8, 1.5, 1.5, 1.5, 1.5 }; var Params = new double[9] { 0.07, 0.01, 0.02, -0.16, 0.06, -0.05, 0, 0.06, -0.09 }; //var Params = new double[9] { -0.17, 0.01, 0.02, -0.16, 0.06, -0.05, 0, 0.06, -0.09 }; // Params = Params.Add(Base); string TestAgent = "PacmanAI.UncertainAgent,PacmanAI"; var GRR = GR.RunGamesOnline(HostName, gamesToPlay, controller.GetType().Name //TestAgent , new Random().Next(), //null new List <double>(Params) ); var NewScores = new List <double>(); NewScores.AddRange(GRR.scores); for (int i = 0; i < NewScores.Count; i++) { NewScores[i] += 9000; } NewScores.AddRange(GRR.scores); var ZeroScores = new List <double>(); for (int i = 0; i < 100; i++) { ZeroScores.Add(0); } Console.WriteLine("Done - " + GRR.scores.Average() + " " + GRR.gamesPlayed); Console.WriteLine("Scores over 1600: " + GRR.scores.Where(s => s >= 1600).Count()); Console.WriteLine("Done (Altered) - " + NewScores.Average() + " " + GRR.gamesPlayed); Console.WriteLine("Scores over 1500 (Altered): " + NewScores.Where(s => s >= 1500).Count()); /*Console.WriteLine("Evaluation score via distribution evaluation: " + new DistributionWeightEvaluation(null).CalculateFitnessScore(GRR.scores, 5000, 1)); * Console.WriteLine("Evaluation score via average evaluation: " + new AccurateThresholdEvaluation(null).CalculateFitnessScore(GRR.scores, 5000, 1)); * * * Console.WriteLine("Evaluation score via distribution evaluation (All zeroes): " + new DistributionWeightEvaluation(null).CalculateFitnessScore(ZeroScores, 1500, 1)); * Console.WriteLine("Evaluation score via average evaluation (All zeroes): " + new AccurateThresholdEvaluation(null).CalculateFitnessScore(ZeroScores, 1500, 1)); * * Console.WriteLine("Evaluation score via distribution evaluation (Altered scores): " + new DistributionWeightEvaluation(null).CalculateFitnessScore(NewScores, 5000, 1)); * Console.WriteLine("Evaluation score via average evaluation (Altered scores): " + new AccurateThresholdEvaluation(null).CalculateFitnessScore(NewScores, 5000, 1)); * * var gaussianDist = new Accord.Statistics.Distributions.Univariate.NormalDistribution(2000, 10); * * Console.WriteLine("Evaluation score via distribution evaluation (Gaussian scores): " + new DistributionWeightEvaluation(null).CalculateFitnessScore(gaussianDist.Generate(100).ToList(), 2000, 1)); * Console.WriteLine("Evaluation score via average evaluation (Gaussian scores): " + new AccurateThresholdEvaluation(null).CalculateFitnessScore(gaussianDist.Generate(100).ToList(), 2000, 1)); */ Accord.Statistics.Distributions.Univariate.EmpiricalDistribution rdb = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(GRR.scores.ToArray(), 25); Accord.Controls.DataSeriesBox.Show("Pacman score distribution", rdb.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, 12000)); Accord.Statistics.Distributions.Univariate.EmpiricalDistribution rdb2 = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(NewScores.ToArray()); Accord.Controls.DataSeriesBox.Show("Pacman score distribution (Altered)", rdb2.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, 12000)); double[] coef = { 4, 1 }; var skewNormal = new Mixture <NormalDistribution>(coef, new NormalDistribution(2000, 1500), new NormalDistribution(7000, 1500));// new SkewNormalDistribution(4500, 3000, 7.2); Accord.Controls.DataSeriesBox.Show("Skew Normal", skewNormal.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, 12000)); Console.ReadKey(); return; // DEFINE CONTROLLER // //BasePacman controller = new MMMCTSCode.MMMCTS(); //BasePacman controller = new RandomPac(); //BasePacman controller = new LucPacScripted(); //BasePacman controller = new LucPac(); //BasePacman controller = new MMPac.MMPac("NeuralNetwork.nn"); //BasePacman controller = new MMPac.MMPac(Weights); //BasePacman controller = new MMPac.MMLocPac("NeuralNetworkLocPac.nn"); // Turn off the logging if (controller.GetType() == typeof(LucPac) && m_RemainQuiet) { LucPac.REMAIN_QUIET = true; } if (controller.GetType() == typeof(LucPacScripted) && m_RemainQuiet) { LucPacScripted.REMAIN_QUIET = true; } //BasePacman controller = new SmartDijkstraPac(); gs.Controller = controller; Stopwatch watch = new Stopwatch(); int percentage = -1; int lastUpdate = 0; watch.Start(); while (gamesPlayed < gamesToPlay) { int newPercentage = (int)Math.Floor(((float)gamesPlayed / gamesToPlay) * 100); if (newPercentage != percentage || gamesPlayed - lastUpdate >= 100) { lastUpdate = gamesPlayed; percentage = newPercentage; Console.Clear(); Console.WriteLine("Simulating ... " + percentage + "% (" + gamesPlayed + " : " + gamesToPlay + ")"); Console.WriteLine(" - Elapsed: " + (watch.ElapsedMilliseconds / 1000.0) + "ms"); Console.WriteLine(" - Current best: " + highestScore); Console.WriteLine(" - Current worst: " + lowestScore); if (gamesPlayed > 0) { Console.WriteLine(" - Current avg.: " + (totalScore / gamesPlayed)); } } // update gamestate Direction direction = controller.Think(gs); gs.Pacman.SetDirection(direction); // update game gs.Update(); ms += GameState.MSPF; } watch.Stop(); // shut down controller controller.SimulationFinished(); // output results Console.Clear(); long seconds = ms / 1000; Console.WriteLine("Games played: " + gamesPlayed); Console.WriteLine("Avg. score: " + (totalScore / gamesPlayed)); Console.WriteLine("Highest score: " + highestScore + " points"); Console.WriteLine("Lowest score: " + lowestScore + " points"); Console.WriteLine("Max Pills Eaten: " + maxPillsEaten); Console.WriteLine("Min Pills Eaten: " + minPillsEaten); Console.WriteLine("Average Pills Eaten: " + pillsEatenTotal / gamesPlayed); Console.WriteLine("Max Ghosts Eaten: " + maxGhostsEaten); Console.WriteLine("Min Ghosts Eaten: " + minGhostsEaten); Console.WriteLine("Average Ghosts Eaten: " + totalGhostsEaten / gamesPlayed); Console.WriteLine("Longest game: " + ((float)longestGame / 1000.0f) + " seconds"); Console.WriteLine("Total simulated time: " + (seconds / 60 / 60 / 24) + "d " + ((seconds / 60 / 60) % 24) + "h " + ((seconds / 60) % 60) + "m " + (seconds % 60) + "s"); Console.WriteLine("Avg. simulated time pr. game: " + ((float)ms / 1000.0f / gamesPlayed) + " seconds"); Console.WriteLine("Simulation took: " + (watch.ElapsedMilliseconds / 1000.0f) + " seconds"); Console.WriteLine("Speed: " + (ms / watch.ElapsedMilliseconds) + " (" + ((ms / watch.ElapsedMilliseconds) / 60) + "m " + ((ms / watch.ElapsedMilliseconds) % 60) + " s) simulated seconds pr. second"); Console.WriteLine("For a total of: " + gamesPlayed / (watch.ElapsedMilliseconds / 1000.0f) + " games pr. second"); Console.WriteLine(); //Calculate standard deviation double mean = totalScore / gamesPlayed; double totalsqdif = 0; foreach (var val in scores) { totalsqdif += (val - mean) * (val - mean); } double variance = totalsqdif / gamesPlayed; double stddev = Math.Sqrt(variance); Console.WriteLine("Standard deviation of: " + stddev); Console.WriteLine("Standard deviation of (Accord): " + scores.ToArray().StandardDeviation()); //Generates a distribution from existing data Accord.Statistics.Distributions.Univariate.EmpiricalDistribution db = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(scores.ToArray()); //Calculates standard deviation Console.WriteLine("Standard deviation of (Accord 2): " + db.StandardDeviation); double[] sample = //{ 1000, 960, 1000, 960, 1000, 600, 100, 1000, 1500}; { 2000, 2500, 2100, 9000, 1900, 2000, 150, 2100 }; //{ 60000, 70000, 80000, 90000, 40000, 100000, 200000, 15000, 500000, 44444 }; //scores.Take(20).ToArray(); double[] sample2 = scores.Take(100).ToArray(); //Shapiro Wilk test to see if distribution is normal var swT = new Accord.Statistics.Testing.ShapiroWilkTest(scores.ToArray()); Console.WriteLine("Shapiro Wilk Test on all scores: Statistic - " + swT.Statistic + " , PValue - " + swT.PValue + " , Significant - " + swT.Significant); var normalDist = new Accord.Statistics.Distributions.Univariate.NormalDistribution(950, 1200); var swT2 = new Accord.Statistics.Testing.ShapiroWilkTest(normalDist.Generate(1000)); Console.WriteLine("Shapiro Wilk Test on normal dist: Statistic - " + swT2.Statistic + " , PValue - " + swT2.PValue + " , Significant - " + swT2.Significant); //Accord.Statistics.Testing.KolmogorovSmirnovTest ks = new Accord.Statistics.Testing.KolmogorovSmirnovTest(sample, db); //Console.WriteLine("KS Test: Statistic - " + ks.Statistic + " , PValue - " + ks.PValue + " , Significant - " + ks.Significant); //Probability that the given scores were sampled from the previous distribution Accord.Statistics.Testing.ZTest ts = new Accord.Statistics.Testing.ZTest(sample, totalScore / gamesPlayed); /*sample.Average(), * db.StandardDeviation, * sample.Length, * totalScore / gamesPlayed); */ Console.WriteLine("Z Test: Statistic - " + ts.Statistic + " , PValue - " + ts.PValue + " , Significant - " + ts.Significant); Accord.Statistics.Testing.ZTest ts2 = new Accord.Statistics.Testing.ZTest(sample2, totalScore / gamesPlayed); /*sample2.Average(), * //Accord.Statistics.Tools.StandardDeviation(sample2.ToArray()), * db.StandardDeviation, * sample2.Length, * totalScore / gamesPlayed); */ Console.WriteLine("Z Test 2: Statistic - " + ts2.Statistic + " , PValue - " + ts2.PValue + " , Significant - " + ts2.Significant); //% of values that are between given ranges Console.WriteLine("Distribution function 0 - 1000: " + db.DistributionFunction(0, 1000)); Console.WriteLine("Distribution function 1000 - 11000: " + db.DistributionFunction(1000, 11000)); Console.WriteLine("Distribution function 0 - 500: " + db.DistributionFunction(0, 500)); Console.WriteLine("Distribution function 1500 - 11000: " + db.DistributionFunction(1500, 11000)); //MannWhitneyWilcoxon test on whether 2 samples are from the same distribution - high P value = likely same distribution Accord.Statistics.Testing.MannWhitneyWilcoxonTest mwTest = new Accord.Statistics.Testing.MannWhitneyWilcoxonTest(scores.ToArray(), sample2); Console.WriteLine("MWW Test: Statistic - " + mwTest.Statistic + " , PValue - " + mwTest.PValue + " , Significant - " + mwTest.Significant); Accord.Statistics.Testing.MannWhitneyWilcoxonTest mwTest2 = new Accord.Statistics.Testing.MannWhitneyWilcoxonTest(normalDist.Generate(1000), scores.ToArray()); Console.WriteLine("MWW Test 2 (actual scores versus normal dist): Statistic - " + mwTest2.Statistic + " , PValue - " + mwTest2.PValue + " , Significant - " + mwTest2.Significant); //Accord.Controls.HistogramBox.Show(scores.ToArray()); //Guess what distribution this is var analysis = new Accord.Statistics.Analysis.DistributionAnalysis(scores.ToArray()); // Compute the analysis analysis.Compute(); // Get the most likely distribution (first) var mostLikely = analysis.GoodnessOfFit[0]; var result = mostLikely.Distribution.ToString(); Console.WriteLine(result); //Plots the distributions Accord.Controls.DataSeriesBox.Show("Pacman score distribution", db.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, highestScore)); Accord.Controls.DataSeriesBox.Show("Normal distribution", normalDist.ProbabilityDensityFunction, new Accord.DoubleRange(-2000, highestScore)); Accord.Controls.DataSeriesBox.Show("Gamma distribution", mostLikely.Distribution.ProbabilityFunction, new Accord.DoubleRange(-2000, highestScore)); //Calculate some CDF related malarkey //top 20 scores - 1 empirical //next 20 scores - 2nd empirical //calculate cdf of both //calculate cdf of cumulative int games1 = 80; int games2 = 20; Accord.Statistics.Distributions.Univariate.EmpiricalDistribution edb = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(scores.GetRange(0, games1).ToArray()); Accord.Statistics.Distributions.Univariate.EmpiricalDistribution edb2 = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(scores.GetRange(games1, games2).ToArray()); Accord.Statistics.Distributions.Univariate.EmpiricalDistribution edbC = new Accord.Statistics.Distributions.Univariate.EmpiricalDistribution(scores.GetRange(0, games1 + games2).ToArray()); var cdf1 = edb.DistributionFunction(800); var cdf2 = edb2.DistributionFunction(800); var cdfC = edbC.DistributionFunction(800); Console.WriteLine("CDF1 = " + cdf1 + ", CDF2 = " + cdf2 + ", Guess = " + (cdf1 * games1 + cdf2 * games2) / (games1 + games2) + ", Actual = " + cdfC); //Convolution var ScoresA = scores.GetRange(0, games1).ToArray(); var ScoresB = scores.GetRange(games1, games1).ToArray(); //var Convolution = ScoresA.Convolve(ScoresB); double[] Convolution = new double[games1]; Accord.Math.Transforms.FourierTransform2.Convolve(ScoresA, ScoresB, Convolution); Console.ReadLine(); }