public void GetRandomItemReturnsUnequallyProbableItemsWithExpectedProbability() { var items = new[] { "Moat", "Smithy", "Village", "Market" }; var randomNumbers = new[] { 0, 1, 2, 3, 4 }; var distribution = new ProbabilityDistribution(new RandomNumberProviderStub(randomNumbers), items); distribution.IncreaseLikelihood("Village"); var occurances = new Dictionary <string, int>(); foreach (var item in items) { occurances[item] = 0; } for (int i = 0; i < randomNumbers.Length; i++) { occurances[distribution.RandomItem(items)]++; } var expected = new Dictionary <string, int> { { "Moat", 1 }, { "Smithy", 1 }, { "Village", 2 }, { "Market", 1 } }; CollectionAssert.AreEquivalent(expected, occurances); }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not available in .NET: //ORIGINAL LINE: private void assertLimit(final ProbabilityDistribution<double> dist, final double limit) private void assertLimit(ProbabilityDistribution <double> dist, double limit) { try { dist.getCDF(limit); Assert.fail(); } //JAVA TO C# CONVERTER WARNING: 'final' catch parameters are not available in C#: //ORIGINAL LINE: catch (final IllegalArgumentException e) catch (legalArgumentException) { // Expected } try { dist.getPDF(limit); Assert.fail(); } //JAVA TO C# CONVERTER WARNING: 'final' catch parameters are not available in C#: //ORIGINAL LINE: catch (final IllegalArgumentException e) catch (legalArgumentException) { // Expected } }
public void TestExample() { p_xy = new [, ] { { 1 / 4f, 1 / 4f }, { 1 / 2f, 0f } }; p_x = ProbabilityDistribution.MarginalX(p_xy); p_y = ProbabilityDistribution.MarginalY(p_xy); Assert.True(ProbabilityDistribution.IsValid(p_x)); Assert.True(ProbabilityDistribution.IsValid(p_y)); Assert.True(ProbabilityDistribution.IsValid(p_xy)); // Assert.Equal(2f, ProbabilityDistribution.Entropy(p_x, 2)); // Assert.Equal(7/4f, ProbabilityDistribution.Entropy(p_y, 2)); Assert.Equal(3 / 2f, ProbabilityDistribution.JointEntropy(p_xy, 2)); Assert.Equal(3 / 4f, ProbabilityDistribution.JointEntropy(p_xy, p_xy.Length)); Assert.Equal(1.04f, ProbabilityDistribution.JointEntropy(p_xy), 2); Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, 2)); Assert.NotEqual(1 / 4f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x)); Assert.Equal(1 / 4f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, p_xy.Length), 2); Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, p_x.Length), 2); Assert.Equal(0.35f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x), 2); Assert.Equal(0.29f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_y, 2), 2); Assert.Equal(0.2f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_y), 2); // Assert.Equal(3/8f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, 2)); }
internal static void SetWeatherNonSystemForTomorrow(MersenneTwister Dice, FerngillClimate GameClimate, double rainDays, double stormDays, double windyDays, RangePair TmrwTemps) { ProbabilityDistribution <string> WeatherDist = new ProbabilityDistribution <string>("sunny"); WeatherDist.AddNewEndPoint(rainDays, "rain"); if (ClimatesOfFerngill.WeatherOpt.DisableHighRainWind) { WeatherDist.AddNewCappedEndPoint(windyDays, "debris"); } double distOdd = Dice.NextDoublePositive(); if (ClimatesOfFerngill.WeatherOpt.Verbose) { ClimatesOfFerngill.Logger.Log(WeatherDist.ToString()); ClimatesOfFerngill.Logger.Log($"Distribution odds is {distOdd}"); } if (!(WeatherDist.GetEntryFromProb(distOdd, out string Result))) { Result = "sunny"; ClimatesOfFerngill.Logger.Log("The weather has failed to process in some manner. Falling back to [sunny]", LogLevel.Info); } if (ClimatesOfFerngill.WeatherOpt.Verbose) { ClimatesOfFerngill.Logger.Log($"Weather result is {Result}"); } SetWeatherTomorrow(Result, Dice, GameClimate, stormDays, TmrwTemps); }
public void testBasicUsage() { // >>> P[T, T, T] = 0.108; P[T, T, F] = 0.012; P[F, T, T] = 0.072; P[F, // T, F] = 0.008 // >>> P[T, F, T] = 0.016; P[T, F, F] = 0.064; P[F, F, T] = 0.144; P[F, // F, F] = 0.576 ProbabilityDistribution jp = new ProbabilityDistribution("ToothAche", "Cavity", "Catch"); jp.set(0.108, true, true, true); jp.set(0.012, true, true, false); jp.set(0.072, false, true, true); jp.set(0.008, false, true, false); jp.set(0.016, true, false, true); jp.set(0.064, true, false, false); jp.set(0.144, false, false, true); jp.set(0.008, false, false, false); Query q = new Query("Cavity", new String[] { "ToothAche" }, new bool[] { true }); double[] probs = EnumerateJointAsk.ask(q, jp); Assert.AreEqual(0.6, probs[0], 0.001); Assert.AreEqual(0.4, probs[1], 0.001); }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not available in .NET: //ORIGINAL LINE: protected void assertInverseCDF(final double[] x, final ProbabilityDistribution<double> dist) protected internal virtual void assertInverseCDF(double[] x, ProbabilityDistribution <double> dist) { assertInverseCDFWithNull(dist); foreach (double d in x) { assertEquals(dist.getInverseCDF(dist.getCDF(d)), d, EPS); } try { dist.getInverseCDF(3.4); Assert.fail(); } //JAVA TO C# CONVERTER WARNING: 'final' catch parameters are not available in C#: //ORIGINAL LINE: catch (final IllegalArgumentException e) catch (legalArgumentException) { // Expected } try { dist.getInverseCDF(-0.2); Assert.fail(); } //JAVA TO C# CONVERTER WARNING: 'final' catch parameters are not available in C#: //ORIGINAL LINE: catch (final IllegalArgumentException e) catch (legalArgumentException) { // Expected } }
public void TestReadmeExample0() { int binCount = 3; Tally <float> tally = new Tally <float>(binCount, x => (int)(x * binCount)); // Some where, this is called repeatedly. // tally.Add(neuron.value); // But let's supply some fake values for demonstration purposes. tally.Add(0.2f); tally.Add(0.1f); tally.Add(0.4f); tally.Add(0.5f); // Finally we analyze it. float[] p = tally.probability; Assert.Equal(new [] { 2 / 4f, 2 / 4f, 0f }, p); float H = ProbabilityDistribution.Entropy(p); // Here's the entropy without any normalization. Assert.Equal(0.7f, H, 1); // Let's use a base of 2 so the entropy is in the units of bits. float Hbits = ProbabilityDistribution.Entropy(p, 2); Assert.Equal(1f, Hbits, 1); // So this neuron's value carries one bit of information. It's either going // into the first bin or the second bin at an equal probability and never // going into the third bin. }
public void TestThreeIndependentCoinFlips() { p_xy = new [, ] { { 1 / 9f, 1 / 9f, 1 / 9f }, { 1 / 9f, 1 / 9f, 1 / 9f }, { 1 / 9f, 1 / 9f, 1 / 9f } }; p_x = ProbabilityDistribution.MarginalX(p_xy); p_y = ProbabilityDistribution.MarginalY(p_xy); Assert.True(ProbabilityDistribution.IsValid(p_x)); Assert.True(ProbabilityDistribution.IsValid(p_y)); Assert.True(ProbabilityDistribution.IsValid(p_xy)); Assert.Equal(1.58f, ProbabilityDistribution.Entropy(p_x, 2), 2); Assert.Equal(1.58f, ProbabilityDistribution.Entropy(p_y, 2), 2); Assert.Equal(1f, ProbabilityDistribution.Entropy(p_x, p_x.Length), 2); Assert.Equal(1f, ProbabilityDistribution.Entropy(p_y, p_y.Length), 2); Assert.Equal(1.1f, ProbabilityDistribution.Entropy(p_x), 2); Assert.Equal(1.1f, ProbabilityDistribution.Entropy(p_y), 2); Assert.Equal(3.17f, ProbabilityDistribution.JointEntropy(p_xy, 2), 2); Assert.Equal(1f, ProbabilityDistribution.JointEntropy(p_xy, p_xy.Length), 2); Assert.Equal(2.2f, ProbabilityDistribution.JointEntropy(p_xy), 2); Assert.Equal(1.58f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, 2), 2); Assert.Equal(0.5f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, p_xy.Length), 2); Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, p_x.Length), 2); Assert.Equal(1.1f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x), 2); Assert.Equal(1.58f, ProbabilityDistribution.ConditionalEntropyXY(p_xy, p_y, 2), 2); Assert.Equal(1.1f, ProbabilityDistribution.ConditionalEntropyXY(p_xy, p_y), 2); Assert.Equal(0f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, 2), 2); Assert.Equal(0f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy), 2); Assert.Equal(0f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, p_xy.Length), 2); Assert.Equal(0f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, p_x.Length), 2); }
public void TestMarginalDistributionX() { p_xy = new [, ] { { 1 / 8f, 1 / 16f, 1 / 32f, 1 / 32f }, { 1 / 16f, 1 / 8f, 1 / 32f, 1 / 32f }, { 1 / 16f, 1 / 16f, 1 / 16f, 1 / 16f }, { 1 / 4f, 0f, 0f, 0f } }; p_x = ProbabilityDistribution.MarginalX(p_xy); p_y = ProbabilityDistribution.MarginalY(p_xy); Assert.True(ProbabilityDistribution.IsValid(p_x)); Assert.True(ProbabilityDistribution.IsValid(p_y)); Assert.True(ProbabilityDistribution.IsValid(p_xy)); Assert.Equal(1 / 8f, p_xy[0, 0]); Assert.Equal(1 / 16f, p_xy[1, 0]); Assert.Equal(1 / 16f, p_xy[2, 0]); // Assert.Equal(1/8f, p_xy[0]); // Assert.Equal(1/16f, p_xy[1]); // Assert.Equal(1/32f, p_xy[2]); Assert.Equal(new [] { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f }, p_x); Assert.Equal(new [] { 1 / 2f, 1 / 4f, 1 / 8f, 1 / 8f }, p_y); Assert.Equal(2f, ProbabilityDistribution.Entropy(p_x, 2)); Assert.Equal(7 / 4f, ProbabilityDistribution.Entropy(p_y, 2)); Assert.Equal(27 / 8f, ProbabilityDistribution.JointEntropy(p_xy, 2)); Assert.Equal(11 / 8f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, 2)); Assert.Equal(3 / 8f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, 2)); }
public ContinuousCircle(int ID, double x, double y, ProbabilityDistribution dist, double std) { this.ID = ID; X = x; Y = y; distanceDistribution = dist; StandardDeviation = std; }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not available in .NET: //ORIGINAL LINE: protected void assertPDF(final double[] z, final double[] x, final ProbabilityDistribution<double> dist) protected internal virtual void assertPDF(double[] z, double[] x, ProbabilityDistribution <double> dist) { assertPDFWithNull(dist); for (int i = 0; i < z.Length; i++) { assertEquals(dist.getPDF(x[i]), z[i], EPS); } }
public void TestCompareWithProbability() { var fc = new TallyAlphabet <int>(new[] { "a", "b", "c" }, 3, y => y); fc.Add("a", 2); fc.Add("a", 1); fc.Add("a", 0); fc.Add("b", 2); fc.Add("b", 1); fc.Add("b", 0); fc.Add("c", 2); fc.Add("c", 1); fc.Add("c", 0); Assert.Equal(1 / 3f, fc.ProbabilityX("a"), 2); Assert.Equal(1 / 3f, fc.ProbabilityX("b"), 2); Assert.Equal(1 / 3f, fc.ProbabilityX("c"), 2); Assert.Equal(1 / 3f, fc.ProbabilityY(0)); Assert.Equal(1 / 3f, fc.ProbabilityY(1)); Assert.Equal(1 / 9f, fc.ProbabilityXY("a", 0)); Assert.Equal(1 / 3f, fc.ProbabilityYGivenX(0, "a"), 2); Assert.Equal(1 / 3f, fc.ProbabilityYGivenX(1, "a"), 2); Assert.Equal(1 / 3f, fc.ProbabilityYGivenX(0, "b"), 2); Assert.Equal(1 / 3f, fc.ProbabilityYGivenX(1, "b"), 2); Assert.Equal(1f, fc.EntropyYGivenX(3), 1); Assert.Equal(9, fc.probabilityXY.Length); Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, fc.probabilityX); Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, fc.probabilityY); Assert.Equal(new [, ] { { 1 / 9f, 1 / 9f, 1 / 9f }, { 1 / 9f, 1 / 9f, 1 / 9f }, { 1 / 9f, 1 / 9f, 1 / 9f }, }, fc.probabilityXY); Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyYX(fc.probabilityXY, fc.probabilityX, fc.probabilityXY.Length)); Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyYX(fc.probabilityXY, fc.probabilityX, fc.probabilityX.Length)); Assert.Equal(1f, fc.EntropyXGivenY(3), 1); Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY, fc.probabilityY, fc.probabilityXY.Length)); Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY, fc.probabilityY, fc.probabilityY.Length)); Assert.Equal(2f, fc.EntropyXY(3), 1); Assert.Equal(2f, ProbabilityDistribution.JointEntropy(fc.probabilityXY, fc.probabilityX.Length), 1); Assert.Equal(1f, ProbabilityDistribution.JointEntropy(fc.probabilityXY, fc.probabilityXY.Length), 1); Assert.Equal(3.2f, ProbabilityDistribution.JointEntropy(fc.probabilityXY, 2), 1); Assert.Equal(1f, fc.EntropyX(3), 2); Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probabilityX, fc.probabilityX.Length), 2); Assert.Equal(0.5f, ProbabilityDistribution.Entropy(fc.probabilityX, fc.probabilityXY.Length), 2); Assert.Equal(1f, fc.EntropyY(3), 1); Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probabilityY, fc.probabilityY.Length), 2); Assert.Equal(0.5f, ProbabilityDistribution.Entropy(fc.probabilityY, fc.probabilityXY.Length), 2); // H(X|Y) = H(X,Y) - H(Y) // This should always be true. Assert.Equal(0f, fc.EntropyXGivenY() - fc.EntropyXY() + fc.EntropyY(), 1); Assert.Equal(0f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY, fc.probabilityY) - ProbabilityDistribution.JointEntropy(fc.probabilityXY) + ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY, fc.probabilityY), 1); Assert.Equal(0f, fc.MutualInformationXY(3), 2); }
public void TestOneSample() { var fc = new TallySingle(10, -1f, 1f); fc.Add(0f); Assert.Equal(1f, fc.Probability(0f)); Assert.Equal(0f, fc.Probability(1f)); Assert.Equal(0f, ProbabilityDistribution.Entropy(fc.probability)); }
public void TestAlphabet() { var fc = new TallyAlphabet(new[] { "a", "b" }); fc.Add("a"); fc.Add("b"); Assert.Equal(0.5f, fc.Probability("a")); Assert.Equal(0.5f, fc.Probability("b")); Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probability, fc.binCount), 2); }
public void TestArrayTallyAlphabet2() { var fc = new ArrayTallyAlphabet <int>(new[] { "a", "b", "c" }, 3, y => y); fc.Add(new [] { "a", "b" }, new [] { 2, 1 }); fc.Add(new [] { "a", "b" }, new [] { 1, 1 }); fc.Add(new [] { "a", "b" }, new [] { 0, 1 }); fc.Add(new [] { "b", "b" }, new [] { 2, 1 }); fc.Add(new [] { "b", "b" }, new [] { 1, 1 }); fc.Add(new [] { "b", "b" }, new [] { 0, 1 }); fc.Add(new [] { "c", "b" }, new [] { 2, 1 }); fc.Add(new [] { "c", "b" }, new [] { 1, 1 }); fc.Add(new [] { "c", "b" }, new [] { 0, 1 }); Assert.Equal(9, fc.probabilityXY[0, 0].Length); Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, fc.probabilityX[0]); Assert.Equal(new [, ] { { 1 / 9f, 1 / 9f, 1 / 9f }, { 1 / 9f, 1 / 9f, 1 / 9f }, { 1 / 9f, 1 / 9f, 1 / 9f }, }, fc.probabilityXY[0, 0]); Assert.Equal(new [, ] { { 0f, 0f, 0f }, { 1 / 3f, 1 / 3f, 1 / 3f }, { 0f, 0f, 0f }, }, fc.probabilityXY[1, 0]); Assert.Equal(new [, ] { { 0f, 0f, 0f }, { 0f, 1f, 0f }, { 0f, 0f, 0f }, }, fc.probabilityXY[1, 1]); Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, ProbabilityDistribution.MarginalY(fc.probabilityXY[0, 0])); Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, ProbabilityDistribution.MarginalY(fc.probabilityXY[1, 0])); Assert.Equal(new [] { 0f, 1f, 0f }, ProbabilityDistribution.MarginalX(fc.probabilityXY[1, 0])); Assert.Equal(new [] { 0f, 1f, 0f }, fc.probabilityX[1]); Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, fc.probabilityY[0]); Assert.Equal(new [] { 0f, 1f, 0f }, fc.probabilityY[1]); Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyYX(fc.probabilityXY[0, 0], fc.probabilityX[0], fc.probabilityXY[0, 0].Length)); Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyYX(fc.probabilityXY[0, 0], fc.probabilityX[0], fc.probabilityX[0].Length)); Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY[0, 0], fc.probabilityY[0], fc.probabilityXY[0, 0].Length)); Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY[0, 0], fc.probabilityY[0], fc.probabilityY[0].Length)); Assert.Equal(2f, ProbabilityDistribution.JointEntropy(fc.probabilityXY[0, 0], fc.probabilityX[0].Length), 1); Assert.Equal(1f, ProbabilityDistribution.JointEntropy(fc.probabilityXY[0, 0], fc.probabilityXY[0, 0].Length), 1); Assert.Equal(3.2f, ProbabilityDistribution.JointEntropy(fc.probabilityXY[0, 0], 2), 1); Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probabilityX[0], fc.probabilityX[0].Length), 2); Assert.Equal(0.5f, ProbabilityDistribution.Entropy(fc.probabilityX[0], fc.probabilityXY[0, 0].Length), 2); Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probabilityY[0], fc.probabilityY[0].Length), 2); Assert.Equal(0.5f, ProbabilityDistribution.Entropy(fc.probabilityY[0], fc.probabilityXY[0, 0].Length), 2); // H(X|Y) = H(X,Y) - H(Y) // This should always be true. Assert.Equal(0f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY[0, 0], fc.probabilityY[0]) - ProbabilityDistribution.JointEntropy(fc.probabilityXY[0, 0]) + ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY[0, 0], fc.probabilityY[0]), 1); }
public void TestTwoSamples() { var fc = new TallySingle(10, -1f, 1f); fc.Add(0f); fc.Add(0.5f); Assert.Equal(0.5f, fc.Probability(0f)); Assert.Equal(0f, fc.Probability(1f)); Assert.Equal(0.5f, fc.Probability(0.5f)); Assert.Equal(0.301f, ProbabilityDistribution.Entropy(fc.probability, fc.binCount), 3); }
public static double[] ask(Query q, ProbabilityDistribution pd) { double[] probDist = new double[2]; Dictionary <String, bool> h = q.getEvidenceVariables(); // true probability h[q.getQueryVariable()] = true; probDist[0] = pd.probabilityOf(h); // false probability h[q.getQueryVariable()] = false; probDist[1] = pd.probabilityOf(h); return(Util.normalize(probDist)); }
/// <summary> /// Checks that the Chebyshev inequality holds true /// for the mean of a sample draw from the specified /// distribution. /// </summary> /// <param name="distribution">The distribution from which the /// sample has been drawn.</param> /// <param name="sampleMean">The observed sample mean.</param> /// <param name="sampleSize">The sample size.</param> /// <param name="delta">The delta: a positive, less than unity /// quantity such that the probability of observing the sample /// mean close to the population mean is at least /// <c>1 - delta</c>. /// </param> /// <remarks> /// <para> /// It is assumed that the /// <see cref="ProbabilityDistribution.Mean"/> and the /// <see cref="ProbabilityDistribution.Variance"/> of /// <paramref name="distribution"/> are finite, and /// <paramref name="delta"/> is in the open interval /// having extremes <c>0</c> and <c>1</c>. /// Then the event /// </para> /// <para> /// { | sampleMean - distribution.Mean() | < epsilon }, /// </para> /// <para> /// where epsilon = sampleStdDev / Math.Sqrt( sampleSize * delta ), /// is expected to happen with probability /// greater than or equal to 1 - delta. /// Hence this method asserts that the event under study /// holds true for the observed sample mean. /// </para> /// </remarks> /// <exception cref="AssertFailedException"> /// <paramref name="distribution"/> has infinite mean.<br/> /// -or-<br/> /// <paramref name="distribution"/> has infinite variance.<br/> /// -or-<br/> /// <paramref name="delta"/> is not in the open interval /// having extremes <c>0</c> and <c>1</c>.<br/> /// -or-<br/> /// The event did not happen for the mean of the observed /// sample. /// </exception> /// <seealso href="https://en.wikipedia.org/wiki/Chebyshev%27s_inequality"/> public static void CheckChebyshevInequality( ProbabilityDistribution distribution, double sampleMean, int sampleSize, double delta) { RandomNumberGeneratorTest.CheckChebyshevInequality( distribution.Mean(), distribution.Variance(), sampleMean, sampleSize, delta); }
//JAVA TO C# CONVERTER WARNING: 'final' parameters are not available in .NET: //ORIGINAL LINE: protected void assertInverseCDFWithNull(final ProbabilityDistribution<double> dist) protected internal virtual void assertInverseCDFWithNull(ProbabilityDistribution <double> dist) { try { dist.getInverseCDF(null); Assert.fail(); } //JAVA TO C# CONVERTER WARNING: 'final' catch parameters are not available in C#: //ORIGINAL LINE: catch (final IllegalArgumentException e) catch (legalArgumentException) { // Expected } }
/// <summary>Initializes a new instance of the /// <see cref="TestableProbabilityDistribution"/> /// class.</summary> /// <param name="probabilityDistribution"> /// The probability distribution to test.</param> /// <param name="mean">The expected mean.</param> /// <param name="variance">The expected variance.</param> /// <param name="pdfPartialGraph"> /// A dictionary in which keys represent PDF arguments, and the /// corresponding values contain the expected images of such arguments. /// </param> /// <param name="cdfPartialGraph"> /// A dictionary in which keys represent CDF arguments, and the /// corresponding values contain the expected images of such arguments. /// </param> /// <param name="canInvertCdf"><c>true</c> if the target distribution /// can invert the CDF; otherwise <c>false</c>.</param> /// <param name="inverseCdfPartialGraph"> /// A dictionary in which keys represent inverse CDF arguments, and /// the corresponding values contain the expected images of such /// arguments. /// </param> public TestableProbabilityDistribution( ProbabilityDistribution probabilityDistribution, double mean, double variance, Dictionary <TestableDoubleMatrix, DoubleMatrix> pdfPartialGraph, Dictionary <TestableDoubleMatrix, DoubleMatrix> cdfPartialGraph, bool canInvertCdf = false, Dictionary <TestableDoubleMatrix, DoubleMatrix> inverseCdfPartialGraph = null) { this.probabilityDistribution = probabilityDistribution; this.mean = mean; this.variance = variance; this.pdfPartialGraph = pdfPartialGraph; this.cdfPartialGraph = cdfPartialGraph; this.canInvertCdf = canInvertCdf; this.inverseCdfPartialGraph = inverseCdfPartialGraph; }
public void TestTwoIndependentCoinFlipsBadSetup() { p_xy = new [, ] { { 1 / 2f, 1 / 2f }, { 1 / 2f, 1 / 2f } }; p_x = ProbabilityDistribution.MarginalX(p_xy); p_y = ProbabilityDistribution.MarginalY(p_xy); Assert.False(ProbabilityDistribution.IsValid(p_x)); Assert.False(ProbabilityDistribution.IsValid(p_y)); Assert.False(ProbabilityDistribution.IsValid(p_xy)); ProbabilityDistribution.Normalize(p_x); ProbabilityDistribution.Normalize(p_y); ProbabilityDistribution.Normalize(p_xy); Assert.True(ProbabilityDistribution.IsValid(p_x)); Assert.True(ProbabilityDistribution.IsValid(p_y)); Assert.True(ProbabilityDistribution.IsValid(p_xy)); }
public void TestReadmeExample1() { int binCount = 4; Tally <float, float> tally = new Tally <float, float>(binCount, x => (int)(x * binCount), binCount, y => (int)(y * binCount)); // Some where, this is called repeatedly. // tally.Add(sensor.value, effector.value); // But let's supply some fake values for demonstration purposes. tally.Add(0.6f, 0.1f); tally.Add(0.5f, 0.5f); tally.Add(0.7f, 0.9f); tally.Add(0.7f, 0.3f); // Finally we analyze it. float[] px = tally.probabilityX; Assert.Equal(new [] { 0f, 0f, 1f, 0f }, px); float[] py = tally.probabilityY; Assert.Equal(new [] { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f }, py); float[,] pxy = tally.probabilityXY; Assert.Equal(new [, ] { { 0f, 0f, 0f, 0f }, { 0f, 0f, 0f, 0f }, { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f }, { 0f, 0f, 0f, 0f }, }, pxy); float Hsensor = ProbabilityDistribution.Entropy(px, 2); float Heffector = ProbabilityDistribution.Entropy(py, 2); // H(effector | sensor) float Heffector_sensor = ProbabilityDistribution.ConditionalEntropyYX(pxy, px, 2); Assert.Equal(0f, Hsensor, 1); // So the sensor carries no information. It's going to the second bin always // based on what's been seen. Assert.Equal(2f, Heffector, 1); // The effector carries 2 bits of information. It could show up in any of // the bins with equal probability. It would take two bits to describe which bin. Assert.Equal(2f, Heffector_sensor, 1); // Given that we know the sensor, there's no reduction in randomness for the // effector. In fact since H(effector) = H(effector|sensor) we now know that // the sensor and effector are entirely independent of one another. }
internal static void CheckForStaticRainChanges(WeatherConditions curr, MersenneTwister Dice, double ChanceForNonNormalRain) { if (Game1.isLightning && Game1.isRaining) { curr.SetRainAmt(130); } double rainOdds, newRainOdds; rainOdds = Dice.NextDouble(); newRainOdds = Dice.NextDouble(); //random chance to just set rain at a differnt number if (rainOdds < ChanceForNonNormalRain) { //yay use of probablity distribution ProbabilityDistribution <RainLevels> RainSpread = new ProbabilityDistribution <RainLevels>(RainLevels.Normal); RainSpread.AddNewCappedEndPoint(.12, RainLevels.Sunshower); RainSpread.AddNewCappedEndPoint(.28, RainLevels.Light); RainSpread.AddNewCappedEndPoint(.351, RainLevels.Normal); RainSpread.AddNewCappedEndPoint(.1362, RainLevels.Moderate); RainSpread.AddNewCappedEndPoint(.09563, RainLevels.Heavy); RainSpread.AddNewCappedEndPoint(.0382, RainLevels.Severe); RainSpread.AddNewCappedEndPoint(.0094, RainLevels.Torrential); RainSpread.AddNewCappedEndPoint(.0049, RainLevels.Typhoon); RainSpread.AddNewCappedEndPoint(.00287, RainLevels.NoahsFlood); if (!(RainSpread.GetEntryFromProb(newRainOdds, out RainLevels Result))) { Result = RainLevels.Normal; ClimatesOfFerngill.Logger.Log("The rain probablity spread has failed to find an rain level. Falling back to normal rain", LogLevel.Error); } curr.SetRainAmt(WeatherUtilities.ReturnRndRainAmtInLevel(Dice, Result)); if (ClimatesOfFerngill.WeatherOpt.Verbose) { ClimatesOfFerngill.Logger.Log($"We've set the rain to a non normal value - with roll {rainOdds} for setting non normal, and {newRainOdds} for category {Result}, resulting in new rain target {curr.AmtOfRainDrops} in category {WeatherUtilities.GetRainCategory(curr.AmtOfRainDrops)}"); } } curr.TodayRain += curr.AmtOfRainDrops; curr.RefreshRainAmt(); }
public ProbabilityAI(ProbabilityDistribution buyDistribution) { var buyBehaviour = new ProbabilisticBuyBehaviour(buyDistribution); Behaviours.Add(new ProbabilisticBuyBehaviour.LearnFromGameResultBehaviour(buyDistribution)); Behaviours.Add(new DefaultDiscardOrRedrawCardsBehaviour()); Behaviours.Add(new DefaultMakeChoiceBehaviour()); Behaviours.Add(new DefaultSelectFixedNumberOfCardsToPassOrTrashBehaviour()); Behaviours.Add(new DefaultSelectFromRevealedBehaviour()); Behaviours.Add(new DefaultSelectFixedNumberOfCardsForPlayBehaviour()); Behaviours.Add(new DefaultSelectUpToNumberOfCardsToTrashBehaviour()); Behaviours.Add(new PlaySimpleActionsBehaviour()); Behaviours.Add(new SkipActionsBehaviour()); Behaviours.Add(new BuyPointsBehaviour(6)); Behaviours.Add(buyBehaviour); Behaviours.Add(new SkipBuyBehaviour()); }
public TimeDistribution(TimeUnit timeUnit, SimulationTime min, SimulationTime mode, SimulationTime max) { this.min = min; this.mode = mode; this.max = max; if (this.mode <= SimulationTime.Zero) { this.probabilityDistribution = ProbabilityDistribution.Constant; this.min = this.max = this.mode = SimulationTime.Zero; } else if (this.max <= SimulationTime.Zero) { this.probabilityDistribution = ProbabilityDistribution.Exponential; this.min = this.max = SimulationTime.Zero; } else { if (this.min > this.mode) { this.min = this.mode; } else if (this.min < SimulationTime.Zero) { this.min = SimulationTime.Zero; } if (this.max < this.mode) { this.max = this.mode; } if ((this.min < this.mode) || (this.mode < this.max)) { this.probabilityDistribution = ProbabilityDistribution.Triangular; } else { this.probabilityDistribution = ProbabilityDistribution.Constant; } } this.xValue = this.mode; this.Unit = timeUnit; }
/// <summary> /// Performs initial cluster seeding by choosing clusters randomly from data points. /// </summary> /// <param name="x">The data points <paramref name="x"/> to clusterize.</param> /// <param name="weights">The <c>weight</c> of importance for each data point.</param> /// <param name="cancellationToken">The cancellationToken token used to notify the clusterizer that the operation should be canceled.</param> /// <returns>The array that contains indexes of data points chosen as centroids.</returns> internal int[] RandomSeeding(IList <IVector <float> > x, IList <float> weights, CancellationToken cancellationToken) { Random random = new Random(0); int k = this.Count; int dimension = this.Dimension; int samples = x.Count; ProbabilityDistribution distribution = weights != null ? new ProbabilityDistribution(weights, random) : null; int[] indexes = new int[k]; // 1. Choose one center uniformly at random from among the data points. int idx = Next(); x[idx].Copy(this[0].Centroid, 0); // 2. Choose other centers uniformly at random from among the data points // make sure data points are different for (int centroid = 1; centroid < k; centroid++) { cancellationToken.ThrowIfCancellationRequested(); idx = Next(); while (indexes.Take(centroid).Any(i => idx == i || x[idx].Equals(x[i]))) { idx = Next(); } x[idx].Copy(this[centroid].Centroid, 0); } return(indexes); int Next() { return(distribution?.Next() ?? random.Next(0, samples)); } }
public void GetRandomItemReturnsEquallyLikelyItemsWithEqualProbability() { var items = new[] {"Moat", "Smithy", "Village", "Market"}; var randomNumbers = new[] {0, 1, 2, 3}; var distribution = new ProbabilityDistribution(new RandomNumberProviderStub(randomNumbers), items); var occurances = new Dictionary<string, int>(); foreach (var item in items) occurances[item] = 0; for (int i = 0; i < randomNumbers.Length; i++) occurances[distribution.RandomItem(items)]++; var expected = new Dictionary<string, int> { {"Moat", 1}, {"Smithy", 1}, {"Village", 1}, {"Market", 1} }; CollectionAssert.AreEquivalent(expected, occurances); }
public void TestExample2() { p_xy = new [, ] { { 1 / 4f, 1 / 4f, 0f }, { 1 / 2f, 0f, 0f }, { 0f, 0f, 0f } }; p_x = ProbabilityDistribution.MarginalX(p_xy); p_y = ProbabilityDistribution.MarginalY(p_xy); // Assert.Equal(2f, ProbabilityDistribution.Entropy(p_x, 2)); // Assert.Equal(7/4f, ProbabilityDistribution.Entropy(p_y, 2)); Assert.Equal(3 / 2f, ProbabilityDistribution.JointEntropy(p_xy, 2)); Assert.Equal(0.47f, ProbabilityDistribution.JointEntropy(p_xy, p_xy.Length), 2); Assert.Equal(1.04f, ProbabilityDistribution.JointEntropy(p_xy), 2); Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, 2)); Assert.Equal(0.16f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, p_xy.Length), 2); Assert.Equal(0.32f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, p_x.Length), 2); Assert.Equal(0.35f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x), 2); Assert.Equal(0.69f, ProbabilityDistribution.ConditionalEntropyXY(p_xy, p_y, 2), 2); Assert.Equal(0.22f, ProbabilityDistribution.ConditionalEntropyXY(p_xy, p_y, p_xy.Length), 2); Assert.Equal(0.43f, ProbabilityDistribution.ConditionalEntropyXY(p_xy, p_y, p_y.Length), 2); Assert.Equal(0.48f, ProbabilityDistribution.ConditionalEntropyXY(p_xy, p_y), 2); // Assert.Equal(3/8f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, 2)); }
public AIFactory() { ProbabilityAIDistribution = new ProbabilityDistribution(AISupportedActions.All, Treasure.Basic); }
public ProbabilisticBuyBehaviour(ProbabilityDistribution distribution) { _distribution = distribution; }
public LearnFromGameResultBehaviour(ProbabilityDistribution distribution) { _distribution = distribution; }
public void Write(TProtocol oprot) { oprot.IncrementRecursionDepth(); try { TStruct struc = new TStruct("TDDIOutputFailure"); oprot.WriteStructBegin(struc); TField field = new TField(); if (__isset.Id) { field.Name = "Id"; field.Type = TType.I64; field.ID = 1; oprot.WriteFieldBegin(field); oprot.WriteI64(Id); oprot.WriteFieldEnd(); } if (Name != null && __isset.Name) { field.Name = "Name"; field.Type = TType.String; field.ID = 2; oprot.WriteFieldBegin(field); oprot.WriteString(Name); oprot.WriteFieldEnd(); } if (Description != null && __isset.Description) { field.Name = "Description"; field.Type = TType.String; field.ID = 3; oprot.WriteFieldBegin(field); oprot.WriteString(Description); oprot.WriteFieldEnd(); } if (__isset.IsCitation) { field.Name = "IsCitation"; field.Type = TType.Bool; field.ID = 4; oprot.WriteFieldBegin(field); oprot.WriteBool(IsCitation); oprot.WriteFieldEnd(); } if (__isset.IsAbstract) { field.Name = "IsAbstract"; field.Type = TType.Bool; field.ID = 5; oprot.WriteFieldBegin(field); oprot.WriteBool(IsAbstract); oprot.WriteFieldEnd(); } if (KeyValueMaps != null && __isset.KeyValueMaps) { field.Name = "KeyValueMaps"; field.Type = TType.List; field.ID = 6; oprot.WriteFieldBegin(field); { oprot.WriteListBegin(new TList(TType.Struct, KeyValueMaps.Count)); foreach (TDDIKeyValueMapRef _iter454 in KeyValueMaps) { _iter454.Write(oprot); } oprot.WriteListEnd(); } oprot.WriteFieldEnd(); } if (CitedElement != null && __isset.CitedElement) { field.Name = "CitedElement"; field.Type = TType.Struct; field.ID = 7; oprot.WriteFieldBegin(field); CitedElement.Write(oprot); oprot.WriteFieldEnd(); } if (__isset.FailureRate) { field.Name = "FailureRate"; field.Type = TType.Double; field.ID = 8; oprot.WriteFieldBegin(field); oprot.WriteDouble(FailureRate); oprot.WriteFieldEnd(); } if (FailureMode != null && __isset.FailureMode) { field.Name = "FailureMode"; field.Type = TType.Struct; field.ID = 9; oprot.WriteFieldBegin(field); FailureMode.Write(oprot); oprot.WriteFieldEnd(); } if (ProbabilityDistribution != null && __isset.ProbabilityDistribution) { field.Name = "ProbabilityDistribution"; field.Type = TType.Struct; field.ID = 10; oprot.WriteFieldBegin(field); ProbabilityDistribution.Write(oprot); oprot.WriteFieldEnd(); } if (MinimalCutsets != null && __isset.MinimalCutsets) { field.Name = "MinimalCutsets"; field.Type = TType.List; field.ID = 11; oprot.WriteFieldBegin(field); { oprot.WriteListBegin(new TList(TType.Struct, MinimalCutsets.Count)); foreach (TDDIMinimalCutset _iter455 in MinimalCutsets) { _iter455.Write(oprot); } oprot.WriteListEnd(); } oprot.WriteFieldEnd(); } oprot.WriteFieldStop(); oprot.WriteStructEnd(); } finally { oprot.DecrementRecursionDepth(); } }
/// <summary></summary> protected static void UpdateDiscreteDistribution( ref ChartControl chart, ProbabilityDistribution dist, List<string> titles, DistributionFunction function ) { string xTitle = "x"; string yTitle; int xmin = (int)Math.Floor( dist.InverseCDF( 0.0001 ) ); int xmax = (int)Math.Ceiling( dist.InverseCDF( 0.9999 ) ); DoubleVector x = new DoubleVector( xmax - xmin + 1, xmin, 1 ); DoubleVector y; if( function == DistributionFunction.PDF ) { yTitle = "Probability Mass Function"; y = x.Apply( new Func<double, double>( dist.PDF ) ); } else { yTitle = "Cumulative Distribution Function"; y = x.Apply( new Func<double, double>( dist.CDF ) ); } ChartSeries series = BindXY( x, y, ChartSeriesType.Column, ChartSymbolShape.None ); Update( ref chart, series, titles, xTitle, yTitle ); }
public override string ToString() { StringBuilder __sb = new StringBuilder("TDDIOutputFailure("); bool __first = true; if (__isset.Id) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("Id: "); __sb.Append(Id); } if (Name != null && __isset.Name) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("Name: "); __sb.Append(Name); } if (Description != null && __isset.Description) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("Description: "); __sb.Append(Description); } if (__isset.IsCitation) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("IsCitation: "); __sb.Append(IsCitation); } if (__isset.IsAbstract) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("IsAbstract: "); __sb.Append(IsAbstract); } if (KeyValueMaps != null && __isset.KeyValueMaps) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("KeyValueMaps: "); __sb.Append(KeyValueMaps); } if (CitedElement != null && __isset.CitedElement) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("CitedElement: "); __sb.Append(CitedElement == null ? "<null>" : CitedElement.ToString()); } if (__isset.FailureRate) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("FailureRate: "); __sb.Append(FailureRate); } if (FailureMode != null && __isset.FailureMode) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("FailureMode: "); __sb.Append(FailureMode == null ? "<null>" : FailureMode.ToString()); } if (ProbabilityDistribution != null && __isset.ProbabilityDistribution) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("ProbabilityDistribution: "); __sb.Append(ProbabilityDistribution == null ? "<null>" : ProbabilityDistribution.ToString()); } if (MinimalCutsets != null && __isset.MinimalCutsets) { if (!__first) { __sb.Append(", "); } __first = false; __sb.Append("MinimalCutsets: "); __sb.Append(MinimalCutsets); } __sb.Append(")"); return(__sb.ToString()); }
/// <summary></summary> protected static void UpdateContinuousDistribution( ref ChartControl chart, ProbabilityDistribution dist, List<string> titles, DistributionFunction function, int numInterpolatedValues ) { string xTitle = "x"; string yTitle; double xmin = dist.InverseCDF( 0.0001 ); double xmax = dist.InverseCDF( 0.9999 ); OneVariableFunction f; if( function == DistributionFunction.PDF ) { yTitle = "Probability Density Function"; f = new OneVariableFunction( new Func<double, double> ( delegate( double x ) { return dist.PDF( x ); } ) ); } else { yTitle = "Cumulative Distribution Function"; f = new OneVariableFunction( new Func<double, double> ( delegate( double x ) { return dist.CDF( x ); } ) ); } ChartSeries series = BindXY( f, xmin, xmax, numInterpolatedValues, ChartSeriesType.Line, ChartSymbolShape.None ); Update( ref chart, series, titles, xTitle, yTitle ); }