public void TestMarginalDistributionX()
 {
     p_xy = new [, ] {
         { 1 / 8f, 1 / 16f, 1 / 32f, 1 / 32f },
         { 1 / 16f, 1 / 8f, 1 / 32f, 1 / 32f },
         { 1 / 16f, 1 / 16f, 1 / 16f, 1 / 16f },
         { 1 / 4f, 0f, 0f, 0f }
     };
     p_x = ProbabilityDistribution.MarginalX(p_xy);
     p_y = ProbabilityDistribution.MarginalY(p_xy);
     Assert.True(ProbabilityDistribution.IsValid(p_x));
     Assert.True(ProbabilityDistribution.IsValid(p_y));
     Assert.True(ProbabilityDistribution.IsValid(p_xy));
     Assert.Equal(1 / 8f, p_xy[0, 0]);
     Assert.Equal(1 / 16f, p_xy[1, 0]);
     Assert.Equal(1 / 16f, p_xy[2, 0]);
     // Assert.Equal(1/8f, p_xy[0]);
     // Assert.Equal(1/16f, p_xy[1]);
     // Assert.Equal(1/32f, p_xy[2]);
     Assert.Equal(new [] { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f }, p_x);
     Assert.Equal(new [] { 1 / 2f, 1 / 4f, 1 / 8f, 1 / 8f }, p_y);
     Assert.Equal(2f, ProbabilityDistribution.Entropy(p_x, 2));
     Assert.Equal(7 / 4f, ProbabilityDistribution.Entropy(p_y, 2));
     Assert.Equal(27 / 8f, ProbabilityDistribution.JointEntropy(p_xy, 2));
     Assert.Equal(11 / 8f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, 2));
     Assert.Equal(3 / 8f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, 2));
 }
Exemplo n.º 2
0
        public void TestReadmeExample0()
        {
            int           binCount = 3;
            Tally <float> tally    = new Tally <float>(binCount, x => (int)(x * binCount));

            // Some where, this is called repeatedly.
            // tally.Add(neuron.value);
            // But let's supply some fake values for demonstration purposes.
            tally.Add(0.2f);
            tally.Add(0.1f);
            tally.Add(0.4f);
            tally.Add(0.5f);

            // Finally we analyze it.
            float[] p = tally.probability;
            Assert.Equal(new [] { 2 / 4f, 2 / 4f, 0f }, p);
            float H = ProbabilityDistribution.Entropy(p);

            // Here's the entropy without any normalization.
            Assert.Equal(0.7f, H, 1);

            // Let's use a base of 2 so the entropy is in the units of bits.
            float Hbits = ProbabilityDistribution.Entropy(p, 2);

            Assert.Equal(1f, Hbits, 1);
            // So this neuron's value carries one bit of information. It's either going
            // into the first bin or the second bin at an equal probability and never
            // going into the third bin.
        }
        public void TestThreeIndependentCoinFlips()
        {
            p_xy = new [, ] {
                { 1 / 9f, 1 / 9f, 1 / 9f },
                { 1 / 9f, 1 / 9f, 1 / 9f },
                { 1 / 9f, 1 / 9f, 1 / 9f }
            };
            p_x = ProbabilityDistribution.MarginalX(p_xy);
            p_y = ProbabilityDistribution.MarginalY(p_xy);
            Assert.True(ProbabilityDistribution.IsValid(p_x));
            Assert.True(ProbabilityDistribution.IsValid(p_y));
            Assert.True(ProbabilityDistribution.IsValid(p_xy));
            Assert.Equal(1.58f, ProbabilityDistribution.Entropy(p_x, 2), 2);
            Assert.Equal(1.58f, ProbabilityDistribution.Entropy(p_y, 2), 2);
            Assert.Equal(1f, ProbabilityDistribution.Entropy(p_x, p_x.Length), 2);
            Assert.Equal(1f, ProbabilityDistribution.Entropy(p_y, p_y.Length), 2);
            Assert.Equal(1.1f, ProbabilityDistribution.Entropy(p_x), 2);
            Assert.Equal(1.1f, ProbabilityDistribution.Entropy(p_y), 2);
            Assert.Equal(3.17f, ProbabilityDistribution.JointEntropy(p_xy, 2), 2);
            Assert.Equal(1f, ProbabilityDistribution.JointEntropy(p_xy, p_xy.Length), 2);
            Assert.Equal(2.2f, ProbabilityDistribution.JointEntropy(p_xy), 2);
            Assert.Equal(1.58f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, 2), 2);
            Assert.Equal(0.5f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, p_xy.Length), 2);
            Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x, p_x.Length), 2);
            Assert.Equal(1.1f, ProbabilityDistribution.ConditionalEntropyYX(p_xy, p_x), 2);
            Assert.Equal(1.58f, ProbabilityDistribution.ConditionalEntropyXY(p_xy, p_y, 2), 2);
            Assert.Equal(1.1f, ProbabilityDistribution.ConditionalEntropyXY(p_xy, p_y), 2);

            Assert.Equal(0f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, 2), 2);
            Assert.Equal(0f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy), 2);
            Assert.Equal(0f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, p_xy.Length), 2);
            Assert.Equal(0f, ProbabilityDistribution.MutualInformation(p_x, p_y, p_xy, p_x.Length), 2);
        }
Exemplo n.º 4
0
        public void TestCompareWithProbability()
        {
            var fc = new TallyAlphabet <int>(new[] { "a", "b", "c" }, 3, y => y);

            fc.Add("a", 2);
            fc.Add("a", 1);
            fc.Add("a", 0);
            fc.Add("b", 2);
            fc.Add("b", 1);
            fc.Add("b", 0);
            fc.Add("c", 2);
            fc.Add("c", 1);
            fc.Add("c", 0);
            Assert.Equal(1 / 3f, fc.ProbabilityX("a"), 2);
            Assert.Equal(1 / 3f, fc.ProbabilityX("b"), 2);
            Assert.Equal(1 / 3f, fc.ProbabilityX("c"), 2);
            Assert.Equal(1 / 3f, fc.ProbabilityY(0));
            Assert.Equal(1 / 3f, fc.ProbabilityY(1));
            Assert.Equal(1 / 9f, fc.ProbabilityXY("a", 0));
            Assert.Equal(1 / 3f, fc.ProbabilityYGivenX(0, "a"), 2);
            Assert.Equal(1 / 3f, fc.ProbabilityYGivenX(1, "a"), 2);
            Assert.Equal(1 / 3f, fc.ProbabilityYGivenX(0, "b"), 2);
            Assert.Equal(1 / 3f, fc.ProbabilityYGivenX(1, "b"), 2);
            Assert.Equal(1f, fc.EntropyYGivenX(3), 1);

            Assert.Equal(9, fc.probabilityXY.Length);
            Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, fc.probabilityX);
            Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, fc.probabilityY);
            Assert.Equal(new [, ] {
                { 1 / 9f, 1 / 9f, 1 / 9f },
                { 1 / 9f, 1 / 9f, 1 / 9f },
                { 1 / 9f, 1 / 9f, 1 / 9f },
            }, fc.probabilityXY);

            Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyYX(fc.probabilityXY, fc.probabilityX, fc.probabilityXY.Length));
            Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyYX(fc.probabilityXY, fc.probabilityX, fc.probabilityX.Length));
            Assert.Equal(1f, fc.EntropyXGivenY(3), 1);
            Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY, fc.probabilityY, fc.probabilityXY.Length));
            Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY, fc.probabilityY, fc.probabilityY.Length));
            Assert.Equal(2f, fc.EntropyXY(3), 1);
            Assert.Equal(2f, ProbabilityDistribution.JointEntropy(fc.probabilityXY, fc.probabilityX.Length), 1);
            Assert.Equal(1f, ProbabilityDistribution.JointEntropy(fc.probabilityXY, fc.probabilityXY.Length), 1);
            Assert.Equal(3.2f, ProbabilityDistribution.JointEntropy(fc.probabilityXY, 2), 1);
            Assert.Equal(1f, fc.EntropyX(3), 2);
            Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probabilityX, fc.probabilityX.Length), 2);
            Assert.Equal(0.5f, ProbabilityDistribution.Entropy(fc.probabilityX, fc.probabilityXY.Length), 2);
            Assert.Equal(1f, fc.EntropyY(3), 1);
            Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probabilityY, fc.probabilityY.Length), 2);
            Assert.Equal(0.5f, ProbabilityDistribution.Entropy(fc.probabilityY, fc.probabilityXY.Length), 2);
            // H(X|Y) = H(X,Y) - H(Y)
            // This should always be true.
            Assert.Equal(0f, fc.EntropyXGivenY() - fc.EntropyXY() + fc.EntropyY(), 1);
            Assert.Equal(0f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY, fc.probabilityY)
                         - ProbabilityDistribution.JointEntropy(fc.probabilityXY)
                         + ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY, fc.probabilityY), 1);
            Assert.Equal(0f, fc.MutualInformationXY(3), 2);
        }
Exemplo n.º 5
0
        public void TestOneSample()
        {
            var fc = new TallySingle(10, -1f, 1f);

            fc.Add(0f);
            Assert.Equal(1f, fc.Probability(0f));
            Assert.Equal(0f, fc.Probability(1f));
            Assert.Equal(0f, ProbabilityDistribution.Entropy(fc.probability));
        }
Exemplo n.º 6
0
        public void TestAlphabet()
        {
            var fc = new TallyAlphabet(new[] { "a", "b" });

            fc.Add("a");
            fc.Add("b");
            Assert.Equal(0.5f, fc.Probability("a"));
            Assert.Equal(0.5f, fc.Probability("b"));
            Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probability, fc.binCount), 2);
        }
Exemplo n.º 7
0
        public void TestArrayTallyAlphabet2()
        {
            var fc = new ArrayTallyAlphabet <int>(new[] { "a", "b", "c" }, 3, y => y);

            fc.Add(new [] { "a", "b" }, new [] { 2, 1 });
            fc.Add(new [] { "a", "b" }, new [] { 1, 1 });
            fc.Add(new [] { "a", "b" }, new [] { 0, 1 });
            fc.Add(new [] { "b", "b" }, new [] { 2, 1 });
            fc.Add(new [] { "b", "b" }, new [] { 1, 1 });
            fc.Add(new [] { "b", "b" }, new [] { 0, 1 });
            fc.Add(new [] { "c", "b" }, new [] { 2, 1 });
            fc.Add(new [] { "c", "b" }, new [] { 1, 1 });
            fc.Add(new [] { "c", "b" }, new [] { 0, 1 });

            Assert.Equal(9, fc.probabilityXY[0, 0].Length);
            Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, fc.probabilityX[0]);
            Assert.Equal(new [, ] {
                { 1 / 9f, 1 / 9f, 1 / 9f },
                { 1 / 9f, 1 / 9f, 1 / 9f },
                { 1 / 9f, 1 / 9f, 1 / 9f },
            }, fc.probabilityXY[0, 0]);
            Assert.Equal(new [, ] {
                { 0f, 0f, 0f },
                { 1 / 3f, 1 / 3f, 1 / 3f },
                { 0f, 0f, 0f },
            }, fc.probabilityXY[1, 0]);

            Assert.Equal(new [, ] {
                { 0f, 0f, 0f },
                { 0f, 1f, 0f },
                { 0f, 0f, 0f },
            }, fc.probabilityXY[1, 1]);
            Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, ProbabilityDistribution.MarginalY(fc.probabilityXY[0, 0]));
            Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, ProbabilityDistribution.MarginalY(fc.probabilityXY[1, 0]));
            Assert.Equal(new [] { 0f, 1f, 0f }, ProbabilityDistribution.MarginalX(fc.probabilityXY[1, 0]));
            Assert.Equal(new [] { 0f, 1f, 0f }, fc.probabilityX[1]);
            Assert.Equal(new [] { 1 / 3f, 1 / 3f, 1 / 3f }, fc.probabilityY[0]);
            Assert.Equal(new [] { 0f, 1f, 0f }, fc.probabilityY[1]);
            Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyYX(fc.probabilityXY[0, 0], fc.probabilityX[0], fc.probabilityXY[0, 0].Length));
            Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyYX(fc.probabilityXY[0, 0], fc.probabilityX[0], fc.probabilityX[0].Length));
            Assert.Equal(1 / 2f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY[0, 0], fc.probabilityY[0], fc.probabilityXY[0, 0].Length));
            Assert.Equal(1f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY[0, 0], fc.probabilityY[0], fc.probabilityY[0].Length));
            Assert.Equal(2f, ProbabilityDistribution.JointEntropy(fc.probabilityXY[0, 0], fc.probabilityX[0].Length), 1);
            Assert.Equal(1f, ProbabilityDistribution.JointEntropy(fc.probabilityXY[0, 0], fc.probabilityXY[0, 0].Length), 1);
            Assert.Equal(3.2f, ProbabilityDistribution.JointEntropy(fc.probabilityXY[0, 0], 2), 1);
            Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probabilityX[0], fc.probabilityX[0].Length), 2);
            Assert.Equal(0.5f, ProbabilityDistribution.Entropy(fc.probabilityX[0], fc.probabilityXY[0, 0].Length), 2);
            Assert.Equal(1f, ProbabilityDistribution.Entropy(fc.probabilityY[0], fc.probabilityY[0].Length), 2);
            Assert.Equal(0.5f, ProbabilityDistribution.Entropy(fc.probabilityY[0], fc.probabilityXY[0, 0].Length), 2);
            // H(X|Y) = H(X,Y) - H(Y)
            // This should always be true.
            Assert.Equal(0f, ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY[0, 0], fc.probabilityY[0])
                         - ProbabilityDistribution.JointEntropy(fc.probabilityXY[0, 0])
                         + ProbabilityDistribution.ConditionalEntropyXY(fc.probabilityXY[0, 0], fc.probabilityY[0]), 1);
        }
Exemplo n.º 8
0
        public void TestTwoSamples()
        {
            var fc = new TallySingle(10, -1f, 1f);

            fc.Add(0f);
            fc.Add(0.5f);
            Assert.Equal(0.5f, fc.Probability(0f));
            Assert.Equal(0f, fc.Probability(1f));
            Assert.Equal(0.5f, fc.Probability(0.5f));
            Assert.Equal(0.301f, ProbabilityDistribution.Entropy(fc.probability, fc.binCount), 3);
        }
Exemplo n.º 9
0
        public void TestReadmeExample1()
        {
            int binCount = 4;
            Tally <float, float> tally
                = new Tally <float, float>(binCount, x => (int)(x * binCount),
                                           binCount, y => (int)(y * binCount));

            // Some where, this is called repeatedly.
            // tally.Add(sensor.value, effector.value);
            // But let's supply some fake values for demonstration purposes.
            tally.Add(0.6f, 0.1f);
            tally.Add(0.5f, 0.5f);
            tally.Add(0.7f, 0.9f);
            tally.Add(0.7f, 0.3f);

            // Finally we analyze it.
            float[] px = tally.probabilityX;
            Assert.Equal(new [] { 0f, 0f, 1f, 0f }, px);
            float[] py = tally.probabilityY;
            Assert.Equal(new [] { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f }, py);
            float[,] pxy = tally.probabilityXY;
            Assert.Equal(new [, ] {
                { 0f, 0f, 0f, 0f },
                { 0f, 0f, 0f, 0f },
                { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f },
                { 0f, 0f, 0f, 0f },
            }, pxy);
            float Hsensor   = ProbabilityDistribution.Entropy(px, 2);
            float Heffector = ProbabilityDistribution.Entropy(py, 2);
            // H(effector | sensor)
            float Heffector_sensor = ProbabilityDistribution.ConditionalEntropyYX(pxy, px, 2);

            Assert.Equal(0f, Hsensor, 1);
            // So the sensor carries no information. It's going to the second bin always
            // based on what's been seen.
            Assert.Equal(2f, Heffector, 1);
            // The effector carries 2 bits of information. It could show up in any of
            // the bins with equal probability.  It would take two bits to describe which bin.
            Assert.Equal(2f, Heffector_sensor, 1);
            // Given that we know the sensor, there's no reduction in randomness for the
            // effector. In fact since H(effector) = H(effector|sensor) we now know that
            // the sensor and effector are entirely independent of one another.
        }
Exemplo n.º 10
0
        public void TestReadmeExample2()
        {
            int binCount = 4;
            ArrayTally <float, float> tally
                = new ArrayTally <float, float>(binCount, x => (int)(x * binCount),
                                                binCount, y => (int)(y * binCount));

            // Some where, this is called repeatedly.
            // tally.Add(sensor.value, effector.value);
            // But let's supply some fake values for demonstration purposes.
            tally.Add(new [] { 0.6f, 0.1f }, new [] { 0.1f, 0f });
            tally.Add(new [] { 0.5f, 0.5f }, new [] { 0.5f, 0f });
            tally.Add(new [] { 0.7f, 0.9f }, new [] { 0.9f, 0f });
            tally.Add(new [] { 0.7f, 0.3f }, new [] { 0.3f, 0f });


            // float[] px = tally.probabilityX[0];
            // float[] py = tally.probabilityY[0];
            // float[,] pxy = tally.probabilityXY[0, 0];
            // If we analyze the first element of X and Y, we'll get the same results
            // from example 2.  However, if we look at the second element of X, which
            // is the same as the first element of Y, we'll get something different.

            // Finally we analyze it.

            float[] px = tally.probabilityX[1];
            Assert.Equal(new [] { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f }, px);
            float[] py = tally.probabilityY[0];
            Assert.Equal(new [] { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f }, py);
            float[,] pxy = tally.probabilityXY[1, 0];
            Assert.Equal(new [, ] {
                { 1 / 4f, 0f, 0f, 0f },
                { 0f, 1 / 4f, 0f, 0f },
                { 0f, 0f, 1 / 4f, 0f },
                { 0f, 0f, 0f, 1 / 4f },
            }, pxy);
            float Hsensor   = ProbabilityDistribution.Entropy(px, 2);
            float Heffector = ProbabilityDistribution.Entropy(py, 2);
            // H(effector | sensor)
            float Heffector_sensor = ProbabilityDistribution.ConditionalEntropyYX(pxy, px, 2);

            Assert.Equal(2f, Hsensor, 1);
            // So the sensor carries 2 bits of information. It's a copy of what the
            // first effector's producing.
            Assert.Equal(2f, Heffector, 1);
            // The effector carries 2 bits of information. It could show up in any of
            // the bins with equal probability.  It would take two bits to describe which bin.
            Assert.Equal(0f, Heffector_sensor, 1);
            // Given that we know the sensor, there's zero information required to know
            // how the effector will behave. H(effector|sensor) = 0 means the effector
            // is completely determined by the sensor, which makes sense since they're
            // the same values.


            // Footnote: I had a typo where I ran the computation against [0, 0] instead
            // of [1, 0], but I got the right result which surprised me. Why the same
            // result? Because the above and below matrix when you sum up the elements
            // with px you get the same thing.

            Assert.Equal(new [, ] {
                { 0f, 0f, 0f, 0f },
                { 0f, 0f, 0f, 0f },
                { 1 / 4f, 1 / 4f, 1 / 4f, 1 / 4f },
                { 0f, 0f, 0f, 0f },
            }, tally.probabilityXY[0, 0]);
        }