Beispiel #1
0
        protected virtual Feedback SamplePosFeedback()
        {
            switch (PosSampler)
            {
            case PosSampler.UniformUser:
                string userIdOrg       = UsersMap.ToOriginalID(SampleUser());
                var    userPosFeedback = UserPosFeedback[userIdOrg];
                return(userPosFeedback[random.Next(userPosFeedback.Count)]);

            case PosSampler.UniformLevel:
                int level = PosLevels[random.Next(PosLevels.Count)];
                // uniform feedback sampling from a level
                int index = random.Next(LevelPosFeedback[level].Count);
                return(LevelPosFeedback[level][index]);

            case PosSampler.DynamicLevel:
            case PosSampler.AdaptedWeight:
                int l = PosLevels[_posLevelSampler.Sample()];
                int i = random.Next(LevelPosFeedback[l].Count);
                return(LevelPosFeedback[l][i]);

            case PosSampler.UniformFeedback:
                int level2 = PosLevels[random.Next(PosLevels.Count)];
                int index2 = random.Next(LevelPosFeedback[level2].Count);
                return(LevelPosFeedback[level2][index2]);

            case PosSampler.LeastPopular:
                return(SampleLeastPopularPosFeedback());

            default:
                return(null);
            }
        }
Beispiel #2
0
        public static T uniformFromArray <T>(T[] items)
        {
            double[] weights = (from i in Enumerable.Range(1, items.Length) select 1.0d).ToArray();
            int      idx     = Categorical.Sample(weights);

            return(items[idx]);
        }
Beispiel #3
0
        public KMeans(int k, IReadOnlyList <IVector> data, DistanceMetric distanceMetric = DistanceMetric.Euclidean, int?randomSeed = null)
        {
            _k = k;
            _distanceMetric = distanceMetric;
            _cluster        = new ClusterData();
            _data           = data;

            // use kmeans++ to find best initial positions
            // https://normaldeviate.wordpress.com/2012/09/30/the-remarkable-k-means/
            var rand  = randomSeed.HasValue ? new Random(randomSeed.Value) : new Random();
            var data2 = data.ToList();

            // pick the first at random
            var firstIndex = rand.Next(0, data2.Count);

            _cluster.Add(data2[firstIndex]);
            data2.RemoveAt(firstIndex);

            // create a categorical distribution for each subsequent pick
            for (var i = 1; i < _k && data2.Count > 0; i++)
            {
                var probabilityList = new List <double>();
                foreach (var item in data2)
                {
                    using (var distance = _cluster.CalculateDistance(item, _distanceMetric)) {
                        var minIndex = distance.MinimumIndex();
                        probabilityList.Add(distance.AsIndexable()[minIndex]);
                    }
                }
                var distribution = new Categorical(probabilityList.ToArray());
                var nextIndex    = distribution.Sample();
                _cluster.Add(data2[nextIndex]);
                data2.RemoveAt(nextIndex);
            }
        }
        public void TrainModel2()
        {
            var trainer = BrightWireProvider.CreateMarkovTrainer2 <string>();

            _Train(trainer);

            // test serialisation/deserialisation
            using (var buffer = new MemoryStream()) {
                trainer.SerialiseTo(buffer);
                buffer.Position = 0;
                trainer.DeserialiseFrom(buffer, true);
            }
            var dictionary = trainer.Build().AsDictionary;

            // generate some text
            var    rand = new Random();
            string prev = default(string), curr = default(string);
            var    output = new List <string>();

            for (var i = 0; i < 1024; i++)
            {
                var transitions  = dictionary.GetTransitions(prev, curr);
                var distribution = new Categorical(transitions.Select(d => Convert.ToDouble(d.Probability)).ToArray());
                var next         = transitions[distribution.Sample()].NextState;
                output.Add(next);
                if (SimpleTokeniser.IsEndOfSentence(next))
                {
                    break;
                }
                prev = curr;
                curr = next;
            }
            Assert.IsTrue(output.Count < 1024);
        }
    // Update is called once per frame
    void Update()
    {
        timeToNextMonster -= Time.deltaTime;

        if (timeToNextMonster < 0 && !monster.active)
        {
            cenario = GenerateRandoms.cenarioSelecionado;
            //monster.GetComponent<Animator>().Play(cenario.ToString());
            timeToNextMonster = cosine(4, 8);

            double[] probs = new double[4];
            probs[0]      = 0.4;
            probs[1]      = 0.3;
            probs[2]      = 0.2;
            probs[3]      = 0.1;
            typeOfMonster = Categorical.Sample(probs);

            monster.SetActive(true);
            monster.transform.position = new Vector3(Camera.main.transform.position.x + 5, transform.position.y, transform.position.z);
            // monster.GetComponent<SpriteRenderer>().sprite = Resources.Load<Sprite>("inimigos/" + cenario.ToString() + "/monstro" + typeOfMonster);

            monster.GetComponent <Animator>().Play(cenario.ToString() + typeOfMonster.ToString());
            Debug.Log(cenario.ToString() + typeOfMonster.ToString());
        }
    }
Beispiel #6
0
        public void TrainModel3()
        {
            var trainer = BrightWireProvider.CreateMarkovTrainer3 <string>();

            _Train(trainer);
            var model = trainer.Build().AsDictionary;

            // generate some text
            var    rand = new Random();
            string prevPrev = default(string), prev = default(string), curr = default(string);
            var    output = new List <string>();

            for (var i = 0; i < 1024; i++)
            {
                var transitions  = model.GetTransitions(prevPrev, prev, curr);
                var distribution = new Categorical(transitions.Select(d => Convert.ToDouble(d.Probability)).ToArray());
                var next         = transitions[distribution.Sample()].NextState;
                output.Add(next);
                if (SimpleTokeniser.IsEndOfSentence(next))
                {
                    break;
                }
                prevPrev = prev;
                prev     = curr;
                curr     = next;
            }
            Assert.IsTrue(output.Count < 1024);
        }
Beispiel #7
0
        public void StartEpoch()
        {
            var distribution = new Categorical(_weightFunc().Select(d => Convert.ToDouble(d)).ToArray());

            _rowMap.Clear();
            for (int i = 0, len = _dataProvider.Count; i < len; i++)
            {
                _rowMap[i] = distribution.Sample();
            }
        }
        public RandomProjection(ILinearAlgebraProvider lap, int fixedSize, int reducedSize, int s = 3)
        {
            LinearAlgebraProvider = lap;
            _fixedSize            = fixedSize;
            Size = reducedSize;
            var c1           = Math.Sqrt(3);
            var distribution = new Categorical(new[] { 1.0 / (2 * s), 1 - (1.0 / s), 1.0 / (2 * s) });

            Matrix = LinearAlgebraProvider.CreateMatrix(fixedSize, reducedSize,
                                                        (i, j) => Convert.ToSingle((distribution.Sample() - 1) * c1));
        }
Beispiel #9
0
        IReadOnlyList <int> _GetNextSamples()
        {
            var distribution = new Categorical(_rowWeight.Select(d => Convert.ToDouble(d)).ToArray());
            var ret          = new List <int>();

            for (int i = 0, len = _rowWeight.Length; i < len; i++)
            {
                ret.Add(distribution.Sample());
            }
            return(ret.OrderBy(v => v).ToList());
        }
Beispiel #10
0
        /// <summary>
        /// Builds a n-gram based language model and generates new text from the model
        /// </summary>
        public static void MarkovChains()
        {
            // tokenise the novel "The Beautiful and the Damned" by F. Scott Fitzgerald
            List <IReadOnlyList <string> > sentences;

            using (var client = new WebClient()) {
                var data = client.DownloadString("http://www.gutenberg.org/cache/epub/9830/pg9830.txt");
                var pos  = data.IndexOf("CHAPTER I");
                sentences = SimpleTokeniser.FindSentences(SimpleTokeniser.Tokenise(data.Substring(pos))).ToList();
            }

            // create a markov trainer that uses a window of size 3
            var trainer = BrightWireProvider.CreateMarkovTrainer3 <string>();

            foreach (var sentence in sentences)
            {
                trainer.Add(sentence);
            }
            var model = trainer.Build().AsDictionary;

            // generate some text
            var rand = new Random();

            for (var i = 0; i < 50; i++)
            {
                var    sb = new StringBuilder();
                string prevPrev = default(string), prev = default(string), curr = default(string);
                for (var j = 0; j < 256; j++)
                {
                    var transitions  = model.GetTransitions(prevPrev, prev, curr);
                    var distribution = new Categorical(transitions.Select(d => Convert.ToDouble(d.Probability)).ToArray());
                    var next         = transitions[distribution.Sample()].NextState;
                    if (Char.IsLetterOrDigit(next[0]) && sb.Length > 0)
                    {
                        var lastChar = sb[sb.Length - 1];
                        if (lastChar != '\'' && lastChar != '-')
                        {
                            sb.Append(' ');
                        }
                    }
                    sb.Append(next);

                    if (SimpleTokeniser.IsEndOfSentence(next))
                    {
                        break;
                    }
                    prevPrev = prev;
                    prev     = curr;
                    curr     = next;
                }
                Console.WriteLine(sb.ToString());
            }
        }
Beispiel #11
0
        protected virtual Feedback SampleNegFeedback(Feedback posFeedback)
        {
            int observedOrUnobserved = _unobservedOrNegativeSampler.Sample();

            if (observedOrUnobserved == 1)
            {
                return(SampleUnobservedNegFeedback(posFeedback));
            }

            var toSampleLevels = GetNegSampleLevels(posFeedback);

            // not possible to sample observed
            if (toSampleLevels.Count == 0)
            {
                return(SampleUnobservedNegFeedback(posFeedback));
            }

            // sample observed
            // here both levels and item are sampled uniformly with respect to the frequency of items in a level
            var cdf = new List <int>();

            cdf.Add(0);

            for (int i = 0; i < toSampleLevels.Count; i++)
            {
                cdf.Add(cdf[i] + UserLevelFeedback[posFeedback.User.Id][toSampleLevels[i]].Count);
            }

            int sampleIndex = random.Next(cdf.Last());

            int levelIndex = -1, sampleOffset = -1;

            for (int i = 1; i < cdf.Count; i++)
            {
                if (sampleIndex < cdf[i])
                {
                    sampleOffset = sampleIndex - cdf[i - 1];
                    levelIndex   = i - 1;
                    break;
                }
            }

            NumObservedNeg++;
            return(UserLevelFeedback[posFeedback.User.Id][toSampleLevels[levelIndex]][sampleOffset]);
        }
Beispiel #12
0
        protected virtual string SampleNegItemDynamic(Feedback posFeedback)
        {
            // sample r
            int r;

            do
            {
                r = _rankSampler.Sample();
            } while (r >= AllItems.Count);

            int user_id = UsersMap.ToInternalID(posFeedback.User.Id);
            var u       = user_factors.GetRow(user_id);

            // sample f from p(f|c)
            double sum = 0;

            for (int i = 0; i < NumFactors; i++)
            {
                sum += Math.Abs(u[i]) * _itemFactorsStdev[i];
            }

            double[] probs = new double[NumFactors];
            for (int i = 0; i < NumFactors; i++)
            {
                probs[i] = Math.Abs(u[i]) * _itemFactorsStdev[i] / sum;
            }

            int f = new Categorical(probs).Sample();

            // take item j (negItemId) from position r of the list of sampled f
            string negItemId;

            if (Math.Sign(user_factors[user_id, f]) > 0)
            {
                negItemId = _factorBasedRank[f][r];
            }
            else
            {
                negItemId = _factorBasedRank[f][AllItems.Count - r - 1];
            }

            return(negItemId);
        }
        public IList <NeuralNetwork> ApplySelection(IList <GeneticAlgorithmAgent> agents)
        {
            int   populationCount = agents.Count;
            float fitnessSum      = agents.Select(agent => agent.Fitness).Sum();
            var   probabilities   = new double[populationCount];

            for (int i = 0; i < populationCount; i++)
            {
                probabilities[i] = (double)agents[i].Fitness / (double)fitnessSum;
            }

            var distribution  = new Categorical(probabilities);
            var newPopulation = new List <NeuralNetwork>();

            for (int i = 0; i < populationCount; i++)
            {
                newPopulation.Add(agents[distribution.Sample()].Network.Clone());
            }

            return(newPopulation);
        }
    // Update is called once per frame
    void Update()
    {
        if (!item.active)
        {
            timeToNextItem -= Time.deltaTime;
            if (timeToNextItem < 0)
            {
                timeToNextItem = cosine(30, 45);
                double[] probs = new double[4];
                probs[0] = 0.4;
                probs[1] = 0.3;
                probs[2] = 0.2;
                probs[3] = 0.1;
                int    typeOfItem = Categorical.Sample(probs);
                Camera cam        = Camera.main;
                float  height     = 2f * cam.orthographicSize;
                float  width      = height * cam.aspect;
                item.SetActive(true);
                item.transform.position = new Vector3(cam.transform.position.x + 4, transform.position.y, transform.position.z);

                if (typeOfItem == 0)
                {
                    item.GetComponent <SpriteRenderer>().sprite = Resources.Load <Sprite>("Itens/potion");
                }
                else if (typeOfItem == 1)
                {
                    item.GetComponent <SpriteRenderer>().sprite = Resources.Load <Sprite>("Itens/pill");
                }
                else if (typeOfItem == 2)
                {
                    item.GetComponent <SpriteRenderer>().sprite = Resources.Load <Sprite>("Itens/medicine");
                }
                else if (typeOfItem == 3)
                {
                    item.GetComponent <SpriteRenderer>().sprite = Resources.Load <Sprite>("Itens/backpack");
                }
            }
        }
    }
Beispiel #15
0
        static void Main(string[] args)
        {
            int increment         = 10;      // how much inverse probabilities are updated per sample
            int guidanceParameter = 1000000; // Small one - consequtive sampling is more affected by outcome. Large one - closer to uniform sampling

            int[]    invprob       = new int [6];
            double[] probabilities = new double [6];

            int[] counter = new int [] { 0, 0, 0, 0, 0, 0 };
            int[] repeat  = new int [] { 0, 0, 0, 0, 0, 0 };
            int   prev    = -1;

            for (int k = 0; k != 100000; ++k)
            {
                if (k % 60 == 0)        // drop accumulation, important for low guidance
                {
                    for (int i = 0; i != 6; ++i)
                    {
                        invprob[i] = guidanceParameter;
                    }
                }
                for (int i = 0; i != 6; ++i)
                {
                    probabilities[i] = 1.0 / (double)invprob[i];
                }
                var cat = new Categorical(probabilities);
                var q   = cat.Sample();
                counter[q] += 1;
                invprob[q] += increment;
                if (q == prev)
                {
                    repeat[q] += 1;
                }
                prev = q;
            }
            counter.ToList().ForEach(Console.WriteLine);
            repeat.ToList().ForEach(Console.WriteLine);
        }
Beispiel #16
0
        public IList <NeuralNetwork> ApplySelection(IList <GeneticAlgorithmAgent> agents)
        {
            IList <GeneticAlgorithmAgent> agentsSorted = agents
                                                         .OrderBy(agent => agent.Fitness)
                                                         .ToList();

            int populationCount = agentsSorted.Count;

            double[] probabilities = Enumerable
                                     .Range(0, populationCount)
                                     .Select(i => (double)(2 * i) / (populationCount * (populationCount - 1)))
                                     .ToArray();

            var distribution  = new Categorical(probabilities);
            var newPopulation = new List <NeuralNetwork>();

            for (int i = 0; i < populationCount; i++)
            {
                newPopulation.Add(agentsSorted[distribution.Sample()].Network.Clone());
            }

            return(newPopulation);
        }
 public void FailSampleStatic()
 {
     Assert.Throws <ArgumentOutOfRangeException>(() => Categorical.Sample(new Random(), _badP));
 }
 public void CanSample()
 {
     var n = new Categorical(largeP);
     var d = n.Sample();
 }
Beispiel #19
0
        public void CanSample()
        {
            var n = new Categorical(_largeP);

            n.Sample();
        }
Beispiel #20
0
 public void FailSampleStatic()
 {
     Assert.That(() => Categorical.Sample(new System.Random(0), _badP), Throws.ArgumentException);
 }
Beispiel #21
0
 public void CanSampleStatic()
 {
     Categorical.Sample(new System.Random(0), _largeP);
 }
 public void CanSample()
 {
     var n = new Categorical(_largeP);
     n.Sample();
 }
Beispiel #23
0
        public static void MarkovChains()
        {
            var lines = res.trump.Split('\n');

            StringBuilder sb = new StringBuilder();

            foreach (var l in lines)
            {
                if (string.IsNullOrWhiteSpace(l))
                {
                    continue;
                }

                var spl = l.Split(',');
                if (spl.Length > 1)
                {
                    sb.Append(spl[1]);
                    if (spl[1].Last() == '!' || spl[1].Last() == '?' || spl[1].Last() == '.')
                    {
                        continue;
                    }
                    sb.Append(". ");
                }
            }

            List <IReadOnlyList <string> > sentences;

            sentences = SimpleTokeniser.FindSentences(SimpleTokeniser.Tokenise(sb.ToString()))
                        .ToList();

            var sentencesRW = sentences
                              .Select(m => m.ToList())
                              .Where(m => m.Count > 1)
                              .Where(m => !((m.Contains("https") || m.Contains("http")) && m.Count < 10))
                              .Where(m => !(m[0] == "co"))
                              .ToList();

            var trainer = BrightWireProvider.CreateMarkovTrainer3 <string>();

            foreach (var sentence in sentencesRW)
            {
                trainer.Add(sentence);
            }
            var model = trainer.Build().AsDictionary;

            // generate some text
            for (var i = 0; i < 5000000; i++)
            {
                sb = new StringBuilder();
                string prevPrev = default(string), prev = default(string), curr = default(string);
                for (var j = 0; j < 256; j++)
                {
                    var transitions  = model.GetTransitions(prevPrev, prev, curr);
                    var distribution = new Categorical(transitions.Select(d => Convert.ToDouble(d.Probability)).ToArray());
                    var next         = transitions[distribution.Sample()].NextState;
                    if (Char.IsLetterOrDigit(next[0]) && sb.Length > 0)
                    {
                        var lastChar = sb[sb.Length - 1];
                        if (lastChar != '\'' && lastChar != '-')
                        {
                            sb.Append(' ');
                        }
                    }
                    sb.Append(next);

                    if (SimpleTokeniser.IsEndOfSentence(next))
                    {
                        break;
                    }
                    prevPrev = prev;
                    prev     = curr;
                    curr     = next;
                }

                if (sb.Length < 10)
                {
                    continue;
                }

                if (i % 10000 == 0)
                {
                    Console.WriteLine($"Writing line {i}");
                }

                File.AppendAllText("sts.txt", sb.ToString() + Environment.NewLine);
            }
        }
 public void CanSampleStatic()
 {
     Categorical.Sample(new Random(), _largeP);
 }
Beispiel #25
0
        /// <summary>
        /// Run example
        /// </summary>
        /// <a href="http://en.wikipedia.org/wiki/Categorical_distribution">Categorical distribution</a>
        public void Run()
        {
            // 1. Initialize the new instance of the Categorical distribution class with parameters P = (0.1, 0.2, 0.25, 0.45)
            var binomial = new Categorical(new[] { 0.1, 0.2, 0.25, 0.45 });

            Console.WriteLine(@"1. Initialize the new instance of the Categorical distribution class with parameters P = (0.1, 0.2, 0.25, 0.45)");
            Console.WriteLine();

            // 2. Distributuion properties:
            Console.WriteLine(@"2. {0} distributuion properties:", binomial);

            // Cumulative distribution function
            Console.WriteLine(@"{0} - Сumulative distribution at location '3'", binomial.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            Console.WriteLine(@"{0} - Probability mass at location '3'", binomial.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            Console.WriteLine(@"{0} - Log probability mass at location '3'", binomial.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            Console.WriteLine(@"{0} - Entropy", binomial.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            Console.WriteLine(@"{0} - Largest element in the domain", binomial.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            Console.WriteLine(@"{0} - Smallest element in the domain", binomial.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            Console.WriteLine(@"{0} - Mean", binomial.Mean.ToString(" #0.00000;-#0.00000"));

            // Median
            Console.WriteLine(@"{0} - Median", binomial.Median.ToString(" #0.00000;-#0.00000"));

            // Variance
            Console.WriteLine(@"{0} - Variance", binomial.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            Console.WriteLine(@"{0} - Standard deviation", binomial.StdDev.ToString(" #0.00000;-#0.00000"));

            // 3. Generate 10 samples of the Categorical distribution
            Console.WriteLine(@"3. Generate 10 samples of the Categorical distribution");
            for (var i = 0; i < 10; i++)
            {
                Console.Write(binomial.Sample().ToString("N05") + @" ");
            }

            Console.WriteLine();
            Console.WriteLine();

            // 4. Generate 100000 samples of the Categorical(new []{ 0.1, 0.2, 0.25, 0.45 }) distribution and display histogram
            Console.WriteLine(@"4. Generate 100000 samples of the Categorical(0.1, 0.2, 0.25, 0.45) distribution and display histogram");
            var data = new int[100000];

            Categorical.Samples(data, new[] { 0.1, 0.2, 0.25, 0.45 });
            ConsoleHelper.DisplayHistogram(data);
            Console.WriteLine();

            // 5. Generate 100000 samples of the Categorical(new []{ 0.6, 0.2, 0.1, 0.1 }) distribution and display histogram
            Console.WriteLine(@"5. Generate 100000 samples of the Categorical(0.6, 0.2, 0.1, 0.1) distribution and display histogram");
            Categorical.Samples(data, new[] { 0.6, 0.2, 0.1, 0.1 });
            ConsoleHelper.DisplayHistogram(data);
        }
Beispiel #26
0
 public void CanSample()
 {
     var n = new Categorical(largeP);
     var d = n.Sample();
 }
Beispiel #27
0
        public override void ExecuteExample()
        {
            // <a href="http://en.wikipedia.org/wiki/Binomial_distribution">Binomial distribution</a>
            MathDisplay.WriteLine("<b>Binomial distribution</b>");
            // 1. Initialize the new instance of the Binomial distribution class with parameters P = 0.2, N = 20
            var binomial = new Binomial(0.2, 20);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the Binomial distribution class with parameters P = {0}, N = {1}", binomial.P, binomial.N);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", binomial);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", binomial.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", binomial.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", binomial.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            MathDisplay.WriteLine(@"{0} - Entropy", binomial.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", binomial.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", binomial.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", binomial.Mean.ToString(" #0.00000;-#0.00000"));

            // Median
            MathDisplay.WriteLine(@"{0} - Median", binomial.Median.ToString(" #0.00000;-#0.00000"));

            // Mode
            MathDisplay.WriteLine(@"{0} - Mode", binomial.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", binomial.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", binomial.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            MathDisplay.WriteLine(@"{0} - Skewness", binomial.Skewness.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the Binomial distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the Binomial distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(binomial.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Bernoulli_distribution">Bernoulli distribution</a>
            MathDisplay.WriteLine("<b>Bernoulli distribution</b>");
            // 1. Initialize the new instance of the Bernoulli distribution class with parameter P = 0.2
            var bernoulli = new Bernoulli(0.2);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the Bernoulli distribution class with parameter P = {0}", bernoulli.P);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", bernoulli);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", bernoulli.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", bernoulli.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", bernoulli.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            MathDisplay.WriteLine(@"{0} - Entropy", bernoulli.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", bernoulli.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", bernoulli.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", bernoulli.Mean.ToString(" #0.00000;-#0.00000"));

            // Mode
            MathDisplay.WriteLine(@"{0} - Mode", bernoulli.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", bernoulli.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", bernoulli.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            MathDisplay.WriteLine(@"{0} - Skewness", bernoulli.Skewness.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the Bernoulli distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the Bernoulli distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(bernoulli.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Categorical_distribution">Categorical distribution</a>
            MathDisplay.WriteLine("<b>Categorical distribution</b>");
            // 1. Initialize the new instance of the Categorical distribution class with parameters P = (0.1, 0.2, 0.25, 0.45)
            var binomialC = new Categorical(new[] { 0.1, 0.2, 0.25, 0.45 });

            MathDisplay.WriteLine(@"1. Initialize the new instance of the Categorical distribution class with parameters P = (0.1, 0.2, 0.25, 0.45)");
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", binomialC);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", binomialC.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", binomialC.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", binomialC.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            MathDisplay.WriteLine(@"{0} - Entropy", binomialC.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", binomialC.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", binomialC.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", binomialC.Mean.ToString(" #0.00000;-#0.00000"));

            // Median
            MathDisplay.WriteLine(@"{0} - Median", binomialC.Median.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", binomialC.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", binomialC.StdDev.ToString(" #0.00000;-#0.00000"));

            // 3. Generate 10 samples of the Categorical distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the Categorical distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(binomialC.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Conway%E2%80%93Maxwell%E2%80%93Poisson_distribution">ConwayMaxwellPoisson distribution</a>
            MathDisplay.WriteLine("<b>Conway Maxwell Poisson distribution</b>");
            // 1. Initialize the new instance of the ConwayMaxwellPoisson distribution class with parameters Lambda = 2, Nu = 1
            var conwayMaxwellPoisson = new ConwayMaxwellPoisson(2, 1);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the ConwayMaxwellPoisson distribution class with parameters Lambda = {0}, Nu = {1}", conwayMaxwellPoisson.Lambda, conwayMaxwellPoisson.Nu);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", conwayMaxwellPoisson);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", conwayMaxwellPoisson.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", conwayMaxwellPoisson.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", conwayMaxwellPoisson.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", conwayMaxwellPoisson.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", conwayMaxwellPoisson.Mean.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", conwayMaxwellPoisson.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", conwayMaxwellPoisson.StdDev.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the ConwayMaxwellPoisson distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the ConwayMaxwellPoisson distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(conwayMaxwellPoisson.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Discrete_uniform">DiscreteUniform distribution</a>
            MathDisplay.WriteLine("<b>Discrete Uniform distribution</b>");
            // 1. Initialize the new instance of the DiscreteUniform distribution class with parameters LowerBound = 2, UpperBound = 10
            var discreteUniform = new DiscreteUniform(2, 10);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the DiscreteUniform distribution class with parameters LowerBound = {0}, UpperBound = {1}", discreteUniform.LowerBound, discreteUniform.UpperBound);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", discreteUniform);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", discreteUniform.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", discreteUniform.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", discreteUniform.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            MathDisplay.WriteLine(@"{0} - Entropy", discreteUniform.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", discreteUniform.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", discreteUniform.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", discreteUniform.Mean.ToString(" #0.00000;-#0.00000"));

            // Median
            MathDisplay.WriteLine(@"{0} - Median", discreteUniform.Median.ToString(" #0.00000;-#0.00000"));

            // Mode
            MathDisplay.WriteLine(@"{0} - Mode", discreteUniform.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", discreteUniform.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", discreteUniform.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            MathDisplay.WriteLine(@"{0} - Skewness", discreteUniform.Skewness.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the DiscreteUniform distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the DiscreteUniform distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(discreteUniform.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Geometric_distribution">Geometric distribution</a>
            MathDisplay.WriteLine("<b>Geometric distribution</b>");
            // 1. Initialize the new instance of the Geometric distribution class with parameter P = 0.2
            var geometric = new Geometric(0.2);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the Geometric distribution class with parameter P = {0}", geometric.P);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", geometric);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", geometric.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", geometric.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", geometric.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            MathDisplay.WriteLine(@"{0} - Entropy", geometric.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", geometric.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", geometric.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", geometric.Mean.ToString(" #0.00000;-#0.00000"));

            // Median
            MathDisplay.WriteLine(@"{0} - Median", geometric.Median.ToString(" #0.00000;-#0.00000"));

            // Mode
            MathDisplay.WriteLine(@"{0} - Mode", geometric.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", geometric.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", geometric.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            MathDisplay.WriteLine(@"{0} - Skewness", geometric.Skewness.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the Geometric distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the Geometric distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(geometric.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Hypergeometric_distribution">Hypergeometric distribution</a>
            MathDisplay.WriteLine("<b>Hypergeometric distribution</b>");
            // 1. Initialize the new instance of the Hypergeometric distribution class with parameters PopulationSize = 10, M = 2, N = 8
            var hypergeometric = new Hypergeometric(30, 15, 10);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the Hypergeometric distribution class with parameters Population = {0}, Success = {1}, Draws = {2}", hypergeometric.Population, hypergeometric.Success, hypergeometric.Draws);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", hypergeometric);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", hypergeometric.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", hypergeometric.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", hypergeometric.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", hypergeometric.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", hypergeometric.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", hypergeometric.Mean.ToString(" #0.00000;-#0.00000"));

            // Mode
            MathDisplay.WriteLine(@"{0} - Mode", hypergeometric.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", hypergeometric.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", hypergeometric.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            MathDisplay.WriteLine(@"{0} - Skewness", hypergeometric.Skewness.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the Hypergeometric distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the Hypergeometric distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(hypergeometric.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Negative_binomial">NegativeBinomial distribution</a>
            MathDisplay.WriteLine("<b>Negative Binomial distribution</b>");
            // 1. Initialize the new instance of the NegativeBinomial distribution class with parameters P = 0.2, R = 20
            var negativeBinomial = new NegativeBinomial(20, 0.2);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the NegativeBinomial distribution class with parameters P = {0}, N = {1}", negativeBinomial.P, negativeBinomial.R);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", negativeBinomial);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", negativeBinomial.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", negativeBinomial.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", negativeBinomial.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", negativeBinomial.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", negativeBinomial.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", negativeBinomial.Mean.ToString(" #0.00000;-#0.00000"));

            // Mode
            MathDisplay.WriteLine(@"{0} - Mode", negativeBinomial.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", negativeBinomial.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", negativeBinomial.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            MathDisplay.WriteLine(@"{0} - Skewness", negativeBinomial.Skewness.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the NegativeBinomial distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the NegativeBinomial distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(negativeBinomial.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Poisson_distribution">Poisson distribution</a>
            MathDisplay.WriteLine("<b>Poisson distribution</b>");
            // 1. Initialize the new instance of the Poisson distribution class with parameter Lambda = 1
            var poisson = new Poisson(1);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the Poisson distribution class with parameter Lambda = {0}", poisson.Lambda);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", poisson);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", poisson.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", poisson.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", poisson.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            MathDisplay.WriteLine(@"{0} - Entropy", poisson.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", poisson.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", poisson.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", poisson.Mean.ToString(" #0.00000;-#0.00000"));

            // Median
            MathDisplay.WriteLine(@"{0} - Median", poisson.Median.ToString(" #0.00000;-#0.00000"));

            // Mode
            MathDisplay.WriteLine(@"{0} - Mode", poisson.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", poisson.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", poisson.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            MathDisplay.WriteLine(@"{0} - Skewness", poisson.Skewness.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the Poisson distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the Poisson distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(poisson.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();


            // <a href="http://en.wikipedia.org/wiki/Zipf_distribution">Zipf distribution</a>
            MathDisplay.WriteLine("<b>Zipf distribution</b>");
            // 1. Initialize the new instance of the Zipf distribution class with parameters S = 5, N = 10
            var zipf = new Zipf(5, 10);

            MathDisplay.WriteLine(@"1. Initialize the new instance of the Zipf distribution class with parameters S = {0}, N = {1}", zipf.S, zipf.N);
            MathDisplay.WriteLine();

            // 2. Distributuion properties:
            MathDisplay.WriteLine(@"2. {0} distributuion properties:", zipf);

            // Cumulative distribution function
            MathDisplay.WriteLine(@"{0} - Сumulative distribution at location '3'", zipf.CumulativeDistribution(3).ToString(" #0.00000;-#0.00000"));

            // Probability density
            MathDisplay.WriteLine(@"{0} - Probability mass at location '3'", zipf.Probability(3).ToString(" #0.00000;-#0.00000"));

            // Log probability density
            MathDisplay.WriteLine(@"{0} - Log probability mass at location '3'", zipf.ProbabilityLn(3).ToString(" #0.00000;-#0.00000"));

            // Entropy
            MathDisplay.WriteLine(@"{0} - Entropy", zipf.Entropy.ToString(" #0.00000;-#0.00000"));

            // Largest element in the domain
            MathDisplay.WriteLine(@"{0} - Largest element in the domain", zipf.Maximum.ToString(" #0.00000;-#0.00000"));

            // Smallest element in the domain
            MathDisplay.WriteLine(@"{0} - Smallest element in the domain", zipf.Minimum.ToString(" #0.00000;-#0.00000"));

            // Mean
            MathDisplay.WriteLine(@"{0} - Mean", zipf.Mean.ToString(" #0.00000;-#0.00000"));

            // Mode
            MathDisplay.WriteLine(@"{0} - Mode", zipf.Mode.ToString(" #0.00000;-#0.00000"));

            // Variance
            MathDisplay.WriteLine(@"{0} - Variance", zipf.Variance.ToString(" #0.00000;-#0.00000"));

            // Standard deviation
            MathDisplay.WriteLine(@"{0} - Standard deviation", zipf.StdDev.ToString(" #0.00000;-#0.00000"));

            // Skewness
            MathDisplay.WriteLine(@"{0} - Skewness", zipf.Skewness.ToString(" #0.00000;-#0.00000"));
            MathDisplay.WriteLine();

            // 3. Generate 10 samples of the Zipf distribution
            MathDisplay.WriteLine(@"3. Generate 10 samples of the Zipf distribution");
            for (var i = 0; i < 10; i++)
            {
                MathDisplay.Write(zipf.Sample().ToString("N05") + @" ");
            }
            MathDisplay.FlushBuffer();
            MathDisplay.WriteLine();
            MathDisplay.WriteLine();
        }
        static void ReberPrediction()
        {
            // generate 500 extended reber grammar training examples
            var grammar   = new ReberGrammar();
            var sequences = grammar.GetExtended(10, 16).Take(500).ToList();

            // split the data into training and test sets
            var data = ReberGrammar.GetOneHot(sequences).Split(0);

            using var lap = BrightWireProvider.CreateLinearAlgebra();
            var graph = new GraphFactory(lap);

            // binary classification rounds each output to either 0 or 1
            var errorMetric = graph.ErrorMetric.BinaryClassification;

            // configure the network properties
            graph.CurrentPropertySet.Use(graph.GradientDescent.RmsProp).
            Use(graph.WeightInitialisation.Xavier);

            // create the engine
            var trainingData = graph.CreateDataSource(data.Training);
            var testData     = trainingData.CloneWith(data.Test);
            var engine       = graph.CreateTrainingEngine(trainingData, learningRate: 0.1f, batchSize: 32);

            // build the network
            const int HIDDEN_LAYER_SIZE = 32, TRAINING_ITERATIONS = 30;

            graph.Connect(engine).AddGru(HIDDEN_LAYER_SIZE).AddFeedForward(engine.DataSource.OutputSize).
            Add(graph.SigmoidActivation()).AddBackpropagationThroughTime(errorMetric);
            engine.Train(TRAINING_ITERATIONS, testData, errorMetric);

            // generate a sample sequence using the learned state transitions
            var networkGraph    = engine.Graph;
            var executionEngine = graph.CreateEngine(networkGraph);

            Console.WriteLine("Generating new reber sequences from the observed state probabilities...");
            for (var z = 0; z < 3; z++)
            {
                // prepare the first input
                var input = new float[ReberGrammar.Size];
                input[ReberGrammar.GetIndex('B')] = 1f;
                Console.Write("B");
                int index = 0, eCount = 0;
                using var executionContext = graph.CreateExecutionContext();
                var result = executionEngine.ExecuteSequential(index++, input, executionContext,
                                                               MiniBatchSequenceType.SequenceStart);
                for (var i = 0; i < 32; i++)
                {
                    var next = result.Output[0].Data.Select((v, j) => ((double)v, j)).
                               Where(d => d.Item1 >= 0.1f).ToList();
                    var distribution = new Categorical(next.Select(d => d.Item1).ToArray());
                    var nextIndex    = next[distribution.Sample()].Item2;
                    Console.Write(ReberGrammar.GetChar(nextIndex));
                    if (nextIndex == ReberGrammar.GetIndex('E') && ++eCount == 2)
                    {
                        break;
                    }
                    Array.Clear(input, 0, ReberGrammar.Size);
                    input[nextIndex] = 1f;
                    result           = executionEngine.ExecuteSequential(index++, input, executionContext,
                                                                         MiniBatchSequenceType.Standard);
                }

                Console.WriteLine();
            }
        }
Beispiel #29
0
 public void FailSampleStatic()
 {
     var d = Categorical.Sample(new Random(), badP);
 }