コード例 #1
0
        public void GenerateTest()
        {
            // Example taken from http://en.wikipedia.org/wiki/Viterbi_algorithm

            double[,] transition =
            {
                { 0.7, 0.3 },
                { 0.4, 0.6 }
            };

            double[,] emission =
            {
                { 0.1, 0.4, 0.5 },
                { 0.6, 0.3, 0.1 }
            };

            double[] initial =
            {
                0.6, 0.4
            };

            HiddenMarkovModel hmm = new HiddenMarkovModel(transition, emission, initial);

            double logLikelihood;

            int[] path;
            int[] samples = hmm.Generate(10, out path, out logLikelihood);

            double expected = hmm.Evaluate(samples, path);

            Assert.AreEqual(expected, logLikelihood);
        }
        public static void Generate()
        {
            MathHelper.SetupGenerator(42);

            // Consider some phrases:
            //
            string[][] phrases =
            {
                new[] { "those",  "are",   "sample", "words", "from", "a", "dictionary" },
                new[] { "those",  "are",   "sample", "words" },
                new[] { "sample", "words", "are",    "words" },
                new[] { "those",  "words" },
                new[] { "those",  "are",   "words" },
                new[] { "words",  "from",  "a",      "dictionary" },
                new[] { "those",  "are",   "words",  "from",  "a",    "dictionary" }
            };

            // Let's begin by transforming them to sequence of
            // integer labels using a codification codebook:
            var codebook = new Codification(phrases);

            // Now we can create the training data for the models:
            int[][] sequence = codebook.Translate(phrases);

            // To create the models, we will specify a forward topology,
            // as the sequences have definite start and ending points.
            //
            var topology = new Forward(state_count: 4);
            int symbols  = codebook.SymbolCount; // We have 7 different words

            Console.WriteLine("Symbol Count: {0}", symbols);

            // Create the hidden Markov model
            HiddenMarkovModel hmm = new HiddenMarkovModel(topology, symbols);

            // Create the learning algorithm
            BaumWelchLearning teacher = new BaumWelchLearning(hmm);

            // Teach the model about the phrases
            double error = teacher.Run(sequence);

            // Now, we can ask the model to generate new samples
            // from the word distributions it has just learned:
            //
            int[] sample = hmm.Generate(3);

            // And the result will be: "those", "are", "words".
            string[] result = codebook.Translate(sample);

            foreach (string result_word in result)
            {
                Console.WriteLine(result_word);
            }
        }
コード例 #3
0
        public void GenerateTest2()
        {
            #region doc_generate
            Accord.Math.Random.Generator.Seed = 42;

            // Let's say we have the following set of sequences
            string[][] phrases =
            {
                new[] { "those",  "are",   "sample", "words", "from", "a", "dictionary" },
                new[] { "those",  "are",   "sample", "words" },
                new[] { "sample", "words", "are",    "words" },
                new[] { "those",  "words" },
                new[] { "those",  "are",   "words" },
                new[] { "words",  "from",  "a",      "dictionary" },
                new[] { "those",  "are",   "words",  "from",  "a",    "dictionary" }
            };

            // Let's begin by transforming them to sequence of
            // integer labels using a codification codebook:
            var codebook = new Codification("Words", phrases);

            // Now we can create the training data for the models:
            int[][] sequence = codebook.Translate("Words", phrases);

            // To create the models, we will specify a forward topology,
            // as the sequences have definite start and ending points.
            //
            var topology = new Forward(states: 4);
            int symbols  = codebook["Words"].Symbols; // We have 7 different words

            // Create the hidden Markov model
            var hmm = new HiddenMarkovModel(topology, symbols);

            // Create the learning algorithm
            var teacher = new BaumWelchLearning(hmm);

            // Teach the model
            teacher.Learn(sequence);

            // Now, we can ask the model to generate new samples
            // from the word distributions it has just learned:
            //
            int[] sample = hmm.Generate(3);

            // And the result will be: "those", "are", "words".
            string[] result = codebook.Translate("Words", sample);
            #endregion

            Assert.AreEqual("those", result[0]);
            Assert.AreEqual("are", result[1]);
            Assert.AreEqual("words", result[2]);
        }
コード例 #4
0
        public override List <Note> Generate()
        {
            if (model == null)
            {
                return(new List <Note>());
            }

            int[] sample = model.Generate(64);

            List <Note> result = new List <Note>();

            foreach (int note in sample)
            {
                int noteLength = (int)Math.Pow(2.0, note % 5);
                int noteId     = (note / 5) - addToNote;

                result.Add(new Note(noteId, (NoteLength)noteLength));
            }

            return(result);
        }