Exemplo n.º 1
0
        public void ProbabilityDensityFunctionTest()
        {
            MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2];
            components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[, ] {
                { 1 }
            });
            components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[, ] {
                { 1 }
            });

            double[] coefficients = { 0.3, 0.7 };
            var      mixture      = new MultivariateMixture <MultivariateNormalDistribution>(coefficients, components);

            double[] x = { 1.2 };

            double expected =
                0.3 * components[0].ProbabilityDensityFunction(x) +
                0.7 * components[1].ProbabilityDensityFunction(x);

            double actual = mixture.ProbabilityDensityFunction(x);

            Assert.AreEqual(expected, actual);
        }
Exemplo n.º 2
0
        public void DistributionFunctionTestPerComponent()
        {
            MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2];
            components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[, ] {
                { 1 }
            });
            components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[, ] {
                { 1 }
            });

            double[] coefficients = { 0.3, 0.7 };
            var      mixture      = new MultivariateMixture <MultivariateNormalDistribution>(coefficients, components);

            double[] x = { 1.2 };

            double expected =
                mixture.DistributionFunction(0, x) +
                mixture.DistributionFunction(1, x);

            double actual = mixture.DistributionFunction(x);

            Assert.AreEqual(expected, actual);
        }
        public void GenerateTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            var normal = new MultivariateNormalDistribution(
                new double[] { 2, 6 },
                new double[, ] {
                { 2, 1 }, { 1, 5 }
            });

            double[][] sample = normal.Generate(1000000);

            double[] mean = sample.Mean();
            double[,] cov = sample.Covariance();

            Assert.AreEqual(2, mean[0], 1e-2);
            Assert.AreEqual(6, mean[1], 1e-2);

            Assert.AreEqual(2, cov[0, 0], 1e-2);
            Assert.AreEqual(1, cov[0, 1], 1e-2);
            Assert.AreEqual(1, cov[1, 0], 1e-2);
            Assert.AreEqual(5, cov[1, 1], 2e-2);
        }
Exemplo n.º 4
0
        private static double[, ][] calculateZXMatrix <TFilter, TState, TMeasurement>(this IList <TFilter> kalmanFilters,
                                                                                      IList <TMeasurement> measurements,
                                                                                      out double[,] probsZX)
        where TFilter : KalmanFilter <TState, TMeasurement>
        {
            probsZX = new double[measurements.Count, kalmanFilters.Count];
            var innovZX = new double[measurements.Count, kalmanFilters.Count][];

            for (int tIdx = 0; tIdx < kalmanFilters.Count; tIdx++)
            {
                var kalman         = kalmanFilters[tIdx];
                var zeroCoordinate = new double[kalman.MeasurementVectorDimension];

                var mvnPDF = new MultivariateNormalDistribution(zeroCoordinate, kalman.ResidualCovariance);
                var mulCorrectionFactor = (double)1 / mvnPDF.ProbabilityDensityFunction(zeroCoordinate);

                for (int mIdx = 0; mIdx < measurements.Count; mIdx++)
                {
                    var measurement = measurements[mIdx];

                    //delta' / S^-1 * delta; this expression has ChiSquare distribution (Mahalanobis distance)
                    double[] delta; double mahalanobisDistancce;//not used
                    var      isInsideGate = kalman.IsMeasurementInsideGate(measurement, out delta, out mahalanobisDistancce);

                    innovZX[mIdx, tIdx] = delta;

                    if (isInsideGate)
                    {
                        probsZX[mIdx, tIdx] = mvnPDF.ProbabilityDensityFunction(delta) * mulCorrectionFactor; //modification (added mul correction factor)
                    }
                    //else probsZX[mIdx, tIdx] = 0 (by default)
                }
            }

            return(innovZX);
        }
Exemplo n.º 5
0
        public void LogProbabilityDensityFunctionTestPerComponent()
        {
            MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2];
            components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[, ] {
                { 1 }
            });
            components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[, ] {
                { 1 }
            });

            double[] coefficients = { 0.3, 0.7 };
            var      mixture      = new MultivariateMixture <MultivariateNormalDistribution>(coefficients, components);

            double[] x = { 1.2 };

            double expected = System.Math.Log(
                mixture.ProbabilityDensityFunction(0, x) +
                mixture.ProbabilityDensityFunction(1, x));

            double actual = mixture.LogProbabilityDensityFunction(x);

            Assert.AreEqual(expected, actual, 1e-10);
            Assert.IsFalse(double.IsNaN(actual));
        }
Exemplo n.º 6
0
        public Cluster(List <Point> points, KMeans kmeans = null, int index = 0)
        {
            Dimensions = points.First().Coordinates.Length;

            K          = Dimensions * (Dimensions + 3) / 2;
            Points     = points;
            Centroid   = kmeans?.Clusters[index].Centroid;
            Covariance = kmeans?.Clusters[index].Covariance;
            Minimums   = new double[Dimensions];
            Maximums   = new double[Dimensions];

            var pointsMatrix = points.Select(p => p.Coordinates).ToArray();

            Means = pointsMatrix.Mean(pointsMatrix.Transpose().Select(x => x.Sum()).ToArray());
            var covariance = pointsMatrix.Covariance(Means);

            for (var i = 0; i < Dimensions; i++)
            {
                Minimums[i] = points.Min(p => p[i]);
                Maximums[i] = points.Max(p => p[i]);
            }

            _multivariateNormalDistribution = new MultivariateNormalDistribution(Means, covariance);
        }
        private static double[, ][] calculateZXMatrix <TFilter, TState, TMeasurement>(this IList <TFilter> kalmanFilters,
                                                                                      IList <TMeasurement> measurements,
                                                                                      out double[,] probsZX)
        where TFilter : KalmanFilter <TState, TMeasurement>
        {
            probsZX = new double[measurements.Count, kalmanFilters.Count];
            var innovZX = new double[measurements.Count, kalmanFilters.Count][];

            for (int tIdx = 0; tIdx < kalmanFilters.Count; tIdx++)
            {
                var kalman         = kalmanFilters[tIdx];
                var zeroCoordinate = new double[kalman.MeasurementVectorDimension];

                var mvnPDF = new MultivariateNormalDistribution(zeroCoordinate, kalman.CovarianceMatrix);
                var mulCorrectionFactor = (double)1 / mvnPDF.ProbabilityDensityFunction(zeroCoordinate);

                for (int mIdx = 0; mIdx < measurements.Count; mIdx++)
                {
                    var measurement = measurements[mIdx];

                    var delta = kalman.CalculatePredictionError(measurement);
                    innovZX[mIdx, tIdx] = delta;

                    //delta' / S^-1 * delta; this expression has ChiSquare distribution
                    var gate = delta.Multiply(kalman.CovarianceMatrixInv).Multiply(delta.Transpose()).Sum();

                    if (gate < gateThreshold)
                    {
                        probsZX[mIdx, tIdx] = mvnPDF.ProbabilityDensityFunction(delta) * mulCorrectionFactor; //modification (added mul correction factor)
                    }
                    //else probsZX[mIdx, tIdx] = 0 (by default)
                }
            }

            return(innovZX);
        }
Exemplo n.º 8
0
        private void btnGenerateRandom_Click(object sender, EventArgs e)
        {
            k = (int)numClusters.Value;

            // Generate data with n Gaussian distributions
            double[][][] data = new double[k][][];

            for (int i = 0; i < k; i++)
            {
                // Create random centroid to place the Gaussian distribution
                var mean = Matrix.Random(2, -6.0, +6.0);

                // Create random covariance matrix for the distribution
                double[,] covariance;
                do
                {
                    covariance = Matrix.Random(2, true, 0.0, 3.0);
                }while (!covariance.IsPositiveDefinite());


                // Create the Gaussian distribution
                var gaussian = new MultivariateNormalDistribution(mean, covariance);

                int samples = Accord.Math.Tools.Random.Next(150, 250);
                data[i] = gaussian.Generate(samples);
            }

            // Join the generated data
            mixture = Matrix.Stack(data);

            // Update the scatterplot
            CreateScatterplot(graph, mixture, k);

            // Forget previous initialization
            kmeans = null;
        }
Exemplo n.º 9
0
        public void FittingOptionsTest()
        {
            // Create a degenerate problem
            double[][] sequences = new double[][]
            {
                new double[] { 1, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new double[] { 1, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new double[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 1, 1, 1, 1, 1, 1 },
                new double[] { 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Creates a continuous hidden Markov Model with two states organized in a ergodic
            //  topology and an underlying multivariate Normal distribution as density.
            var density = new MultivariateNormalDistribution(1);

            var model = new HiddenMarkovModel <MultivariateNormalDistribution>(new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new BaumWelchLearning <MultivariateNormalDistribution>(model)
            {
                Tolerance  = 0.0001,
                Iterations = 0,

                // Configure options for fitting the normal distribution
                FittingOptions = new NormalOptions()
                {
                    Regularization = 0.0001,
                }
            };

            // Fit the model. No exceptions will be thrown
            double logLikelihood = teacher.Run(sequences);
            double likelihood    = Math.Exp(logLikelihood);

            Assert.AreEqual(47.434837528491286, logLikelihood, 1e-15);
            Assert.IsFalse(double.IsNaN(logLikelihood));

            Assert.AreEqual(0.0001, (teacher.FittingOptions as NormalOptions).Regularization);



            // Try without a regularization constant to get an exception
            bool thrown;

            thrown  = false;
            density = new MultivariateNormalDistribution(1);
            model   = new HiddenMarkovModel <MultivariateNormalDistribution>(new Ergodic(2), density);
            teacher = new BaumWelchLearning <MultivariateNormalDistribution>(model)
            {
                Tolerance = 0.0001, Iterations = 0,
            };
            Assert.IsNull(teacher.FittingOptions);
            try { teacher.Run(sequences); }
            catch { thrown = true; }
            Assert.IsTrue(thrown);

            thrown  = false;
            density = new Accord.Statistics.Distributions.Multivariate.MultivariateNormalDistribution(1);
            model   = new HiddenMarkovModel <MultivariateNormalDistribution>(new Ergodic(2), density);
            teacher = new BaumWelchLearning <MultivariateNormalDistribution>(model)
            {
                Tolerance      = 0.0001,
                Iterations     = 0,
                FittingOptions = new NormalOptions()
                {
                    Regularization = 0
                }
            };
            Assert.IsNotNull(teacher.FittingOptions);
            try { teacher.Run(sequences); }
            catch { thrown = true; }
            Assert.IsTrue(thrown);
        }
        public void sequence_parsing_test()
        {
            #region doc_learn_fraud_analysis

            // Ensure results are reproducible
            Accord.Math.Random.Generator.Seed = 0;

            // Let's say we have the following data about credit card transactions,
            // where the data is organized in order of transaction, per credit card
            // holder. Everytime the "Time" column starts at zero, denotes that the
            // sequence of observations follow will correspond to transactions of the
            // same person:

            double[,] data =
            {
                // "Time", "V1",   "V2",  "V3", "V4", "V5", "Amount",  "Fraud"
                { 0, 0.521, 0.124, 0.622, 15.2, 25.6,  2.70, 0 },              // first person, ok
                { 1, 0.121, 0.124, 0.822, 12.2, 25.6,  42.0, 0 },              // first person, ok

                { 0, 0.551, 0.124, 0.422, 17.5, 25.6,  20.0, 0 },              // second person, ok
                { 1, 0.136, 0.154, 0.322, 15.3, 25.6,  50.0, 0 },              // second person, ok
                { 2, 0.721, 0.240, 0.422, 12.2, 25.6, 100.0, 1 },              // second person, fraud!
                { 3, 0.222, 0.126, 0.722, 18.1, 25.8,  10.0, 0 },              // second person, ok
            };

            // Transform the above data into a jagged matrix
            double[][][] input;
            int[][]      states;
            transform(data, out input, out states);

            // Determine here the number of dimensions in the observations (in this case, 6)
            int observationDimensions = 6; // 6 columns: "V1", "V2", "V3", "V4", "V5", "Amount"

            // Create some prior distributions to help initialize our parameters
            var priorC = new WishartDistribution(dimension: observationDimensions, degreesOfFreedom: 10); // this 10 is just some random number, you might have to tune as if it was a hyperparameter
            var priorM = new MultivariateNormalDistribution(dimension: observationDimensions);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new MaximumLikelihoodLearning <MultivariateNormalDistribution, double[]>()
            {
                // Their emissions will be multivariate Normal distributions initialized using the prior distributions
                Emissions = (j) => new MultivariateNormalDistribution(mean: priorM.Generate(), covariance: priorC.Generate()),

                // We will prevent our covariance matrices from becoming degenerate by adding a small
                // regularization value to their diagonal until they become positive-definite again:
                FittingOptions = new NormalOptions()
                {
                    Regularization = 1e-6
                },
            };

            // Use the teacher to learn a new HMM
            var hmm = teacher.Learn(input, states);

            // Use the HMM to predict whether the transations were fradulent or not:
            int[] firstPerson = hmm.Decide(input[0]);  // predict the first person, output should be: 0, 0

            int[] secondPerson = hmm.Decide(input[1]); // predict the second person, output should be: 0, 0, 1, 0
            #endregion


            Assert.AreEqual(new[] { 0, 0 }, firstPerson);
            Assert.AreEqual(new[] { 0, 0, 1, 0 }, secondPerson);
        }
Exemplo n.º 11
0
        public void LearnTest9()
        {
            var observations = new double[][][]
            {
                #region example
                new double[][]
                {
                    new double[] { 2.58825719356537, -6.10018078957452, -3.51826652951428, },
                    new double[] { 1.5637531876564, -8.92844874836103, -9.09330631370717, },
                    new double[] { 2.12242007255554, -14.8117769726059, -9.04211363915664, },
                    new double[] { 0.39045587182045, -10.3548189544216, -7.69608701297759, },
                    new double[] { -0.553155690431595, -34.9185135663671, 14.6941023804174, },
                    new double[] { -0.923129916191101, -6.06337512248124, 8.28106954197084, },
                    new double[] { 0.478342920541763, -4.93066650122859, 3.1120912556361, },
                },
                new double[][]
                {
                    new double[] { 1.89824998378754, -8.21581113387553, -7.88790716806936, },
                    new double[] { 2.24453508853912, -10.281886698766, -9.67846789539227, },
                    new double[] { 0.946296751499176, -22.0276392511088, -6.52238763834787, },
                    new double[] { -0.251136720180511, -13.3010653290676, 8.47499524273859, },
                    new double[] { -2.35625505447388, -18.1542111199742, 6.25564428645639, },
                    new double[] { 0.200483202934265, -5.48215328147925, 5.88811639894938, },
                },
                new double[][]
                {
                    new double[] { 2.7240589261055, -3.71720542338046, -3.75092324997593, },
                    new double[] { 2.19917744398117, -7.18434871865373, -4.92539999824263, },
                    new double[] { 1.40723958611488, -11.5545592998714, -5.14780194932221, },
                    new double[] { 1.61909088492393, -12.5262932665595, -6.34366687651826, },
                    new double[] { -2.54745036363602, -8.64924529565274, 4.15127988308386, },
                    new double[] { 0.815489888191223, -33.8531051237431, 4.3954106953589, },
                    new double[] { -2.2090271115303, -7.17818258102413, 8.9117419130814, },
                    new double[] { -1.9000232219696, -2.4331659041997, 6.91224717766923, },
                },
                new double[][]
                {
                    new double[] { 4.88746017217636, -4.36384651224969, -5.45526891285354, },
                    new double[] { 1.07786506414413, -12.9399071692788, -5.88248026843442, },
                    new double[] { 2.28888094425201, -15.4017823367163, -9.36490649113217, },
                    new double[] { -1.16468518972397, -35.4200913138333, 5.44735305966353, },
                    new double[] { -1.1483296751976, -13.5454911068913, 7.83577905727326, },
                    new double[] { -2.58188247680664, -1.10149600205281, 10.5928750605715, },
                    new double[] { -0.277529656887054, -6.96828661824016, 4.59381106840823, },
                },
                new double[][]
                {
                    new double[] { 3.39118540287018, -2.9173207268871, -5.66795398530988, },
                    new double[] { 1.44856870174408, -9.21319243840922, -5.74986260778932, },
                    new double[] { 1.45215392112732, -10.3989582187704, -7.06932768129103, },
                    new double[] { 0.640938431024551, -15.319525165245, -7.68866476960221, },
                    new double[] { -0.77500119805336, -20.8335910793105, -1.56702420087282, },
                    new double[] { -3.48337143659592, -18.0461677940976, 12.3393172987974, },
                    new double[] { -1.17014795541763, -5.59624373275155, 6.09176828712909, },
                },
                new double[][]
                {
                    new double[] { -3.984335064888, -6.2406475893692, -8.13815178201645, },
                    new double[] { -2.12110131978989, -5.60649378910647, -7.69551693188544, },
                    new double[] { -1.62762850522995, -24.1160212319193, -14.9683354815265, },
                    new double[] { -1.15231424570084, -17.1336790735458, -5.70731951079186, },
                    new double[] { 0.00514835119247437, -35.4256585588532, 11.0357975880744, },
                    new double[] { 0.247226655483246, -4.87705331087666, 8.47028869639136, },
                    new double[] { -1.28729045391083, -4.4684855254196, 4.45432778840328, },
                },
                new double[][]
                {
                    new double[] { -5.14926165342331, -14.4168633009146, -14.4808205022332, },
                    new double[] { -3.93681302666664, -13.6040611430423, -9.52852874304709, },
                    new double[] { -4.0200162678957, -17.9772444010218, -10.9145425003168, },
                    new double[] { 2.99205146729946, -11.3995995445577, 10.0112700536762, },
                    new double[] { -1.80960297584534, -25.9626088707583, 3.84153700324761, },
                    new double[] { -0.47445073723793, -3.15995343875038, 3.81288679772555, },
                },
                new double[][]
                {
                    new double[] { -3.10730338096619, -4.90623566171983, -7.71155001801384, },
                    new double[] { -2.58265435695648, -12.8249488039327, -7.81701695282102, },
                    new double[] { -3.70455086231232, -10.9642675851383, -10.3474496036822, },
                    new double[] { 2.34457105398178, -22.575668228196, -4.00681935468317, },
                    new double[] { -0.137023627758026, -22.8846781066673, 6.49448229892285, },
                    new double[] { -1.04487389326096, -10.8106353197974, 6.89123118904132, },
                    new double[] { -0.807777792215347, -6.72485967042486, 6.44026679233423, },
                    new double[] { -0.0864192843437195, -1.82784244477527, 5.21446167464657, },
                },
                new double[][]
                {
                    new double[] { -3.68375554680824, -8.91158395500054, -9.35894038244743, },
                    new double[] { -3.42774018645287, -8.90966793048099, -12.0502934183779, },
                    new double[] { -2.21796408295631, -20.1283824753482, -9.3404551995806, },
                    new double[] { 0.275979936122894, -24.8898254667703, -1.95441472953041, },
                    new double[] { 2.8757631778717, -25.5929744730134, 15.9213204397452, },
                    new double[] { -0.0532664358615875, -5.41014381829368, 7.0702071664098, },
                    new double[] { -0.523447245359421, -2.21351362388411, 5.47910029515575, },
                },
                new double[][]
                {
                    new double[] { -2.87790596485138, -4.67335526533981, -5.23215633615683, },
                    new double[] { -2.4156779050827, -3.99829080603495, -4.85576151355235, },
                    new double[] { -2.6987336575985, -7.76589206730162, -5.81054787011341, },
                    new double[] { -2.65482440590858, -10.5628263066491, -5.60468502395908, },
                    new double[] { -2.54620611667633, -13.0387387107748, -5.36223367466908, },
                    new double[] { -0.349991768598557, -6.54244110985515, -4.35843018634009, },
                    new double[] { 1.43021196126938, -14.1423935327282, 11.3171592025544, },
                    new double[] { -0.248833745718002, -25.6880129237476, 3.6943247495434, },
                    new double[] { -0.191526114940643, -7.40986142342928, 5.01053017361167, },
                    new double[] { 0.0262223184108734, -2.32355649224634, 5.02960958030255, },
                },
                new double[][]
                {
                    new double[] { -0.491838902235031, -6.14010393559236, 0.827477332024586, },
                    new double[] { -0.806065648794174, -7.15029676810841, -1.19623376104369, },
                    new double[] { -0.376655906438828, -8.79062775480082, -1.90518908829517, },
                    new double[] { 0.0747844576835632, -8.78933441325732, -1.96265207353993, },
                    new double[] { -0.375023484230042, 3.89681155173501, 9.01643231817069, },
                    new double[] { -2.8106614947319, -11.460008093918, 2.27801912994775, },
                    new double[] { 8.87353122234344, -36.8569805718597, 6.36432395690119, },
                    new double[] { 2.17160433530808, -6.57312981892095, 6.99683358454453, },
                },
                new double[][]
                {
                    new double[] { -2.59969010949135, -3.67992698430228, 1.09594294144671, },
                    new double[] { -1.09673067927361, -5.84256216502719, -0.576662929456575, },
                    new double[] { -1.31642892956734, -7.75851355520771, -2.38379618379558, },
                    new double[] { -0.119869410991669, -8.5749576027529, -1.84393133510667, },
                    new double[] { 1.6157403588295, -8.50491836461337, 1.75083250596366, },
                    new double[] { 1.66225507855415, -26.4882911957686, 1.98153904369032, },
                    new double[] { 2.55657434463501, -10.5098938623168, 11.632377227365, },
                    new double[] { 1.91832333803177, -9.98753621777953, 7.38483383044985, },
                    new double[] { 2.16058492660522, -2.7784029746222, 7.8378896386686, },
                },
                #endregion
            };

            var density = new MultivariateNormalDistribution(3);
            var model   = new HiddenMarkovModel <MultivariateNormalDistribution>(new Forward(5), density);

            var learning = new ViterbiLearning <MultivariateNormalDistribution>(model)
            {
                Tolerance      = 0.0001,
                Iterations     = 0,
                FittingOptions = new NormalOptions()
                {
                    Regularization = 0.0001
                }
            };

            double logLikelihood = learning.Run(observations);

            Assert.IsFalse(Double.IsNaN(logLikelihood));

            foreach (double value in model.Transitions)
            {
                Assert.IsFalse(Double.IsNaN(value));
            }

            foreach (double value in model.Probabilities)
            {
                Assert.IsFalse(Double.IsNaN(value));
            }
        }
        public void learn_pendigits_normalization()
        {
            Console.WriteLine("Starting NormalQuasiNewtonHiddenLearningTest.learn_pendigits_normalization");

            using (var travis = new KeepTravisAlive())
            {
                #region doc_learn_pendigits
                // Ensure we get reproducible results
                Accord.Math.Random.Generator.Seed = 0;

                // Download the PENDIGITS dataset from UCI ML repository
                var pendigits = new Pendigits(path: Path.GetTempPath());

                // Get and pre-process the training set
                double[][][] trainInputs  = pendigits.Training.Item1;
                int[]        trainOutputs = pendigits.Training.Item2;

                // Pre-process the digits so each of them is centered and scaled
                trainInputs = trainInputs.Apply(Accord.Statistics.Tools.ZScores);
                trainInputs = trainInputs.Apply((x) => x.Subtract(x.Min())); // make them positive

                // Create some prior distributions to help initialize our parameters
                var priorC = new WishartDistribution(dimension: 2, degreesOfFreedom: 5);
                var priorM = new MultivariateNormalDistribution(dimension: 2);

                // Create a new learning algorithm for creating continuous hidden Markov model classifiers
                var teacher1 = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>()
                {
                    // This tells the generative algorithm how to train each of the component models. Note: The learning
                    // algorithm is more efficient if all generic parameters are specified, including the fitting options
                    Learner = (i) => new BaumWelchLearning <MultivariateNormalDistribution, double[], NormalOptions>()
                    {
                        Topology = new Forward(5), // Each model will have a forward topology with 5 states

                        // Their emissions will be multivariate Normal distributions initialized using the prior distributions
                        Emissions = (j) => new MultivariateNormalDistribution(mean: priorM.Generate(), covariance: priorC.Generate()),

                        // We will train until the relative change in the average log-likelihood is less than 1e-6 between iterations
                        Tolerance     = 1e-6,
                        MaxIterations = 1000, // or until we perform 1000 iterations (which is unlikely for this dataset)

                        // We will prevent our covariance matrices from becoming degenerate by adding a small
                        // regularization value to their diagonal until they become positive-definite again:
                        FittingOptions = new NormalOptions()
                        {
                            Regularization = 1e-6
                        }
                    }
                };

                // The following line is only needed to ensure reproducible results. Please remove it to enable full parallelization
                teacher1.ParallelOptions.MaxDegreeOfParallelism = 1; // (Remove, comment, or change this line to enable full parallelism)

                // Use the learning algorithm to create a classifier
                var hmmc = teacher1.Learn(trainInputs, trainOutputs);


                // Create a new learning algorithm for creating HCRFs
                var teacher2 = new HiddenQuasiNewtonLearning <double[]>()
                {
                    Function = new MarkovMultivariateFunction(hmmc),

                    MaxIterations = 10
                };

                // The following line is only needed to ensure reproducible results. Please remove it to enable full parallelization
                teacher2.ParallelOptions.MaxDegreeOfParallelism = 1; // (Remove, comment, or change this line to enable full parallelism)

                // Use the learning algorithm to create a classifier
                var hcrf = teacher2.Learn(trainInputs, trainOutputs);

                // Compute predictions for the training set
                int[] trainPredicted = hcrf.Decide(trainInputs);

                // Check the performance of the classifier by comparing with the ground-truth:
                var    m1       = new GeneralConfusionMatrix(predicted: trainPredicted, expected: trainOutputs);
                double trainAcc = m1.Accuracy; // should be 0.66523727844482561


                // Prepare the testing set
                double[][][] testInputs  = pendigits.Testing.Item1;
                int[]        testOutputs = pendigits.Testing.Item2;

                // Apply the same normalizations
                testInputs = testInputs.Apply(Accord.Statistics.Tools.ZScores);
                testInputs = testInputs.Apply((x) => x.Subtract(x.Min())); // make them positive

                // Compute predictions for the test set
                int[] testPredicted = hcrf.Decide(testInputs);

                // Check the performance of the classifier by comparing with the ground-truth:
                var    m2      = new GeneralConfusionMatrix(predicted: testPredicted, expected: testOutputs);
                double testAcc = m2.Accuracy; // should be 0.66506538564184681
                #endregion

                Assert.AreEqual(0.66523727844482561, trainAcc, 1e-10);
                Assert.AreEqual(0.66506538564184681, testAcc, 1e-10);
            }
        }
        public void learn_pendigits_normalization()
        {
            #region doc_learn_pendigits
            // Ensure we get reproducible results
            Accord.Math.Random.Generator.Seed = 0;

            // Download the PENDIGITS dataset from UCI ML repository
            var pendigits = new Pendigits(path: Path.GetTempPath());

            // Get and pre-process the training set
            double[][][] trainInputs  = pendigits.Training.Item1;
            int[]        trainOutputs = pendigits.Training.Item2;

            // Pre-process the digits so each of them is centered and scaled
            trainInputs = trainInputs.Apply(Accord.Statistics.Tools.ZScores);
            trainInputs = trainInputs.Apply((x) => x.Subtract(x.Min())); // make them positive

            // Create some prior distributions to help initialize our parameters
            var priorC = new WishartDistribution(dimension: 2, degreesOfFreedom: 5);
            var priorM = new MultivariateNormalDistribution(dimension: 2);

            // Create a template Markov classifier that we can use as a base for the HCRF
            var hmmc = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>(
                classes: pendigits.NumberOfClasses, topology: new Forward(5),
                initial: (i, j) => new MultivariateNormalDistribution(mean: priorM.Generate(), covariance: priorC.Generate()));

            // Create a new learning algorithm for creating HCRFs
            var teacher = new HiddenQuasiNewtonLearning <double[]>()
            {
                Function = new MarkovMultivariateFunction(hmmc),

                MaxIterations = 10
            };

            // The following line is only needed to ensure reproducible results. Please remove it to enable full parallelization
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1; // (Remove, comment, or change this line to enable full parallelism)

            // Use the learning algorithm to create a classifier
            var hcrf = teacher.Learn(trainInputs, trainOutputs);

            // Compute predictions for the training set
            int[] trainPredicted = hcrf.Decide(trainInputs);

            // Check the performance of the classifier by comparing with the ground-truth:
            var    m1       = new ConfusionMatrix(predicted: trainPredicted, expected: trainOutputs);
            double trainAcc = m1.Accuracy; // should be 0.89594053744997137


            // Prepare the testing set
            double[][][] testInputs  = pendigits.Testing.Item1;
            int[]        testOutputs = pendigits.Testing.Item2;

            // Apply the same normalizations
            testInputs = testInputs.Apply(Accord.Statistics.Tools.ZScores);
            testInputs = testInputs.Apply((x) => x.Subtract(x.Min())); // make them positive

            // Compute predictions for the test set
            int[] testPredicted = hcrf.Decide(testInputs);

            // Check the performance of the classifier by comparing with the ground-truth:
            var    m2      = new ConfusionMatrix(predicted: testPredicted, expected: testOutputs);
            double testAcc = m2.Accuracy; // should be 0.89594053744997137
            #endregion

            Assert.AreEqual(0.89594053744997137, trainAcc, 1e-10);
            Assert.AreEqual(0.896050173472111, testAcc, 1e-10);
        }
Exemplo n.º 14
0
 /// <summary>
 ///   Bhattacharyya distance between two Gaussian distributions.
 /// </summary>
 ///
 /// <param name="a">The first Normal distribution.</param>
 /// <param name="b">The second Normal distribution.</param>
 ///
 /// <returns>The Bhattacharyya distance between the two distributions.</returns>
 ///
 public double Distance(MultivariateNormalDistribution a, MultivariateNormalDistribution b)
 {
     return(Distance(a.Mean, a.Covariance, b.Mean, b.Covariance));
 }
Exemplo n.º 15
0
Arquivo: Form1.cs Projeto: sbreed/HMM
        public Form1()
        {
            InitializeComponent();

            //double[][][] sequences = new double[][][]
            //{
            //    new double[][]
            //    {
            //        // This is the first  sequence with label = 0
            //        new double[] { 0, 1 },
            //        new double[] { 1, 2 },
            //        new double[] { 2, 3 },
            //        new double[] { 3, 4 },
            //        new double[] { 4, 5 },
            //    },

            //    new double[][]
            //    {
            //            // This is the second sequence with label = 1
            //        new double[] { 4,  3 },
            //        new double[] { 3,  2 },
            //        new double[] { 2,  1 },
            //        new double[] { 1,  0 },
            //        new double[] { 0, -1 },
            //    }
            //};

            // Labels for the sequences
            //int[] labels = { 0, 1 };

            #region Old

            //Dictionary<string, List<List<double[]>>> dictAll;
            //using (System.IO.FileStream fs = new System.IO.FileStream(@".\SkeletonsAsDouble.serialized", System.IO.FileMode.Open))
            //{
            //    BinaryFormatter bf = new BinaryFormatter();
            //    dictAll = (Dictionary<string, List<List<double[]>>>)bf.Deserialize(fs);
            //}

            //#region Export to CSV

            //using (System.IO.StreamWriter sw = new System.IO.StreamWriter(@".\SkeletonsAsCSV.csv"))
            //{
            //    double[] rgFirstFrame = dictAll[dictAll.Keys.ElementAt(0)][0][0];

            //    string strHeader = string.Empty;
            //    for (int i = 0; i < rgFirstFrame.Length; i++)
            //    {
            //        strHeader += string.Format("F{0},", i);
            //    }
            //    strHeader += "Label";
            //    sw.WriteLine(strHeader);

            //    foreach (KeyValuePair<string, List<List<double[]>>> kvp in dictAll)
            //    {
            //        foreach (List<double[]> lstGesture in kvp.Value)
            //        {
            //            foreach (double[] frame in lstGesture)
            //            {
            //                sw.WriteLine(string.Format("{0},{1}", string.Join(",", frame), kvp.Key));
            //            }
            //        }
            //    }
            //}

            #endregion

            #region CHANGE THESE PARAMETERS

            const int nStartHiddenCount = 2;
            const int nEndHiddenCount   = 12;
            const int nFeatureCount     = 8; // Both DataOrig.dat and Data.dat have dimensionality of 8

            #endregion

            string strFile;
            int    nClasses;

#if STRHC1
            // Data.dat = Second STRHC (10 classes but not as noisy)
            strFile  = @".\Data.dat";
            nClasses = 10;
#else
            // Data1.dat = First STRHC (4 classes but very noisy)
            strFile  = @".\DataOrig.dat";
            nClasses = 4;
#endif

            Dictionary <int, Tuple <Dictionary <string, List <List <double[]> > >, Dictionary <string, List <List <double[]> > > > > dict;
            using (System.IO.FileStream fs = new System.IO.FileStream(strFile, System.IO.FileMode.Open, System.IO.FileAccess.Read))
            {
                BinaryFormatter bf = new BinaryFormatter();
                dict = bf.Deserialize(fs) as Dictionary <int, Tuple <Dictionary <string, List <List <double[]> > >, Dictionary <string, List <List <double[]> > > > >;
            }

            using (System.IO.StreamWriter sw = new System.IO.StreamWriter(@"HMMOutput.txt"))
            {
                for (int nHiddenCount = nStartHiddenCount; nHiddenCount <= nEndHiddenCount; nHiddenCount++)
                {
                    double fAverage = 0.0;

                    int nEpoch = 0;

                    List <int> lstTrainTimes = new List <int>();
                    List <int> lstRecogTimes = new List <int>();

                    //for (int i = 1; i <= nEpochs; i++)
                    foreach (KeyValuePair <int, Tuple <Dictionary <string, List <List <double[]> > >, Dictionary <string, List <List <double[]> > > > > kvpTT in dict)
                    {
                        nEpoch++;

                        #region Old

                        //Dictionary<string, List<List<double[]>>> dictTrain = new Dictionary<string, List<List<double[]>>>();
                        //Dictionary<string, List<List<double[]>>> dictTest = new Dictionary<string, List<List<double[]>>>();

                        //#region Divide the vectors into training and testing sets

                        //foreach (KeyValuePair<string, List<List<double[]>>> kvp in dictAll)
                        //{
                        //    int nGroupSize = kvp.Value.Count / nEpochs;

                        //    List<List<double[]>> lstTrain;
                        //    List<List<double[]>> lstTest;
                        //    dictTrain.Add(kvp.Key, (lstTrain = new List<List<double[]>>()));
                        //    dictTest.Add(kvp.Key, (lstTest = new List<List<double[]>>()));

                        //    for (int j = 0; j < kvp.Value.Count; j++)
                        //    {
                        //        if (j < i * nGroupSize && j >= (i * nGroupSize) - nGroupSize)
                        //        {
                        //            lstTest.Add(kvp.Value[j]);
                        //        }
                        //        else
                        //        {
                        //            lstTrain.Add(kvp.Value[j]);
                        //        }
                        //    }
                        //}

                        //#endregion

                        #endregion

                        Dictionary <string, List <List <double[]> > > dictTrain = kvpTT.Value.Item1;
                        Dictionary <string, List <List <double[]> > > dictTest  = kvpTT.Value.Item2;

                        double[][][] sequences = new double[dictTrain.Sum(kvp => kvp.Value.Count)][][];
                        int[]        labels    = new int[sequences.Length];

                        #region The Sequences

                        int nIndex = 0;
                        foreach (KeyValuePair <string, List <List <double[]> > > kvp in dictTrain)
                        {
                            foreach (List <double[]> lst in kvp.Value)
                            {
                                sequences[nIndex] = lst.ToArray();
                                labels[nIndex]    = Array.IndexOf(dictTrain.Keys.ToArray(), kvp.Key);
                                nIndex++;
                            }
                        }

                        #endregion

                        var initialDensity = new MultivariateNormalDistribution(nFeatureCount);

                        // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
                        // and an underlying multivariate mixture of Normal distributions as density.
                        var classifier = new HiddenMarkovClassifier <MultivariateNormalDistribution>(
                            classes: nClasses, topology: new Forward(nHiddenCount), initial: initialDensity);

                        // Configure the learning algorithms to train the sequence classifier
                        var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution>(
                            classifier,

                            // Train each model until the log-likelihood changes less than 0.0001
                            modelIndex => new BaumWelchLearning <MultivariateNormalDistribution>(
                                classifier.Models[modelIndex])
                        {
                            Tolerance  = 0.0001,
                            Iterations = 0,

                            FittingOptions = new NormalOptions()
                            {
                                Diagonal       = true,    // only diagonal covariance matrices
                                Regularization = 1e-5     // avoid non-positive definite errors
                            }
                        }
                            );

                        // Train the sequence classifier using the algorithm

                        System.Diagnostics.Stopwatch watchTrain = new System.Diagnostics.Stopwatch();
                        watchTrain.Start();
                        double logLikelihood = teacher.Run(sequences, labels);
                        watchTrain.Stop();

                        lstTrainTimes.Add((int)watchTrain.ElapsedMilliseconds);

                        //// Calculate the probability that the given
                        ////  sequences originated from the model
                        //double likelihood, likelihood2;

                        //// Try to classify the 1st sequence (output should be 0)
                        //int c1 = classifier.Compute(sequences[0], out likelihood);

                        //// Try to classify the 2nd sequence (output should be 1)
                        //int c2 = classifier.Compute(sequences[1], out likelihood2);

                        sw.WriteLine("Epoch: {0} -- Hidden State: {1}\t\tTotal Train Time: {2}", nEpoch, nHiddenCount, watchTrain.ElapsedMilliseconds.ToString());

                        int nCorrect   = 0;
                        int nIncorrect = 0;

                        foreach (KeyValuePair <string, List <List <double[]> > > kvp in dictTest)
                        {
                            foreach (List <double[]> lst in kvp.Value)
                            {
                                System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch();
                                watch.Start();
                                int nClassIndex = classifier.Compute(lst.ToArray());
                                watch.Stop();

                                sw.WriteLine(string.Format("Should be: {0}\tRecognized: {1}\t\tTime: {2} ms", kvp.Key, dictTest.Keys.ElementAt(nClassIndex), watch.ElapsedMilliseconds));
                                lstRecogTimes.Add((int)watch.ElapsedMilliseconds);

                                if (dictTest.Keys.ElementAt(nClassIndex) == kvp.Key)
                                {
                                    nCorrect++;
                                }
                                else
                                {
                                    nIncorrect++;
                                }
                            }
                        }

                        fAverage += (double)nCorrect / (nCorrect + nIncorrect);

                        sw.WriteLine(string.Format("Correct: {0} of {1} ({2:P3})", nCorrect, nCorrect + nIncorrect, (double)nCorrect / (nCorrect + nIncorrect)));

                        sw.WriteLine();
                        sw.WriteLine();
                    }

                    sw.WriteLine("Average Correct for {0} Hidden: {1:P3}", nHiddenCount, fAverage / 5);

                    sw.WriteLine("Average Train Time for {0}: {1:F2}", nHiddenCount, lstTrainTimes.Select(v => (double)v).Average());
                    sw.WriteLine("Std. Dev. Train Time for {0}: {1:F2}", nHiddenCount, lstTrainTimes.Select(v => (double)v).StandardDeviation());

                    sw.WriteLine("Average Recog. Time for {0}: {1:F2}", nHiddenCount, lstRecogTimes.Select(v => (double)v).Average());
                    sw.WriteLine("Std. Dev. Recog. Time for {0}: {1:F2}", nHiddenCount, lstRecogTimes.Select(v => (double)v).StandardDeviation());

                    sw.WriteLine();
                    sw.WriteLine();
                    sw.WriteLine();
                    sw.WriteLine();
                }
            }
        }
Exemplo n.º 16
0
 /// <summary>
 ///   Bhattacharyya distance between two Gaussian distributions.
 /// </summary>
 ///
 /// <param name="x">The first Normal distribution.</param>
 /// <param name="y">The second Normal distribution.</param>
 ///
 /// <returns>The Bhattacharyya distance between the two distributions.</returns>
 ///
 public double Distance(MultivariateNormalDistribution x, MultivariateNormalDistribution y)
 {
     return(Distance(x.Mean, x.Covariance, y.Mean, y.Covariance));
 }
Exemplo n.º 17
0
        public void LearnTest6()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1 },
                    new double[] { 1, 2 },
                    new double[] { 2, 3 },
                    new double[] { 3, 4 },
                    new double[] { 4, 5 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 4, 3 },
                    new double[] { 3, 2 },
                    new double[] { 2, 1 },
                    new double[] { 1, 0 },
                    new double[] { 0, -1 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            var density = new MultivariateNormalDistribution(2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier <MultivariateNormalDistribution>(
                2, new Custom(new double[2, 2], new double[2]), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning <MultivariateNormalDistribution>(
                    classifier.Models[modelIndex])
            {
                Tolerance  = 0.0001,
                Iterations = 0,

                FittingOptions = new NormalOptions()
                {
                    Diagonal = true
                }
            }
                );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);


            // Calculate the probability that the given
            //  sequences originated from the model
            double response1, response2;

            // Try to classify the 1st sequence (output should be 0)
            int c1 = classifier.Compute(sequences[0], out response1);

            // Try to classify the 2nd sequence (output should be 1)
            int c2 = classifier.Compute(sequences[1], out response2);

            Assert.AreEqual(double.NegativeInfinity, logLikelihood);
            Assert.AreEqual(0, response1);
            Assert.AreEqual(0, response2);

            Assert.IsFalse(double.IsNaN(logLikelihood));
            Assert.IsFalse(double.IsNaN(response1));
            Assert.IsFalse(double.IsNaN(response2));
        }
Exemplo n.º 18
0
        public void LearnTest3()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 0 },
                    new double[] { 1 },
                    new double[] { 2 },
                    new double[] { 3 },
                    new double[] { 4 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 4 },
                    new double[] { 3 },
                    new double[] { 2 },
                    new double[] { 1 },
                    new double[] { 0 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(1);
            var classifier = new HiddenMarkovClassifier <MultivariateNormalDistribution>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution>(classifier,

                                                                                              // Train each model until the log-likelihood changes less than 0.001
                                                                                              modelIndex => new BaumWelchLearning <MultivariateNormalDistribution>(classifier.Models[modelIndex])
            {
                Tolerance  = 0.0001,
                Iterations = 0
            }
                                                                                              );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood1, likelihood2;

            // Try to classify the first sequence (output should be 0)
            int c1 = classifier.Compute(sequences[0], out likelihood1);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Compute(sequences[1], out likelihood2);

            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);

            Assert.AreEqual(-13.271981026832929, logLikelihood, 1e-14);
            Assert.AreEqual(0.99999791320102149, likelihood1, 1e-15);
            Assert.AreEqual(0.99999791320102149, likelihood2, 1e-15);

            Assert.IsFalse(double.IsNaN(logLikelihood));
            Assert.IsFalse(double.IsNaN(likelihood1));
            Assert.IsFalse(double.IsNaN(likelihood2));
        }
    public void LearnGesture(int valuesUsed, int modeUsed, int statesUsed)
    {
        double[][][] inputs  = new double[storedGestures.Count][][];
        int[]        outputs = new int[storedGestures.Count];

        for (int i = 0; i < inputs.Length; i++)
        {
            double[][] points = new double[storedGestures[i].points.Length][];
            switch (modeUsed)
            {
            case 3:
                for (int j = 0; j < storedGestures[i].points.Length; j++)
                {
                    points[j] = new double[3] {
                        storedGestures[i].points[j][0], storedGestures[i].points[j][1], storedGestures[i].points[j][2]
                    };
                }
                break;

            case 33:
                for (int j = 0; j < storedGestures[i].points.Length; j++)
                {
                    points[j] = new double[6] {
                        storedGestures[i].points[j][0], storedGestures[i].points[j][1], storedGestures[i].points[j][2],
                        storedGestures[i].points[j][6], storedGestures[i].points[j][7], storedGestures[i].points[j][8]
                    };
                }
                break;

            case 6:
                for (int j = 0; j < storedGestures[i].points.Length; j++)
                {
                    points[j] = new double[6] {
                        storedGestures[i].points[j][0], storedGestures[i].points[j][1], storedGestures[i].points[j][2],
                        storedGestures[i].points[j][3], storedGestures[i].points[j][4], storedGestures[i].points[j][5]
                    };
                }
                break;

            case 66:
                for (int j = 0; j < storedGestures[i].points.Length; j++)
                {
                    points[j] = new double[12] {
                        storedGestures[i].points[j][0], storedGestures[i].points[j][1], storedGestures[i].points[j][2],
                        storedGestures[i].points[j][3], storedGestures[i].points[j][4], storedGestures[i].points[j][5],
                        storedGestures[i].points[j][6], storedGestures[i].points[j][7], storedGestures[i].points[j][8],
                        storedGestures[i].points[j][9], storedGestures[i].points[j][10], storedGestures[i].points[j][11]
                    };
                }
                break;
            }

            inputs[i]  = points;
            outputs[i] = storedGestures[i].index;
        }

        List <String> classes = new List <String>();

        int states = gestureIndex.Count;

        MultivariateNormalDistribution dist = new MultivariateNormalDistribution(valuesUsed);

        hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>
                  (states, new Forward(statesUsed), dist);

        var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm)
        {
            Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i])
            {
                Tolerance     = 0.01,
                MaxIterations = 0,

                FittingOptions = new NormalOptions()
                {
                    Regularization = 1e-5
                }
            }
        };

        teacher.Empirical = true;
        teacher.Rejection = false;

        teacher.Learn(inputs, outputs);

        Debug.Log("Sequence Learned!");
    }
Exemplo n.º 20
0
 private static double M(double x, double y, double rho)
 {
     return(MultivariateNormalDistribution.Bivariate(0, 0, 1, 1, rho).DistributionFunction(new double[] { x, y }));
 }
Exemplo n.º 21
0
        /// <summary>
        ///   Generates a random vector of observations from a distribution with the given parameters.
        /// </summary>
        ///
        /// <param name="samples">The number of samples to generate.</param>
        /// <param name="mean">The mean vector μ (mu) for the distribution.</param>
        /// <param name="covariance">The covariance matrix Σ (sigma) for the distribution.</param>
        ///
        /// <returns>A random vector of observations drawn from this distribution.</returns>
        ///
        public static double[][] Generate(int samples, double[] mean, double[,] covariance)
        {
            var normal = new MultivariateNormalDistribution(mean, covariance);

            return(normal.Generate(samples));
        }