Exemple #1
0
        public void PerformLocalSearch(Ant <ClusterExampleAssignment> ant)
        {
            Solution <ClusterExampleAssignment> originalSolution = ant.Solution;

            _kmeans.SetAssignment(originalSolution.ToList());
            ClusteringSolution clusteringSolution = _kmeans.CreateClusters();

            Solution <ClusterExampleAssignment> optimizedSolution = new Solution <ClusterExampleAssignment>();
            List <int> optimizedTrail = new List <int>();

            foreach (Cluster cluster in clusteringSolution.Clusters)
            {
                foreach (DataMining.Data.Example example in cluster.Examples)
                {
                    int componentIndex = (example.Index * ClustersNumber) + cluster.Label;
                    optimizedSolution.Components.Add(new DecisionComponent <ClusterExampleAssignment>(componentIndex, new ClusterExampleAssignment(example.Index, cluster.Label)));
                    optimizedTrail.Add(componentIndex);
                }
            }


            this.SolutionQualityEvaluator.EvaluateSolutionQuality(optimizedSolution);


            if (optimizedSolution.Quality > originalSolution.Quality)
            {
                ant.Solution = optimizedSolution;
                ant.Trail    = optimizedTrail;
            }
        }
        private void Initialize(double[][] observations)
        {
            var algo = new KMeans();

            for (var j = 0; j < NumberOfStates; j++)
            {
                _pi[j] = 1d / NumberOfStates;
            }

            for (var j = 0; j < NumberOfStates; j++)
            {
                _tpm[j] = (double[])_pi.Clone();
            }

            var k          = _pi.Length * NumberOfComponents;
            var dimentions = observations[0].Length;

            algo.CreateClusters(observations, k, KMeans.KMeansDefaultIterations, (k > 3) ? InitialClusterSelectionMethod.Furthest : InitialClusterSelectionMethod.Random);
            _emission = new Mixture <IMultivariateDistribution> [_pi.Length];

            for (int i = 0; i < _pi.Length; i++)
            {
                _emission[i] = new Mixture <IMultivariateDistribution>(NumberOfComponents, dimentions);
                for (int j = 0; j < NumberOfComponents; j++)
                {
                    var mean       = algo.ClusterCenters[j + NumberOfComponents * i];
                    var covariance = algo.ClusterCovariances[j + NumberOfComponents * i];

                    _emission[i].Components[j] = new NormalDistribution(mean, covariance);
                }
            }
        }
Exemple #3
0
        public static ClusteringSolution CreateKMeansClusters(int seed, Dataset dataset, int clustersNumber, ISimilarityMeasure similarityMeasure, int maxIterations, bool fireEvents)
        {
            DataMining.Utilities.RandomUtility.Initialize(seed);
            KMeans kmeans = new KMeans(dataset, clustersNumber, similarityMeasure, maxIterations, fireEvents);

            kmeans.Initialize();
            kmeans.OnPostIteration += new EventHandler(kmeans_OnPostIteration);
            return(kmeans.CreateClusters());
        }
Exemple #4
0
        static void Main(string[] args)
        {
            var stopwatch = Stopwatch.StartNew();

            KMeans kMeans = new KMeans(40, 4);

            kMeans.CreateClusters();
            kMeans.PrintClusters();

            stopwatch.Stop();
            Console.WriteLine($"Elapsed time: {stopwatch.Elapsed.TotalSeconds} seconds.");

            Console.Read();
        }
Exemple #5
0
        public void CreateClusters_FTSEObservations_4Clusters()
        {
            var util   = new TestDataUtils();
            var series = util.GetSvcData(util.FTSEFilePath, new DateTime(2010, 12, 18), new DateTime(2011, 12, 18));

            var algo = new KMeans();

            algo.CreateClusters(series, 4, 200, InitialClusterSelectionMethod.Random);

            Assert.AreEqual(4, algo.Clusters.Count);
            for (int i = 0; i < algo.Clusters.Count; i++)
            {
                Assert.IsTrue(algo.Clusters[i].Length > 0);
            }
        }
        private void Initialize(double[][] observations)
        {
            // Create initial emmissions , TMP and Pi are already created
            var algo = new KMeans();

            algo.CreateClusters(observations, _pi.Length, KMeans.KMeansDefaultIterations, (_pi.Length > 3) ? InitialClusterSelectionMethod.Furthest : InitialClusterSelectionMethod.Random);
            _emission = new IMultivariateDistribution[_pi.Length];

            for (int i = 0; i < _pi.Length; i++)
            {
                var mean       = algo.ClusterCenters[i];
                var covariance = algo.ClusterCovariances[i];

                _emission[i] = new NormalDistribution(mean, covariance);
            }
        }
Exemple #7
0
        private void Initialize(double[][] observations)
        {
            // Create new Emty components
            var covariance = new double[_mixture.Dimension, _mixture.Dimension];
            var mean       = new double[_mixture.Dimension];
            var algo       = new KMeans();

            algo.CreateClusters(observations, _mixture.Components.Length, KMeans.KMeansDefaultIterations, (_mixture.Components.Length > 3) ? InitialClusterSelectionMethod.Furthest : InitialClusterSelectionMethod.Random);

            for (int i = 0; i < _mixture.Components.Length; i++)
            {
                mean       = algo.ClusterCenters[i];
                covariance = algo.ClusterCovariances[i];

                _mixture.Components[i] = new NormalDistribution(mean, covariance);
            }
        }
Exemple #8
0
        private NormalDistribution[] CreateEmissions(double[][] observations, int numberOfEmissions)
        {
            var emissions = new NormalDistribution[numberOfEmissions];
            // Create initial emmissions , TMP and Pi are already created
            var algo = new KMeans();

            algo.CreateClusters(observations, numberOfEmissions, KMeans.KMeansDefaultIterations, (numberOfEmissions > 3) ? InitialClusterSelectionMethod.Furthest : InitialClusterSelectionMethod.Random);

            for (int i = 0; i < numberOfEmissions; i++)
            {
                var mean       = algo.ClusterCenters[i];
                var covariance = algo.ClusterCovariances[i];

                emissions[i] = new NormalDistribution(mean, covariance);
            }

            return(emissions);
        }
        private void Initialize(double[][] observations)
        {
            var algo       = new KMeans();
            var k          = _pi.Length * _numberOfComponents;
            var dimentions = observations[0].Length;

            algo.CreateClusters(observations, k, KMeans.KMeansDefaultIterations, (k > 3) ? InitialClusterSelectionMethod.Furthest : InitialClusterSelectionMethod.Random);
            _emission = new Mixture <IMultivariateDistribution> [_pi.Length];

            for (int i = 0; i < _pi.Length; i++)
            {
                _emission[i] = new Mixture <IMultivariateDistribution>(_numberOfComponents, dimentions);
                for (int j = 0; j < _numberOfComponents; j++)
                {
                    var mean       = algo.ClusterCenters[j + _numberOfComponents * i];
                    var covariance = algo.ClusterCovariances[j + _numberOfComponents * i];

                    _emission[i].Components[j] = new NormalDistribution(mean, covariance);
                    Debug.WriteLine("[i,j]=[{0},{1}] Mean Vector {2}", i, j, new Vector(mean));
                }
            }
        }
        private Mixture <IMultivariateDistribution>[] CreateEmissions(double[][] observations, int numberOfStates, int numberOfComponents)
        {
            // Create initial emmissions , TMP and Pi are already created
            var algo = new KMeans();

            algo.CreateClusters(observations, numberOfStates * numberOfComponents, KMeans.KMeansDefaultIterations, (numberOfStates * numberOfComponents > 3) ? InitialClusterSelectionMethod.Furthest : InitialClusterSelectionMethod.Random);

            var emissions = new Mixture <IMultivariateDistribution> [numberOfStates];

            for (int i = 0; i < numberOfStates; i++)
            {
                emissions[i] = new Mixture <IMultivariateDistribution>(numberOfComponents, observations[0].Length);
                for (int j = 0; j < numberOfComponents; j++)
                {
                    var mean       = algo.ClusterCenters[j + numberOfComponents * i];
                    var covariance = algo.ClusterCovariances[j + numberOfComponents * i];

                    emissions[i].Components[j] = new NormalDistribution(mean, covariance);
                }
            }

            return(emissions);
        }