コード例 #1
0
        /// <summary>
        /// Create clustering based on the density
        /// </summary>
        /// <param name="numOfClusters">Needed by <see cref="Kmeans"/></param>
        /// <param name="initialMeans">Needed by <see cref="Kmeans"/></param>
        /// <returns>
        /// Clustering as <see cref="MultidimensionalArray"/>
        /// [0]: x, [1]: y, [2]: data, [3]: cellToCluster (e.g. cell 0 is in cluster 1), [4]: local cell index
        /// </returns>
        public MultidimensionalArray CreateClustering_Density(int numOfClusters, double[] initialMeans)
        {
            Console.WriteLine("CreateClustering_Density: START");

            double[] data   = ShockFindingExtensions.GetFinalFunctionValues(input, inputExtended.ExtractSubArrayShallow(-1, 0));
            Kmeans   kmeans = new Kmeans(data, numOfClusters, initialMeans);

            int[] cellToCluster = kmeans.Cluster();

            MultidimensionalArray clustering = MultidimensionalArray.Create(data.Length, 5);

            for (int i = 0; i < input.Lengths[0]; i++)
            {
                clustering[i, 0] = input[i, (int)inputExtended[i, 0] - 1, 0];      // x
                clustering[i, 1] = input[i, (int)inputExtended[i, 0] - 1, 1];      // y
                clustering[i, 2] = data[i];                                        // data value
                clustering[i, 3] = cellToCluster[i];                               // cellToCluster (e.g. cell 0 is in cluster 1)
                clustering[i, 4] = inputExtended[i, 2];                            // local cell index
            }
            _clusterings.Add(clustering);

            Console.WriteLine("CreateClustering_Density: END");

            return(clustering);
        }
コード例 #2
0
        private async Task GetAllTrails()
        {
            trails = new List <Trail>();

            var query = ParseObject.GetQuery("Trail").WhereNotEqualTo("objectId", "toto");
            IEnumerable <ParseObject> results = await query.FindAsync();

            foreach (var item in results)
            {
                Trail trail = new Trail();
                trail.Distance = item.Get <float>("distance");
                trail.Duration = (int)item.Get <double>("duration");
                trail.Id       = item.ObjectId;
                trails.Add(trail);
            }

            Kmeans.Init(trails);
            int[] clusters = Kmeans.Cluster(Kmeans.rawData, 3);

            List <ParseObject> listResult = new List <ParseObject>();

            for (int i = 0; i < results.Count(); i++)
            {
                ParseObject pObject = results.ElementAt(i);
                pObject["clusterId"] = clusters[i];
                listResult.Add(pObject);
                await pObject.SaveAsync();
            }

            GetX(listResult);
        }
        public override void Initialize(object target, string targetInputFile)
        {
            base.Initialize(target, targetInputFile);

            var targetBitmap = target as Bitmap;

            initColors = Kmeans.GetDominanteColors(targetBitmap, k);
        }
        public override void Initialize(string targetInputFile)
        {
            base.Initialize(targetInputFile);

            var targetBitmap = base.InititializeSame(targetInputFile);

            initColors = Kmeans.GetDominanteColors(targetBitmap, k);
        }
コード例 #5
0
ファイル: UnitTest1.cs プロジェクト: SujuM/PaintPalette
        public void TestNumberColors()
        {
            Kmeans kmeans = new Kmeans();

            List <Color> colors = Palette.GetColors(5, PATH, kmeans);

            Assert.AreEqual(5, colors.Count);

            colors = Palette.GetColors(7, PATH, kmeans);

            Assert.AreEqual(7, colors.Count);
        }
コード例 #6
0
        protected void Page_Load(object sender, EventArgs e)
        {
            Response.Write("\n开始 k-means 聚类(clustering)\n");

            // real data likely to come from a text file or SQL
            // 需要聚类的数据是两个维度,具体指某个人的身高体重
            //集合中每项描述了一个人的身高(Height: inches)和体重(Weight: kilograms)
            double[][] rawData = new double[20][];
            rawData[0]  = new double[] { 65.0, 220.0 };
            rawData[1]  = new double[] { 73.0, 160.0 };
            rawData[2]  = new double[] { 59.0, 110.0 };
            rawData[3]  = new double[] { 61.0, 120.0 };
            rawData[4]  = new double[] { 75.0, 150.0 };
            rawData[5]  = new double[] { 67.0, 240.0 };
            rawData[6]  = new double[] { 68.0, 230.0 };
            rawData[7]  = new double[] { 70.0, 220.0 };
            rawData[8]  = new double[] { 62.0, 130.0 };
            rawData[9]  = new double[] { 66.0, 210.0 };
            rawData[10] = new double[] { 77.0, 190.0 };
            rawData[11] = new double[] { 75.0, 180.0 };
            rawData[12] = new double[] { 74.0, 170.0 };
            rawData[13] = new double[] { 70.0, 210.0 };
            rawData[14] = new double[] { 61.0, 110.0 };
            rawData[15] = new double[] { 58.0, 100.0 };
            rawData[16] = new double[] { 66.0, 230.0 };
            rawData[17] = new double[] { 59.0, 120.0 };
            rawData[18] = new double[] { 68.0, 210.0 };
            rawData[19] = new double[] { 61.0, 130.0 };

            Response.Write("需要聚类的数据如下:</br>");
            Response.Write("&nbsp;&nbsp&nbsp&nbsp身高&nbsp;&nbsp&nbsp&nbsp体重</br>");
            Response.Write("-------------------</br>");
            ShowData(rawData, 2, true);
            //首先需要确定划分的簇的数量
            int numClusters = 3;

            Response.Write("需要聚类的目标簇数: " + numClusters + "<br>");

            int[] clustering = Kmeans.Cluster(rawData, numClusters); // this is it

            Response.Write("K-means 聚类结束e<br>");

            //Response.Write("最终划分的结果:"+"<br>");
            ///ShowVector(clustering, true);

            Response.Write("原始数据被聚类之后的结果:<br>");
            ShowClustered(rawData, clustering, numClusters, 1);

            //Response.Write("\nEnd k-means clustering demo\n");
        }
コード例 #7
0
        static void Main(string[] args)
        {
            var parser = new Parser();
            var data   = parser.Parse(',', @"./Data/WineData.csv");

            var iterations = 100;
            var clusters   = 4;

            var kMeans = new Kmeans(data, iterations, clusters);

            kMeans.Run();
            kMeans.Print();

            Console.Read();
        }
コード例 #8
0
ファイル: MainForm.cs プロジェクト: aiex718/ML_Learning
        public MainForm(Knn <RawImage28x28> knn, Kmeans <RawImage28x28> kmeansTrainResult)
        {
            InitializeComponent();

            if (knn != null)
            {
                Knn = knn;
                SetupKnn();
            }

            if (kmeansTrainResult != null)
            {
                Kmeans = kmeansTrainResult;
                SetupKmeans();
            }
        }
コード例 #9
0
        public void execute()
        {
            List <string> documents = FileUtils.read(String.Concat(System.AppDomain.CurrentDomain.BaseDirectory, @"..\..\Data\input.txt"));

            Kmeans kmeans = new Kmeans();
            List <CentroidVector> centroids = kmeans.start(documents, 3);

            foreach (CentroidVector centroid in centroids)
            {
                foreach (DocumentVector dv in centroid.getVectorCluster())
                {
                    Console.WriteLine(dv.getLabel());
                }
                Console.WriteLine("\n-------------{0}-------------------\n", centroid.getVectorCluster().Count);
                Thread.Sleep(100);
            }
        }
コード例 #10
0
    static void Main(string[] args)
    {
        int n = 2;
        List <List <float> > data = new List <List <float> >();

        data.Add(new List <float> {
            1f, 2f
        });
        data.Add(new List <float> {
            1f, 4f
        });
        data.Add(new List <float> {
            1f, 0f
        });
        data.Add(new List <float> {
            4f, 2f
        });
        data.Add(new List <float> {
            4f, 4f
        });
        data.Add(new List <float> {
            4f, 0f
        });
        data.Add(new List <float> {
            5f, 6f
        });

        Kmeans kc = new Kmeans();

        kc.KmeansCluster(n, data);
        Console.WriteLine("Here's the cluster centers");
        foreach (var c in kc.clusterCenter)
        {
            Console.WriteLine(string.Join(",", c));
        }

        Console.WriteLine("Here's the cluster assignment.");
        for (int i = 0; i < data.Count; ++i)
        {
            Console.WriteLine(string.Format("{0},{1} - {2}",
                                            data[i][0],
                                            data[i][1],
                                            kc.assignedCluster[i]));
        }
    }
コード例 #11
0
ファイル: KmeansVisualize.cs プロジェクト: a3geek/Clusterings
        void Awake()
        {
            var nodes = new List <Vector3>();

            var center = this.box.center;
            var size   = this.box.size;

            for (var i = 0; i < this.nodes; i++)
            {
                nodes.Add(new Vector3(
                              Random.Range(center.x - size.x / 2f, center.x + size.x / 2f),
                              Random.Range(center.y - size.y / 2f, center.y + size.y / 2f),
                              Random.Range(center.x - size.z / 2f, center.z + size.z / 2f)
                              ));
            }

            this.kmeans      = new Kmeans(nodes, this.clusters, this.threshold, this.initIteration /*, this.box.center, this.box.size*/);
            this.Calculating = false;

            for (var i = 0; i < this.clusters; i++)
            {
                this.colors.Add(Random.ColorHSV());
            }
        }
コード例 #12
0
        /// <summary>
        /// Create clustering based on the artificial viscosity (mean values)
        /// </summary>
        /// <param name="inputClustering">Input data which has to be a previous clustering</param>
        /// <param name="numOfClusters">Needed by <see cref="Kmeans"/></param>
        /// <param name="initialMeans">Needed by <see cref="Kmeans"/></param>
        /// <returns>
        /// Clustering as <see cref="MultidimensionalArray"/>
        /// [0]: x, [1]: y, [2]: data, [3]: cellToCluster (e.g. cell 0 is in cluster 1), [4]: local cell index
        /// </returns>
        public MultidimensionalArray CreateClustering_AV(MultidimensionalArray inputClustering, int numOfClusters, double[] initialMeans)
        {
            Console.WriteLine("CreateClustering_AV: START");

            // Get AV values
            var avField     = this.Session.Timesteps.Last().Fields.Where(f => f.Identification == "artificialViscosity").SingleOrDefault();
            int numOfPoints = inputClustering.Lengths[0];

            double[] data = new double[numOfPoints];
            for (int i = 0; i < data.Length; i++)
            {
                data[i] = avField.GetMeanValue((int)inputClustering[i, 4]);
            }

            // Kmeans
            Kmeans kmeans = new Kmeans(data, numOfClusters, initialMeans);

            int[] cellToCluster = kmeans.Cluster();

            // Store values
            MultidimensionalArray clustering = MultidimensionalArray.Create(data.Length, inputClustering.Lengths[1]);

            for (int i = 0; i < numOfPoints; i++)
            {
                clustering[i, 0] = inputClustering[i, 0];      // x
                clustering[i, 1] = inputClustering[i, 1];      // y
                clustering[i, 2] = data[i];                    // data value
                clustering[i, 3] = cellToCluster[i];           // cellToCluster (e.g. cell 0 is in cluster 1)
                clustering[i, 4] = inputClustering[i, 4];      // local cell index
            }
            _clusterings.Add(clustering);

            Console.WriteLine("CreateClustering_AV: END");

            return(clustering);
        }
コード例 #13
0
ファイル: Program.cs プロジェクト: aiex718/ML_Learning
        static void Main(string[] args)
        {
            VectorCollection <RawImage28x28> Dataset = null;
            Knn <RawImage28x28> knn = null;
            KmeansTrainResult <RawImage28x28> KmeansTrainResult = null;
            Kmeans <RawImage28x28>            kmeans            = null;

            //CheckFiles
            if (File.Exists(MnistPixelFilePath) == false)
            {
                MnistPixelFilePath = Environment.CurrentDirectory + "\\train-images.idx3-ubyte";
            }

            if (File.Exists(MnistLabelFilePath) == false)
            {
                MnistLabelFilePath = Environment.CurrentDirectory + "\\train-labels.idx1-ubyte";
            }

            if (File.Exists(KmeansTrainResultFilePath) == false)
            {
                KmeansTrainResultFilePath = Environment.CurrentDirectory + "\\KmeansTrainResult.json";
            }


            //LoadFiles
            if (File.Exists(MnistPixelFilePath) && File.Exists(MnistLabelFilePath))
            {
                var MnistDataSet = MnistDataSetLoader.LoadData(MnistPixelFilePath, MnistLabelFilePath);
                Dataset = new VectorCollection <RawImage28x28>(MnistDataSet);
            }

            if (File.Exists(KmeansTrainResultFilePath))
            {
                KmeansTrainResult = new KmeansTrainResult <RawImage28x28>(KmeansTrainResultFilePath);
            }

            if (Dataset != null)
            {
                var knnTrainer = new KnnTrainer <RawImage28x28>(Dataset);
                knn = new Knn <RawImage28x28>(knnTrainer.Train());

                if (KmeansTrainResult == null)
                {
                    Console.WriteLine("Can't find Kmeans train data.");
                    KmeansTrainResult = AskKmeansTrainData(false);
                }
                else if (KmeansTrainResult != null)
                {
                    Console.WriteLine("Found Kmeans train data.");
                    Console.WriteLine("[U]se trained data or [t]rain again? [u/t]");
                    if (Console.ReadLine().ToLower().Contains('t'))
                    {
                        KmeansTrainResult = null;
                        KmeansTrainResult = AskKmeansTrainData(true);
                    }
                }
            }
            else
            {
                Console.WriteLine("Can't find Knn train data, ignored");
                Console.WriteLine("Press any key to continue.");
                Console.ReadKey();
            }

            if (KmeansTrainResult != null)
            {
                kmeans = new Kmeans <RawImage28x28>(KmeansTrainResult);
            }

            if (kmeans == null && knn == null)
            {
                Console.WriteLine("Fatal error: Can not find any trained data.");
                Console.WriteLine("Press any key to exit...");
                Console.ReadKey();
                return;
            }

            MainForm form = new MainForm(knn, kmeans);

            Application.EnableVisualStyles();
            Application.Run(form);
        }
コード例 #14
0
        public void Run()
        {
            DateTime expSt = DateTime.Now;

            Console.WriteLine("========================================================================");
            Console.WriteLine(" Metoda porównania: " + comparators[comparatorId].GetType());
            Console.WriteLine("========================================================================");
            WordsStats stats = new WordsStats(Words.ComputeWords(DataStore.Instance.GetAllNews()));

            stats.Compute();

            List <string> experimentTopics;

            if (topics != null)
            {
                experimentTopics = new List <string>();
                experimentTopics.AddRange(topics);
            }
            else
            {
                experimentTopics = GetRandomTopics(topicCount);
            }


            Group initialGroup = GroupFactory.CreateGroupWithNewsFromTopics(experimentTopics);


            Console.WriteLine("========================================================================");
            Console.WriteLine("Topiki w grupie początkowej:");
            foreach (string topic in experimentTopics)
            {
                Console.WriteLine(topic + " [" + Util.topicCountInGroup(topic, initialGroup) + "]");
            }

            Console.WriteLine("Rozmiar grupy: " + initialGroup.Count);
            ;

            DateTime start;
            TimeSpan t1, t2;


            Hierarchical hr = new Hierarchical(comparators[comparatorId], stats, maxLen);
            Kmeans       km = new Kmeans(comparators[comparatorId], stats, maxLen);

            Console.WriteLine("========================================================================");

            if (!onlyKMeans)
            {
                start = DateTime.Now;
                List <Group> hierarchicalResult = hr.Compute(initialGroup, topicCount != 0 ? topicCount : (uint)topics.Length,
                                                             Hierarchical.Distance.AVG);
                t1 = (DateTime.Now - start);
                PrintStats("Hierachical", t1, hierarchicalResult, 0);
            }

            start = DateTime.Now;
            List <Group> kMeansResult = km.Compute(initialGroup, topicCount != 0 ? topicCount : (uint)topics.Length, kMeansIterations);

            t2 = (DateTime.Now - start);
            PrintStats("KMeans", t2, kMeansResult, kMeansIterations);


            Console.WriteLine("========================================================================");
            Console.WriteLine("Czas działania: " + (DateTime.Now - expSt));
        }