Beispiel #1
0
        public static void AddClusterInfo(ref SortedDictionary <double, RunOutput> dict)
        {
            if (dict.Count <= 3)
            {
                return;
            }

            Accord.Math.Random.Generator.Seed = 0;

            double[][] metrics = new double[dict.Count][];
            int        i       = 0;

            foreach (RunOutput t in dict.Values)
            {
                metrics[i++] = t.metricDepths.ToArray();
            }

            // Create a new K-Means algorithm
            KMeans kmeans = new KMeans(k: dict.Count / 3);

            // Compute and retrieve the data centroids
            KMeansClusterCollection clusters = kmeans.Learn(metrics);

            // Use the centroids to parition all the data
            int[] labels = clusters.Decide(metrics);

            int j = 0;

            foreach (RunOutput v in dict.Values)
            {
                v.cluster = labels[j++];
            }
        }
Beispiel #2
0
        static void runKMeans(DistanceObj[] groups)
        {
            int numGroups = groups.Length;

            // Declaring and intializing array for K-Means
            double[][] observations = new double[numGroups][];

            for (int i = 0; i < observations.Length; i++)
            {
                observations[i]    = new double[2];
                observations[i][0] = groups[i].coords[0];
                observations[i][1] = groups[i].coords[1];
            }

            KMeans km = new KMeans(7);

            KMeansClusterCollection clusters = km.Learn(observations);

            int[] labels = clusters.Decide(observations);

            for (int i = 0; i < labels.Length; i++)
            {
                Console.WriteLine(groups[i].address + ": " + labels[i]);
            }
        }
Beispiel #3
0
        private double ComputeWk(double[][][] clusterData, KMeansClusterCollection clusters)
        {
            // sum of pairwise distances
            double Wk = 0;

            double[] D = new double[clusterData.Length];

            for (int r = 0; r < clusterData.Length; r++)
            {
                if (clusterData[r] != null)
                {
                    D[r] = 0;
                    for (int i = 0; i < clusterData[r].Length; i++)
                    {
                        for (int j = 0; j < i; j++)
                        {
                            D[r] += clusters.Distance.Distance(clusterData[r][i], clusterData[r][j]);
                        }
                    }

                    if (clusterData[r].Length > 0)
                    {
                        Wk += D[r] / (2 * clusterData[r].Length);
                    }
                }
            }

            return(Wk);
        }
Beispiel #4
0
 public void writeModel(string outPath)
 {
     if (kmeans == null)
     {
         buildModel();
     }
     outmodelpath = outPath;
     using (System.IO.StreamWriter sw = new System.IO.StreamWriter(outmodelpath))
     {
         sw.WriteLine(dataPrepBase.modelTypes.StrataCovCorr.ToString());
         sw.WriteLine(InPath);
         sw.WriteLine(String.Join(",", VariableFieldNames));
         sw.WriteLine(n.ToString());
         sw.WriteLine(prop.ToString());
         sw.WriteLine(k.ToString());
         sw.WriteLine(String.Join(",", lbl.ToArray()));
         KMeansClusterCollection gCol = kmeans.Clusters;
         for (int i = 0; i < gCol.Count; i++)
         {
             KMeansCluster gClust = gCol[i];
             sw.WriteLine(String.Join(",", (from double d in gClust.Mean select d.ToString()).ToArray()));
             sw.WriteLine(String.Join(",", (from double d in gClust.Covariance select d.ToString()).ToArray()));
             sw.WriteLine(gClust.Proportion.ToString());
         }
         sw.Close();
     }
 }
Beispiel #5
0
        private void kmeansReport(Forms.RunningProcess.frmRunningProcessDialog rd)
        {
            KMeansClusterCollection gCol = kmeans.Clusters;

            for (int i = 0; i < gCol.Count; i++)
            {
                KMeansCluster gClust = gCol[i];
                double[]      mns    = gClust.Mean;
                double[,] cov  = gClust.Covariance;
                double[,] corr = getCorr(cov);
                rd.addMessage("\n\nStratum " + Labels[i] + ":\nMeans: " + String.Join(", ", (from double d in mns select d.ToString()).ToArray()) + "\nCovariance:");
                for (int j = 0; j < VariableFieldNames.Length; j++)
                {
                    string[] covStrArr = new string[VariableFieldNames.Length];
                    for (int l = 0; l < covStrArr.Length; l++)
                    {
                        covStrArr[l] = cov[l, j].ToString();
                    }
                    rd.addMessage("\n" + String.Join(",", covStrArr));
                }
                rd.addMessage("\nCorr:");
                for (int j = 0; j < VariableFieldNames.Length; j++)
                {
                    string[] corrStrArr = new string[VariableFieldNames.Length];
                    for (int l = 0; l < corrStrArr.Length; l++)
                    {
                        corrStrArr[l] = corr[l, j].ToString();
                    }
                    rd.addMessage("\n" + String.Join(",", corrStrArr));
                }
            }
        }
Beispiel #6
0
        private void setKMeansCluster(System.IO.StreamReader sr)
        {
            KMeans kmeans = new KMeans(k);
            KMeansClusterCollection kmeansColl = kmeans.Clusters;

            for (int i = 0; i < k; i++)
            {
                double[] mns       = (from s in (sr.ReadLine().Split(new char[] { ',' })) select System.Convert.ToDouble(s)).ToArray();
                string[] covVlsStr = sr.ReadLine().Split(new char[] { ',' });
                double   p         = System.Convert.ToDouble(sr.ReadLine());
                double[,] cov = new double[VariableFieldNames.Length, VariableFieldNames.Length];
                for (int j = 0; j < VariableFieldNames.Length; j++)
                {
                    for (int l = 0; l < VariableFieldNames.Length; l++)
                    {
                        int indexVl = (j * VariableFieldNames.Length) + l;
                        cov[l, j] = System.Convert.ToDouble(covVlsStr[indexVl]);
                    }
                }

                KMeansCluster kc = new KMeansCluster(kmeansColl, i);
                kc.Mean       = mns;
                kc.Covariance = cov;
                kc.Proportion = p;
            }
            clusterCollection = kmeansColl;
            model             = kmeans;
        }
Beispiel #7
0
        private void test2ToolStripMenuItem_Click(object sender, EventArgs e)
        {
            ResponseObservation obs = new ResponseObservation();

            obs.Load(@"NewObservation.bin");

            List <Unit> allUnits  = obs.Observation.RawData.Units.ToList();
            ImageData   heightMap = new ImageData();

            heightMap.Load(@"TerrainHeight.bin");
            ImageData placeMap = new ImageData();

            placeMap.Load(@"PlacementGrid.bin");
            placeMap = SC2Bot.CreatePlaceableImageData(placeMap, allUnits);
            // collect base expansion
            List <Unit> gas = allUnits.GetUnits(UNIT_TYPEID.NEUTRAL_VESPENEGEYSER);

            //List<Point2D> points = allUnits.FindBaseLocation();
            Console.WriteLine("" + gas.Count);
            double[][] data = new double[gas.Count][];
            for (int i = 0; i < gas.Count; i++)
            {
                data[i] = new double[] { gas[i].Pos.X, gas[i].Pos.Y };
            }
            KMeans kmeans = new KMeans(k: gas.Count / 2);
            KMeansClusterCollection clusters = kmeans.Learn(data);
        }
        public void Train(List <Person> trainingPeople, int skillSetSize)
        {
            double[][] inputs = _dataPointService.GenerateDataPointsFromPeople(trainingPeople, skillSetSize);
            KMeans     kMeans = new KMeans(2);

            _clustersCollection = kMeans.Learn(inputs);

            trainingPredictions = _clustersCollection.Decide(inputs);
        }
        private int[] clusterKMeans(double[][] observations)
        {
            Accord.Math.Random.Generator.Seed = 0;
            KMeans kmeans = new KMeans(9);
            KMeansClusterCollection clusters = kmeans.Learn(observations);

            int[] labels = clusters.Decide(observations);

            return(labels);
        }
Beispiel #10
0
        private void binaryReport(Forms.RunningProcess.frmRunningProcessDialog rd)
        {
            KMeansClusterCollection gCol = (KMeansClusterCollection)clusterCollection;

            for (int i = 0; i < gCol.Count; i++)
            {
                KMeansCluster gClust = gCol[i];
                double[]      mns    = gClust.Mean;
                rd.addMessage("\n\nCluster " + Labels[i] + ":\nMeans: " + String.Join(", ", (from double d in mns select d.ToString()).ToArray()));
            }
        }
Beispiel #11
0
        private int[] CreateKClusters(int k, double[][] locations)
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Create a new K-Means algorithm with 3 clusters
            KMeans kmeans = new KMeans(k);

            KMeansClusterCollection clusters = kmeans.Learn(locations);

            int[] labels = clusters.Decide(locations);
            return(labels);
        }
Beispiel #12
0
        private void writeKmeansData(System.IO.StreamWriter sw)
        {
            KMeansClusterCollection gCol = (KMeansClusterCollection)clusterCollection;

            for (int i = 0; i < gCol.Count; i++)
            {
                KMeansCluster gClust = gCol[i];
                sw.WriteLine(String.Join(",", (from double d in gClust.Mean select d.ToString()).ToArray()));
                sw.WriteLine(String.Join(",", (from double d in gClust.Covariance select d.ToString()).ToArray()));
                sw.WriteLine(gClust.Proportion.ToString());
            }
        }
Beispiel #13
0
    // Called by LandMap.GetZones (), returns number of subregions
    public int ClusterLocationsAccordKMeans(MapPoint[,] points, TerrainVerticesDatabase vertDatabase)
    {
        // K-means cluster algorithm to separate locations in the regions

        int regionId = 0;

        for (int isleId = 0; isleId < regions.Count; isleId++)
        {
            MapRegion region = regions[isleId];

            double[][] tileLocations = new double[region.turf.Count][];

            for (int i = 0; i < tileLocations.Length; i++)
            {
                tileLocations[i] = new double[3];

                TerrainVertData vertData = vertDatabase.GetVertDataFromRegionTile(region.turf[i], isleId);
                //LoggerTool.Post ("Requesting " + region.turf[i].ToString ());
                if (vertData != null)
                {
                    tileLocations[i][0] = region.turf[i].x;
                    tileLocations[i][1] = vertData.inlandPosition;
                    tileLocations[i][2] = region.turf[i].y;
                }
                else
                {
                    LoggerTool.Post("Null from VertDB for " + region.turf[i].ToString());
                    tileLocations[i][0] = 0;
                    tileLocations[i][1] = 0;
                    tileLocations[i][2] = 0;
                }
            }

            int k = InitializeNumOfK(tileLocations.Length);
            Debug.Log(k + " centroid(s)");

            KMeans kmeans = new KMeans(k);
            KMeansClusterCollection clusters = kmeans.Learn(tileLocations);

            int[] labels = clusters.Decide(tileLocations);

            Debug.Log("Number of labels (clusters) = " + labels.Length);
            for (int i = 0; i < labels.Length; i++)
            {
                points[(int)tileLocations[i][0], (int)tileLocations[i][2]].areaValue = regionId + labels[i];
            }

            regionId += k;
        }

        return(regionId);
    }
Beispiel #14
0
        public double[] computeNew(object category)
        {
            string cat      = category.ToString();
            int    catIndex = lbl.IndexOf(cat);
            int    np       = ((VariableFieldNames.Length * VariableFieldNames.Length) - VariableFieldNames.Length) / 2;

            double[] pValues = new double[np];
            if (catIndex == -1)
            {
                return(pValues);
            }
            KMeansClusterCollection gCol   = kmeans.Clusters;
            KMeansCluster           gClust = gCol[catIndex];

            double[] mns      = gClust.Mean;
            double[] var      = new double[mns.Length];
            double   nSample  = gClust.Proportion * N;
            double   seAdjust = Math.Sqrt(2 * 1 / nSample);

            for (int j = 0; j < mns.Length; j++)
            {
                var[j] = gClust.Covariance[j, j];
            }
            int cnt  = 1;
            int pCnt = 0;

            for (int j = 0; j < mns.Length - 1; j++)
            {
                for (int k = cnt; k < mns.Length; k++)
                {
                    double mD    = mns[j] - mns[k];
                    double pSD   = Math.Sqrt((var[j] + var[k]) / 2);
                    double se    = pSD * seAdjust;
                    double tStat = mD / se;
                    Accord.Statistics.Distributions.Univariate.TDistribution tDist = new Accord.Statistics.Distributions.Univariate.TDistribution(2 * nSample - 2);
                    double cdf    = tDist.DistributionFunction(tStat);
                    double pValue = 0;
                    if (tStat > 0)
                    {
                        pValue = (1 - cdf) * 2;
                    }
                    else
                    {
                        pValue = (cdf * 2);
                    }
                    pValues[pCnt] = pValue;
                    pCnt++;
                }
                cnt += 1;
            }
            return(pValues);
        }
Beispiel #15
0
        public void Engine(double[][] observations, int k, ref int[] labels)
        {
            Accord.Math.Random.Generator.Seed = 0;
            KMeans kmeans = new KMeans(k);

            kmeans.UseSeeding    = Seeding.Uniform;
            kmeans.MaxIterations = 0; // no limit
            KMeansClusterCollection clusters = kmeans.Learn(observations);

            double[][] centroids = kmeans.Centroids;
            labels = clusters.Decide(observations);
            double err = kmeans.Error;
        }
Beispiel #16
0
        public void binary_split_new_method()
        {
            #region doc_sample1
            // Use a fixed seed for reproducibility
            Accord.Math.Random.Generator.Seed = 0;

            // Declare some data to be clustered
            double[][] input = 
            {
                new double[] { -5, -2, -1 },
                new double[] { -5, -5, -6 },
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            // Create a new binary split with 3 clusters 
            BinarySplit binarySplit = new BinarySplit(3);

            // Learn a data partitioning using the Binary Split algorithm
            KMeansClusterCollection clustering = binarySplit.Learn(input);

            // Predict group labels for each point
            int[] output = clustering.Decide(input);

            // As a result, the first two observations should belong to the
            //  same cluster (thus having the same label). The same should
            //  happen to the next four observations and to the last three.
            #endregion

            Assert.AreEqual(output[0], output[1]);

            Assert.AreEqual(output[2], output[3]);
            Assert.AreEqual(output[2], output[4]);
            Assert.AreEqual(output[2], output[5]);

            Assert.AreEqual(output[6], output[7]);
            Assert.AreEqual(output[6], output[8]);

            Assert.AreNotEqual(output[0], output[2]);
            Assert.AreNotEqual(output[2], output[6]);
            Assert.AreNotEqual(output[0], output[6]);

            int[] labels2 = binarySplit.Clusters.Nearest(input);

            Assert.IsTrue(output.IsEqual(labels2));
        }
Beispiel #17
0
            public KMeans(int k, Func <double[], double[], double> distance)
            {
                if (k <= 0)
                {
                    throw new ArgumentOutOfRangeException("k");
                }
                if (distance == null)
                {
                    throw new ArgumentNullException("distance");
                }


                this.clusters = new KMeansClusterCollection(k, distance);
            }
        public static object ClusterParameterSets(double[,] Data, double[] Scores)
        {
            int nRows = Data.GetLength(0), nCols = Data.GetLength(1);
            int numClusters = (int)Math.Ceiling(Math.Log(nRows));

            double[][] data = new double[nRows][];
            for (int r = 0; r < nRows; ++r)
            {
                data[r] = new double[nCols];
                for (int c = 0; c < nCols; ++c)
                {
                    data[r][c] = Data[r, c];
                }
            }

            KMeans km = new KMeans(numClusters);
            KMeansClusterCollection kcc = km.Learn(data);

            // Calculate the averages and stdevs of the cluster scores.
            SummaryContainer[] summaries = new SummaryContainer[numClusters];
            for (int i = 0; i < kcc.Count; ++i)
            {
                if (summaries[i] == null)
                {
                    summaries[i] = new SummaryContainer();
                }

                summaries[i].Add(Scores[i]);
            }

            // TODO - 19Apr20 - Changed this from old version of Accord, not sure if it's right. Needs testing.
            double[,] ret = new double[numClusters, nCols + 6];
            for (int r = 0; r < numClusters; ++r)
            {
                for (int c = 0; c < nCols; ++c)
                {
                    ret[r, c] = kcc[r].Centroid[c];
                }

                ret[r, nCols]     = summaries[r].Count;
                ret[r, nCols + 1] = summaries[r].Min;
                ret[r, nCols + 2] = summaries[r].Max;
                ret[r, nCols + 3] = summaries[r].Average;
                ret[r, nCols + 4] = summaries[r].Stdev;
                ret[r, nCols + 5] = summaries[r].Z;
            }

            return(ret);
        }
        /// <summary>
        ///   Initializes the Gaussian Mixture Models using K-Means
        ///   parameters as an initial parameter guess.
        /// </summary>
        ///
        private void btnInitialize_Click(object sender, EventArgs e)
        {
            // Creates and computes a new
            // K-Means clustering algorithm:

            kmeans = new KMeans(k);

            KMeansClusterCollection clustering = kmeans.Learn(observations);

            // Classify all instances in mixture data
            int[] classifications = clustering.Decide(observations);

            // Draw the classifications
            updateGraph(classifications);
        }
Beispiel #20
0
        private void makeKMeans()
        {
            kmeans = new KMeans(k);
            KMeansClusterCollection kmeansColl = kmeans.Clusters;

            for (int i = 0; i < k; i++)
            {
                double[] mns = meansLst[i];
                double   p   = proportionsLst[i];
                double[,] scov = scovLst[i];
                KMeansCluster kc = new KMeansCluster(kmeansColl, i);
                kc.Mean       = mns;
                kc.Covariance = scov;
                kc.Proportion = p;
            }
        }
Beispiel #21
0
        public void AccordKMeans()
        {
            Stopwatch stopwatch = Stopwatch.StartNew();

            stopwatch.Start();
            KMeans kmeans = new KMeans(50);

            double[][] v = Input.ToRowArrays();
            KMeansClusterCollection clusters = kmeans.Learn(v);

            AssignedClusters = clusters.Decide(v).ToList();
            BestClustering   = AssignedClusters;
            Centroids        = clusters.Centroids.Select(x => CreateVector.Dense(x)).ToList();
            stopwatch.Stop();
            timer = stopwatch.ElapsedTicks;
        }
Beispiel #22
0
        private void setBinaryCluster(System.IO.StreamReader sr)
        {
            BinarySplit             bSplit     = new BinarySplit(k);
            KMeansClusterCollection kmeansColl = bSplit.Clusters;

            for (int i = 0; i < k; i++)
            {
                double[] mns = (from s in (sr.ReadLine().Split(new char[] { ',' })) select System.Convert.ToDouble(s)).ToArray();
                sr.ReadLine();
                double        p  = System.Convert.ToDouble(sr.ReadLine());
                KMeansCluster kc = new KMeansCluster(kmeansColl, i);
                kc.Mean       = mns;
                kc.Proportion = p;
            }
            clusterCollection = kmeansColl;
            model             = bSplit;
        }
Beispiel #23
0
        private static double[][][] ClusterPoints(double[][] dataset, int k, KMeansClusterCollection clusters)
        {
            // points in each cluster
            double[][][] clusterData = new double[k][][];
            for (int i = 0; i < dataset.Length; i++)
            {
                int decision = clusters.Decide(dataset[i]);

                if (clusterData[decision] == null)
                {
                    clusterData[decision] = new double[][] { };
                }
                Array.Resize(ref clusterData[decision], clusterData[decision].Length + 1);
                clusterData[decision][clusterData[decision].Length - 1] = dataset[i];
            }

            return(clusterData);
        }
Beispiel #24
0
        /* Обучение персептрона
         * public void Learn()
         * {
         *
         *  int counter = 0;
         *  while (true)
         *  {
         *      var watch = new Stopwatch();
         *      watch.Start();
         *      while (watch.ElapsedMilliseconds < 200)
         *      {
         *          LearningIteration();
         *          AccountError();
         *          counter++;
         *          if (counter > IterationsCount) break;
         *
         *      }
         *      watch.Stop();
         *      //LearningEnds();
         *      if (counter > IterationsCount) break;
         *
         *  }
         * }
         *
         * protected virtual void LearningIteration()
         * {
         *  teacher.RunEpoch(Inputs,Outputs);
         *  /*
         *  teachAtt.RunEpoch(LearnMed, LearnAtt);
         *  teachMed.RunEpoch(LearnAtt, LearnMed);
         *
         * }
         *
         * protected virtual void AccountError()
         * {
         *  //LearningErrors.Enqueue(GetError(LearningInputs, LearningAnswers));
         * }
         *
         * protected virtual void LearningEnds()
         * {
         *
         *  EndLearnAtt = LearnMed.Select(z => netAtt.Compute(z)[0]).ToArray();
         *  EndLearnMed = LearnAtt.Select(z => netMed.Compute(z)[0]).ToArray();
         *
         * }
         *
         * protected Double GetError(Double[][] Inputs, Double[][] Answers,ActivationNetwork network)
         * {
         *  Double sum = 0;
         *  for (int i = 0; i < Inputs.Length; i++)
         *  {
         *      sum += Math.Abs(network.Compute(Inputs[i])[0] - Answers[i][0]);
         *  }
         *  sum /= Inputs.Length;
         *  return sum;
         * }
         */

        private void Clusterisation()
        {
            Console.WriteLine("CL");
            Data = new Double[sizeFor][];
            for (int i = 0; i < sizeFor; i++)
            {
                Data[i]      = new Double[2];
                Data[i][Att] = dataAtt[i];
                Data[i][Med] = dataMed[i];
            }
            clusters = kmeans.Learn(Data);
            for (int k = 0; k < sizeFor; k++)
            {
                addPoint((int)dataAtt[k], (int)dataMed[k]);
            }
            Inputs = GenerateInputs().ToArray();
            Refr   = true;
        }
Beispiel #25
0
        public void buildModel(string modelPath)
        {
            outmodelpath = modelPath;
            using (System.IO.StreamReader sr = new System.IO.StreamReader(outmodelpath))
            {
                dataPrepBase.modelTypes mType = (dataPrepBase.modelTypes)Enum.Parse(typeof(dataPrepBase.modelTypes), sr.ReadLine());
                if (mType != dataPrepBase.modelTypes.TTEST)
                {
                    System.Windows.Forms.MessageBox.Show("Not a TTest Model!!", "Error", System.Windows.Forms.MessageBoxButtons.OK, System.Windows.Forms.MessageBoxIcon.Error);
                    return;
                }
                inpath             = sr.ReadLine();
                stratafld          = sr.ReadLine();
                VariableFieldNames = sr.ReadLine().Split(new char[] { ',' });
                n      = System.Convert.ToInt32(sr.ReadLine());
                prop   = System.Convert.ToDouble(sr.ReadLine());
                k      = System.Convert.ToInt32(sr.ReadLine());
                lbl    = sr.ReadLine().Split(new char[] { ',' }).ToList();
                kmeans = new KMeans(k);
                KMeansClusterCollection kmeansColl = kmeans.Clusters;
                for (int i = 0; i < k; i++)
                {
                    double[] mns       = (from s in (sr.ReadLine().Split(new char[] { ',' })) select System.Convert.ToDouble(s)).ToArray();
                    string[] covVlsStr = sr.ReadLine().Split(new char[] { ',' });
                    double   p         = System.Convert.ToDouble(sr.ReadLine());
                    double[,] cov = new double[VariableFieldNames.Length, VariableFieldNames.Length];
                    for (int j = 0; j < VariableFieldNames.Length; j++)
                    {
                        for (int l = 0; l < VariableFieldNames.Length; l++)
                        {
                            int indexVl = (j * VariableFieldNames.Length) + l;
                            cov[l, j] = System.Convert.ToDouble(covVlsStr[indexVl]);
                        }
                    }

                    KMeansCluster kc = new KMeansCluster(kmeansColl, i);
                    kc.Mean       = mns;
                    kc.Covariance = cov;
                    kc.Proportion = p;
                }
                sr.Close();
            }
        }
Beispiel #26
0
        private static void splitCluster(int index)
        {
            //kmeans with k = 2 for clusters[index].members
            //if kmeansArr == 1, add group to temp list for new cluster, delete from clusters[index]
            //create new Cluster with the temp list, append to end of clusters (the for loop in findLargeClusters will adapt to it and check it later for >15
            int numGroups = clusters[index].members.Count;

            double[][] observations = new double[numGroups][];

            for (int i = 0; i < observations.Length; i++)
            {
                observations[i]    = new double[2];
                observations[i][0] = clusters[index].members[i].destination.coords[0];
                observations[i][1] = clusters[index].members[i].destination.coords[1];
            }

            KMeans km = new KMeans(2);

            KMeansClusterCollection clust = km.Learn(observations);

            int[] clustArr = clust.Decide(observations);

            //if a group is in the second of the two clusters, we will put it in a new List and delete it from the old one
            List <Group> forNewCluster = new List <Group>();

            for (int i = clustArr.Length - 1; i >= 0; i--)
            {
                if (clustArr[i] == 1)
                {
                    forNewCluster.Add(clusters[index].members[i]);
                    clusters[index].members.RemoveAt(i);
                }
            }

            Cluster newCluster = new Cluster(forNewCluster);

            //update the cluster attributes in each group for the new cluster
            clusters.Add(newCluster);
            foreach (Group g in clusters[clusters.Count() - 1].members)
            {
                g.cluster = clusters.Count() - 1;
            }
        }
        static void Main(string[] args)
        {
            // sample input
            var sampleSet = new double[][]
            {
                new double[] { 1, 9 },
                new double[] { 2, 8 },
                new double[] { 3, 7 },
                new double[] { 4, 6 },
                new double[] { 5, 5 }
            };

            KMeans kmeans = new KMeans(2);
            KMeansClusterCollection clusters = kmeans.Learn(sampleSet);

            Console.WriteLine("\n\n* Clusters: {0}", String.Join(",", clusters.Decide(sampleSet)));

            Console.WriteLine("\n\n\n\nDONE!!");
            Console.ReadKey();
        }
Beispiel #28
0
 void train()
 {
     clusters = kmeans.Learn(inputList.ToArray());
     // make a empty picture
     {
         Bitmap mask = new Bitmap(300, 300);
         using (Graphics g = Graphics.FromImage(mask))
         {
             g.FillRectangle(new SolidBrush(Color.Black), new Rectangle(0, 0, mask.Width, mask.Height));
         }
         //Bitmap g = Grayscale.CommonAlgorithms.BT709.Apply(mask);
         ImageStatistics stat = new ImageStatistics(Grayscale.CommonAlgorithms.BT709.Apply(mask));
         double[][]      ds   = { new double[] { stat.Gray.Mean, stat.Gray.Median, stat.Gray.StdDev } };
         //Program.logIt(string.Format("{0},{1},{2}", ds[0][0], ds[0][1], ds[0][2]));
         int[] res = clusters.Decide(ds);
         output[0]      = true;
         output[1]      = true;
         output[res[0]] = false;
     }
 }
Beispiel #29
0
        public static List <Point2D> FindBaseLocation(this List <Unit> self)
        {
            List <Point2D> ret = new List <Point2D>();
            List <Unit>    gas = self.GetUnits(UNIT_TYPEID.NEUTRAL_VESPENEGEYSER);

            double[][] data = new double[gas.Count][];
            for (int i = 0; i < gas.Count; i++)
            {
                data[i] = new double[] { gas[i].Pos.X, gas[i].Pos.Y };
            }
            KMeans kmeans = new KMeans(k: gas.Count / 2);
            KMeansClusterCollection clusters = kmeans.Learn(data);

            for (int i = 0; i < kmeans.Centroids.Length; i++)
            {
                ret.Add(new Point2D {
                    X = (float)kmeans.Centroids[i][0], Y = (float)kmeans.Centroids[i][1]
                });
            }
            return(ret);
        }
Beispiel #30
0
        private async Task RetrainAsync(Expression <Func <Leg, bool> > predicate)
        {
            await _geocodingDbSync.UpdateAllAsync();

            KMeans kMeans = new KMeans(NumberOfClusters)
            {
                Distance = new GeographicDistance()
            };

            double[][] dataset = GetDataset(predicate == null ?
                                            await _legRepository.ListAsync()
                : await _legRepository.ListAsync(predicate));

            ClusterCollection = await Task.Run(() => kMeans.Learn(dataset));

            IEnumerable <DateTime> startTimes = (await _legRepository.ListAsync(predicate)).Select(leg => leg.StartTime);

            _RetrainedEarliestDate = startTimes.Min();
            _RetrainedLatestDate   = startTimes.Max();
            _LastRetrained         = DateTime.Now;
        }
        /// <summary>
        ///     Computes the silhouette width for the given set of clusters and observations.
        /// </summary>
        /// <param name="clusters">The clusters in the dataset.</param>
        /// <param name="observations">The observation vectors.</param>
        /// <returns>The silhouette width for the given set of clusters and observations.</returns>
        private static double ComputeSilhouetteWidth(KMeansClusterCollection clusters, double[][] observations)
        {
            Object lockObj = new object();
            double totalSilhouetteWidth = 0;

            // Get cluster assignments for all of the observations
            int[] clusterAssignments = clusters.Decide(observations);

            Parallel.For(0, observations.Length, observationIdx =>
            {
                double obsIntraclusterDissimilarity = 0;
                double obsInterClusterDissimilarity = 0;

                // Sum the distance between current observation and every other observation in the same cluster
                for (int caIdx = 0; caIdx < clusterAssignments.Length; caIdx++)
                {
                    if (clusterAssignments[caIdx] == clusterAssignments[observationIdx])
                    {
                        obsIntraclusterDissimilarity +=
                            ComputeEuclideanTrajectoryDifference(observations[observationIdx], observations[caIdx]);
                    }
                }

                // Compute the average intracluster dissimilarity (local variance)
                obsIntraclusterDissimilarity = obsIntraclusterDissimilarity/
                                               clusterAssignments.Where(ca => ca == clusterAssignments[observationIdx])
                                                   .Count();

                // Setup list to hold distance from current observation to every other cluster centroid
                List<double> centroidDistances = new List<double>(clusters.Count);

                // Sum the distance between current observation and cluster centroids to which the current
                // observation is NOT assigned
                for (int idx = 0; idx < clusters.Count; idx++)
                {
                    // Only compute distance when observation is not assigned to the current cluster
                    if (idx != clusterAssignments[observationIdx])
                    {
                        centroidDistances.Add(ComputeEuclideanTrajectoryDifference(observations[observationIdx],
                            clusters[idx].Centroid));
                    }
                }

                // Get the minimum intercluster dissimilarity (0 if there are no centroid differences)
                obsInterClusterDissimilarity = centroidDistances.Any() ? centroidDistances.Min() : 0;

                // Add the silhoutte width for the current observation
                var curSilhouetteWidth = (Math.Abs(obsIntraclusterDissimilarity) < 0.0000001 &&
                                          Math.Abs(obsInterClusterDissimilarity) < 0.0000001)
                    ? 0
                    : (obsInterClusterDissimilarity -
                       obsIntraclusterDissimilarity)/
                      Math.Max(obsIntraclusterDissimilarity,
                          obsInterClusterDissimilarity);

                lock (lockObj)
                {
                    totalSilhouetteWidth += curSilhouetteWidth;
                }
            });

            // Return the silhoutte width
            return totalSilhouetteWidth/observations.Length;
        }