Beispiel #1
0
        static int[] Cluster(IEnumerable <int> integers, int bandwidth)
        {
            #if DEBUG
            var stopwatch = new Stopwatch();
            stopwatch.Start();
            #endif

            var kernel    = new GaussianKernel(1);
            var meanshift = new MeanShift(1, kernel, bandwidth);
            meanshift.UseParallelProcessing = false;

            var points = integers.Select(i => new[] { Convert.ToDouble(i) }).ToArray();

            try
            {
                var labels = meanshift.Compute(points);
            }
            catch (Exception exception)
            {
                throw;
            }

            #if DEBUG
            stopwatch.Stop();
            Console.WriteLine($"Performed meanshift on {points.Length} points in {stopwatch.ElapsedMilliseconds}ms");
            #endif

            return(meanshift.Clusters.Modes.Select(m => Convert.ToInt32(m[0])).ToArray());
        }
Beispiel #2
0
        public void meanshift_new_method_no_ctor_args()
        {
            #region doc_sample1
            // Use a fixed seed for reproducibility
            Accord.Math.Random.Generator.Seed = 0;

            // Declare some data to be clustered
            double[][] input =
            {
                new double[] { -5, -2, -4 },
                new double[] { -5, -5, -6 },
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            // Create a new Mean-Shift algorithm for 3 dimensional samples
            MeanShift meanShift = new MeanShift()
            {
                // Use a uniform kernel density
                Kernel    = new UniformKernel(),
                Bandwidth = 2.0
            };

            // Learn a data partitioning using the Mean Shift algorithm
            MeanShiftClusterCollection clustering = meanShift.Learn(input);

            // Predict group labels for each point
            int[] labels = clustering.Decide(input);

            // As a result, the first two observations should belong to the
            //  same cluster (thus having the same label). The same should
            //  happen to the next four observations and to the last three.
            #endregion

            Assert.AreEqual(labels[0], labels[1]);

            Assert.AreEqual(labels[2], labels[3]);
            Assert.AreEqual(labels[2], labels[4]);
            Assert.AreEqual(labels[2], labels[5]);

            Assert.AreEqual(labels[6], labels[7]);
            Assert.AreEqual(labels[6], labels[8]);

            Assert.AreNotEqual(labels[0], labels[2]);
            Assert.AreNotEqual(labels[2], labels[6]);
            Assert.AreNotEqual(labels[0], labels[6]);


            int[] labels2 = meanShift.Clusters.Decide(input);
            Assert.IsTrue(labels.IsEqual(labels2));

            Assert.AreEqual(3 / 9.0, meanShift.Clusters.Proportions[labels[6]], 1e-6);
            Assert.AreEqual(2 / 9.0, meanShift.Clusters.Proportions[labels[0]], 1e-6);
            Assert.AreEqual(4 / 9.0, meanShift.Clusters.Proportions[labels[2]], 1e-6);
        }
Beispiel #3
0
        public static List <List <Vector2> > ClusterPoints(List <Vector2> points)//delete "outliers"
        {
            double[][] input = new double[points.Count][];
            for (int i = 0; i < points.Count; i++)
            {
                input[i] = new double[] { points[i].x, points[i].y };
            }
            UniformKernel kernel    = new UniformKernel();
            MeanShift     meanShift = new MeanShift(dimension: 2, kernel: kernel, bandwidth: 1e-2);
            MeanShiftClusterCollection clustering = meanShift.Learn(input);

            int[] labels = clustering.Decide(input);

            List <List <Vector2> > classedPoints = new List <List <Vector2> >();

            for (int i = 0; i <= Mathf.Max(labels); i++)
            {
                List <Vector2> iClass = new List <Vector2>();
                foreach (var p in points)
                {
                    iClass.Add(p);
                }
                classedPoints.Add(iClass);
            }
            return(classedPoints);
        }
Beispiel #4
0
        public void MeanShiftConstructorTest2()
        {
            Accord.Math.Tools.SetupGenerator(1);

            // Declare some observations
            double[][] observations =
            {
                new double[] { -5, -2, -4 },
                new double[] { -5, -5, -6 },
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            double[][] orig = observations.MemberwiseClone();

            // Create a uniform kernel density function
            UniformKernel kernel = new UniformKernel();

            // Create a new Mean-Shift algorithm for 3 dimensional samples
            MeanShift meanShift = new MeanShift(dimension: 3, kernel: kernel, bandwidth: 2);

            // Compute the algorithm, retrieving an integer array
            //  containing the labels for each of the observations
            int[] labels = meanShift.Compute(observations);

            // As a result, the first two observations should belong to the
            //  same cluster (thus having the same label). The same should
            //  happen to the next four observations and to the last three.

            Assert.AreEqual(labels[0], labels[1]);

            Assert.AreEqual(labels[2], labels[3]);
            Assert.AreEqual(labels[2], labels[4]);
            Assert.AreEqual(labels[2], labels[5]);

            Assert.AreEqual(labels[6], labels[7]);
            Assert.AreEqual(labels[6], labels[8]);

            Assert.AreNotEqual(labels[0], labels[2]);
            Assert.AreNotEqual(labels[2], labels[6]);
            Assert.AreNotEqual(labels[0], labels[6]);


            int[] labels2 = meanShift.Clusters.Decide(observations);
            Assert.IsTrue(labels.IsEqual(labels2));

            // the data must not have changed!
            Assert.IsTrue(orig.IsEqual(observations));

            Assert.AreEqual(3 / 9.0, meanShift.Clusters.Proportions[labels[6]], 1e-6);
            Assert.AreEqual(2 / 9.0, meanShift.Clusters.Proportions[labels[0]], 1e-6);
            Assert.AreEqual(4 / 9.0, meanShift.Clusters.Proportions[labels[2]], 1e-6);
        }
Beispiel #5
0
        public void YinYangMeanShiftTest()
        {
            Accord.Math.Random.Generator.Seed = 1;
            double[][] inputs  = yinyang.Submatrix(null, 0, 1).ToJagged();
            int[]      outputs = yinyang.GetColumn(2).ToInt32();

            MeanShift ms = new MeanShift(2, new GaussianKernel(dimension: 2), 0.55);

            int[] labels = ms.Compute(inputs);
        }
Beispiel #6
0
        public void MeanShiftConstructorTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };


            var       kernel    = new GaussianKernel(dimension: 2);
            MeanShift meanShift = new MeanShift(2, kernel, 3);

            // Compute the model (estimate)
            int[] labels = meanShift.Compute(samples);

            int a = 0;
            int b = 1;

            if (0.2358896594197982.IsRelativelyEqual(meanShift.Clusters.Modes[1][0], 1e-10))
            {
                a = 1;
                b = 0;
            }

            for (int i = 0; i < 5; i++)
            {
                Assert.AreEqual(a, labels[i]);
            }

            for (int i = 5; i < samples.Length; i++)
            {
                Assert.AreEqual(b, labels[i]);
            }

            Assert.AreEqual(0.2358896594197982, meanShift.Clusters.Modes[a][0], 1e-10);
            Assert.AreEqual(1.0010865560750339, meanShift.Clusters.Modes[a][1], 1e-10);

            Assert.AreEqual(6.7284908155626031, meanShift.Clusters.Modes[b][0], 1e-10);
            Assert.AreEqual(1.2713970467590967, meanShift.Clusters.Modes[b][1], 1e-10);

            Assert.AreEqual(2, meanShift.Clusters.Count);
            Assert.AreEqual(2, meanShift.Clusters.Modes.Length);
        }
Beispiel #7
0
        public void MeanShiftConstructorTest()
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };


            var       kernel    = new GaussianKernel(dimension: 2);
            MeanShift meanShift = new MeanShift(2, kernel, 2.0);

            meanShift.UseParallelProcessing = false;

            // Compute the model (estimate)
            int[] labels = meanShift.Compute(samples);

            int a = labels[0];
            int b = (a == 0) ? 1 : 0;

            for (int i = 0; i < 5; i++)
            {
                Assert.AreEqual(a, labels[i]);
            }

            for (int i = 5; i < samples.Length; i++)
            {
                Assert.AreEqual(b, labels[i]);
            }

            Assert.AreEqual(1.1922811512028066, meanShift.Clusters.Modes[a][0], 1e-3);
            Assert.AreEqual(1.2567196159235963, meanShift.Clusters.Modes[a][1], 1e-3);

            Assert.AreEqual(5.2696337859175868, meanShift.Clusters.Modes[b][0], 1e-3);
            Assert.AreEqual(1.4380326532534968, meanShift.Clusters.Modes[b][1], 1e-3);

            Assert.AreEqual(2, meanShift.Clusters.Count);
            Assert.AreEqual(2, meanShift.Clusters.Modes.Length);

            Assert.AreEqual(0.5, meanShift.Clusters.Proportions[0]);
            Assert.AreEqual(0.5, meanShift.Clusters.Proportions[1]);
        }
        public override void Resolve(double[] weightedColors, int startIndex, int colorsAmount, byte componentsAmount, double k,
                                     double[] resultColor, int resultIndex)
        {
            var els  = componentsAmount + 1;
            var offs = startIndex * els;

            var colors = weightedColors
                         .Skip(offs)
                         .Take(colorsAmount * els)
                         .ToArray();

            var observations = new double[colorsAmount][];

            for (int colorIndex = 0; colorIndex < colorsAmount; colorIndex++)
            {
                var weightedColor = new double[els];
                observations[colorIndex] = weightedColor;

                for (int i = 0; i < els; i++)
                {
                    weightedColor[i] = weightedColors[offs + colorIndex * els + i];
                }
            }

            // Create a uniform kernel density function
            UniformKernel kernel = new UniformKernel();

            //calculate sigma
            double sigma = CalculateSigma(colors, (byte)(componentsAmount + 1));

            //calc bandwidth
            double bandwidth = k * sigma;

            // Create a new Mean-Shift algorithm for 4 dimensional samples
            MeanShift meanShift = new MeanShift(kernel: kernel, bandwidth: bandwidth);

            // Compute the algorithm, retrieving an integer array
            //  containing the labels for each of the observations
            int[] labels          = meanShift.Compute(observations);
            var   mostCommonColor = labels
                                    .GroupBy(item => item)
                                    .OrderByDescending(gg => gg.Count())
                                    .Select(gg => gg.Key)
                                    .First();

            var labeledWeightedColors = labels
                                        .Where(l => l == mostCommonColor)
                                        .SelectMany((label, index) => observations[index])
                                        .ToArray();


            _colorResolver.Resolve(labeledWeightedColors, 0, labeledWeightedColors.Length / els, componentsAmount, k, resultColor, resultIndex);
        }
Beispiel #9
0
        private object calcMeanShift(IPixelBlock3 vPb, IPixelBlock3 pb3)
        {
            double[][]    jaArr  = pixelBlockToJaggedArray(vPb);
            int           bands  = vPb.Planes;
            UniformKernel kernel = new UniformKernel();
            //GaussianKernel kernel = new GaussianKernel(bands);
            MeanShift ms = new MeanShift(bands, kernel, radius);

            int[] vls = ms.Compute(jaArr, 0.05, 10);
            NumClusters = ms.Clusters.Count;
            Console.WriteLine(NumClusters);
            return(splitArray(vls, pb3));
        }
        public void MeanShiftConstructorTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 }, 
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };


            var kernel = new GaussianKernel(dimension: 2);
            MeanShift meanShift = new MeanShift(2, kernel, 3);

            // Compute the model (estimate)
            int[] labels = meanShift.Compute(samples);

            int a = 0;
            int b = 1;

            if (0.2358896594197982.IsRelativelyEqual(meanShift.Clusters.Modes[1][0], 1e-10))
            {
                a = 1;
                b = 0;
            }

            for (int i = 0; i < 5; i++)
                Assert.AreEqual(a, labels[i]);

            for (int i = 5; i < samples.Length; i++)
                Assert.AreEqual(b, labels[i]);

            Assert.AreEqual(0.2358896594197982, meanShift.Clusters.Modes[a][0], 1e-10);
            Assert.AreEqual(1.0010865560750339, meanShift.Clusters.Modes[a][1], 1e-10);

            Assert.AreEqual(6.7284908155626031, meanShift.Clusters.Modes[b][0], 1e-10);
            Assert.AreEqual(1.2713970467590967, meanShift.Clusters.Modes[b][1], 1e-10);

            Assert.AreEqual(2, meanShift.Clusters.Count);
            Assert.AreEqual(2, meanShift.Clusters.Modes.Length);
        }
Beispiel #11
0
        public void meanShift()
        {
            string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "kmeans");

            Directory.CreateDirectory(basePath);

            #region doc_meanshift
            // Load a test image (shown in a picture box below)
            var    sampleImages = new TestImages(path: basePath);
            Bitmap image        = sampleImages.GetImage("airplane.png");

            // ImageBox.Show("Original", image).Hold();

            // Create converters to convert between Bitmap images and double[] arrays
            var imageToArray = new ImageToArray(min: -1, max: +1);
            var arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1);

            // Transform the image into an array of pixel values
            double[][] pixels; imageToArray.Convert(image, out pixels);

            // Create a MeanShift algorithm using given bandwidth
            //   and a Gaussian density kernel as kernel function.
            MeanShift meanShift = new MeanShift()
            {
                Kernel    = new GaussianKernel(3),
                Bandwidth = 0.06,

                // We will compute the mean-shift algorithm until the means
                // change less than 0.05 between two iterations of the algorithm
                Tolerance     = 0.05,
                MaxIterations = 10
            };

            // Learn the clusters from the data
            var clusters = meanShift.Learn(pixels);

            // Use clusters to decide class labels
            int[] labels = clusters.Decide(pixels);

            // Replace every pixel with its corresponding centroid
            double[][] replaced = pixels.Apply((x, i) => clusters.Modes[labels[i]]);

            // Retrieve the resulting image (shown in a picture box)
            Bitmap result; arrayToImage.Convert(replaced, out result);

            // ImageBox.Show("Mean-Shift clustering", result).Hold();
            #endregion
        }
Beispiel #12
0
        public void MeanShiftConstructorTest()
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 }, 
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };


            var kernel = new GaussianKernel(dimension: 2);
            MeanShift meanShift = new MeanShift(2, kernel, 2.0);
            meanShift.UseParallelProcessing = false;

            // Compute the model (estimate)
            int[] labels = meanShift.Compute(samples);

            int a = labels[0];
            int b = (a == 0) ? 1 : 0;

            for (int i = 0; i < 5; i++)
                Assert.AreEqual(a, labels[i]);

            for (int i = 5; i < samples.Length; i++)
                Assert.AreEqual(b, labels[i]);

            Assert.AreEqual(1.1922811512028066, meanShift.Clusters.Modes[a][0], 1e-3);
            Assert.AreEqual(1.2567196159235963, meanShift.Clusters.Modes[a][1], 1e-3);

            Assert.AreEqual(5.2696337859175868, meanShift.Clusters.Modes[b][0], 1e-3);
            Assert.AreEqual(1.4380326532534968, meanShift.Clusters.Modes[b][1], 1e-3);

            Assert.AreEqual(2, meanShift.Clusters.Count);
            Assert.AreEqual(2, meanShift.Clusters.Modes.Length);

            Assert.AreEqual(0.5, meanShift.Clusters.Proportions[0]);
            Assert.AreEqual(0.5, meanShift.Clusters.Proportions[1]);
        }
Beispiel #13
0
 public void WriteXml(XmlWriter writer)
 {
     writer.WriteAttributeString("name", Name);
     writer.WriteAttributeString("desc1", Description1);
     writer.WriteAttributeString("desc2", Description2);
     writer.WriteAttributeString("partName", PartName);
     writer.WriteAttributeString("featureName", FeatureName);
     writer.WriteAttributeString("toleranceValue", ToleranceValue.ToString(CultureInfo.InvariantCulture));
     writer.WriteAttributeString("toleranceEffective",
                                 ToleranceEffective.ToString(CultureInfo.InvariantCulture));
     writer.WriteAttributeString("effect", Effect.ToString(CultureInfo.InvariantCulture));
     writer.WriteAttributeString("sensitivity", Sensitivity.ToString(CultureInfo.InvariantCulture));
     writer.WriteAttributeString("meanShift", MeanShift.ToString(CultureInfo.InvariantCulture));
     writer.WriteAttributeString("src", Src);
     writer.WriteAttributeString("weight", Weight.ToString(CultureInfo.InvariantCulture));
 }
Beispiel #14
0
        /// <summary>
        ///   Runs the Mean-Shift algorithm.
        /// </summary>
        ///
        private void runMeanShift()
        {
            int pixelSize = 3;

            // Retrieve the kernel bandwidth
            double sigma = (double)numBandwidth.Value;

            // Load original image
            Bitmap image = Properties.Resources.leaf;

            // Create converters
            ImageToArray imageToArray = new ImageToArray(min: -1, max: +1);
            ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1);

            // Transform the image into an array of pixel values
            double[][] pixels; imageToArray.Convert(image, out pixels);


            // Create a MeanShift algorithm using the given bandwidth
            // and a Gaussian density kernel as the kernel function:

            IRadiallySymmetricKernel kernel = new GaussianKernel(pixelSize);

            var meanShift = new MeanShift(pixelSize, kernel, sigma)
            {
                Tolerance     = 0.05,
                MaxIterations = 10
            };


            // Compute the mean-shift algorithm until the difference
            // in shift vectors between two iterations is below 0.05

            int[] idx = meanShift.Compute(pixels);


            // Replace every pixel with its corresponding centroid
            pixels.ApplyInPlace((x, i) => meanShift.Clusters.Modes[idx[i]]);

            // Show resulting image in the picture box
            Bitmap result; arrayToImage.Convert(pixels, out result);

            pictureBox.Image = result;
        }
Beispiel #15
0
        private static void meanShift(double[][] inputs)
        {
            // Create a mean-shfit algorithm
            var kmeans = new MeanShift()
            {
                Bandwidth     = 0.1,
                Kernel        = new EpanechnikovKernel(),
                Distance      = new Euclidean(),
                MaxIterations = 1000
            };

            // Use it to learn a data model
            var model = kmeans.Learn(inputs);

            // Use the model to group new instances
            int[] prediction = model.Decide(inputs);

            // Plot the results
            ScatterplotBox.Show("Mean-Shift's answer", inputs, prediction).Hold();
        }
Beispiel #16
0
        static void TestMeanShift()
        {
            Bitmap image = Accord.Imaging.Image.FromUrl("https://c1.staticflickr.com/4/3209/2527630511_fae07530c2_b.jpg");

            //ImageBox.Show("Original", image).Hold();

            // Create converters to convert between Bitmap images and double[] arrays
            var imageToArray = new ImageToArray(min: -1, max: +1);
            var arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1);

            // Transform the image into an array of pixel values
            double[][] pixels; imageToArray.Convert(image, out pixels);


            // Create a MeanShift algorithm using given bandwidth
            //   and a Gaussian density kernel as kernel function.
            MeanShift meanShift = new MeanShift()
            {
                Kernel    = new EpanechnikovKernel(),
                Bandwidth = 0.1,

                // We will compute the mean-shift algorithm until the means
                // change less than 0.05 between two iterations of the algorithm
                Tolerance     = 0.05,
                MaxIterations = 10
            };

            // Learn the clusters from the data
            var clusters = meanShift.Learn(pixels);

            // Use clusters to decide class labels
            int[] labels = clusters.Decide(pixels);

            // Replace every pixel with its corresponding centroid
            double[][] replaced = pixels.Apply((x, i) => clusters.Modes[labels[i]]);

            // Retrieve the resulting image (shown in a picture box)
            Bitmap result; arrayToImage.Convert(replaced, out result);

            //ImageBox.Show("Mean-Shift clustering", result).Hold();
        }
Beispiel #17
0
        public override void Initialize()
        {
            SetStartDate(2016, 1, 1);
            SetEndDate(2016, 7, 1);
            SetCash(10000);

            AddSecurity(SecurityType.Equity, symbol, Resolution.Hour);

            var tradeBarHistory = History <TradeBar>(symbol, TimeSpan.FromDays(7), Resolution.Hour);

            // we can loop over the return value from these functions and we get TradeBars
            // we can use these TradeBars to initialize indicators or perform other math
            var closes = new double[][] { tradeBarHistory.Select((tb) => tb.Close).ToDoubleArray() };

            IRadiallySymmetricKernel kernel = new GaussianKernel(1);

            var meanShift = new MeanShift(kernel, 1)
            {
                //Tolerance = 0.05,
                //MaxIterations = 10
            };


            // Compute the mean-shift algorithm until the difference
            // in shift vectors between two iterations is below 0.05

            int[] idx = meanShift.Learn(closes).Decide(closes);


            // Replace every pixel with its corresponding centroid
            result = closes.Apply((x, i) => meanShift.Clusters.Modes[idx[i]], result: closes);

            foreach (var rr in result)
            {
                foreach (var r in rr)
                {
                    Debug("" + r);
                }
            }
        }
Beispiel #18
0
        public void TestClusterOrder()
        {
            // set of points with 4 obvious clusters
            double[][] points =
            {
                // plus,minus points
                new double[] {    11, -10 },
                new double[] {    11, -12 },
                new double[] {    10, -13 },
                // plus,plus points
                new double[] {    10,  10 },
                new double[] {    11,  13 },
                new double[] {    10,  12 },
                new double[] {    11,  10 },
                // minus,plus points
                new double[] {   -10,  10 },
                new double[] {   -10,  11 },
                new double[] {   -11,  10 },
                new double[] {   -11,  11 },
                // minus,minus points
                new double[] {   -10, -10 },
                new double[] { -11.5, -10 },
                new double[] {   -13, -10 }
            };

            // for use in tests, count points in each cluster
            double minusMinusPointCount = 0;
            double minusPlusPointCount  = 0;
            double plusMinusPointCount  = 0;
            double plusPlusPointCount   = 0;

            for (int i = 0; i < points.Length; ++i)
            {
                if (points[i][0] < 0 && points[i][1] < 0)
                {
                    minusMinusPointCount += 1;
                }
                else if (points[i][0] < 0 && points[i][1] > 0)
                {
                    minusPlusPointCount += 1;
                }
                else if (points[i][0] > 0 && points[i][1] < 0)
                {
                    plusMinusPointCount += 1;
                }
                else if (points[i][0] > 0 && points[i][1] > 0)
                {
                    plusPlusPointCount += 1;
                }
            }


            // MeanShift calculations
            Accord.Math.Random.Generator.Seed = 1;
            var meanShift = new MeanShift()
            {
                // Use a uniform kernel density
                Kernel    = new Accord.Statistics.Distributions.DensityKernels.GaussianKernel(2),
                Bandwidth = 2
            };

            meanShift.UseParallelProcessing = false;

            var clustering = meanShift.Learn(points);

            int[] labels = clustering.Decide(points);


            // Test results

            // we should get 4 clusters
            Assert.True(clustering.Count == 4);

            // proportions of clusters should match, and point labels should assign them to modes
            // that make sense
            for (int i = 0; i < clustering.Count; ++i)
            {
                if (clustering.Modes[i][0] < 0 && clustering.Modes[i][1] < 0)
                {
                    Assert.Equal(clustering.Proportions[i], minusMinusPointCount / points.Length, 5);
                    for (int j = 0; j < points.Length; ++j)
                    {
                        if (labels[j] == i)
                        {
                            Assert.True(points[j][0] < 0 && points[j][1] < 0);
                        }
                    }
                }
                if (clustering.Modes[i][0] < 0 && clustering.Modes[i][1] > 0)
                {
                    Assert.Equal(clustering.Proportions[i], minusPlusPointCount / points.Length, 5);
                    for (int j = 0; j < points.Length; ++j)
                    {
                        if (labels[j] == i)
                        {
                            Assert.True(points[j][0] < 0 && points[j][1] > 0);
                        }
                    }
                }
                if (clustering.Modes[i][0] > 0 && clustering.Modes[i][1] < 0)
                {
                    Assert.Equal(clustering.Proportions[i], plusMinusPointCount / points.Length, 5);
                    for (int j = 0; j < points.Length; ++j)
                    {
                        if (labels[j] == i)
                        {
                            Assert.True(points[j][0] > 0 && points[j][1] < 0);
                        }
                    }
                }
                if (clustering.Modes[i][0] > 0 && clustering.Modes[i][1] > 0)
                {
                    Assert.Equal(clustering.Proportions[i], plusPlusPointCount / points.Length, 5);
                    for (int j = 0; j < points.Length; ++j)
                    {
                        if (labels[j] == i)
                        {
                            Assert.True(points[j][0] > 0 && points[j][1] > 0);
                        }
                    }
                }
            }
        }
Beispiel #19
0
        public void WeightedMeanShiftConstructorTest()
        {
            MeanShift ms1, ms2, ms3;

            Accord.Math.Tools.SetupGenerator(0);

            double[][] samples1 =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 1 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };

            int[] weights1 = Vector.Ones <int>(samples1.Length);

            ms1 = new MeanShift(2, new GaussianKernel(dimension: 2), 2.0);
            ms1.Compute(samples1);


            Accord.Math.Tools.SetupGenerator(0);

            double[][] samples2 =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
            };

            int[] weights = { 1, 1, 2, 1, 1, 1, 2, 1 };

            ms2 = new MeanShift(2, new GaussianKernel(dimension: 2), 2.0);
            ms2.Compute(samples2, weights);

            ms3 = new MeanShift(2, new GaussianKernel(dimension: 2), 2.0);
            ms3.Compute(samples2);


            int[] labels1 = ms1.Clusters.Decide(samples1);
            int[] labels2 = ms2.Clusters.Decide(samples1);
            int[] labels3 = ms3.Clusters.Decide(samples1);

            Assert.IsTrue(Matrix.IsEqual(labels1, labels2));

            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Modes, ms2.Clusters.Modes, 1e-3));
            Assert.IsFalse(Matrix.IsEqual(ms1.Clusters.Modes, ms3.Clusters.Modes, 1e-2));
            Assert.IsFalse(Matrix.IsEqual(ms2.Clusters.Modes, ms3.Clusters.Modes, 1e-2));

            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Proportions, ms2.Clusters.Proportions));
            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Proportions, ms3.Clusters.Proportions));

            Assert.AreEqual(0.5, ms1.Clusters.Proportions[0]);
            Assert.AreEqual(0.5, ms1.Clusters.Proportions[1]);
        }
Beispiel #20
0
        /// <summary>
        ///   Runs the Mean-Shift algorithm.
        /// </summary>
        /// 
        private void runMeanShift()
        {
            int pixelSize = 3;

            // Retrieve the kernel bandwidth
            double sigma = (double)numBandwidth.Value;

            // Load original image
            Bitmap image = Properties.Resources.leaf;

            // Create converters
            ImageToArray imageToArray = new ImageToArray(min: -1, max: +1);
            ArrayToImage arrayToImage = new ArrayToImage(image.Width, image.Height, min: -1, max: +1);

            // Transform the image into an array of pixel values
            double[][] pixels; imageToArray.Convert(image, out pixels);


            // Create a MeanShift algorithm using the given bandwidth
            // and a Gaussian density kernel as the kernel function:

            IRadiallySymmetricKernel kernel = new GaussianKernel(pixelSize);
            
            var meanShift = new MeanShift(pixelSize, kernel, sigma)
            {
                Tolerance = 0.05,
                MaxIterations = 10
            };

            
            // Compute the mean-shift algorithm until the difference 
            // in shift vectors between two iterations is below 0.05
            
            int[] idx = meanShift.Compute(pixels);


            // Replace every pixel with its corresponding centroid
            pixels.ApplyInPlace((x, i) => meanShift.Clusters.Modes[idx[i]]);

            // Show resulting image in the picture box
            Bitmap result; arrayToImage.Convert(pixels, out result);

            pictureBox.Image = result;
        }
Beispiel #21
0
        public void WeightedMeanShiftConstructorTest()
        {
            MeanShift ms1, ms2, ms3;

            Accord.Math.Tools.SetupGenerator(0);

            double[][] samples1 =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 }, 
                new double[] { 1, 1 },
                new double[] { 0, 1 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };

            int[] weights1 = Vector.Ones<int>(samples1.Length);

            ms1 = new MeanShift(2, new GaussianKernel(dimension: 2), 2.0);
            ms1.Compute(samples1);


            Accord.Math.Tools.SetupGenerator(0);

            double[][] samples2 =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 }, 
                new double[] { 1, 1 },
                new double[] { 0, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
            };

            int[] weights = { 1, 1, 2, 1, 1, 1, 2, 1 };

            ms2 = new MeanShift(2, new GaussianKernel(dimension: 2), 2.0);
            ms2.Compute(samples2, weights);

            ms3 = new MeanShift(2, new GaussianKernel(dimension: 2), 2.0);
            ms3.Compute(samples2);


            int[] labels1 = ms1.Clusters.Nearest(samples1);
            int[] labels2 = ms2.Clusters.Nearest(samples1);
            int[] labels3 = ms3.Clusters.Nearest(samples1);

            Assert.IsTrue(Matrix.IsEqual(labels1, labels2));

            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Modes, ms2.Clusters.Modes, 1e-3));
            Assert.IsFalse(Matrix.IsEqual(ms1.Clusters.Modes, ms3.Clusters.Modes, 1e-2));
            Assert.IsFalse(Matrix.IsEqual(ms2.Clusters.Modes, ms3.Clusters.Modes, 1e-2));

            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Proportions, ms2.Clusters.Proportions));
            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Proportions, ms3.Clusters.Proportions));

            Assert.AreEqual(0.5, ms1.Clusters.Proportions[0]);
            Assert.AreEqual(0.5, ms1.Clusters.Proportions[1]);
        }
        public void MeanShiftConstructorTest2()
        {

            Accord.Math.Tools.SetupGenerator(1);


            // Declare some observations
            double[][] observations = 
            {
                new double[] { -5, -2, -1 },
                new double[] { -5, -5, -6 },
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            double[][] orig = observations.MemberwiseClone();

            // Create a uniform kernel density function
            UniformKernel kernel = new UniformKernel();

            // Create a new Mean-Shift algorithm for 3 dimensional samples
            MeanShift meanShift = new MeanShift(dimension: 3, kernel: kernel, bandwidth: 1.5 );

            // Compute the algorithm, retrieving an integer array
            //  containing the labels for each of the observations
            int[] labels = meanShift.Compute(observations);

            // As a result, the first two observations should belong to the
            //  same cluster (thus having the same label). The same should
            //  happen to the next four observations and to the last three.

            Assert.AreEqual(labels[0], labels[1]);

            Assert.AreEqual(labels[2], labels[3]);
            Assert.AreEqual(labels[2], labels[4]);
            Assert.AreEqual(labels[2], labels[5]);

            Assert.AreEqual(labels[6], labels[7]);
            Assert.AreEqual(labels[6], labels[8]);

            Assert.AreNotEqual(labels[0], labels[2]);
            Assert.AreNotEqual(labels[2], labels[6]);
            Assert.AreNotEqual(labels[0], labels[6]);


            int[] labels2 = meanShift.Clusters.Nearest(observations);
            Assert.IsTrue(labels.IsEqual(labels2));

            // the data must not have changed!
            Assert.IsTrue(orig.IsEqual(observations));
        }
Beispiel #23
0
        public void RunProcess(double[][] inputDataMS, bool displayResult = false)
        {
            Stopwatch sw = new Stopwatch();

            sw.Start();

            MeanShift clusterMS = new MeanShift(dataDimension, new UniformKernel(), msSearchRadius);

            clusterMS.Distance = new myDistanceClass();

            MeanShiftClusterCollection clustering = clusterMS.Learn(inputDataMS);

            pointLabels = clustering.Decide(inputDataMS);

            clusteringPlaneRec = new List <pointPlaneClass>();
            for (int i = 0; i < clustering.Count; i++)
            {
                clusteringPlaneRec.Add(new pointPlaneClass(i, 0));
            }
            for (int i = 0; i < h * w; i++)
            {
                MyVector3 vector3T = new MyVector3(inputDataMS[i][6], inputDataMS[i][7], inputDataMS[i][8]);
                if (vector3T.x == 0 && vector3T.y == 0 && vector3T.z == 0)
                {
                    continue;
                }

                int idx = pointLabels[i];
                clusteringPlaneRec[idx].Points.Add(vector3T);
                clusteringPlaneRec[idx].PointsIdx.Add(i);
                clusteringPlaneRec[idx].Value++;
            }
            clusteringPlaneRec.Sort((x, y) => y.Value.CompareTo(x.Value));
            #region visualization
            if (displayResult)
            {
                int loop = 0;
                Image <Bgr, byte> image2 = new Image <Bgr, byte>(w, h);
                image2.SetZero();
                for (int i = 0; i < h; i++)
                {
                    for (int j = 0; j < w; j++)
                    {
                        if (pointLabels[loop] >= 0)
                        {
                            byte r = (byte)(Utils.ColorMall[pointLabels[loop] % 30].R);
                            byte g = (byte)(Utils.ColorMall[pointLabels[loop] % 30].G);
                            byte b = (byte)(Utils.ColorMall[pointLabels[loop] % 30].B);
                            image2[i, j] = new Bgr(b, g, r);
                        }
                        loop++;
                    }
                }
                new ImageViewer(image2, "2 - MeanShiftClustering").Show();
            }
            #endregion
            sw.Stop();
            Console.WriteLine(clusteringPlaneRec.Count + " labels\tin" + sw.ElapsedMilliseconds / 1000 + "s");
            sw.Restart();

            // extract planes from clustered data
            SceondPlaneExtraction();
            #region visualization
            if (displayResult)
            {
                int loop = 0;
                Image <Bgr, byte> image3 = new Image <Bgr, byte>(w, h);
                image3.SetZero();
                for (int i = 0; i < h; i++)
                {
                    for (int j = 0; j < w; j++)
                    {
                        if (pointLabels[loop] >= 0)
                        {
                            byte r = (byte)(Utils.ColorMall[pointLabels[loop] % 30].R);
                            byte g = (byte)(Utils.ColorMall[pointLabels[loop] % 30].G);
                            byte b = (byte)(Utils.ColorMall[pointLabels[loop] % 30].B);
                            image3[i, j] = new Bgr(b, g, r);
                        }
                        loop++;
                    }
                }
                new ImageViewer(image3, "3 - PlaneExtraction").Show();
            }
            #endregion
            sw.Stop();
            Console.WriteLine(extractionPlaneRec.Count + " labels\tin" + sw.ElapsedMilliseconds / 1000 + "s");
            sw.Restart();

            // merge planes if necessary
            MergePlanes();
            #region visualization
            if (displayResult)
            {
                int loop = 0;
                Image <Bgr, byte> image4 = new Image <Bgr, byte>(w, h);
                image4.SetZero();
                for (int i = 0; i < h; i++)
                {
                    for (int j = 0; j < w; j++)
                    {
                        if (pointLabels[loop] >= 0)
                        {
                            byte r = (byte)(Utils.ColorMall[pointLabels[loop] % 30].R);
                            byte g = (byte)(Utils.ColorMall[pointLabels[loop] % 30].G);
                            byte b = (byte)(Utils.ColorMall[pointLabels[loop] % 30].B);
                            image4[i, j] = new Bgr(b, g, r);
                        }
                        loop++;
                    }
                }
                new ImageViewer(image4, "4 - MergedPlanes").Show();
            }
            #endregion

            sw.Stop();
            Console.WriteLine(mergedPlaneRec.Count + " labels\tin" + sw.ElapsedMilliseconds / 1000 + "s");
        }
Beispiel #24
0
        public void WeightedMeanShiftConstructorTest2()
        {
            Accord.Math.Tools.SetupGenerator(1);

            // Declare some observations
            double[][] observations1 =
            {
                new double[] { -5, -2, -4 },
                new double[] { -5, -5, -6 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            int[] weights1 = { 1, 1, 1, 1, 1, 1, 1, 1, 1 };

            // Declare some observations
            double[][] observations2 =
            {
                new double[] { -5, -2, -4 },
                new double[] { -5, -5, -6 },
                new double[] {  1,  1,  2 },
                // new double[] {  1,  1,  2 },
                // new double[] {  1,  1,  2 },
                // new double[] {  1,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            int[] weights2 = { 1, 1, 4, 1, 1, 1 };


            Accord.Math.Random.Generator.Seed = 1;
            var ms1 = new MeanShift(dimension: 3, kernel: new UniformKernel(), bandwidth: 2);

            ms1.UseParallelProcessing = false;
            ms1.Compute(observations1);

            Accord.Math.Random.Generator.Seed = 1;
            var ms2 = new MeanShift(dimension: 3, kernel: new UniformKernel(), bandwidth: 2);

            ms2.UseParallelProcessing = false;
            ms2.Compute(observations1, weights1);

            Accord.Math.Random.Generator.Seed = 1;
            var ms3 = new MeanShift(dimension: 3, kernel: new UniformKernel(), bandwidth: 2);

            ms3.UseParallelProcessing = false;
            ms3.Compute(observations2, weights2);

            int[] labels1 = ms1.Clusters.Decide(observations1);
            int[] labels2 = ms2.Clusters.Decide(observations1);
            int[] labels3 = ms3.Clusters.Decide(observations1);

            Assert.IsTrue(Matrix.IsEqual(labels1, labels2));
            Assert.IsTrue(Matrix.IsEqual(labels1, labels3));

            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Modes, ms2.Clusters.Modes, 1e-3));
            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Modes, ms3.Clusters.Modes, 1e-3));

            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Proportions, ms2.Clusters.Proportions));
            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Proportions, ms3.Clusters.Proportions));

            Assert.AreEqual(3 / 9.0, ms3.Clusters.Proportions[labels1[6]], 1e-6);
            Assert.AreEqual(2 / 9.0, ms3.Clusters.Proportions[labels1[0]], 1e-6);
            Assert.AreEqual(4 / 9.0, ms3.Clusters.Proportions[labels1[2]], 1e-6);
        }
Beispiel #25
0
        public void YinYangMeanShiftTest()
        {
            Accord.Math.Random.Generator.Seed = 1;
            double[][] inputs = yinyang.Submatrix(null, 0, 1).ToJagged();
            int[] outputs = yinyang.GetColumn(2).ToInt32();

            MeanShift ms = new MeanShift(2, new GaussianKernel(dimension: 2), 0.55);

            int[] labels = ms.Compute(inputs);
        }
Beispiel #26
0
        public void WeightedMeanShiftConstructorTest2()
        {
            Accord.Math.Tools.SetupGenerator(1);

            // Declare some observations
            double[][] observations1 = 
            {
                new double[] { -5, -2, -4 },
                new double[] { -5, -5, -6 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            int[] weights1 = { 1, 1, 1, 1, 1, 1, 1, 1, 1 };

            // Declare some observations
            double[][] observations2 = 
            {
                new double[] { -5, -2, -4 },
                new double[] { -5, -5, -6 },
                new double[] {  1,  1,  2 },
                // new double[] {  1,  1,  2 },
                // new double[] {  1,  1,  2 },
                // new double[] {  1,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            int[] weights2 = { 1, 1, 4, 1, 1, 1 };


            Accord.Math.Random.Generator.Seed = 1;
            var ms1 = new MeanShift(dimension: 3, kernel: new UniformKernel(), bandwidth: 2);
            ms1.UseParallelProcessing = false;
            ms1.Compute(observations1);

            Accord.Math.Random.Generator.Seed = 1;
            var ms2 = new MeanShift(dimension: 3, kernel: new UniformKernel(), bandwidth: 2);
            ms2.UseParallelProcessing = false;
            ms2.Compute(observations1, weights1);

            Accord.Math.Random.Generator.Seed = 1;
            var ms3 = new MeanShift(dimension: 3, kernel: new UniformKernel(), bandwidth: 2);
            ms3.UseParallelProcessing = false;
            ms3.Compute(observations2, weights2);

            int[] labels1 = ms1.Clusters.Nearest(observations1);
            int[] labels2 = ms2.Clusters.Nearest(observations1);
            int[] labels3 = ms3.Clusters.Nearest(observations1);

            Assert.IsTrue(Matrix.IsEqual(labels1, labels2));
            Assert.IsTrue(Matrix.IsEqual(labels1, labels3));

            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Modes, ms2.Clusters.Modes, 1e-3));
            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Modes, ms3.Clusters.Modes, 1e-3));

            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Proportions, ms2.Clusters.Proportions));
            Assert.IsTrue(Matrix.IsEqual(ms1.Clusters.Proportions, ms3.Clusters.Proportions));

            Assert.AreEqual(3 / 9.0, ms3.Clusters.Proportions[labels1[6]], 1e-6);
            Assert.AreEqual(2 / 9.0, ms3.Clusters.Proportions[labels1[0]], 1e-6);
            Assert.AreEqual(4 / 9.0, ms3.Clusters.Proportions[labels1[2]], 1e-6);
        }
        public void Main()
        {
            List<string> abstracts = PullAbstracts(@"ExampleData\references.txt");

            Stopwords sw = new Stopwords(@"ExampleData\Stopwords.txt");

            StandardAnalyzer analyzer = new StandardAnalyzer(Net.Util.Version.LUCENE_30);
            using (IndexWriter writer = new IndexWriter(FSDirectory.Open(@"ExampleData\Index"),
                analyzer, IndexWriter.MaxFieldLength.LIMITED))
            {
                foreach (string ab in abstracts)
                {
                    string trimmedAb = sw.TrimStopwords(ab);
                    if (trimmedAb.Contains("here "))
                        return;
                    Document document = new Document();
                    Field field = new Field("content", trimmedAb, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
                    document.Add(field);

                    writer.AddDocument(document);
                }
                writer.Optimize();
            }

            List<Dictionary<string, int>> vectors = new List<Dictionary<string, int>>();
            using (IndexReader reader = IndexReader.Open(FSDirectory.Open(@"ExampleData\Index"), true))
            {
                // get all terms.
                TermEnum termsEnum = reader.Terms();
                List<string> termStrings = new List<string>();
                while (termsEnum.Next())
                {
                    string term = termsEnum.Term.Text;
                    string processed = null;
                    if (!sw.IsStopWord(term, out processed))
                    {
                        termStrings.Add(processed);
                    }
                }

                // create vectors
                for (int c = 0; c < reader.NumDocs(); c++)
                {
                    Dictionary<string, int> vector = new Dictionary<string, int>();
                    foreach (string term in termStrings)
                    {
                        vector.Add(term, 0);
                    }

                    ITermFreqVector freqVector = reader.GetTermFreqVector(c, "content");
                    string[] terms = freqVector.GetTerms();
                    foreach (string term in terms)
                    {
                        int count = vector[term];
                        vector[term] = count + 1;
                    }
                    vectors.Add(vector);
                }
            }

            double summation = 0.0;
            string[] keys = vectors[0].Keys.ToArray();
            for (int c = 0; c < keys.Length; c++)
            {
                double val = (vectors[10][keys[c]] - vectors[11][keys[c]]);
                summation += val * val;
            }
            double distance = Math.Sqrt(summation);

            double[][] arr = new double[vectors.Count][];
            for(int c=0;c<vectors.Count;c++)
            {
                arr[c] = vectors[c].Values.Select(m=>(double)m).ToArray();
            }

            int dimension = vectors[0].Values.Count;
            double sigma = 14.0;
            MeanShift meanShiftClustering = new MeanShift(dimension, new GaussianKernel(dimension), sigma);

            int[] indices = meanShiftClustering.Compute(arr, .05, 100);
            MeanShiftClusterCollection clusters = meanShiftClustering.Clusters;
            return;
        }
 private object calcMeanShift(IPixelBlock3 vPb,IPixelBlock3 pb3)
 {
     double[][] jaArr = pixelBlockToJaggedArray(vPb);
     int bands = vPb.Planes;
     UniformKernel kernel = new UniformKernel();
     //GaussianKernel kernel = new GaussianKernel(bands);
     MeanShift ms = new MeanShift(bands, kernel, radius);
     int[] vls = ms.Compute(jaArr, 0.05, 10);
     NumClusters = ms.Clusters.Count;
     Console.WriteLine(NumClusters);
     return splitArray(vls, pb3);
 }
Beispiel #29
0
        public int ComputeClustering(Vertex[] points)
        {
            int pixelSize = 3;

            // Retrieve the kernel bandwidth
            // double sigma = (double)numBandwidth.Value;

            // Create a MeanShift algorithm using the given bandwidth
            // and a Gaussian density kernel as the kernel function:

            Accord.Compat.ParallelOptions opt = new Accord.Compat.ParallelOptions();
            opt.MaxDegreeOfParallelism = 1;

            var meanShift = new MeanShift()
            {
                Kernel        = new GaussianKernel((int)PARAM_2),
                Bandwidth     = PARAM_1,
                ComputeLabels = true,

                // Please set ParallelOptions.MaxDegreeOfParallelism to 1 instead.

                ParallelOptions = opt
                                  //Tolerance = 0.05,
                                  // MaxIterations = (int)ITERATION
            };

            double[][] input = new double[points.Length][];

            for (int i = 0; i < points.Length; ++i)
            {
                input[i] = points[i].Coords;
            }

            // Compute the mean-shift algorithm until the difference
            // in shift vectors between two iterations is below 0.05

            int[] classification = meanShift.Learn(input).Decide(input);

            Hashtable ret = new Hashtable();
            Hashtable wut = new Hashtable();

            for (int i = 0; i < input.Length; ++i)
            {
                int idCluster = classification[i];

                if (ret[idCluster] == null)
                {
                    Facility newFac = new Facility();
                    newFac.VertexIndex = i;
                    newFac.Coords      = meanShift.Clusters.Modes[idCluster];

                    ret.Add(idCluster, newFac);

                    points[i].IsFacility = true;
                    points[i].Facility   = newFac;
                }
                else
                {
                    ((Facility)ret[idCluster]).AddVertex(i, 0);
                }
            }

            facilities.Clear();

            Console.WriteLine("===============================");

            // WTF
            foreach (Facility f in ret.Values)
            {
                //f.Coords = new double[points[0].Dimension];
                //foreach (int v in f.VertexIndices)
                //{

                //    for (int i = 0; i < f.Coords.Length; ++i)
                //    {
                //        f.Coords[i] += points[v][i];
                //    }

                //    for (int i = 0; i < f.Coords.Length; ++i)
                //    {
                //        f.Coords[i] /= f.VertexIndices.Count;
                //    }
                //}

                facilities.Add(f);

                Console.WriteLine("Clients: {0}", f.VertexIndices.Count);
            }

            return(0);
        }
    private List <Line2> Skeletonize(out bool iscurve)
    {
        Image <Gray, byte> img2   = body_img.Copy();
        Image <Gray, byte> eroded = new Image <Gray, byte>(img2.Size);
        Image <Gray, byte> temp   = new Image <Gray, byte>(img2.Size);
        Image <Gray, byte> skel   = new Image <Gray, byte>(img2.Size);

        body_img.Save("test.png");


        #region with matlab
        string argument1 = "\"" + "test.png" + "\"";
        System.Diagnostics.Process process = new System.Diagnostics.Process();
        process.StartInfo.FileName               = System.Environment.CurrentDirectory + "\\Assets\\frommatlab\\skeleton.exe";
        process.StartInfo.Arguments              = argument1;
        process.StartInfo.UseShellExecute        = false;
        process.StartInfo.CreateNoWindow         = true;
        process.StartInfo.RedirectStandardOutput = true;
        //启动
        process.Start();
        process.WaitForExit();
        #endregion

        skel          = new Image <Gray, byte>("prune.png");
        ori_thin_img  = new Image <Gray, byte>("thin.png");
        ori_prune_img = skel;

        #region thining - comment
        //skel.SetValue(0);
        //CvInvoke.Threshold(img2, temp, 127, 256, 0);
        //var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));
        //bool done = false;

        ////skeleton
        //int itr = 0;
        //while (!done)
        //{
        //    CvInvoke.Erode(img2, eroded, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
        //    CvInvoke.Dilate(eroded, temp, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
        //    CvInvoke.Subtract(img2, temp, temp);
        //    CvInvoke.BitwiseOr(skel, temp, skel);
        //    eroded.CopyTo(img2);
        //    itr++;
        //    if (CvInvoke.CountNonZero(img2) == 0) done = true;
        //}
        //Image<Gray, Byte> cannyimg = body_img.Canny(60, 100);
        //CvInvoke.Dilate(cannyimg, cannyimg, element, new Point(-1, -1), 3, BorderType.Reflect, default(MCvScalar));
        //CvInvoke.Subtract(skel, cannyimg, skel);
        //ori_skel_img = skel.Copy();

        ////thinning
        //if (!noface)
        //{
        //    #region thinning
        //    List<Mat> cs = new List<Mat>();
        //    List<Mat> ds = new List<Mat>();
        //    for (int i = 0; i < 8; i++)
        //    {
        //        cs.Add(CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1)));
        //        ds.Add(CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1)));
        //    }

        //    cs[0].SetTo(new int[] { 0, 0, 0, 0, 1, 0, 1, 1, 1 });
        //    cs[1].SetTo(new int[] { 1, 0, 0, 1, 1, 0, 1, 0, 0 });
        //    cs[2].SetTo(new int[] { 1, 1, 1, 0, 1, 0, 0, 0, 0 });
        //    cs[3].SetTo(new int[] { 0, 0, 1, 0, 1, 1, 0, 0, 1 });

        //    ds[0].SetTo(new int[] { 1, 1, 1, 0, 0, 0, 0, 0, 0 });
        //    ds[1].SetTo(new int[] { 0, 0, 1, 0, 0, 1, 0, 0, 1 });
        //    ds[2].SetTo(new int[] { 0, 0, 0, 0, 0, 0, 1, 1, 1 });
        //    ds[3].SetTo(new int[] { 1, 0, 0, 1, 0, 0, 1, 0, 0 });

        //    cs[4].SetTo(new int[] { 0, 0, 0, 1, 1, 0, 1, 1, 0 });
        //    cs[5].SetTo(new int[] { 1, 1, 0, 1, 1, 0, 0, 0, 0 });
        //    cs[6].SetTo(new int[] { 0, 1, 1, 0, 1, 1, 0, 0, 0 });
        //    cs[7].SetTo(new int[] { 0, 0, 0, 0, 1, 1, 0, 1, 1 });

        //    ds[4].SetTo(new int[] { 0, 1, 1, 0, 0, 1, 0, 0, 0 });
        //    ds[5].SetTo(new int[] { 0, 0, 0, 0, 0, 1, 0, 1, 1 });
        //    ds[6].SetTo(new int[] { 0, 0, 0, 1, 0, 0, 1, 1, 0 });
        //    ds[7].SetTo(new int[] { 1, 1, 0, 1, 0, 0, 0, 0, 0 });

        //    Image<Gray, byte> img3 = skel.Copy();
        //    Image<Gray, byte> temp2 = skel.CopyBlank();
        //    Image<Gray, byte> lastimg3 = skel.Copy();

        //    done = false;
        //    while (!done)
        //    {
        //        for (int i = 0; i < 8; i++)
        //        {
        //            temp = this.HitOrMiss(img3, cs[i], ds[i]);
        //            CvInvoke.Subtract(img3, temp, img3);
        //        }

        //        CvInvoke.Subtract(lastimg3, img3, temp2);
        //        lastimg3 = img3.Copy();
        //        if (CvInvoke.CountNonZero(temp2) == 0) done = true;
        //    }

        //    //img3.Save("thining.png");
        //    #endregion
        //    skel = img3.Copy();
        //    ori_thinning_img = img3.Copy();
        //}
        ////// remove noise
        ////for (int i = 0; i < img3.Height; i++)
        ////{
        ////    for (int j = 0; j < img3.Width; j++)
        ////    {
        ////        if (img3[i, j].Equals(new Gray(255)))
        ////        {
        ////            bool change = false;
        ////            for (int pad = 1; pad < 3; pad++)
        ////            {
        ////                if (i >= pad && i < img3.Height - pad && j >= pad && j < img3.Width - pad)
        ////                {
        ////                    if (img3[i - pad, j].Equals(new Gray(0)) &&
        ////                        img3[i - pad, j - pad].Equals(new Gray(0)) &&
        ////                        img3[i - pad, j + pad].Equals(new Gray(0)) &&
        ////                        img3[i + pad, j].Equals(new Gray(0)) &&
        ////                        img3[i + pad, j - pad].Equals(new Gray(0)) &&
        ////                        img3[i + pad, j + pad].Equals(new Gray(0)) &&
        ////                        img3[i, j - pad].Equals(new Gray(0)) &&
        ////                        img3[i, j + pad].Equals(new Gray(0)))
        ////                        change = true;
        ////                }
        ////            }
        ////            if (change)
        ////                img3[i, j] = new Gray(0);
        ////        }
        ////    }
        ////}
        ////img3.Save("thiningdenoise.png");
        #endregion

        // get line
        // consider both straight line and curve
        LineSegment2D[] lines = skel.HoughLinesBinary(
            1,               //Distance resolution in pixel-related units
            Math.PI / 180.0, //Angle resolution measured in radians.
            3,               //threshold
            4,               //min Line width
            1                //gap between lines
            )[0];            //Get the lines from the first channel

        Image <Gray, byte> lineimg    = skel.CopyBlank();
        List <Line2>       skel_lines = new List <Line2>();
        foreach (LineSegment2D line in lines)
        {
            //remove image boundaries
            //if (line.P1.X > 10 && line.P1.Y > 10 && line.P1.X < body_img.Height - 10 && line.P1.Y < body_img.Width &&
            //   line.P2.X > 10 && line.P2.Y > 10 && line.P2.X < body_img.Height - 10 && line.P2.Y < body_img.Width - 10)
            //{
            skel_lines.Add(new Line2(new Vector2(line.P1.X, line.P1.Y), new Vector2(line.P2.X, line.P2.Y)));
            lineimg.Draw(line, new Gray(255), 2);
            //}
        }
        if (debug)
        {
            lineimg.Save("skel-line.png");
        }


        // cluster according to direction and relative distance
        // too many cluster means curve axis
        IMGSIZE = Math.Min(body_img.Width, body_img.Height);
        if (skel_lines.Count > 0)
        {
            double[][] xy = new double[skel_lines.Count][];
            for (int i = 0; i < skel_lines.Count; i++)
            {
                xy[i] = new double[] { skel_lines[i].start.x, skel_lines[i].start.y,
                                       skel_lines[i].end.x, skel_lines[i].end.y };
            }

            MeanShift clusterMS = new MeanShift(4, new UniformKernel(), 0.02);
            clusterMS.Distance = new myDistanceClass();
            MeanShiftClusterCollection clustering = clusterMS.Learn(xy);
            var lineLabels   = clustering.Decide(xy);
            int clustercount = lineLabels.DistinctCount();
            //Debug.Log("cluster count: " + clustercount);

            if (debug)
            {
                Image <Rgb, byte> lineimg_rgb = lineimg.Convert <Rgb, byte>();
                System.Random     rnd         = new System.Random();
                Rgb[]             colortable  = new Rgb[clustering.Count];
                for (int i = 0; i < clustering.Count; i++)
                {
                    colortable[i] = new Rgb(rnd.Next(255), rnd.Next(255), rnd.Next(255));
                }

                for (int i = 0; i < skel_lines.Count; i++)
                {
                    int label = lineLabels[i];
                    lineimg_rgb.Draw(skel_lines[i].ToLineSegment2D(), colortable[label], 2);
                }
                lineimg_rgb.Save("skel-line-cluster.png");
            }


            if (noface)
            {
                thred = 2;   // 2
            }
            if (clustercount > thred)
            {
                iscurve = true;
            }
            else
            {
                iscurve = false;
            }
        }
        else
        {
            iscurve = false;
            NumericalRecipes.RansacLine2d rcl = new NumericalRecipes.RansacLine2d();
            List <Vector2> linepoints         = new List <Vector2>();
            linepoints = IExtension.GetMaskPoints(skel);
            Line2 bestline = rcl.Estimate(linepoints);
            skel_lines.Add(bestline);
        }

        return(skel_lines);
    }
Beispiel #31
-1
        public void meanshift_new_method()
        {
            #region doc_sample1
            // Use a fixed seed for reproducibility
            Accord.Math.Random.Generator.Seed = 0;

            // Declare some data to be clustered
            double[][] input = 
            {
                new double[] { -5, -2, -4 },
                new double[] { -5, -5, -6 },
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            // Create a uniform kernel density function
            UniformKernel kernel = new UniformKernel();

            // Create a new Mean-Shift algorithm for 3 dimensional samples
            MeanShift meanShift = new MeanShift(dimension: 3, kernel: kernel, bandwidth: 2);

            // Learn a data partitioning using the Mean Shift algorithm
            MeanShiftClusterCollection clustering = meanShift.Learn(input);

            // Predict group labels for each point
            int[] labels = clustering.Decide(input);

            // As a result, the first two observations should belong to the
            //  same cluster (thus having the same label). The same should
            //  happen to the next four observations and to the last three.
            #endregion

            Assert.AreEqual(labels[0], labels[1]);

            Assert.AreEqual(labels[2], labels[3]);
            Assert.AreEqual(labels[2], labels[4]);
            Assert.AreEqual(labels[2], labels[5]);

            Assert.AreEqual(labels[6], labels[7]);
            Assert.AreEqual(labels[6], labels[8]);

            Assert.AreNotEqual(labels[0], labels[2]);
            Assert.AreNotEqual(labels[2], labels[6]);
            Assert.AreNotEqual(labels[0], labels[6]);


            int[] labels2 = meanShift.Clusters.Nearest(input);
            Assert.IsTrue(labels.IsEqual(labels2));

            Assert.AreEqual(3 / 9.0, meanShift.Clusters.Proportions[labels[6]], 1e-6);
            Assert.AreEqual(2 / 9.0, meanShift.Clusters.Proportions[labels[0]], 1e-6);
            Assert.AreEqual(4 / 9.0, meanShift.Clusters.Proportions[labels[2]], 1e-6);
        }