Exemple #1
0
        private void doNearestKNeighboursAlgorithmTest()
        {
            var metric = new EuclideanMetric();
            var alg    = new NearestKNeighboursAlgorithm(Data.TrainingSample, metric, 1);

            // LOO
            alg.Train_LOO();
            var optK = alg.K;

            Console.WriteLine("Nearest K Neigbour: optimal k is {0}", optK);
            Console.WriteLine();

            // Margins
            Console.WriteLine("Margins:");
            calculateMargin(alg);
            Console.WriteLine();

            //Error distribution
            Console.WriteLine("Errors:");
            for (int k = 1; k < 5; k++)
            {
                alg.K = k;
                var errors = alg.GetErrors(Data.Data);
                var ec     = errors.Count();
                var dc     = Data.Data.Count;
                var pct    = Math.Round(100.0F * ec / dc, 2);
                Console.WriteLine("{0}:\t{1} of {2}\t({3}%) {4}", k, ec, dc, pct, k == optK ? "<-LOO optimal" : string.Empty);
            }
            Console.WriteLine();

            Visualizer.Run(alg);
        }
Exemple #2
0
        /// <summary>
        /// Leave-one-out optimization
        /// </summary>
        public static void OptimizeLOO(NearestKNeighboursAlgorithm alg, int?minK = null, int?maxK = null)
        {
            if (!minK.HasValue || minK.Value < 1)
            {
                minK = 1;
            }
            if (!maxK.HasValue || maxK.Value > alg.TrainingSample.Count)
            {
                maxK = alg.TrainingSample.Count - 1;
            }

            var kOpt      = int.MaxValue;
            var minErrCnt = int.MaxValue;

            for (int k = minK.Value; k <= maxK.Value; k++)
            {
                var errCnt = 0;
                alg.K = k;

                var initSample = alg.TrainingSample;

                foreach (var pData in initSample)
                {
                    var looSample = initSample.ApplyMask((p, c, idx) => p != pData.Key);

                    try
                    {
                        alg.TrainingSample = looSample;

                        var predClass = alg.Predict(pData.Key);
                        var realClass = pData.Value;
                        if (!predClass.Equals(realClass))
                        {
                            errCnt++;
                        }
                    }
                    finally
                    {
                        alg.TrainingSample = initSample;
                    }
                }

                if (errCnt < minErrCnt)
                {
                    minErrCnt = errCnt;
                    kOpt      = k;
                }
            }

            alg.K = kOpt;
        }