Ejemplo n.º 1
1
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        int k = 3;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        DoubleMatrix trainlab = Load.load_labels("../data/label_train_multiclass.dat");

        RealFeatures feats_train = new RealFeatures(traindata_real);
        RealFeatures feats_test = new RealFeatures(testdata_real);
        EuclidianDistance distance = new EuclidianDistance(feats_train, feats_train);

        Labels labels = new Labels(trainlab);

        KNN knn = new KNN(k, distance, labels);
        knn.train();
        DoubleMatrix out_labels = knn.apply(feats_test).get_labels();
        Console.WriteLine(out_labels.ToString());

        modshogun.exit_shogun();
    }
Ejemplo n.º 2
0
        public void TestSaveAndLoadKNNModel()
        {
            TestTrainValidData(); //Sets up a KNN with data
            string modelFileName = "KNNModel.txt";

            KnnModel.Save(new StreamWriter(modelFileName).BaseStream);
            List <Dictionary <object, int> > labelMappingDict = KnnModel.CopyLabelMappingDictionary();
            List <KNNDataPoint> dataPoints = KnnModel.CopyDataPoints();

            KnnModel = new KNN();
            KnnModel.Load(new StreamReader(modelFileName).BaseStream);
            List <KNNDataPoint> loadedDataPoints = KnnModel.CopyDataPoints();
            var loadedMappingDicts = KnnModel.CopyLabelMappingDictionary();

            for (int i = 0; i < dataPoints.Count; i++)
            {
                Assert.IsTrue(dataPoints[i].Equals(loadedDataPoints[i]));
            }
            for (int i = 0; i < labelMappingDict.Count; i++)
            {
                var savedMappingDict  = labelMappingDict[i];
                var loadedMappingDict = loadedMappingDicts[i];
                foreach (object key in savedMappingDict.Keys)
                {
                    Assert.IsTrue(savedMappingDict[key].Equals(loadedMappingDict[key]));
                }
            }
            File.Delete(modelFileName);
        }
Ejemplo n.º 3
0
	public static void Main() {
		modshogun.init_shogun_with_defaults();
		int k = 3;

		double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
		double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

		double[] trainlab = Load.load_labels("../data/label_train_multiclass.dat");

		RealFeatures feats_train = new RealFeatures(traindata_real);
		RealFeatures feats_test = new RealFeatures(testdata_real);
		EuclidianDistance distance = new EuclidianDistance(feats_train, feats_train);

		Labels labels = new Labels(trainlab);

		KNN knn = new KNN(k, distance, labels);
		knn.train();
		double[] out_labels = knn.apply(feats_test).get_labels();

		foreach(double item in out_labels) {
			Console.Write(item);
		}

		modshogun.exit_shogun();
	}
Ejemplo n.º 4
0
        public void KNN_Test()
        {
            Logger.Info("KNN_Test");

            var problem = ProblemFactory.CreateClassificationProblem(ClassificationProblemType.ChessBoard);

            Logger.Info("Loading training data.");

            var tSet = problem.TrainingSet;
            var vSet = problem.ValidationSet;

            var classifier = new KNN
            {
                KNN_K    = 7,
                TrainSet = tSet
            };

            classifier.Train();

            Logger.Info("Doing cross-validation.");

            var hit = (from e in vSet.Examples let iResult = classifier.Predict(e.X) where e.Label.Id == iResult select e).Count();

            var correctRatio = 1.0 * hit / vSet.Count;

            Logger.Info("CorrectRatio: {0}", correctRatio);

            Assert.IsTrue(correctRatio > 0.900, string.Format("KNN (2-class) Correct Ratio, expected: greater than 0.900, actual: {0}.", correctRatio));
        }
Ejemplo n.º 5
0
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        int k = 3;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real  = Load.load_numbers("../data/fm_test_real.dat");

        double[] trainlab = Load.load_labels("../data/label_train_multiclass.dat");

        RealFeatures      feats_train = new RealFeatures(traindata_real);
        RealFeatures      feats_test  = new RealFeatures(testdata_real);
        EuclidianDistance distance    = new EuclidianDistance(feats_train, feats_train);

        MulticlassLabels labels = new MulticlassLabels(trainlab);

        KNN knn = new KNN(k, distance, labels);

        knn.train();
        double[] out_labels = MulticlassLabels.obtain_from_generic(knn.apply(feats_test)).get_labels();

        foreach (double item in out_labels)
        {
            Console.Write(item);
        }

        modshogun.exit_shogun();
    }
Ejemplo n.º 6
0
 public void EuclideanDistance_2dim()
 {
     Assert.AreEqual(1, KNN.EuclideanDistance(new List <double> {
         1.0, 1.0
     }, new List <double> {
         0.0, 1.0
     }));
 }
Ejemplo n.º 7
0
        public void CalculetaAnsBasedOnData(long viewCount, long uploadCount, int dataInterval)
        {
            var fullData = DivideData(youtubeChannel, dataInterval);

            KNN knn = new KNN(fullData, fullData, dataInterval);

            knn.GetResultBasedOnData(viewCount, uploadCount);
        }
Ejemplo n.º 8
0
        static void Main(string[] args)
        {
            var reader = new Dataset(@"C:\datasets\iris.csv", null);

            var knn = new KNN(3);

            IValidation holdoutValidation = new HoldoutValidation(0.8);

            var acr = holdoutValidation.Validate(knn, reader);

            Console.WriteLine($"{acr}");
        }
Ejemplo n.º 9
0
        public void VoteCheck()
        {
            var l = new List <DistanceClass>
            {
                new DistanceClass('A', 3),
                new DistanceClass('B', 10),
                new DistanceClass('C', 30),
                new DistanceClass('D', 6)
            };

            Assert.AreEqual('A', KNN.Vote(l));
        }
Ejemplo n.º 10
0
 public bool Load(Stream fileStream)
 {
     try
     {
         Model = new KNN();
         Model.Load(fileStream);
     } catch (IOException)
     {
         return(false);
     }
     return(true);
 }
Ejemplo n.º 11
0
        static void Main(string[] args)
        {
            var dh = new DataHandler();

            dh.ReadFeatureVector(@"D:\Projects\NN_CPP_Handwriting\ML_Nist\ML_Nist\Data\train-images.idx3-ubyte");
            dh.ReadFeatureLabels(@"D:\Projects\NN_CPP_Handwriting\ML_Nist\ML_Nist\Data\train-labels.idx1-ubyte");

            dh.SplitData();
            dh.CountClasses();

            KNN knn = new KNN()
            {
                TrainingData   = dh.TrainingData,
                TestData       = dh.TestData,
                ValidationData = dh.ValidationData
            };

            double performance      = 0.0;
            double best_performance = 0.0;

            int best_k = 1;

            for (int k = 1; k <= 4; k++)
            {
                knn.SetK(k);
                performance = knn.ValidatePerformance();

                if (k == 1)
                {
                    best_performance = performance;
                }
                else
                {
                    if (performance > best_performance)
                    {
                        best_performance = performance;
                        best_k           = k;
                    }
                }
            }

            knn.SetK(best_k);
            knn.TestPerformance();

            // uint forward = 0x1A2B3C4D;
            // uint reverse = DataHandler.ConvertToLittleEndean(forward);

            // Console.WriteLine($"forward={forward:X}");
            // Console.WriteLine($"reverse={reverse:X}");

            // Console.WriteLine("Hello World!");
        }
Ejemplo n.º 12
0
    /// <summary>
    /// Main program entry point.
    /// </summary>
    /// <param name="args"></param>
    public static void Main(String[] args)
    {
        if (args.Length != 3)
        {
            Console.WriteLine("Usage: [program].exe k feature_file label_file");
            return;
        }
        int k = Convert.ToInt32(args[0]);

        DataStructure ds1 = new DataStructure(@"hw12data\digitsDataset", args[1],
                                              args[2], "digitsOutput" + k + ".csv", 1);
        KNN knn = new KNN();

        knn.run(k, ds1);
    }
Ejemplo n.º 13
0
        private void Button_Click_Start_Clasification(object sender, RoutedEventArgs e)
        {
            int assignedProperly    = 0;
            int assingedNotProperly = 0;

            Utils.DistributeArticles(ChosenSet, ref TrainingSet, ref TestingSet, int.Parse(trainingDataPercentageInput.Text));
            for (int j = 0; j < int.Parse(countOfLoopsInput.Text); ++j)
            {
                Dictionary <string, List <Article> > labelArticlesMap = new Dictionary <string, List <Article> >();
                SetExtractor();
                PrepareLabelArticleMap(ref labelArticlesMap);

                //Taking some Articles from TrainingSet to resolve problem of cold start
                List <Article> coldStart = new List <Article>();
                foreach (var pair in labelArticlesMap)
                {
                    int amountToTake = TrainingSet.Count / labelArticlesMap.Count / 10;
                    coldStart.AddRange(pair.Value.Take(amountToTake > pair.Value.Count ? pair.Value.Count : amountToTake).ToList());
                }

                //Creating mapping from coldstart Articles to characteristics values, assigning actual labels
                Dictionary <Article, List <double> > ColdStart = new Dictionary <Article, List <double> >();
                foreach (var item in coldStart)
                {
                    ColdStart.Add(item, characteristicExtractor.GetWeights(item));
                    item.AssignedLabel = item.ActualLabel;
                }

                //Computing characteristics for first run of KNN - resolving problem of cold start
                Dictionary <Article, List <double> > KnnMap = new Dictionary <Article, List <double> >();
                foreach (var item in TestingSet)
                {
                    KnnMap.Add(item, characteristicExtractor.GetWeights(item));
                }
                KNN.ColdStart(ref KnnMap, ref ColdStart, int.Parse(kInput.Text), AssignMetric());

                //Actual KNN algorithm
                KNN.Testing(ref KnnMap, int.Parse(kInput.Text), Metrics.EuclideanMetricDistance);

                assignedProperly    += KnnMap.Count(i => i.Key.AssignedLabel == i.Key.ActualLabel);
                assingedNotProperly += KnnMap.Count(i => i.Key.AssignedLabel != i.Key.ActualLabel);
            }
            UpdateStatus("Klasyfikacja zakończona");
            succededDisplay.Text   = assignedProperly.ToString();
            failedDisplay.Text     = assingedNotProperly.ToString();
            accuracityDisplay.Text = (100.0 * assignedProperly / (assignedProperly + assingedNotProperly)).ToString() + "%";
        }
Ejemplo n.º 14
0
        public KNNUnitTest()
        {
            var trainingData = new List <TrainingData>
            {
                new TrainingData(new List <double> {
                    1.0, 0.0
                }, 'A'),
                new TrainingData(new List <double> {
                    2.0, 0.0
                }, 'A'),
                new TrainingData(new List <double> {
                    0.0, 1.0
                }, 'B'),
                new TrainingData(new List <double> {
                    0.0, 2.0
                }, 'B')
            };

            kNN = new KNN(trainingData, 3);
        }
Ejemplo n.º 15
0
        public void CrossValidation(int howManyValidationFolds)
        {
            var dataCount = youtubeChannel.Count;
            var range     = dataCount / howManyValidationFolds;
            var values    = youtubeChannel.Values.ToList();

            Console.WriteLine("Full Data: " + dataCount + " inputs. For testing we take 1/" + howManyValidationFolds + " of data. That is: " + range + "\n");
            int           start        = 0;
            Bayes         bayes        = new Bayes();
            List <string> knnResults   = new List <string>();
            List <string> bayesResults = new List <string>();

            for (int i = 0; i < howManyValidationFolds; i++)
            {
                var testData  = values.GetRange(start, range).ToDictionary(x => x.channelName);                                                               //paimamas range kiekis duomeų -  testavimui
                var trainData = values.GetRange(0, start).Concat(values.GetRange(start + range, dataCount - start - range)).ToDictionary(x => x.channelName); //paimami likusieji duomenys mokymuisi
                start += range;

                var fullData    = DivideData(youtubeChannel, howManyValidationFolds);
                var dividedData = DivideData(trainData, howManyValidationFolds);
                //čia kviečiam algoritmo magijas
                KNN knn = new KNN(fullData, dividedData, howManyValidationFolds);
                bayes.Train(dividedData);
                bayesResults.Add(bayes.Test(fullData, testData));
                knnResults.Add(knn.Test(testData));
            }
            Console.WriteLine(new string('-', 40));
            foreach (var item in knnResults)
            {
                Console.WriteLine(item);
            }
            Console.WriteLine(new string('-', 40));
            foreach (var item in bayesResults)
            {
                Console.WriteLine(item);
            }
            Console.WriteLine(new string('-', 40));
        }
Ejemplo n.º 16
0
        private void KNN_Button_Click(object sender, RoutedEventArgs e)
        {
            imgrid.Source = null;
            KNN knn = new KNN();

            List <GistData> trainSamples = new List <GistData>();
            List <GistData> testSamples  = new List <GistData>();
            List <GistData> trainClasses = new List <GistData>();

            int fakeSimilar = 0;

            foreach (var item in globalgistdatalist)
            {
                if (fakeSimilar % 3 == 2)
                {
                    item.IsSimilar = 0;   //no comment, will go to testSamples
                }
                else if (fakeSimilar % 3 == 1)
                {
                    item.IsSimilar = 1;  //like, Will go to trainSamples
                }
                else if (fakeSimilar % 3 == 0)
                {
                    item.IsSimilar = -1;  //dislike, Will go to trainSamples
                }
                Console.WriteLine("Before KNN: " + item.filename + " " + item.IsSimilar);

                fakeSimilar++;

                if (item.IsSimilar == 0)
                {
                    testSamples.Add(item);
                }
                else
                {
                    trainSamples.Add(item);
                }
            }


            trainClasses = knn.TestKnnCase(trainSamples, testSamples, 3, 0.5);
            globalgistdatalist.Clear();
            globalgistdatalist.AddRange(trainClasses);
            globalgistdatalist.AddRange(trainSamples);
            Console.WriteLine(globalgistdatalist.Count());

            //sp1.Children.Clear();
            lv1.Items.Clear();

            globalgistdatalist = globalgistdatalist.OrderByDescending(o => o.IsSimilar).ThenBy(o => o.distance).ToList();

            //Now load the files in the stack panel
            for (int j = 0; j < globalgistdatalist.Count; j++)
            {
                if (j > 0)
                {
                    if (globalgistdatalist[j].filename == globalgistdatalist[j - 1].filename && globalgistdatalist[j].distance == globalgistdatalist[j - 1].distance)
                    {
                        continue;
                    }
                }

                Image img = new Image();

                BitmapImage source = new BitmapImage();
                source.BeginInit();
                source.UriSource         = new Uri(globalgistdatalist[j].filename, UriKind.Relative);
                source.CacheOption       = BitmapCacheOption.OnLoad;
                source.DecodePixelWidth  = 200;
                source.DecodePixelHeight = 200;
                source.EndInit();
                img.Source  = source;
                img.Stretch = Stretch.Uniform;
                int qq = source.PixelHeight;        // Image loads here
                System.Windows.Controls.ToolTip tooltip = new System.Windows.Controls.ToolTip();
                tooltip.Content = globalgistdatalist[j].metadata;
                img.ToolTip     = tooltip;
                //sp1.Children.Add(img);



                System.Windows.Controls.RadioButton radio1like    = new System.Windows.Controls.RadioButton();
                System.Windows.Controls.RadioButton radio2dislike = new System.Windows.Controls.RadioButton();
                radio1like.VerticalAlignment      = VerticalAlignment.Bottom;
                radio1like.Content                = "Yes";
                radio2dislike.VerticalAlignment   = VerticalAlignment.Bottom;
                radio2dislike.HorizontalAlignment = System.Windows.HorizontalAlignment.Right;
                radio2dislike.Content             = "No";
                var grid = new Grid();
                grid.Children.Add(img);
                grid.Children.Add(radio1like);
                grid.Children.Add(radio2dislike);
                lv1.Items.Add(grid);

                //if (checkbox.IsChecked == true)
                //{
                //    globalgistdatalist[j].IsSimilar = 1;
                //    imgrid.Source = source;
                //}
                //else
                //    globalgistdatalist[j].IsSimilar = -1;

                if (radio1like.IsChecked == true)
                {
                    globalgistdatalist[j].IsSimilar = 1;
                }
                else if (radio2dislike.IsChecked == true)
                {
                    globalgistdatalist[j].IsSimilar = -1;
                }


                Console.WriteLine("After KNN: " + globalgistdatalist[j].filename + " " + globalgistdatalist[j].IsSimilar + " distance :" + globalgistdatalist[j].distance);
            }

            lv1.Items.Refresh();
        }
Ejemplo n.º 17
0
 internal static HandleRef getCPtr(KNN obj)
 {
     return((obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr);
 }
Ejemplo n.º 18
0
        public void TestKNN()
        {
            var samples = new List <IList <object> >
            {
                new List <object> {
                    1.5, 40.0, "Thin"
                },
                new List <object> {
                    1.5, 50.0, "Fat"
                },
                new List <object> {
                    1.5, 60.0, "Fat"
                },
                new List <object> {
                    1.6, 40.0, "Thin"
                },
                new List <object> {
                    1.6, 50.0, "Thin"
                },
                new List <object> {
                    1.6, 60.0, "Fat"
                },
                new List <object> {
                    1.6, 70.0, "Fat"
                },
                new List <object> {
                    1.7, 50.0, "Thin"
                },
                new List <object> {
                    1.7, 60.0, "Thin"
                },
                new List <object> {
                    1.7, 70.0, "Fat"
                },
                new List <object> {
                    1.7, 80.0, "Fat"
                },
                new List <object> {
                    1.8, 60.0, "Thin"
                },
                new List <object> {
                    1.8, 70.0, "Thin"
                },
                new List <object> {
                    1.8, 80.0, "Fat"
                },
                new List <object> {
                    1.8, 90.0, "Fat"
                },
                new List <object> {
                    1.9, 80.0, "Thin"
                },
                new List <object> {
                    1.9, 90.0, "Fat"
                }
            };

            KNN <double, string> algo = new KNN <double, string>(NumericCalculation.EuclideanDistance);

            // Height range: [1.5 - 2.0]
            algo.AddNormalizer(o => (o - 1.5) * 2.0);

            // Weight range: [20.0 - 100.0]
            algo.AddNormalizer(o => (o - 20.0) * 0.0125);

            // Train Data
            foreach (var sample in samples)
            {
                algo.AddTrainingData(sample.Take(2).Cast <double>(), sample[2] as string);
            }

            // Test Data
            var sameCount = samples.Count(o =>
            {
                var properties = o.Take(2).Cast <double>();
                var result     = algo.Perform(properties, 5);
                return(o[2].Equals(result));
            });

            Assert.IsTrue(sameCount * 1.0 / samples.Count > 0.88);
        }
        static void Main(string[] args)
        {
            List <string> wczytane             = new List <string>();
            List <string> wyznaczone           = new List <string>();
            int           K                    = 0;
            string        wybraneEtykiety      = "";
            string        artykulDoWyznaczenia = "";

            Console.WriteLine("Wczytaj K");
            K = Convert.ToInt32(Console.ReadLine());
            Console.WriteLine("Wprowadz metryke");
            Console.WriteLine("Metryka euklidesa:1  ,  Mannhattan:2");
            int metryka = Convert.ToInt32(Console.ReadLine());

            Metryki.Metryki metryki = null;
            if (metryka == 1)
            {
                metryki = new Metryki.MetrykaEuklidesowa();
            }
            if (metryka == 2)
            {
                metryki = new Metryki.MetrykaManhattan();
            }

            wybraneEtykiety = System.IO.File.ReadAllText(@"ZbiorTreningowy/ZadaneEtykiety.txt");
            WybierzEtykiety(wybraneEtykiety);
            var       wcz       = new Wczytanie();
            var       docId     = 0;
            var       arr       = wcz.wczytaj("zbiorTestowy");
            var       artykulNr = 0;
            int       licznik   = 0;
            MaciezWag mw;

            for (var i = 0; i < arr.Length; i++)
            {
                var s = arr[i];
                if (s != null)
                {
                    var dots = new HtmlDocument();
                    dots.LoadHtml(s);
                    foreach (var nodes in dots.DocumentNode.Descendants("REUTERS"))
                    {
                        if (licznik < 150)
                        {
                            bool jedziemy = false;
                            foreach (var node in nodes.Descendants("PLACES"))
                            {
                                if (node != null && node.InnerText != "" && _wybraneEtykiety.Find(x => x == node.InnerText) != null)
                                {
                                    foreach (var nodeb in nodes.Descendants("BODY"))
                                    {
                                        wczytane.Add(node.InnerText);
                                        jedziemy = true;
                                    }
                                }
                            }

                            if (jedziemy)
                            {
                                foreach (var node in nodes.Descendants("BODY"))
                                {
                                    mw = new MaciezWag(node.InnerText, wybraneEtykiety);
                                    KNN knn = new KNN(mw, K, metryki);
                                    wyznaczone.Add(knn.Knn());
                                }

                                licznik++;
                            }
                        }
                    }
                }
            }

            string wynik             = "";
            int    procentZbieznosci = 0;

            for (int i = 0; i < wyznaczone.Count; i++)
            {
                wynik += wczytane[i] + "  " + wyznaczone[i] + System.Environment.NewLine;
                if (wczytane[i] == wyznaczone[i])
                {
                    procentZbieznosci++;
                }
            }

            wynik += Convert.ToString((double)procentZbieznosci / wyznaczone.Count);
            string nazwaPliku = @"Wyniki/" + metryki.ToString() + "_" + Convert.ToString(K) + ".txt";

            System.IO.File.WriteAllText(nazwaPliku, wynik);



            //  Console.WriteLine(knn.Knn());
            Console.ReadKey();
            Console.ReadLine();
        }
Ejemplo n.º 20
0
 // Use this for initialization
 void Start()
 {
     _targetKnn = new KNN();
 }
Ejemplo n.º 21
0
        static void Main(string[] args)
        {
            //double[][] trainInputs =
            //{
            //// The first two are from class 0
            //new double[] { -5, -2, -1 },
            //new double[] { -5, -5, -6 },

            //// The next four are from class 1
            //new double[] {  2,  1,  1 },
            //new double[] {  1,  1,  2 },
            //new double[] {  1,  2,  2 },
            //new double[] {  3,  1,  2 },

            //// The last three are from class 2
            //new double[] { 11,  5,  4 },
            //new double[] { 15,  5,  6 },
            //new double[] { 10,  5,  6 },
            //};

            //int[] trainOutputs =
            //{
            //    0, 0,        // First two from class 0
            //    1, 1, 1, 1,  // Next four from class 1
            //    2, 2, 2      // Last three from class 2
            //};

            //double[][] testInputs =
            //{
            //// The first two are from class 0
            //new double[] { -3, -1, -1 },
            //new double[] { -9, -7, -5 },
            //};

            //int[] testOutputs =
            //{
            //    0, 0,        // First two from class 0
            //};

            DataTable dataTrain = new CsvReader(@"H:\Documents\Visual Studio 2015\Projects\ML\ML\CSV\train\train.csv", true).ToTable();
            DataTable dataTest  = new CsvReader(@"H:\Documents\Visual Studio 2015\Projects\ML\ML\CSV\test\testWithLabels.csv", true).ToTable();

            // I/O data //
            int[] trainOutputs = dataTrain.Columns["label"].ToArray <int>();
            dataTrain.Columns.Remove("label");
            double[][] trainInputs = dataTrain.ToJagged <double>();

            int[] testOutputs = dataTest.Columns["label"].ToArray <int>();
            dataTest.Columns.Remove("label");
            double[][] testInputs = dataTest.ToJagged <double>();
            // I/O data //

            // knn //B
            var knn        = new KNN(trainInputs, trainOutputs, 4);
            var machineKNN = knn.MachineLearning();

            int[] predictedKNN = machineKNN.Decide(testInputs);
            machineKNN.Save(@"H:\Documents\Visual Studio 2015\Projects\ML\ML\models\knn.bin");

            OutputResultsСlassifier showKNN = new OutputResultsСlassifier(machineKNN, testInputs, testOutputs);

            showKNN.SavePredicted(predictedKNN);
            showKNN.SaveAccuracy();
            // knn //

            // nb //
            var nb        = new NB(trainInputs, trainOutputs);
            var machineNB = nb.MachineLearning();

            int[] predictedNB = machineNB.Decide(testInputs);
            machineNB.Save(@"H:\Documents\Visual Studio 2015\Projects\ML\ML\models\nb.bin");

            OutputResultsСlassifier show_nb = new OutputResultsСlassifier(machineNB, testInputs, testOutputs);

            show_nb.SavePredicted(predictedNB);
            show_nb.SaveAccuracy();
            // nb //

            // svm //
            var svm        = new SVM(trainInputs, trainOutputs);
            var machineSVM = svm.MachineLearning();

            int[] predictedSVM = machineSVM.Decide(testInputs);
            machineSVM.Save(@"H:\Documents\Visual Studio 2015\Projects\ML\ML\models\svm.bin");

            OutputResultsСlassifier showSVM = new OutputResultsСlassifier(machineSVM, testInputs, testOutputs);

            showSVM.SavePredicted(predictedSVM);
            showSVM.SaveAccuracy();
            // svm //

            // mlr //
            var mlr        = new MLR(trainInputs, trainOutputs);
            var machineMLR = mlr.MachineLearning();

            int[] predictedMLR = machineMLR.Decide(testInputs);
            machineMLR.Save(@"H:\Documents\Visual Studio 2015\Projects\ML\ML\models\mlr.bin");

            OutputResultsСlassifier showMLR = new OutputResultsСlassifier(machineMLR, testInputs, testOutputs);

            showMLR.SavePredicted(predictedSVM);
            showMLR.SaveAccuracy();
            showMLR.SaveProbabilities(machineMLR);
            // mlr //
        }
Ejemplo n.º 22
0
 public override object Knn(double[] unknown)
 {
     return(KNN.ClassifyReg(unknown, CSVläsare.pointList));
 }
Ejemplo n.º 23
0
 // Use this for initialization
 void Start()
 {
     _modulatorKnn = new KNN();
 }
Ejemplo n.º 24
0
 public KNNProblemPredictor()
 {
     Model = new KNN();
 }
Ejemplo n.º 25
0
 // Use this for initialization
 void Start()
 {
     _knn = new KNN();
 }
Ejemplo n.º 26
0
 // Use this for initialization
 void Start()
 {
     _knn = new KNN();
 }
Ejemplo n.º 27
0
 public void TestInitialize()
 {
     KnnModel = new KNN();
 }
Ejemplo n.º 28
0
 // Use this for initialization
 void Start()
 {
     _targetKnn = new KNN();
 }
Ejemplo n.º 29
0
 // Use this for initialization
 void Start()
 {
     _modulatorKnn = new KNN();
 }