Example #1
0
        public void weights_test_tree_2()
        {
            KNearestNeighbors a;
            KNearestNeighbors b;

            {
                double[][] inputs  = Jagged.ColumnVector(4.2, 0.7, 0.7, 0.7, 1.3, 9.4, 9.4, 12);
                int[]      outputs = { 0, 0, 0, 1, 1, 2, 2, 2 };
                var        knn     = new KNearestNeighbors(k: inputs.Length);
                a = knn.Learn(inputs, outputs);
            }

            {
                double[][] inputs  = Jagged.ColumnVector(4.2, 0.7, 0.7, 1.3, 9.4, 12);
                int[]      outputs = { 0, 0, 1, 1, 2, 2 };
                double[]   weights = { 1, 2, 1, 1, 2, 1 };
                var        knn     = new KNearestNeighbors(k: inputs.Length);
                b = knn.Learn(inputs, outputs, weights);
            }

            {
                double[] x        = { 9.4 };
                double[] expected = a.Scores(x);
                double[] actual   = b.Scores(x);
                Assert.IsTrue(expected.IsEqual(actual, 1e-4));
            }
            {
                double[][] x        = Jagged.ColumnVector(4.2, 0.7, 1.3, 9.4, 12);
                double[][] expected = a.Scores(x);
                double[][] actual   = b.Scores(x);
                Assert.IsTrue(expected.IsEqual(actual, 1e-4));
            }
        }
Example #2
0
        //Train user, if error return null;
        private static KNearestNeighbors TrainUser(DataTable userData)
        {
            // Convert the DataTable to input and output vectors
            double[][] inputs  = userData.ToJagged <double>("category", "difftime");
            int[]      outputs = userData.Columns["classification"].ToArray <int>();


            // Create a KNN learning algorithm
            var teacher = new KNearestNeighbors(k: 1); // by k = 1 neighbors

            // Use the learning algorithm to learn

            KNearestNeighbors userModelKNN = null;

            try
            {
                userModelKNN = teacher.Learn(inputs, outputs);
            }
            catch (Exception e)
            {
                Debug.WriteLine(e.Message);
            }

            return(userModelKNN);
        }
Example #3
0
        public string CalculateKNN(double [] data)
        {
            string answer = "";
            var    knn    = new KNearestNeighbors(k: 3);

            knn.Learn(Property.inputs, Property.outputs);

            try
            {
                if (knn.Decide(data) == 1)
                {
                    answer = "False";
                }
                else
                {
                    answer = "True";
                }
            }
            catch
            {
                MessageBox.Show("데이터 포멧 확인");
            }

            return(answer);
        }
Example #4
0
        public void foo()
        {
            String path = Environment.CurrentDirectory + "\\example.xlsx";

            // Read the Excel worksheet into a DataTable
            DataTable table = new ExcelReader(path).GetWorksheet("T1");

            //Convert the DataTable to input and output vectors
            String[] trainingInputs = table.Columns["Output"].ToArray <String>();

            // Create a new codification codebook to
            //convert strings into discrete symbols
            Codification codebook = new Codification(table,
                                                     "GeneratedByProgram");

            // Extract input and output pairs to train
            DataTable symbols = codebook.Apply(table);

            int[] trainingOutputs = symbols.ToArray <int>("GeneratedByProgram");
            var   knn             = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein());

            // In order to compare strings, we will be using Levenshtein's string distance

            // We learn the algorithm:
            knn.Learn(trainingInputs, trainingOutputs);

            int answer = knn.Decide("Chars");
        }
Example #5
0
        public void weights_test_2()
        {
            KNearestNeighbors <string> a;
            KNearestNeighbors <string> b;

            {
                string[] inputs  = { "Car", "Bar", "Bar", "Bar", "Jar", "Charm", "Charm", "Chair" };
                int[]    outputs = { 0, 0, 0, 1, 1, 2, 2, 2 };
                var      knn     = new KNearestNeighbors <string>(k: inputs.Length, distance: new Levenshtein());
                a = knn.Learn(inputs, outputs);
            }

            {
                string[] inputs  = { "Car", "Bar", "Bar", "Jar", "Charm", "Chair" };
                int[]    outputs = { 0, 0, 1, 1, 2, 2 };
                double[] weights = { 1, 2, 1, 1, 2, 1 };
                var      knn     = new KNearestNeighbors <string>(k: inputs.Length, distance: new Levenshtein());
                b = knn.Learn(inputs, outputs, weights);
            }

            {
                string   x        = "Bar";
                double[] expected = a.Scores(x);
                double[] actual   = b.Scores(x);
                Assert.AreEqual(expected, actual);
            }
            {
                string[]   x        = new[] { "Car", "Bar", "Jar", "Charm", "Chair" };
                double[][] expected = a.Scores(x);
                double[][] actual   = b.Scores(x);
                Assert.AreEqual(expected, actual);
            }
        }
Example #6
0
        public void learn_test()
        {
            #region doc_learn_distance
            // Create some sample learning data. In this data,
            // the first two instances belong to a class, the
            // four next belong to another class and the last
            // three to yet another.

            double[][] inputs =
            {
                // The first two are from class 0
                new double[] { -5, -2, -1 },
                new double[] { -5, -5, -6 },

                // The next four are from class 1
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },

                // The last three are from class 2
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            int[] outputs =
            {
                0, 0,       // First two from class 0
                1, 1, 1, 1, // Next four from class 1
                2, 2, 2     // Last three from class 2
            };


            // Now we will create the K-Nearest Neighbors algorithm. For this
            // example, we will be choosing k = 4. This means that, for a given
            // instance, its nearest 4 neighbors will be used to cast a decision.
            var knn = new KNearestNeighbors <double[]>(k: 4, distance: new SquareEuclidean());

            // We learn the algorithm:
            knn.Learn(inputs, outputs);

            // After the algorithm has been created, we can classify a new instance:
            int answer = knn.Decide(new double[] { 11, 5, 4 }); // answer will be 2.

            // Let's say we would like to compute the error matrix for the classifier:
            var cm = GeneralConfusionMatrix.Estimate(knn, inputs, outputs);

            // We can use it to estimate measures such as
            double error = cm.Error;    // should be 0
            double acc   = cm.Accuracy; // should be 1
            double kappa = cm.Kappa;    // should be 1
            #endregion

            Assert.AreEqual(2, answer);
            Assert.AreEqual(0, error);
            Assert.AreEqual(1, acc);
            Assert.AreEqual(1, kappa);
        }
Example #7
0
        public void Learn()
        {
            var inputs  = GetLearnInputs();
            var outputs = GetOutputs();

            KNearestNeighbors = new KNearestNeighbors(k: 4);
            KNearestNeighbors.Learn(inputs, outputs);
        }
        /// <summary>
        /// .NET accord knn classifier for k-fold crossvalidation ofr AU27
        /// </summary>
        /// <param name="AUs_List"></param>
        /// <param name="counts"></param>
        /// <param name="kfold"></param>
        /// <param name="kValue"></param>
        public Accord.MachineLearning.KNearestNeighbors <double[]> crossValidateAU27(List <string> AUs, int counts, int kfold, int kValue)
        {
            int count = 0;

            double[][] inputs  = new double[balanceLibraries.balanceLibrary(AUs).Count() - kfold][]; //88  arays in the database
            int[]      outputs = new int[balanceLibraries.balanceLibrary(AUs).Count() - kfold];

            for (int i = 0; i < balanceLibraries.balanceLibrary(AUs).Count(); i++)
            {
                XmlSerializer xxxxxserializer = new XmlSerializer(typeof(getDsitanceToNoseTipInOneFrame));
                if (i < (counts - 1) * kfold || i >= counts * kfold)
                {
                    TextReader reader = new StreamReader(balanceLibraries.balanceLibrary(AUs)[i]);
                    try
                    {
                        inputs[count] = new double[28];
                        var read = (getDsitanceToNoseTipInOneFrame)xxxxxserializer.Deserialize(reader);

                        for (int m = 0; m < 28; m++)
                        {
                            //27
                            if (m == 0 || m == 1 || m == 3 || m == 10 || m == 14 || m == 26 || m == 27 || m == 21)
                            {
                                inputs[count][m] = read.Distances[m];

                                if (balanceLibraries.balanceLibrary(AUs)[i].Contains("Smile"))
                                {
                                    outputs[count] = 0;
                                }
                                else if (balanceLibraries.balanceLibrary(AUs)[i].Contains("Laugh"))
                                {
                                    outputs[count] = 0;
                                }
                                else if (balanceLibraries.balanceLibrary(AUs)[i].Contains("Shock"))
                                {
                                    outputs[count] = 1;
                                }
                                else if (balanceLibraries.balanceLibrary(AUs)[i].Contains("Sad"))
                                {
                                    outputs[count] = 0;
                                }
                            }
                        }


                        count++;
                    }
                    catch (Exception ex)
                    {
                        MessageBox.Show(ex.ToString());
                    }
                    reader.Close();
                }
            }
            var teacher = new KNearestNeighbors <double[]>(k: kValue, distance: new SquareEuclidean());

            return(teacher.Learn(inputs, outputs));
        }
Example #9
0
        public static Dictionary <int, string> KnnCreate(Dictionary <List <string>, double[][]> trainingSet)
        {
            // Create some sample learning data.
            int        labelCounter           = -1;
            List <int> classesList            = new List <int>();
            Dictionary <int, string> labelMap = new Dictionary <int, string>();

            foreach (string label in trainingSet.First().Key.ToArray())
            {
                if (!labelMap.ContainsValue(label))
                {
                    labelCounter++;
                    classesList.Add(labelCounter);
                    labelMap.Add(labelCounter, label);
                    Console.WriteLine(labelCounter + ": " + label);
                }
                else
                {
                    classesList.Add(labelCounter);
                }
            }

            int[]      classes = classesList.ToArray();
            double[][] inputs  = trainingSet.First().Value;


            // Now we will create the K-Nearest Neighbors algorithm.
            // It's possible to swtich around the k: 4 for the possibility of better accuracy
            var knn = new KNearestNeighbors(k: 5);

            // We train the algorithm:
            knn.Learn(inputs, classes);

            // Let's say we would like to compute the error matrix for the classifier:
            var cm = GeneralConfusionMatrix.Estimate(knn, inputs, classes);

            // We can use it to estimate measures such as
            double error = cm.Error;    // should be
            double acc   = cm.Accuracy; // should be
            double kappa = cm.Kappa;    // should be

            Console.WriteLine("error: " + error);
            Console.WriteLine("accuracy: " + acc);
            Console.WriteLine("kappa: " + kappa);
            Console.WriteLine("pearson: " + cm.Pearson);
            for (int i = 0; i < cm.ColumnErrors.Length; i++)
            {
                if (cm.ColumnErrors[i] != 0)
                {
                    double columnerror = double.Parse(cm.ColumnErrors[i].ToString()) / double.Parse(cm.ColumnTotals[i].ToString());
                    Console.WriteLine("Error of " + labelMap[i] + ": " + columnerror);
                }
            }
            SaveKnn(knn);
            Fingerprinting.WriteLabelMap(labelMap);
            return(labelMap);
        }
        public void Train(DatasetRepresentation datasetRepresentation)
        {
            ArgumentValidator.ValidateObject(datasetRepresentation);

            datasetUsedForTraining = datasetRepresentation;
            var inputsOutputsPair = GetInputsAndOutputsForDataset(datasetRepresentation);
            var k = 61;

            kNearestNeighbors = new KNearestNeighbors(k);
            kNearestNeighbors.Learn(inputsOutputsPair.Inputs, inputsOutputsPair.Outputs);
        }
Example #11
0
        public void KNearestNeighbors(out int answer)
        {
            int[] y = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                        1, 1, 1, 1, 1, 1, 1, 1, 1, 1 };
            int   kn  = Convert.ToInt32(textBox1.Text);
            var   knn = new KNearestNeighbors(k: kn);

            knn.Learn(train_emfcc, y);
            answer = knn.Decide(mfcc_earr);

            label6.Text += answer.ToString();
        }
Example #12
0
        public void runKNN()
        {
            // K=1 means Only its nearest neighbour will be used
            var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein());

            // In order to compare strings, we will be using Levenshtein's string distance

            String[]  trainingInput   = trainingData.ToArray <String>("Output");
            DataTable trainingsymbols = convertStringDataToDiscreteSymbol();

            int[] trainingOutput = trainingsymbols.ToArray <int>("GeneratedByProgram");
            // We learn the algorithm:
            knn.Learn(trainingInput, trainingOutput);

            // After the algorithm has been created, we can use it:`
            int answer = knn.Decide("Chars"); // answer should be 1.

            DataTable testdata = new DataTable("Sample Data");

            testdata.Columns.Add("Output", "GeneratedByProgram");

            testdata.Rows.Add("a8", "Yes");
            testdata.Rows.Add("b5", "Yes");
            testdata.Rows.Add("This is real", "No");
            testdata.Rows.Add("a9", "Yes");
            testdata.Rows.Add("b15", "Yes");
            testdata.Rows.Add("b15", "Yes");
            testdata.Rows.Add("b18", "Yes");
            testdata.Rows.Add("b200", "Yes");
            testdata.Rows.Add("b17", "Yes");
            testdata.Rows.Add("b62", "Yes");
            testdata.Rows.Add("b90", "Yes");
            testdata.Rows.Add("b123", "Yes");
            testdata.Rows.Add("This is Ok", "Yes");
            testdata.Rows.Add("b1", "Yes");
            testdata.Rows.Add("b64", "Yes");
            testdata.Rows.Add("I am god", "No");
            testdata.Rows.Add("b14", "Yes");
            testdata.Rows.Add("b1", "Yes");
            testdata.Rows.Add("b64", "Yes");
            testdata.Rows.Add("b100000000000", "Yes");

            testForInstance(knn, "b15", "Yes");

            DataTable testsymbols = codebook.Apply(testdata);

            String[] testInput  = testdata.ToArray <String>("Output");
            int[]    testOutput = testsymbols.ToArray <int>("GeneratedByProgram");
            int[]    answers    = knn.Decide(testInput); // answer should be 1.


            Console.WriteLine("\n Accuracy (Tested on 20 data set): " + calculateAccuracy(answers, testOutput));
        }
        public override void DoTraining(DocumentSetCaseCollectionSet trainingSet, classifierTools tools, ILogBuilder logger)
        {
            var state = states.SetState(trainingSet, GetExperimentSufix());

            _distance = new SquareEuclidean();
            var kNearest = new KNearestNeighbors <Double[]>(k: setup.kNN_k, distance: _distance);

            kNearest.Learn(state.data.inputs, state.data.outputs);
            state.machine = kNearest;

            state.SaveState();
        }
Example #14
0
        public void learn_string()
        {
            #region doc_learn_text
            // The k-Nearest Neighbors algorithm can be used with
            // any kind of data. In this example, we will see how
            // it can be used to compare, for example, Strings.

            string[] inputs =
            {
                "Car",     // class 0
                "Bar",     // class 0
                "Jar",     // class 0

                "Charm",   // class 1
                "Chair"    // class 1
            };

            int[] outputs =
            {
                0, 0, 0,  // First three are from class 0
                1, 1,     // And next two are from class 1
            };


            // Now we will create the K-Nearest Neighbors algorithm. For this
            // example, we will be choosing k = 1. This means that, for a given
            // instance, only its nearest neighbor will be used to cast a new
            // decision.

            // In order to compare strings, we will be using Levenshtein's string distance
            var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein());

            // We learn the algorithm:
            knn.Learn(inputs, outputs);

            // After the algorithm has been created, we can use it:
            int answer = knn.Decide("Chars"); // answer should be 1.

            // Let's say we would like to compute the error matrix for the classifier:
            var cm = ConfusionMatrix.Estimate(knn, inputs, outputs);

            // We can use it to estimate measures such as
            double error = cm.Error;    // should be 0
            double acc   = cm.Accuracy; // should be 1
            double kappa = cm.Kappa;    // should be 1
            #endregion

            Assert.AreEqual(1, answer);
            Assert.AreEqual(0, error);
            Assert.AreEqual(1, acc);
            Assert.AreEqual(1, kappa);
        }
        private Events CalcRecommendedEventForUser()
        {
            const int kNeighbors = 1;

            var knn = new KNearestNeighbors <double[]>(kNeighbors, distance: new Accord.Math.Distances.SquareEuclidean());

            if (!HttpContext.Session.TryGetValue("UserName", out var userNameNotEncoded))
            {
                return(null);
            }

            var userName = System.Text.Encoding.UTF8.GetString(userNameNotEncoded);

            var usersEvents = _context.EventToUser.Include(evToUser => evToUser.Event).Include(evToUser => evToUser.EventUserNameNavigation).OrderBy(s => s.EventUserNameNavigation.BlogUserAge);

            LinkedList <double[]> usersAge = new LinkedList <double[]>();
            LinkedList <int>      eventIds = new LinkedList <int>();

            foreach (var userEvent in usersEvents.OrderBy(userEv => userEv.EventId))
            {
                usersAge.AddLast(new double[] { Convert.ToDouble(userEvent.EventUserNameNavigation.BlogUserAge) });
                eventIds.AddLast(userEvent.EventId);
            }

            var inputs = usersAge.Select(user => user.ToArray()).ToArray();

            if (inputs.Length <= 1)
            {
                return(null);
            }
            var outputs = eventIds.ToArray();

            knn.Learn(inputs, outputs);

            var currUserObj = _context.Users.First(users => users.BlogUserName == userName);

            var currUserAge = new double[] { currUserObj.BlogUserAge };
            int decision;

            try
            {
                decision = knn.Decide(currUserAge);
            }
            catch (Exception)
            {
                return(null);
            }
            var decidedEvent = _context.Events.First(someEvent => someEvent.Id == decision);

            return(decidedEvent);
        }
Example #16
0
        /// <summary>
        /// Classify our data using k-nearest neighbors classifer and save the model.
        /// </summary>
        /// <param name="train_data">Frame objects that we will use to train classifers.</param>
        /// <param name="test_data">Frame objects that we will use to test classifers.</param>
        /// <param name="train_label">Labels of the train data.</param>
        /// <param name="test_label">Labels of the test data.</param>
        /// <param name="Classifier_Path">Path where we want to save the classifer on the disk.</param>
        /// <param name="Classifier_Name">Name of the classifer we wnat to save.</param>
        /// <returns></returns>
        public void Knn(double[][] train_data, double[][] test_data, int[] train_label, int[] test_label, String Classifier_Path, String Classifier_Name)
        {
            KNearestNeighbors knn = new KNearestNeighbors(k: 5);

            knn.Learn(train_data, train_label);

            int    answer = knn.Decide(new double[] { 117.07004523277283, 119.9104585647583 });
            var    cm     = GeneralConfusionMatrix.Estimate(knn, test_data, test_label);
            double error  = cm.Error;

            Console.WriteLine(error);

            knn.Save(Path.Combine(Classifier_Path, Classifier_Name));
        }
Example #17
0
        public string knn()
        {
            double[][] inputs =
            {
                // The first two are from class 0
                new double[] { 10 },

                // The next four are from class 1
                new double[] { 30 },

                // The last three are from class 2
                new double[] { 50 },
            };

            int[] outputs =
            {
                0,     // First two from class 0
                1,     // Next four from class 1
                2,     // Last three from class 2
            };


            // Now we will create the K-Nearest Neighbors algorithm. For this
            // example, we will be choosing k = 4. This means that, for a given
            // instance, its nearest 4 neighbors will be used to cast a decision.
            var knn = new KNearestNeighbors(k: 1);

            // We learn the algorithm:
            knn.Learn(inputs, outputs);
            //put a diffoult video
            if (!User.Identity.IsAuthenticated)
            {
                return("instegram.mp4");
            }
            int customerId = int.Parse(User.Claims.FirstOrDefault(claim => claim.Type == ClaimTypes.Sid).Value);
            int age        = _context.Customers.Where(a => a.ID == customerId).Select(a => a.BirthDate.Year).Single();
            // After the algorithm has been created, we can classify a new instance:
            int answer = knn.Decide(new double[] { (DateTime.Now.Year - age) }); // answer will be 2.

            if (answer == 0)
            {
                return("bracletRing.mp4");
            }
            if (answer == 1)
            {
                return("instegram.mp4");
            }
            return("bracletRing2.mp4");
        }
        public void learn_test()
        {
            #region doc_learn_distance
            // Create some sample learning data. In this data,
            // the first two instances belong to a class, the
            // four next belong to another class and the last
            // three to yet another.

            double[][] inputs =
            {
                // The first two are from class 0
                new double[] { -5, -2, -1 },
                new double[] { -5, -5, -6 },

                // The next four are from class 1
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },

                // The last three are from class 2
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            int[] outputs =
            {
                0, 0,       // First two from class 0
                1, 1, 1, 1, // Next four from class 1
                2, 2, 2     // Last three from class 2
            };


            // Now we will create the K-Nearest Neighbors algorithm. For this
            // example, we will be choosing k = 4. This means that, for a given
            // instance, its nearest 4 neighbors will be used to cast a decision.
            var knn = new KNearestNeighbors <double[]>(k: 4, distance: new SquareEuclidean());

            // We learn the algorithm:
            knn.Learn(inputs, outputs);

            // After the algorithm has been created, we can classify a new instance:
            int answer = knn.Decide(new double[] { 11, 5, 4 }); // answer will be 2.
            #endregion

            Assert.AreEqual(2, answer);
        }
Example #19
0
        private void KNN_Load(object sender, EventArgs e)
        {
            int numFeatures = 5, numSamples = 5000;

            inputs  = new double[numSamples * nums.Length][];
            outputs = new int[numSamples * nums.Length];

            loadData(ref inputs, ref outputs, numFeatures, numSamples);

            knn = new KNearestNeighbors(k: nums.Length);
            knn.Learn(inputs, outputs);

            //var cm = GeneralConfusionMatrix.Estimate(knn, inputs, outputs);

            //double error = cm.Error;
            //double acc = cm.Accuracy;
            //double kappa = cm.Kappa;
        }
        public void learn_string()
        {
            #region doc_learn_text
            // The k-Nearest Neighbors algorithm can be used with
            // any kind of data. In this example, we will see how
            // it can be used to compare, for example, Strings.

            string[] inputs =
            {
                "Car",     // class 0
                "Bar",     // class 0
                "Jar",     // class 0

                "Charm",   // class 1
                "Chair"    // class 1
            };

            int[] outputs =
            {
                0, 0, 0,  // First three are from class 0
                1, 1,     // And next two are from class 1
            };


            // Now we will create the K-Nearest Neighbors algorithm. For this
            // example, we will be choosing k = 1. This means that, for a given
            // instance, only its nearest neighbor will be used to cast a new
            // decision.

            // In order to compare strings, we will be using Levenshtein's string distance
            var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein());

            // We learn the algorithm:
            knn.Learn(inputs, outputs);

            // After the algorithm has been created, we can use it:
            int answer = knn.Decide("Chars"); // answer should be 1.
            #endregion

            Assert.AreEqual(1, answer);
        }
Example #21
0
        private static void knn(double[][] inputs, int[] outputs)
        {
            // Create a new k-NN algorithm:
            var knn = new KNearestNeighbors()
            {
                K        = 3,              // base a decision on the class labels of the three nearest neighbors of the query point
                Distance = new Euclidean() // actually the default
            };

            // Learn a k-NN classifier
            knn = knn.Learn(inputs, outputs);

            // Get predictions according to kNN
            int[] predicted = knn.Decide(inputs);

            // Create a confusion matrix to check the quality of the predictions:
            var cm = new ConfusionMatrix(predicted: predicted, expected: outputs);

            // Check the accuracy measure:
            double accuracy = cm.Accuracy; // (should be 1.0 or 100%)
        }
Example #22
0
        public static KeyValuePair <Dictionary <int, string>, KNearestNeighbors> KnnCreateWithLabelMap(Dictionary <List <string>, double[][]> trainingSet)
        {
            int        labelCounter           = -1;
            List <int> classesList            = new List <int>();
            Dictionary <int, string> labelMap = new Dictionary <int, string>();

            /* Since the kNN algorithm generates a model with int values instead of string values for the label,
             * it is imperative to generate a map for reference.
             */
            foreach (string label in trainingSet.First().Key.ToArray())
            {
                if (!labelMap.ContainsValue(label))
                {
                    labelCounter++;
                    classesList.Add(labelCounter);
                    labelMap.Add(labelCounter, label);
                    //Console.WriteLine(labelCounter + ": " + label);
                }
                else
                {
                    classesList.Add(labelCounter);
                }
            }

            int[]      classes = classesList.ToArray();
            double[][] inputs  = trainingSet.First().Value;


            // Now we will create the K-Nearest Neighbors algorithm.
            // It's possible to swtich around the k: 5 for the possibility of better accuracy
            var knn = new KNearestNeighbors(k: 5);

            // We train the algorithm:
            knn.Learn(inputs, classes);

            // Generate the result.
            KeyValuePair <Dictionary <int, string>, KNearestNeighbors> result = new KeyValuePair <Dictionary <int, string>, KNearestNeighbors>(labelMap, knn);

            return(result);
        }
Example #23
0
        public void Main()
        {
            WriteLine("Execution begins...");

            var fn   = @"c:\DEMO\Data\train.csv";
            var f    = File.ReadLines(fn);
            var data = from z in f.Skip(1)
                       let zz = z.Split(',').Select(int.Parse)
                                select new Digit
            {
                Label = zz.First(),
                Image = zz.Skip(1).ToArray()
            };
            var train = data.Take(10000).ToArray();
            var test  = data.Skip(10000).Take(1000).ToArray();

            var classifier = new KNearestNeighbors(1);

            classifier.Learn(
                (from x in train select x.Image.Select(z => (double)z).ToArray()).ToArray(),
                (from x in train select x.Label).ToArray());

            int count = 0, correct = 0;

            foreach (var z in test)
            {
                var n = classifier.Decide(z.Image.Select(t => (double)t).ToArray());
                WriteLine("{0} => {1}", z.Label, n);
                if (z.Label == n)
                {
                    correct++;
                }
                count++;
            }

            WriteLine("Done, {0} of {1} correct ({2}%)", correct, count, (double)correct / (double)count * 100);
            ReadKey();
        }
Example #24
0
        public int DoSimplePrediction(double[][] inputs, int output)
        {
            Dataset simpleTrainedDataset = new Dataset(Constants.Constants.SimpleTrainedDataFilePath);

            double[][] simpleTrainedDatasetInputs  = simpleTrainedDataset.Instances;
            int[]      simpleTrainedDatasetOutputs = simpleTrainedDataset.ClassLabels;

            var knn = new KNearestNeighbors()
            {
                K        = 5,
                Distance = new Euclidean()
            };


            knn = knn.Learn(simpleTrainedDatasetInputs, simpleTrainedDatasetOutputs);
            int[] predicted = knn.Decide(inputs);

            return(predicted
                   .GroupBy(_ => _)
                   .OrderByDescending(_ => _.Count())
                   .Select(_ => _.Key)
                   .First());
        }
Example #25
0
        public void weights_test_tree_1()
        {
            KNearestNeighbors a;
            KNearestNeighbors b;

            {
                double[][] inputs  = Jagged.ColumnVector(4.2, 0.7, 0.7, 0.7, 1.3, 9.4, 9.4, 12);
                int[]      outputs = { 0, 0, 0, 1, 1, 2, 2, 2 };
                double[]   weights = { 1, 1, 0, 0, 1, 1, 0, 1 };
                var        knn     = new KNearestNeighbors(k: inputs.Length);
                a = knn.Learn(inputs, outputs, weights);
            }

            {
                double[][] inputs  = Jagged.ColumnVector(4.2, 0.7, 1.3, 9.4, 12);
                int[]      outputs = { 0, 0, 1, 2, 2 };
                var        knn     = new KNearestNeighbors(k: inputs.Length);
                b = knn.Learn(inputs, outputs);
            }

            double[][] x = Jagged.ColumnVector(4.2, 0.7, 1.3, 9.4, 12);
            Assert.AreEqual(a.Scores(x), b.Scores(x));
        }
Example #26
0
        private static void KNNCompute(int K, double[][] inputs, int[] outputs, double[][] test, int[] answer, List <string> testData, System.IO.StreamWriter fw)
        {
            var knn = new KNearestNeighbors(K);

            knn.Learn(inputs, outputs);

            //测试
            int    i = 0;
            double accuracy;
            int    correctCount = 0;

            foreach (var testDetail in test)
            {
                var predict = knn.Decide(testDetail);
                //fw.WriteLine($"歌曲:{testData[i].Split(',')[0]}, 正确答案是{answer[i]}, KNN(K={K}认为):{predict}");
                if (answer[i] == predict)
                {
                    correctCount++;
                }
                i++;
            }
            accuracy = (double)correctCount / (double)test.Count();
            fw.WriteLine($"KNN(K={K})的正确率:" + accuracy);
        }
 public void Learn()
 {
     KnnModel.Learn(TrainingModelInputs, TrainingModelOutputs);
 }
Example #28
0
        public JsonResult PredictPossibleProducts()
        {
            var userId       = 0;
            int knnNum       = 5;
            int clusterNum   = 4;
            var userIdString = "";

            if (HttpContext.Session["userid"] == null)
            {
                return(Json(new { errorCode = 1, errorMessage = "יוזר לא חוקי" }));
            }

            userIdString = HttpContext.Session["userid"].ToString();
            var didParsed = Int32.TryParse(userIdString, out userId);

            if (!didParsed)
            {
                return(Json(new { errorCode = 1, errorMessage = "יוזר לא חוקי" }));
            }

            var userGender = _context.Users
                             .Where(x => x.Id == userId)
                             .Select(x => x.Gender)
                             .SingleOrDefault();

            var trainData = _context.Purchases
                            .OrderBy(x => x.UserId)
                            .Where(x => x.Product != null)
                            .Select(x => new
            {
                userId     = x.UserId.Value,
                size       = x.Product.Size,
                type       = x.Product.ProductTypeId,
                gender     = x.Product.ProductType.Gender,
                genderUser = x.User.Gender
            })
                            .ToList();

            if (trainData.Count < knnNum || trainData.Count < clusterNum)
            {
                return(Json(new { errorCode = 2, errorMessage = "אין מספיק מידע" }));
            }
            var inputs = trainData.Select(x =>
            {
                double[] res = new double[]
                {
                    Convert.ToInt32(x.gender),
                    Convert.ToInt32(x.genderUser),
                    x.type.Value,
                    x.size
                };

                return(res);
            })
                         .ToArray();

            var codification = new Codification <double>()
            {
                CodificationVariable.Categorical,
                CodificationVariable.Categorical,
                CodificationVariable.Categorical,
                CodificationVariable.Discrete
            };

            // Learn the codification from observations
            var model = codification.Learn(inputs);

            // Transform the mixed observations into only continuous:
            double[][] newInputs = model.ToDouble().Transform(inputs);

            KMedoids kmeans   = new KMedoids(k: clusterNum);
            var      clusters = kmeans.Learn(newInputs);

            int[] labels = clusters.Decide(newInputs);

            var knn5 = new KNearestNeighbors(k: knnNum);

            knn5.Learn(newInputs, labels);

            var purchasesById = _context.Purchases
                                .Where(x => x.Product != null)
                                .Select(x => new
            {
                userId     = x.UserId.Value,
                size       = x.Product.Size,
                type       = x.Product.ProductTypeId,
                gender     = x.Product.ProductType.Gender,
                genderUser = x.User.Gender
            })
                                .GroupBy(x => x.userId)
                                .ToList();

            IList <Tuple <int, int[]> > labelsForUsers = new List <Tuple <int, int[]> >();

            for (int i = 0; i < purchasesById.Count; i++)
            {
                var userInputs = purchasesById[i].
                                 Select(x =>
                {
                    double[] res = new double[]
                    {
                        Convert.ToInt32(x.gender),
                        Convert.ToInt32(x.genderUser),
                        x.type.Value,
                        x.size
                    };

                    return(res);
                })
                                 .ToArray();

                double[][] newUserInputs = model.ToDouble().Transform(userInputs);
                labelsForUsers.Add(new Tuple <int, int[]>(purchasesById[i].Key, clusters.Decide(newUserInputs).Distinct().ToArray()));
            }

            var productIdsUserBought = _context.Purchases
                                       .Where(x => x.UserId == userId)
                                       .Select(x => x.ProductId)
                                       .Distinct()
                                       .ToList();

            var validProductTypeIds = _context.Purchases
                                      .Where(x => x.UserId == userId)
                                      .Select(x => x.Product.ProductTypeId)
                                      .Distinct()
                                      .ToList();

            var productsToPredict = _context.Products
                                    .Where(x => !productIdsUserBought.Contains(x.Id))
                                    .Where(x => validProductTypeIds.Contains(x.ProductTypeId))
                                    .Select(x => new
            {
                id         = x.Id,
                size       = x.Size,
                type       = x.ProductTypeId,
                gender     = x.ProductType.Gender,
                genderUser = userGender
            })
                                    .ToList();

            var predInputs = productsToPredict.Select(x =>
            {
                double[] res = new double[]
                {
                    Convert.ToInt32(x.gender),
                    Convert.ToInt32(x.genderUser),
                    x.type.Value,
                    x.size
                };

                return(res);
            })
                             .ToArray();

            double[][] newPredInputs = model.ToDouble().Transform(predInputs);

            int[] newLabels = knn5.Decide(newPredInputs);

            IList <int> productIdsPrediction = new List <int>();
            var         userLabels           = labelsForUsers.Where(x => x.Item1 == userId).FirstOrDefault() != null?
                                               labelsForUsers.Where(x => x.Item1 == userId).FirstOrDefault().Item2 : new int[0];

            for (int i = 0; i < newLabels.Length; i++)
            {
                if (userLabels.Contains(newLabels[i]))
                {
                    productIdsPrediction.Add(productsToPredict[i].id);
                }
            }

            var predictedProduct = _context.Products
                                   .Where(x => productIdsPrediction.Contains(x.Id))
                                   .Select(x => new
            {
                Id          = x.Id,
                Name        = x.Name,
                Price       = x.Price,
                Size        = x.Size,
                PictureName = x.PictureName
            })
                                   .ToList();

            return(Json(new { products = predictedProduct }, JsonRequestBehavior.AllowGet));
        }
Example #29
0
    public void trainClassifier()
    {
        List <FeatureVector> featureVectors = dataService.getAllFeatureVectors();

        double [][] inputs  = new double[featureVectors.Count][];
        int[]       outputs = new int[featureVectors.Count];

        createInputsAndOutputs(inputs, outputs, featureVectors);


        // Code For creating a MulticlassSupportVectorMachine
        // Create the multi-class learning algorithm for the machine

        /*
         * var teacher = new MulticlassSupportVectorLearning<Gaussian>()
         * {
         *  // Configure the learning algorithm to use SMO to train the
         *  //  underlying SVMs in each of the binary class subproblems.
         *  Learner = (param) => new SequentialMinimalOptimization<Gaussian>()
         *  {
         *      // Estimate a suitable guess for the Gaussian kernel's parameters.
         *      // This estimate can serve as a starting point for a grid search.
         *      UseKernelEstimation = true
         *  }
         * };
         *
         * // Learn a machine
         * var machine = teacher.Learn(inputs, outputs);
         *
         *
         * // Create the multi-class learning algorithm for the machine
         * var calibration = new MulticlassSupportVectorLearning<Gaussian>()
         * {
         *  Model = machine, // We will start with an existing machine
         *
         *  // Configure the learning algorithm to use Platt's calibration
         *  Learner = (param) => new ProbabilisticOutputCalibration<Gaussian>()
         *  {
         *      Model = param.Model // Start with an existing machine
         *  }
         * };
         *
         * classifier = calibration.Learn(inputs, outputs);*/

        // Code for creating a KNN classifier

        int K = (int)(Mathf.Sqrt(inputs.GetLength(0)) / 2.0f);

        classifier = new KNearestNeighbors(k: K, distance: new Euclidean());
        classifier.Learn(inputs, outputs);


        // Code For creating a random forest classifier.

        // Create the forest learning algorithm

        /*
         * var teacher = new RandomForestLearning()
         * {
         *  NumberOfTrees = 50,
         * };
         *
         * classifier = teacher.Learn(inputs, outputs);
         */

        saveModel();
        TrainingFinished = true;
    }
Example #30
0
        public void learn_string()
        {
            string basePath = NUnit.Framework.TestContext.CurrentContext.TestDirectory;

            #region doc_learn_text
            // The k-Nearest Neighbors algorithm can be used with
            // any kind of data. In this example, we will see how
            // it can be used to compare, for example, Strings.

            string[] inputs =
            {
                "Car",     // class 0
                "Bar",     // class 0
                "Jar",     // class 0

                "Charm",   // class 1
                "Chair"    // class 1
            };

            int[] outputs =
            {
                0, 0, 0,  // First three are from class 0
                1, 1,     // And next two are from class 1
            };


            // Now we will create the K-Nearest Neighbors algorithm. For this
            // example, we will be choosing k = 1. This means that, for a given
            // instance, only its nearest neighbor will be used to cast a new
            // decision.

            // In order to compare strings, we will be using Levenshtein's string distance
            var knn = new KNearestNeighbors <string>(k: 1, distance: new Levenshtein());

            // We learn the algorithm:
            knn.Learn(inputs, outputs);

            // After the algorithm has been created, we can use it:
            int answer = knn.Decide("Chars"); // answer should be 1.

            // Let's say we would like to compute the error matrix for the classifier:
            var cm = ConfusionMatrix.Estimate(knn, inputs, outputs);

            // We can use it to estimate measures such as
            double error = cm.Error;    // should be 0
            double acc   = cm.Accuracy; // should be 1
            double kappa = cm.Kappa;    // should be 1
            #endregion

            Assert.AreEqual(1, answer);
            Assert.AreEqual(0, error);
            Assert.AreEqual(1, acc);
            Assert.AreEqual(1, kappa);

#if !NO_BINARY_SERIALIZATION
            knn.Save(Path.Combine(basePath, "string_knn.bin"));

            var loaded_knn = Serializer.Load <KNearestNeighbors <string> >(Path.Combine(basePath, "string_knn.bin"));

            Assert.AreEqual(1, loaded_knn.Decide("Chars"));
            cm = ConfusionMatrix.Estimate(loaded_knn, inputs, outputs);
            Assert.AreEqual(0, cm.Error);
            Assert.AreEqual(1, cm.Accuracy);
            Assert.AreEqual(1, cm.Kappa);

            Assert.AreEqual(knn.ClassCount, loaded_knn.ClassCount);
            Assert.AreEqual(knn.Distance, loaded_knn.Distance);
            Assert.AreEqual(knn.K, loaded_knn.K);
            Assert.AreEqual(knn.NumberOfClasses, loaded_knn.NumberOfClasses);
            Assert.AreEqual(knn.NumberOfInputs, loaded_knn.NumberOfInputs);
            Assert.AreEqual(knn.NumberOfOutputs, loaded_knn.NumberOfOutputs);
            Assert.AreEqual(knn.Outputs, loaded_knn.Outputs);
            Assert.AreEqual(knn.Token, loaded_knn.Token);
#endif
        }