コード例 #1
0
        //public static string GetRoom(KNearestNeighbors knn, double[] coordinates)
        //{
        //    // After the algorithm has been created, we can classify a new instance:
        //    Console.WriteLine("Room: " + getRoomname(knn.Decide(coordinates)));
        //    return getRoomname(knn.Decide(coordinates));
        //}
        public static Dictionary <string, double> getOptions(double[] coordinates, KNearestNeighbors Knn, Dictionary <int, string> labelMap)
        {
            Dictionary <string, int> optionDict = new Dictionary <string, int>();

            int[] list   = labelMap.Keys.ToArray();
            int   length = Knn.GetNearestNeighbors(coordinates, out list).Length;

            foreach (double[] g in Knn.GetNearestNeighbors(coordinates, out list))
            {
                string roomname = getRoomname(Knn.Decide(g), labelMap);
                if (optionDict.Keys.Contains(roomname))
                {
                    optionDict[roomname] += 1;
                }
                else
                {
                    optionDict.Add(roomname, 1);
                }
            }
            Dictionary <string, double> options = new Dictionary <string, double>();

            foreach (KeyValuePair <string, int> option in optionDict)
            {
                double factor = (double)1 / (double)length;
                double value  = option.Value * factor;
                options.Add(option.Key, value);
            }
            return(options);
        }
コード例 #2
0
        private void computebtn_Click(object sender, EventArgs e)
        {
            int[] output;
            knnTxt.Text = "";
            if (comboBox2.SelectedItem.ToString() == "Levenshtein")
            {
                var answer = knnStr.GetNearestNeighbors(tesTxt.Text.ToUpper(), out output);
                for (int i = 0; i < answer.Length; ++i)
                {
                    knnTxt.Text += answer[i] + "," + inverseClassList[output[i]] + Environment.NewLine;
                }

                int classInt = knnStr.Compute(tesTxt.Text.ToUpper());
                label5.Text = inverseClassList[classInt];
            }
            else
            {
                var answer = knn.GetNearestNeighbors(wv.transform(tesTxt.Text.ToUpper(), comboBox4.SelectedItem.ToString()), out output);
                for (int i = 0; i < answer.Length; ++i)
                {
                    knnTxt.Text += wv.transformInverse(answer[i], comboBox4.SelectedItem.ToString()) + "," + inverseClassList[output[i]] + Environment.NewLine;
                }

                int classInt = knn.Compute(wv.transform(tesTxt.Text.ToUpper(), comboBox4.SelectedItem.ToString()));
                label5.Text = inverseClassList[classInt];
                //answer = knn.Compute(wv.transform(GetString(fields, " "), comboBox4.SelectedItem.ToString()));
            }
        }
コード例 #3
0
        public double Predict(double[] query)
        {
            int[] nearest;
            _predictor.GetNearestNeighbors(query, out nearest);

            return(InverseDistanceWeightedAverage(nearest));
        }
コード例 #4
0
        public int[] Predict(double[] input)
        {
            _loadedKnn.K = _k > 0 ? _k : 1;

            int[] result = new int[_k];

            var knnMatrix = _loadedKnn.GetNearestNeighbors(input, out result);

            return(result);
        }
コード例 #5
0
ファイル: KnnExample.cs プロジェクト: dklomp1/BeaconExample
        public static List <string> getOptions(double[] coordinates, KNearestNeighbors knn)
        {
            List <string>            options  = new List <string>();
            Dictionary <int, string> labelMap = Fingerprinting.ReadLabelMap();

            int[] list = labelMap.Keys.ToArray();
            foreach (double[] g in knn.GetNearestNeighbors(coordinates, out list))
            {
                options.Add(getRoomname(knn.Decide(g)));
            }
            return(options);
        }
コード例 #6
0
        public int[] getMatchingClusters(Descriptor descriptor)
        {
            // get the nearest 3 classes
            int[]      labels;
            double[][] point = knn.GetNearestNeighbors(descriptor.getArray(), out labels);

            double[] distance = new double[GlobalConstant.numberOfKNearestNeighbour];
            for (int i = 0; i < GlobalConstant.numberOfKNearestNeighbour; i++)
            {
                distance[i] = Accord.Math.Distance.Euclidean(point[i], descriptor.getArray());
                //Console.WriteLine(distance[i]);
            }


            // sort 2 arrays related to each other
            Array.Sort(distance, labels);

            return(labels);
        }
コード例 #7
0
        private List<String> GetSimilaresDatabaseKNN(List<Double> descriptoresEntrada)
        {
            ModeloSimilitudEntities db=new ModeloSimilitudEntities();
            Double[] vectorEntrada=descriptoresEntrada.ToArray();
            vectorEntrada = Normalizar(vectorEntrada);
            Double[][] matriz = csvtoMatrix("descriptoresNormalizados");

            int[] pertenencia=new int[matriz.Length];
            for(int i=0;i<pertenencia.Length;i++){
                pertenencia[i]=1;
            }
            pertenencia[23] = 2;

            KNearestNeighbors knn = new KNearestNeighbors(k: 10, inputs: matriz, outputs: pertenencia);
            int answer = knn.Compute(vectorEntrada);
            int[] a = new int[1]; a[0] = 1;
            Double[][] cercanos = knn.GetNearestNeighbors(vectorEntrada,out a);

            List<String> listaSimilares = new List<String>();
            List<canciones> dbcanciones = db.canciones.ToList();
            for (int i = 0; i < matriz.Length; i++)
            {
                if (cercanos.Contains(matriz[i]))
                {
                    listaSimilares.Add(dbcanciones[i].id_spotify.Substring(14));
                }
            }

                //string select="select * from canciones where energy={0} and liveness={1} and tempo={2} and speechiness={3} and acousticness={4} and loudness={5} and valence={6} and danceability={7} and instrumentalness={8} and key={9}";
                //string select2 = "select * from canciones";
                //for(int j=0;j<cercanos.Length;j++){
                //    object[] parameters = new object[10];
                //    for (int i = 0; i < 10; i++)
                //    {
                //            SqlParameter param = new SqlParameter("i", cercanos[j][i]);
                //            parameters[i] = cercanos[j][i];
                //        }
                //        var stores = db.Database.SqlQuery<canciones>(select, parameters).ToList();
                //        listaSimilares.Add(stores[0].id_spotify);
                //}

                return listaSimilares;
        }
コード例 #8
0
        private int OptimizeK()
        {
            Tuple <KNearestNeighbors, double> bestModel = null;

            for (int k = 50; k <= 200; k++)
            {
                var crossvalidation = new CrossValidation <KNearestNeighbors>(_input.Length, 5);

                crossvalidation.Fitting = delegate(int fold, int[] indicesTrain, int[] indicesValidation)
                {
                    var trainingInputs  = _input.Submatrix(indicesTrain);
                    var trainingOutputs = _output.Submatrix(indicesTrain);

                    var validationInputs  = _input.Submatrix(indicesValidation);
                    var validationOutputs = _output.Submatrix(indicesValidation);

                    var predictor = new KNearestNeighbors(k, _classes, trainingInputs, trainingOutputs);

                    // Create a training algorithm and learn the training data

                    var trainingError = 0.0;

                    for (int i = 0; i < trainingInputs.Length; i++)
                    {
                        int[] nearest;
                        predictor.GetNearestNeighbors(trainingInputs[i], out nearest);

                        var prediction = InverseDistanceWeightedAverage(nearest);

                        if (prediction > 0 && trainingOutputs[i] > 0 ||
                            prediction < 0 && trainingOutputs[i] < 0 ||
                            prediction.Equals(trainingOutputs[i]))
                        {
                            continue;
                        }

                        trainingError++;
                    }

                    double validationError = 0.0;

                    for (int i = 0; i < validationInputs.Length; i++)
                    {
                        int[] nearest;
                        predictor.GetNearestNeighbors(validationInputs[i], out nearest);

                        var prediction = InverseDistanceWeightedAverage(nearest);

                        if (prediction > 0 && validationOutputs[i] > 0 ||
                            prediction < 0 && validationOutputs[i] < 0 ||
                            prediction.Equals(validationOutputs[i]))
                        {
                            continue;
                        }

                        validationError++;
                    }

                    trainingError   /= trainingInputs.Length;
                    validationError /= validationInputs.Length;

                    return(new CrossValidationValues <KNearestNeighbors>(predictor, trainingError, validationError));
                };

                var result = crossvalidation.Compute();

                //var minError = result.Models.Select(y => y.ValidationValue).Min();
                var minError = result.Models.Select(y => Math.Sqrt(Math.Pow(y.TrainingValue + y.ValidationValue, 2.0))).Min();

                if (bestModel == null || minError < bestModel.Item2)
                {
                    var bestFit = result.Models.FirstOrDefault(x => minError.Equals(x.ValidationValue))?.Model;
                    bestModel = bestFit == null ? bestModel : new Tuple <KNearestNeighbors, double>(bestFit, minError);
                }
            }

            return(bestModel?.Item1.K ?? 80);
        }