public async Task <IHttpActionResult> PostKnnTrainingFile()
        {
            StoreysHandler SH       = new StoreysHandler(db);
            SpacesHandler  SpH      = new SpacesHandler(db);
            KnnsHandler    KH       = new KnnsHandler(db);
            var            provider = new MultipartMemoryStreamProvider();
            await Request.Content.ReadAsMultipartAsync(provider);

            var    file     = provider.Contents[0];
            string filename = file.Headers.ContentDisposition.FileName.Trim('\"');

            //Bytes from the binary reader stored in BlobValue array
            byte[] TrainingSet = file.ReadAsByteArrayAsync().Result; //reader.ReadBytes((int)file.Length);

            string trainingString = System.Text.Encoding.UTF8.GetString(TrainingSet);

            //KVP<LabelMap, knnByte>,First label ID>
            KeyValuePair <KeyValuePair <byte[], byte[]>, string> LabelMapKnn = KnnGenerate.GenerateTemplateTrainingFile(trainingString);
            //Generate the files from the generate template method within the template method. save those 3 to db
            //Storey storey = BC.GetBeaconStorey(BeaconID);
            //Knn knn = new Knn();

            //Once trainingSets generated, Space Guid will be saved in the LabelMapKnn string.
            Guid   storeyId = SpH.GetSpaceStorey(Guid.Parse(LabelMapKnn.Value)).ID;
            Storey storey   = SH.GetStorey(storeyId);

            byte[] LabelMap = LabelMapKnn.Key.Value;
            byte[] Knn      = LabelMapKnn.Key.Key;
            Knn    knn      = new Knn(storey, TrainingSet, LabelMap, Knn);

            KH.PostKnn(knn);
            return(Ok(storey));
        }
        public void ShouldReturnExpectedDistance2()
        {
            var first = new List <double> {
                0, 0
            };
            var second = new List <double> {
                10, 10
            };
            var third = new List <double> {
                10, -10
            };
            var fourth = new List <double> {
                -10, 10
            };
            var fifth = new List <double> {
                -10, -10
            };

            var expected = 14.142;

            Assert.AreEqual(expected, Knn.EuclideanDist(first, second), 0.1);
            Assert.AreEqual(expected, Knn.EuclideanDist(first, third), 0.1);
            Assert.AreEqual(expected, Knn.EuclideanDist(first, fourth), 0.1);
            Assert.AreEqual(expected, Knn.EuclideanDist(first, fifth), 0.1);
        }
    double[] CombinedOldAndNewTargets()
    {
        double[] loadedTargets = ValfriStatic.Targets;

        if (ValfriStatic.AddedPoints == null)
        {
            return(loadedTargets);
        }

        double[][] addedTargets = ListToMatrix(ValfriStatic.AddedPoints);

        double[] combinedData = new double[loadedTargets.Length + addedTargets.Length];

        int i = 0;

        foreach (var point in loadedTargets)
        {
            combinedData[i++] = point;
        }
        foreach (var point in addedTargets)
        {
            combinedData[i++] = Knn.Classify(point, ValfriStatic.MatrixOfPoints, Dataset.NumberOfFeatures, Dataset.K);
        }
        return(combinedData);
    }
        public void ShouldReturnExpectedManhattanDistance()
        {
            var first = new List <double> {
                0, 0
            };
            var second = new List <double> {
                10, 10
            };
            var third = new List <double> {
                10, -10
            };
            var fourth = new List <double> {
                -10, 10
            };
            var fifth = new List <double> {
                -10, -10
            };

            var expected = 20.0;

            Assert.AreEqual(expected, Knn.ManhattanDist(first, second), 0.1);
            Assert.AreEqual(expected, Knn.ManhattanDist(first, third), 0.1);
            Assert.AreEqual(expected, Knn.ManhattanDist(first, fourth), 0.1);
            Assert.AreEqual(expected, Knn.ManhattanDist(first, fifth), 0.1);
        }
Beispiel #5
0
        public void ShouldPredictRedValues()
        {
            var expectedRed = new List <List <double> >
            {
                new List <double> {
                    2, 7
                },
                new List <double> {
                    4.1, 6.9
                },
                new List <double> {
                    6, 7
                },
                new List <double> {
                    5.9, 3.9
                }
            };

            for (var k = 1; k <= 3; k++)
            {
                foreach (var result in expectedRed.Select(predicted =>
                                                          Knn.Predict(Knn.EuclideanDist, _trainingData, k, predicted)))
                {
                    Assert.AreEqual(R, result);
                }
            }
        }
        public void ShouldReturnZeroForTheSamePointTwice()
        {
            var point = new List <double> {
                3, 4, 5
            };

            Assert.AreEqual(0, Knn.EuclideanDist(point, point), 0.0001);
        }
        public Knn GetKnn(Guid StoreyID)
        {
            var id     = from b in db.Knn where b.Storey.ID == StoreyID select b.ID;
            var knn    = db.Knn.Find(id.First());
            Knn result = new Knn(knn.Storey, knn.TrainingSet, knn.LabelMap, knn.kNN);

            return(result);
        }
    List <GameObject> CreatePies(List <List <double> > matrix, bool mark = false)
    {
        List <GameObject> pies = new List <GameObject>();

        for (int i = 0; i < matrix.Count; i++)
        {
            GameObject pie = CreatePie(matrix[i].ToArray(), Knn.Classify(matrix[i].ToArray(), ValfriStatic.MatrixOfPoints, Dataset.NumberOfFeatures, Dataset.K), mark);
            pies.Add(pie);
        }
        return(pies);
    }
Beispiel #9
0
 public MainForm(SURFFeatureExtractor featureExtractor, KnnTrainResult <SURFFeature> knnTrainResult)
 {
     InitializeComponent();
     FeatureExtractor = featureExtractor;
     TrainResult      = knnTrainResult;
     knn        = new Knn <SURFFeature>(TrainResult);
     TagToBrush = new Dictionary <string, Brush>();
     AllBrush   = new List <Brush> {
         Brushes.Aqua, Brushes.Red, Brushes.Green, Brushes.Yellow, Brushes.Gray
     };
 }
Beispiel #10
0
        static void Main(string[] args)
        {
            var start = new APoint {
                X = 0, Y = 0
            };
            var target = new APoint {
                X = 5, Y = 4
            };

            //var DFSmatrix = Common.GetRandomMatrix(7, 7);
            //var dfsMatrix = Algorithms.DFS(DFSmatrix, start, target);
            //Console.WriteLine("DFS path: ");
            //Common.PrettyPrint(dfsMatrix, 7, 7);

            //Console.WriteLine("---------------------------");

            //var BFSmatrix = Common.GetRandomMatrix(7, 7);
            //var bfsMatrix = Algorithms.BFS(BFSmatrix, start, target);
            //Console.WriteLine("BFS path: ");
            //Common.PrettyPrint(bfsMatrix, 7, 7);

            //var idsMatrix = Common.GetRandomMatrix(7, 7);
            //IDS.Matrix = idsMatrix;
            //IDS.Run(start, target);
            //Console.WriteLine("IDS path: ");
            //Common.PrettyPrint(idsMatrix, 7, 7);

            //var aStarMatrix = Common.GetRandomMatrix(7, 7);
            //InformedAlgs.Matrix = aStarMatrix;
            //InformedAlgs.AStar(start, target);
            //Console.WriteLine("A-Star path: ");
            //Console.WriteLine(InformedAlgs.PathLength);
            //Common.PrettyPrint(aStarMatrix, 7, 7);

            //int n = 1000;
            //QueensProblem queensProblem = new QueensProblem(n);
            //queensProblem.Solve();
            //queensProblem.GetSolution(n);

            //var knapsack = new Knapsack();
            //knapsack.InitializeItems();
            //Generator generator = new Generator(knapsack);
            //generator.Solve();
            //Console.Read();

            var knn = new Knn();

            knn.InitializeSets();
            int k = 12;

            knn.Classify(k);
            Console.WriteLine("Accuracy: " + knn.predictedAnswers * 100 / 20 + "%");
        }
Beispiel #11
0
    // Start is called before the first frame update
    void Start()
    {
        //attributes = Dataset.GetDataSetFeatures();

        //foreach (var field in Dataset.InputFields)
        //{
        //    GameObject.Destroy(field);
        //}
        //Dataset.InputFields = new List<InputField>();
        //Dataset.EditInputFields = new List<InputField>();

        //for (int i = 0; i < attributes.Count; i++)
        //{
        //    GameObject field = Instantiate(dataInput);
        //    field.transform.SetParent(featureNameHolder.transform);
        //    field.name = attributes[i] + "Field";
        //    field.GetComponent<InputField>().placeholder.GetComponent<Text>().text = attributes[i];
        //    Dataset.InputFields.Add(field.GetComponent<InputField>());
        //}
        //GameObject Button = Instantiate(dataInputButton);
        //Button.transform.SetParent(featureNameHolder.transform);
        //Button myButton = Button.GetComponent<Button>();
        //myButton.GetComponentInChildren<Text>().text = "Add";
        //myButton.onClick.AddListener(Dataset.InstantiateNewDataPoint);

        Button     changeKValue = GameObject.Find("ChangeK").GetComponent <Button>();
        InputField kInput       = (InputField)GameObject.Find("KInputField").GetComponent(typeof(InputField));

        changeKValue.onClick.AddListener(() => Dataset.ChangeKValue(kInput.text));

        Button changeKnnWeight = GameObject.Find("ChangeWeight").GetComponent <Button>();

        changeKnnWeight.onClick.AddListener(() => Knn.ChangeKnnState());

        Button changeKnnAlg = GameObject.Find("ChangeKnnAlg").GetComponent <Button>();

        changeKnnAlg.onClick.AddListener(() => Knn.ChangeKnnAlgorithm());

        GameObject.Find("CurrentK").GetComponent <Text>().text        = "K = " + Dataset.K.ToString();
        GameObject.Find("CurrentKnnState").GetComponent <Text>().text = "Weight =  " + Knn.currentState.ToString();
        GameObject.Find("CurrentKnnAlg").GetComponent <Text>().text   = "Algorithm =  " + Knn.currentAlg.ToString();

        CanvasGroup canvasGroupFeature = featureNameHolder.GetComponent <CanvasGroup>();

        canvasGroupFeature.interactable = false;
        canvasGroupFeature.alpha        = 0;

        CanvasGroup canvasGroupKNN = KNNSettings.GetComponent <CanvasGroup>();

        canvasGroupKNN.interactable = false;
        canvasGroupKNN.alpha        = 0;
    }
        public void TestKnn()
        {
            var knn          = new Knn();
            var knnParameter = new KnnParameter(1, 3, new EuclidianDistance());
            var laryToBinary = new LaryToBinary(car);

            laryToBinary.Convert();
            knn.Train(car.GetInstanceList(), knnParameter);
            Assert.AreEqual(4.74, 100 * knn.Test(car.GetInstanceList()).GetErrorRate(), 0.01);
            laryToBinary = new LaryToBinary(tictactoe);
            laryToBinary.Convert();
            knn.Train(tictactoe.GetInstanceList(), knnParameter);
            Assert.AreEqual(5.64, 100 * knn.Test(tictactoe.GetInstanceList()).GetErrorRate(), 0.01);
        }
        public void ShouldReturnExpectedDistance1()
        {
            var first = new List <double> {
                0, 1
            };
            var second = new List <double> {
                0, 10
            };

            var expected = 9.0;
            var result   = Knn.EuclideanDist(first, second);

            Assert.AreEqual(expected, result, 0.0001);
        }
        public void TestKnn()
        {
            var knn                  = new Knn();
            var knnParameter         = new KnnParameter(1, 3, new EuclidianDistance());
            var discreteToContinuous = new DiscreteToContinuous(car);

            discreteToContinuous.Convert();
            knn.Train(car.GetInstanceList(), knnParameter);
            Assert.AreEqual(4.75, 100 * knn.Test(car.GetInstanceList()).GetErrorRate(), 0.01);
            discreteToContinuous = new DiscreteToContinuous(tictactoe);
            discreteToContinuous.Convert();
            knn.Train(tictactoe.GetInstanceList(), knnParameter);
            Assert.AreEqual(5.64, 100 * knn.Test(tictactoe.GetInstanceList()).GetErrorRate(), 0.01);
        }
        public List <Product> KnnRecommandation()
        {
            List <Product> products            = _context.Products.ToList();
            List <Product> recommendedProducts = new List <Product>();
            List <Product> purchaseProducts    = new List <Product>();

            if (User.Identity.IsAuthenticated)
            {
                int customerId   = int.Parse(User.Claims.FirstOrDefault(claim => claim.Type == ClaimTypes.Sid).Value);
                var recentOrders = _context.Orders.Where(order => order.CustomerID == customerId).Include(o => o.customer).Include(o => o.PurchaseProducts).ThenInclude(p => p.Product).AsQueryable();
                if (recentOrders.ToList().Count == 0)
                {
                    return(recommendedProducts);
                }
                if (recentOrders.ToList().Count > 5)
                {
                    recentOrders = recentOrders.OrderByDescending(a => a.OrderDate).Take(5);
                }
                foreach (var order in recentOrders)
                {
                    foreach (var purchase in order.PurchaseProducts)
                    {
                        purchaseProducts.Add(purchase.Product);
                    }
                }
                int count = purchaseProducts.Count();
                int[,] vectors = new int[count, 3];
                for (int i = 0; i < count; i++)
                {
                    vectors[i, 0] = (int)purchaseProducts[i].Title;
                }
                for (int i = 0; i < count; i++)
                {
                    vectors[i, 1] = (int)purchaseProducts[i].Color;
                }
                for (int i = 0; i < count; i++)
                {
                    vectors[i, 2] = (int)purchaseProducts[i].price;
                }
                products.RemoveAll(p => purchaseProducts.Where(p2 => p.ID == p2.ID).ToList().Count != 0);//??????
                List <int> recommanded = Knn.Distance(vectors, products);
                foreach (int id in recommanded)
                {
                    recommendedProducts.Add(products.Where(a => a.ID == id).Single());
                }
            }
            return(recommendedProducts);
        }
        public void TestTrain()
        {
            var knn          = new Knn();
            var knnParameter = new KnnParameter(1, 3, new EuclidianDistance());

            knn.Train(iris.GetInstanceList(), knnParameter);
            Assert.AreEqual(4.00, 100 * knn.Test(iris.GetInstanceList()).GetErrorRate(), 0.01);
            knn.Train(bupa.GetInstanceList(), knnParameter);
            Assert.AreEqual(19.42, 100 * knn.Test(bupa.GetInstanceList()).GetErrorRate(), 0.01);
            knn.Train(dermatology.GetInstanceList(), knnParameter);
            Assert.AreEqual(3.01, 100 * knn.Test(dermatology.GetInstanceList()).GetErrorRate(), 0.01);
            knn.Train(car.GetInstanceList(), knnParameter);
            Assert.AreEqual(4.75, 100 * knn.Test(car.GetInstanceList()).GetErrorRate(), 0.01);
            knn.Train(tictactoe.GetInstanceList(), knnParameter);
            Assert.AreEqual(5.64, 100 * knn.Test(tictactoe.GetInstanceList()).GetErrorRate(), 0.01);
        }
Beispiel #17
0
        public MainForm(Knn <RawImage28x28> knn, Kmeans <RawImage28x28> kmeansTrainResult)
        {
            InitializeComponent();

            if (knn != null)
            {
                Knn = knn;
                SetupKnn();
            }

            if (kmeansTrainResult != null)
            {
                Kmeans = kmeansTrainResult;
                SetupKmeans();
            }
        }
Beispiel #18
0
        public void Run()
        {
            Console.WriteLine("===== FlannTest =====");

            // creates data set
            using (Mat features = new Mat(10000, 2, MatType.CV_32FC1))
            {
                Random rand = new Random();
                for (int i = 0; i < features.Rows; i++)
                {
                    features.Set <float>(i, 0, rand.Next(10000));
                    features.Set <float>(i, 1, rand.Next(10000));
                }

                // query
                Point2f queryPoint = new Point2f(7777, 7777);
                Mat     queries    = new Mat(1, 2, MatType.CV_32FC1);
                queries.Set <float>(0, 0, queryPoint.X);
                queries.Set <float>(0, 1, queryPoint.Y);
                Console.WriteLine("query:({0}, {1})", queryPoint.X, queryPoint.Y);
                Console.WriteLine("-----");

                // knnSearch
                using (Index nnIndex = new Index(features, new KDTreeIndexParams(4)))
                {
                    const int Knn = 1;
                    int[]     indices;
                    float[]   dists;
                    nnIndex.KnnSearch(queries, out indices, out dists, Knn, new SearchParams(32));

                    for (int i = 0; i < Knn; i++)
                    {
                        int     index = indices[i];
                        float   dist  = dists[i];
                        Point2f pt    = new Point2f(features.Get <float>(index, 0), features.Get <float>(index, 1));
                        Console.Write("No.{0}\t", i);
                        Console.Write("index:{0}", index);
                        Console.Write(" distance:{0}", dist);
                        Console.Write(" data:({0}, {1})", pt.X, pt.Y);
                        Console.WriteLine();
                    }
                    Knn.ToString();
                }
            }
            Console.Read();
        }
Beispiel #19
0
        public static void KnnMain()
        {
            Console.WriteLine("Using demo setup?[y/n]");
            string str = Console.ReadLine();

            if (str.ToLower().Contains("y") == false)
            {
                Console.WriteLine("Input node maximum value:(Default:10000)");
                NodesMaxValueSet = Convert.ToInt32(Console.ReadLine());
                Console.WriteLine("Input node count:(Default:150)");
                NodesSet = Convert.ToInt32(Console.ReadLine());
                Console.WriteLine("Input groups count for random points to divid into:(Default:3, Min:1)");
                DataGroupCount = Convert.ToInt32(Console.ReadLine());
                Console.WriteLine("Input fluctuation ratio:(Default:0.5, Min:0.1)");
                FluctuationRatio = Convert.ToSingle(Console.ReadLine());

                Console.WriteLine("Input new node count:(Default:10)");
                NewNodesCount = Convert.ToInt32(Console.ReadLine());
                Console.WriteLine("Input k set:(Default:7)");
                k = Convert.ToInt32(Console.ReadLine());
            }

            //Gen grouped random nodes
            Random2DPoints.OnGeneratePointGroups += Random2DPoints_OnGeneratePointGroups;
            var ClassifiedPoints = Random2DPoints.GenerateRandomPointsGroup(NodesMaxValueSet, NodesSet, DataGroupCount, FluctuationRatio);
            var Dataset          = new VectorCollection <Point2D>(ClassifiedPoints);

            Dataset.Print();

            var           knnTrainer = new KnnTrainer <Point2D>(Dataset);
            Knn <Point2D> knn        = new Knn <Point2D>(knnTrainer.Train());

            knn.OnClassify += Knn_OnClassify;

            while (Retry)
            {
                var NewNodes = Random2DPoints.GenerateRandomPoints(NodesMaxValueSet, NewNodesCount);

                foreach (var NewNode in NewNodes)
                {
                    knn.Classify(k, NewNode);
                }

                InputCommand();
            }
        }
Beispiel #20
0
        public static byte[] ClassifyTemplate(List <double> coordinates, Knn knn)
        {
            //This gives you the byte array.
            Dictionary <int, string> LabelMap = ReadLabelMap(knn.LabelMap);
            var loaded_knn = Serializer.Load <KNearestNeighbors>(knn.kNN); //KnnGenerate.KnnCreateWithLabelMap(KnnGenerate.processTrainingSet(trainingString));
            Dictionary <string, double> optionsDict = getOptions(coordinates.ToArray(), loaded_knn, LabelMap);

            var binFormatter = new BinaryFormatter();
            var mStream      = new MemoryStream();

            binFormatter.Serialize(mStream, optionsDict);
            mStream.Close();

            //This gives you the byte array.
            byte[] options = mStream.ToArray();
            return(options);
        }
        public void TestKnn()
        {
            var knn          = new Knn();
            var knnParameter = new KnnParameter(1, 3, new EuclidianDistance());
            var normalize    = new Normalize(iris);

            normalize.Convert();
            knn.Train(iris.GetInstanceList(), knnParameter);
            Assert.AreEqual(4.67, 100 * knn.Test(iris.GetInstanceList()).GetErrorRate(), 0.01);
            normalize = new Normalize(bupa);
            normalize.Convert();
            knn.Train(bupa.GetInstanceList(), knnParameter);
            Assert.AreEqual(16.52, 100 * knn.Test(bupa.GetInstanceList()).GetErrorRate(), 0.01);
            normalize = new Normalize(dermatology);
            normalize.Convert();
            knn.Train(dermatology.GetInstanceList(), knnParameter);
            Assert.AreEqual(1.91, 100 * knn.Test(dermatology.GetInstanceList()).GetErrorRate(), 0.01);
        }
        public void TestKnn()
        {
            var knn          = new Knn();
            var knnParameter = new KnnParameter(1, 3, new EuclidianDistance());
            var pca          = new Pca(iris);

            pca.Convert();
            knn.Train(iris.GetInstanceList(), knnParameter);
            Assert.AreEqual(4.00, 100 * knn.Test(iris.GetInstanceList()).GetErrorRate(), 0.01);
            pca = new Pca(bupa);
            pca.Convert();
            knn.Train(bupa.GetInstanceList(), knnParameter);
            Assert.AreEqual(19.13, 100 * knn.Test(bupa.GetInstanceList()).GetErrorRate(), 0.01);
            pca = new Pca(dermatology);
            pca.Convert();
            knn.Train(dermatology.GetInstanceList(), knnParameter);
            Assert.AreEqual(3.28, 100 * knn.Test(dermatology.GetInstanceList()).GetErrorRate(), 0.01);
        }
        public async Task <IHttpActionResult> PostLocation()
        {
            DateTime                time = DateTime.Now;
            TrackersHandler         TH   = new TrackersHandler(db);
            TrackerLocationsHandler TLH  = new TrackerLocationsHandler(db);
            SpacesHandler           SH   = new SpacesHandler(db);
            KnnsHandler             KH   = new KnnsHandler(db);
            string resultString          = Request.Content.ReadAsStringAsync().Result.Trim();
            KeyValuePair <int, KeyValuePair <Guid, List <double> > > coordinates = LocationParser(resultString);
            Knn Knn = KH.GetKnn(coordinates.Value.Key);

            byte[]          options = Classify.ClassifyTemplate(coordinates.Value.Value, Knn);
            Tracker         tracker = TH.GetTracker(coordinates.Key);
            TrackerLocation loc     = new TrackerLocation(time, options, tracker);

            TLH.PostTrackerLocation(loc);
            return(Ok(Classify.ClassifyTemplate(coordinates.Value.Value, Knn)));
        }
        public async Task <IHttpActionResult> PostStoreyKnn()
        {
            Guid                       storeyID = JsonConvert.DeserializeObject <Guid>(Request.Content.ReadAsStringAsync().Result);
            StoreysHandler             SH       = new StoreysHandler(db);
            SpacesHandler              SpH      = new SpacesHandler(db);
            TrackersHandler            TH       = new TrackersHandler(db);
            TrainingValuesHandler      TVH      = new TrainingValuesHandler(db);
            KnnsHandler                KH       = new KnnsHandler(db);
            IQueryable <TrainingValue> values   = TVH.GetTrainingValuesFromStorey(storeyID);
            KeyValuePair <KeyValuePair <byte[], byte[]>, string> LabelMapKnn = KnnGenerate.GenerateTemplateTrainingValues(values);
            Storey storey = SH.GetStorey(storeyID);

            byte[] LabelMap = LabelMapKnn.Key.Value;
            byte[] Knn      = LabelMapKnn.Key.Key;
            Knn    knn      = new Knn(storey, LabelMap, Knn);

            KH.PostKnn(knn);
            return(Ok());
        }
Beispiel #25
0
        public static void Main(string[] args)
        {
            string algorithm = args[0];

            // all three algorithms process data very differently
            // they exist in different classes
            switch (algorithm)
            {
            case "knn":
                Knn.Run();
                break;

            case "linear":
                OnlineLms.Run();
                break;

            case "nb":
                NaiveBayes.Run();
                break;
            }
        }
Beispiel #26
0
    public static void NewPrediction()
    {
        if (usedFeatures.Count > 0)
        {
            NewDataPoints[0][usedFeatures[0]] = newPred[0];
            NewDataPoints[0][usedFeatures[1]] = newPred[1];
            NewDataPoints[0][usedFeatures[2]] = newPred[2];
        }
        else
        {
            NewDataPoints[0][NewDataPoints[0].Keys.ElementAt(1)] = newPred[0];
            NewDataPoints[0][NewDataPoints[0].Keys.ElementAt(2)] = newPred[1];
            NewDataPoints[0][NewDataPoints[0].Keys.ElementAt(3)] = newPred[2];
        }

        var obj        = CreateNewObject(NewDataPoints[0].Keys.Count() - 2, NewDataPoints[0]);
        var classified = Knn.Classify(obj, DictToMatrix.DictionaryListToMatrix(ListOfPoints), GetNumberOfTargets(GetDataSetTargets()), K);

        if (classified is - 1)
        {
            return;
        }
        //Calculate button
        private void CalculateToolStripButton_Click(object sender, EventArgs e)
        {
            var selectColumns = SelectTextBox.Text.Replace(" ", string.Empty).Split(',');

            var classColumn = ClassTextBox.Text.Replace(" ", string.Empty);

            var kElement = KtextBox.Text.ToString().Replace(" ", string.Empty);

            if (string.IsNullOrEmpty(classColumn) || selectColumns == null || selectColumns.Length == 0 ||
                string.IsNullOrEmpty(kElement) || _newTestObject == null)
            {
                MessageBox.Show(@"Please add data");
            }
            else
            {
                ToConsole(@"Calculating");

                var knn    = new Knn();
                var answer = knn.Calculate(_newTestObject, int.Parse(kElement), _fileData, selectColumns, classColumn);

                ToConsole(@"Answer -> " + answer);
            }
        }
    public void Save()
    {
        List <double> point = scrollScript.GetInputs();

        Debug.Log(point.Count);


        GameObject pieInitiator = GameObject.Find("DataInitiator");

        var pieInitiatorScript = (PieController)pieInitiator.GetComponent(typeof(PieController));

        int target = Knn.Classify(point.ToArray(), ValfriStatic.MatrixOfPoints, Dataset.NumberOfFeatures, Dataset.K);

        var canvas = GameObject.Find("Canvas");

        var newPie = pieInitiatorScript.CreatePie(point.ToArray(), target);
        var KPCA   = KernalPca.Transform(point.ToArray());

        Display.Pie(newPie, canvas);
        Display.SetPosition(newPie, (float)KPCA[0], (float)KPCA[1], 0, true);

        Delete();
    }
Beispiel #29
0
        public void ShouldPredictGreenValues()
        {
            var expectedGreen = new List <List <double> >
            {
                new List <double> {
                    4, 6
                },
                new List <double> {
                    3.5, 6
                },
                new List <double> {
                    5.2, 5
                }
            };

            for (var k = 1; k <= 3; k++)
            {
                foreach (var result in expectedGreen.Select(predicted =>
                                                            Knn.Predict(Knn.ManhattanDist, _trainingData, k, predicted)))
                {
                    Assert.AreEqual(G, result);
                }
            }
        }
Beispiel #30
0
        static void Main(string[] args)
        {
            VectorCollection <RawImage28x28> Dataset = null;
            Knn <RawImage28x28> knn = null;
            KmeansTrainResult <RawImage28x28> KmeansTrainResult = null;
            Kmeans <RawImage28x28>            kmeans            = null;

            //CheckFiles
            if (File.Exists(MnistPixelFilePath) == false)
            {
                MnistPixelFilePath = Environment.CurrentDirectory + "\\train-images.idx3-ubyte";
            }

            if (File.Exists(MnistLabelFilePath) == false)
            {
                MnistLabelFilePath = Environment.CurrentDirectory + "\\train-labels.idx1-ubyte";
            }

            if (File.Exists(KmeansTrainResultFilePath) == false)
            {
                KmeansTrainResultFilePath = Environment.CurrentDirectory + "\\KmeansTrainResult.json";
            }


            //LoadFiles
            if (File.Exists(MnistPixelFilePath) && File.Exists(MnistLabelFilePath))
            {
                var MnistDataSet = MnistDataSetLoader.LoadData(MnistPixelFilePath, MnistLabelFilePath);
                Dataset = new VectorCollection <RawImage28x28>(MnistDataSet);
            }

            if (File.Exists(KmeansTrainResultFilePath))
            {
                KmeansTrainResult = new KmeansTrainResult <RawImage28x28>(KmeansTrainResultFilePath);
            }

            if (Dataset != null)
            {
                var knnTrainer = new KnnTrainer <RawImage28x28>(Dataset);
                knn = new Knn <RawImage28x28>(knnTrainer.Train());

                if (KmeansTrainResult == null)
                {
                    Console.WriteLine("Can't find Kmeans train data.");
                    KmeansTrainResult = AskKmeansTrainData(false);
                }
                else if (KmeansTrainResult != null)
                {
                    Console.WriteLine("Found Kmeans train data.");
                    Console.WriteLine("[U]se trained data or [t]rain again? [u/t]");
                    if (Console.ReadLine().ToLower().Contains('t'))
                    {
                        KmeansTrainResult = null;
                        KmeansTrainResult = AskKmeansTrainData(true);
                    }
                }
            }
            else
            {
                Console.WriteLine("Can't find Knn train data, ignored");
                Console.WriteLine("Press any key to continue.");
                Console.ReadKey();
            }

            if (KmeansTrainResult != null)
            {
                kmeans = new Kmeans <RawImage28x28>(KmeansTrainResult);
            }

            if (kmeans == null && knn == null)
            {
                Console.WriteLine("Fatal error: Can not find any trained data.");
                Console.WriteLine("Press any key to exit...");
                Console.ReadKey();
                return;
            }

            MainForm form = new MainForm(knn, kmeans);

            Application.EnableVisualStyles();
            Application.Run(form);
        }