private void btn_KNN(object sender, RoutedEventArgs e) { int checkresutl = 0; var lp = int.Parse(txt_LenghtOfPattern.Text); var kNN = int.Parse(txt_KNN.Text); var rate = float.Parse(txt_rateIP.Text); var numPre = int.Parse(txt_NumOfPredic.Text); var dataPrediction = txt_SelectedData.Text; checkresutl = CheckDataLenghPattern(lp); checkresutl = CheckValueKNN(kNN); checkresutl = CheckRateImportantPoint(rate); checkresutl = CheckPredictionHorizon(numPre); checkresutl = CheckDataInput(dataPrediction); if (checkresutl.Equals(0)) { List <DataPrediction> dataNeedPre = new List <DataPrediction>(); List <DataPrediction> dataAfterPre = new List <DataPrediction>(); List <DataPrediction> actualData = new List <DataPrediction>(); KNearestNeighbor KNN = new KNearestNeighbor(lp, kNN, rate); //actualData = Function.ReadCSVFile(@"F:\TTTN_LVTN\Duong Tuan Anh\SourceCode\LVTNDTA\Data\LEADING_ECONOMIC.csv"); dataNeedPre = Function.ReadCSVFile(dataPrediction); dataAfterPre = KNN.PredictWithKNN(dataNeedPre); for (int i = 1; i < numPre; i++) { Function.RefeshData(dataAfterPre); dataAfterPre = KNN.PredictWithKNN(dataAfterPre); } //Function.MAE(actualData, dataAfterPre); LineChart chart = new LineChart(dataAfterPre); chart.Show(); } }
public KnnBot(Checker myColor) : base(myColor) { learner = new KNearestNeighbor(5, 1000); // this learner needs to have at least one example // to start with Example e = MakeExample(new Board(), Checker.Blue); e.Labels.Add(0.5); LearnOneExample(e); }
public void Can_classify_with_euclidean_distance() { string[] labels; var featureSpace = GetDataSet(out labels); var b = KNearestNeighbor.Classify(new Vector(0, 0), featureSpace, labels, 3); Assert.AreEqual("B", b); var a = KNearestNeighbor.Classify(new Vector(1.0, 1.0), featureSpace, labels, 3); Assert.AreEqual("A", a); }
private void button3_Click(object sender, EventArgs e) { Glass newGlass = new Glass(-1, (double)riNum.Value, (double)naNum.Value, (double)mgNum.Value, (double)alNum.Value, (double)siNum.Value, (double)kNum.Value, (double)caNum.Value, (double)baNum.Value, (double)feNum.Value, -1); var neuralOutput = network.GetOutputs(newGlass); output(("[BACK PROPOGATION] Siūloma tipas: "******"[K-Nearest-Neighbors] Siūloma tipas: "******"[Bayes] Siūloma tipas: " + bayes.test(newGlass))); }
private object FindPreviousTransactionByPayee(Account a, Transaction t, string payeeOrTransferCaption) { MyMoney money = t.MyMoney; IList <Transaction> list = money.Transactions.GetTransactionsFrom(a); int len = list.Count; if (len == 0) { // Nothing to do here return(null); } // Tally of how close the current transaction is to the amounts for a given category or split. singleNeighbors = new KNearestNeighbor <Category>(); splitNeighbors = new KNearestNeighbor <Category>(); splitCount = normalCount = 0; Transaction closestByDate = null; long ticks = 0; decimal amount = t.Amount; for (int i = 0; i < len; i++) { Transaction u = list[i] as Transaction; if (amount == 0) { // we can't use the probabilities when the amount is zero, so we just return // the closest transaction by date because in the case of something like a paycheck // the most recent paycheck usually has the closest numbers on the splits. long newTicks = Math.Abs((u.Date - t.Date).Ticks); if (closestByDate == null || newTicks < ticks) { closestByDate = u; ticks = newTicks; } } else { AddPossibility(t, u, payeeOrTransferCaption); } } if (closestByDate != null) { return(closestByDate); } IEnumerable <Tuple <object, Category> > result = null; if (splitCount > normalCount) { result = splitNeighbors.GetNearestNeighbors(1, t.Amount); } else { result = singleNeighbors.GetNearestNeighbors(1, t.Amount); } if (result != null && result.Any()) { var first = result.First(); var it = first.Item1 as Transaction; if (it != null && it.IsSplit) { closestByDate = null; ticks = 0; // if this is a "Split" transaction, then we should grab the closest date // so that the copied splits have the best chance of matching. // (e.g. in a split paycheck scenario) foreach (var u in list) { if (u.IsSplit && u.PayeeOrTransferCaption == t.PayeeOrTransferCaption) { long newTicks = Math.Abs((u.Date - t.Date).Ticks); if (closestByDate == null || newTicks < ticks) { closestByDate = u; ticks = newTicks; } } } return(closestByDate); } return(first.Item1); } return(null); }
public void KNearestNeighborConstructorTest() { double[][] inputs = { new double[] { -5, -2, -1 }, new double[] { -5, -5, -6 }, new double[] { 2, 1, 1 }, new double[] { 1, 1, 2 }, new double[] { 1, 2, 2 }, new double[] { 3, 1, 2 }, new double[] { 11, 5, 4 }, new double[] { 15, 5, 6 }, new double[] { 10, 5, 6 }, }; int[] outputs = { 0, 0, 1, 1, 1, 1, 2, 2, 2 }; int k = 3; KNearestNeighbor target = new KNearestNeighbor(k, inputs, outputs); for (int i = 0; i < inputs.Length; i++) { int actual = target.Compute(inputs[i]); int expected = outputs[i]; Assert.AreEqual(expected, actual); } double[][] test = { new double[] { -4, -3, -1 }, new double[] { -5, -4, -4 }, new double[] { 5, 3, 4 }, new double[] { 3, 1, 6 }, new double[] { 10, 5, 4 }, new double[] { 13, 4, 5 }, }; int[] expectedOutputs = { 0, 0, 1, 1, 2, 2, }; for (int i = 0; i < test.Length; i++) { int actual = target.Compute(test[i]); int expected = expectedOutputs[i]; Assert.AreEqual(expected, actual); } }
private void Knn_start_Click(object sender, EventArgs e) { double averageAccurasy = 0; List <Glass> glasses = new List <Glass>(); if (radioButton1.Checked) { glasses = dataList; } else { glasses = clearDistantValuesFromList(dataList); } int testingFileCount = glasses.Count / NUMBER_OF_SEGMENTS; for (int segment = 0; segment < NUMBER_OF_SEGMENTS; segment++) { richTextBox1.AppendText(String.Format("Crosscheck segment {0} \n ", segment + 1)); int correctGuesses = 0; int guessCounter = 0; int falseGuesses = 0; int testingDataFromIndex = segment * testingFileCount; int testingDataToIndex = (segment + 1) * testingFileCount; var trainingData = new List <Glass>(); var testingData = new List <Glass>(); for (int i = 0; i < glasses.Count; i++) { if (i >= testingDataFromIndex && i <= testingDataToIndex) { testingData.Add(glasses[i]); } else { trainingData.Add(glasses[i]); } } var knn = new KNearestNeighbor(7, trainingData); foreach (Glass item in testingData) { var ans = knn.classify(item); guessCounter++; if (ans == item.group_type) { correctGuesses++; } else { falseGuesses++; } } var totalGlasses = glasses.Count; var totalTrain = trainingData.Count; var totalTest = testingData.Count; double trainProc = Math.Round(((double)totalTrain / totalGlasses) * 100, 2); double testProc = Math.Round(((double)totalTest / totalGlasses) * 100, 2); double accuracy = Math.Round(((double)correctGuesses / guessCounter) * 100, 2); averageAccurasy += accuracy; richTextBox1.AppendText(string.Format("Total: {0, 3}; Training: {1, 3}({2, 4}%); Testing: {3, 3}({4, 4}%)\n", totalGlasses, totalTrain, trainProc, totalTest, testProc)); richTextBox1.AppendText(string.Format("Correct guesses: {0, 3} ; Total guesses: {1, 3} ; Accuracy: {2, 4}%\n", correctGuesses, guessCounter, accuracy)); } richTextBox1.AppendText(string.Format("Average accurasy of all segments - {0}%\n", averageAccurasy / NUMBER_OF_SEGMENTS)); }