public static KNearestNeighbours CreateKNNClassifier(int k, Dataset trainingSet, bool useWeightedVote) { DefaultDistanceMeasure distanceMeasure = new DefaultDistanceMeasure(2); KNearestNeighbours knn = new KNearestNeighbours(k, distanceMeasure, trainingSet, useWeightedVote); return(knn); }
public KNNClassifier(ILinearAlgebraProvider lap, KNearestNeighbours model, int k, DistanceMetric distanceMetric = DistanceMetric.Euclidean) { _k = k; _lap = lap; _model = model; _distanceMetric = distanceMetric; for (int i = 0, len = model.Instance.Length; i < len; i++) { _instance.Add(lap.Create(model.Instance[i].Data)); } }
static void Main(string[] args) { var data = CsvReader.ReadData(); var userId = 186; //User-Item KNearestNeighbours kNearestNeighbours = new KNearestNeighbours(0.35); var nearestNeighbours = kNearestNeighbours.GetNearestNeighbours(data, userId, 25); var topRatings = GetTopXAmountOfRecommendations.GetTopXAmountOfRatings(8, nearestNeighbours, 3); //Item-Item var top5 = ItemItemLogic.RunItemItemMethods(data, userId); }
public static void RunACOIBL_WeightOutputs() { AccuracyMeasure accuracyMeasure = new AccuracyMeasure(); foreach (string dataset in GetDatasetFolds("datasets.txt")) { //---------------------------------------- Console.WriteLine("Data Table:" + dataset); //---------------------------------------- #region ACO-KNN-CB //try { for (_currentFold = 0; _currentFold < _folds; _currentFold++) { //---------------------------------------- Console.WriteLine("Fold:" + _currentFold.ToString()); //---------------------------------------- DataMining.Data.Dataset[] tables = LoadTrainingAndTestingData(dataset, _currentFold); DataMining.Data.Dataset trainingSet = tables[0]; DataMining.Data.Dataset testingSet = tables[1]; KNearestNeighbours knnclassifier = SingleTest.CreateKNNAntIBMinerClassifier_ClassBasedWeights(trainingSet, false); //------------------------------------------------------------------ Console.WriteLine("ACO-KNN-CB: " + dataset); SaveWeights(trainingSet, knnclassifier); } Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } //catch (Exception ex) { //LogError(ex); // Console.WriteLine(ex.Message); } #endregion } }
public void InitilizeHeuristicInformation(Dataset dataset, bool useAttributes, bool useInstances) { if (useAttributes) { this._entropyCalculator = new EntropyHeuristicsCalculator(dataset); } if (useInstances) { DefaultDistanceMeasure measure = new DefaultDistanceMeasure(2); KNearestNeighbours knn = new KNearestNeighbours(measure, _dataset, false); this._iblCalculator = new IBLHeuristicsCalculator(dataset, knn); } }
public static EnsembleClassifier CreateKNNPSOIBMinerClassifier_ClassBasedWeights_Ensemble(Dataset trainingSet, bool useWeightedVote) { int classCount = trainingSet.Metadata.Target.Values.Length; int attributesCount = trainingSet.Metadata.Attributes.Length; int problemSize = (attributesCount * classCount) + 1; AccuracyMeasure measure = new AccuracyMeasure(); DefaultDistanceMeasure distanceMeasure = new DefaultDistanceMeasure(2); KNearestNeighbours knn = new KNearestNeighbours(distanceMeasure, trainingSet, useWeightedVote); IBClassificationQualityEvaluator evaluator = new ContinuousACO.ProblemSpecifics.IBClassificationQualityEvaluator(knn, measure); evaluator.LearningSet = trainingSet; evaluator.ValidationSet = trainingSet; PSOIB psoIB = new PSOIB(problemSize, archive, maxIterations / archive, convergenceIterations, evaluator); psoIB.OnPostSwarmIteration += OnPostColonyIteration; EnsembleClassifier psoknn = psoIB.CreateEnsembleClassifier(); return(psoknn); }
private static void RunUserItemMethods(Dictionary <int, Dictionary <int, double> > dictionary) { var userId = 186; var itemId = 514; KNearestNeighbours kNearestNeighbours = new KNearestNeighbours(); var nearestNeigbours = kNearestNeighbours.GetNearestNeighbours(userId, dictionary[userId], dictionary, 25, 0.35); foreach (var item in nearestNeigbours) { Console.WriteLine("Id: {0} Similarity: {1}", item.Key, item.Similarity); } //PredictedRatingCalculations predictedRatingCalculations = new PredictedRatingCalculations(); //var predRating = predictedRatingCalculations.CalculatePredictedRating(itemId, nearestNeigbours, dictionary); //Console.WriteLine("Predicted Rating: {0}" , predRating); var topRatings = GetTopXAmountOfRatings(8, nearestNeigbours, dictionary); foreach (var rating in topRatings) { Console.WriteLine("Predicted Rating: {0}", rating); } }
public static KNearestNeighbours CreateKNNAntIBMinerClassifier_ClassBasedWeights(Dataset trainingSet, bool useWeightedVote) { int classCount = trainingSet.Metadata.Target.Values.Length; int attributesCount = trainingSet.Metadata.Attributes.Length; int problemSize = (attributesCount * classCount) + 1; AccuracyMeasure measure = new AccuracyMeasure(); DefaultDistanceMeasure distanceMeasure = new DefaultDistanceMeasure(2); KNearestNeighbours knn = new KNearestNeighbours(distanceMeasure, trainingSet, useWeightedVote); IBClassificationQualityEvaluator evaluator = new ContinuousACO.ProblemSpecifics.IBClassificationQualityEvaluator(knn, measure); evaluator.LearningSet = trainingSet; evaluator.ValidationSet = trainingSet; Problem <double> problem = new Problem <double>(null, null, evaluator, null); AntIBMiner antminer = new AntIBMiner(maxIterations, colonySize, convergenceIterations, problem, problemSize, archive, q, segma, trainingSet); antminer.OnPostColonyIteration += OnPostColonyIteration; KNearestNeighbours acoknn = antminer.CreateClassifier() as KNearestNeighbours; return(acoknn); }
//private void LoadSimilarityCache() //{ // if (this._similarityCache == null) // this._similarityCache = new double[this._IBClassifier.Database.Size, this._IBClassifier.Database.Size]; // //for(int i=0; i< this._IBClassifier.Metadata.Size; i++) // // for(int j=0; j< this._IBClassifier.Metadata.Size; j++) // // this._similarityCache[i,j]=this._IBClassifier.distanceMeasure.CalculateSimilarity(this._IBClassifier.Database[i],this._IBClassifier.Database[j]); //} #endregion public override void EvaluateSolutionQuality(Solution <double> solution) { int classCount = this.LearningSet.Metadata.Target.Values.Length; int attributesCount = this.LearningSet.Metadata.Attributes.Length; if (this._IBClassifier is GaussianKernelEstimator) { double[][] classBasedWeights = new double[classCount][]; double kernelParameter = solution.Components[0].Element; int counter = 1; if (solution.Components.Count > attributesCount + 1) { for (int i = 0; i < classCount; i++) { classBasedWeights[i] = new double[attributesCount]; for (int j = 0; j < attributesCount; j++) { classBasedWeights[i][j] = solution.Components[counter].Element; counter++; } } } else { for (int i = 0; i < classCount; i++) { classBasedWeights[i] = new double[attributesCount]; for (int j = 0; j < attributesCount; j++) { classBasedWeights[i][j] = solution.Components[counter].Element; counter++; } counter = 1; } } GaussianKernelEstimator GKClassfier = this._IBClassifier as GaussianKernelEstimator; GKClassfier.KernelParameter = kernelParameter / 10; //GKClassfier.KernelParameter = 0; GKClassfier.SetWeights(classBasedWeights); } else if (this._IBClassifier is NearestClassClassifier) { double[][] classBasedWeights = new double[classCount][]; double similarityTheshold = solution.Components[0].Element; int counter = 1; if (solution.Components.Count > attributesCount + 1) { for (int i = 0; i < classCount; i++) { classBasedWeights[i] = new double[attributesCount]; for (int j = 0; j < attributesCount; j++) { classBasedWeights[i][j] = solution.Components[counter].Element; counter++; } } } else { for (int i = 0; i < classCount; i++) { classBasedWeights[i] = new double[attributesCount]; for (int j = 0; j < attributesCount; j++) { classBasedWeights[i][j] = solution.Components[counter].Element; counter++; } counter = 1; } } NearestClassClassifier NNClassfier = this._IBClassifier as NearestClassClassifier; NNClassfier.SimilarityThreshold = similarityTheshold; NNClassfier.SetWeights(classBasedWeights); } else if (this._IBClassifier is KNearestNeighbours) { double[][] classBasedWeights = new double[classCount][]; int k = ((int)(Math.Round((solution.Components[0].Element) * 20, 0))) + 1; //int k = 1; int counter = 1; if (solution.Components.Count > attributesCount + 1) { for (int i = 0; i < classCount; i++) { classBasedWeights[i] = new double[attributesCount]; for (int j = 0; j < attributesCount; j++) { classBasedWeights[i][j] = solution.Components[counter].Element; counter++; } } } else { for (int i = 0; i < classCount; i++) { classBasedWeights[i] = new double[attributesCount]; for (int j = 0; j < attributesCount; j++) { classBasedWeights[i][j] = solution.Components[counter].Element; counter++; } counter = 0; } } KNearestNeighbours KNNClassifier = this._IBClassifier as KNearestNeighbours; KNNClassifier.KNeighbours = k; this._IBClassifier.SetWeights(classBasedWeights); } this._IBClassifier.Database = this.LearningSet; double quality = _measure.CalculateMeasure(this._IBClassifier, ValidationSet); solution.Quality = quality; }
/// <summary> /// boton clasificar /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Button_Click_2(object sender, RoutedEventArgs e) { //instanciamos DataTable datos = HelperDataTable.ConvertToDataTable <Persona>(knnmodel.Datos); DataTable datosTest = HelperDataTable.ConvertToDataTable <Persona>(knnmodel.DatosTest); DataSet dataSet = new DataSet(); dataSet.Tables.Add(datos); dataSet.Tables.Add(datosTest); // k es el segundo parametro KNearestNeighbours <double, DBNull> knn = new KNearestNeighbours <double, DBNull>(dataSet, 5, true); //obtengo las categorias List <string> categorias = knn.categories; //creamos una lista de colores List <SolidColorBrush> colores = new List <SolidColorBrush> { Brushes.Blue, Brushes.Red, Brushes.Yellow, Brushes.Green }; //creo un diccionario de categorias colores Dictionary <string, SolidColorBrush> catcolor = new Dictionary <string, SolidColorBrush>(); for (int i = 0; i < categorias.Count; i++) { catcolor.Add(categorias[i], colores[i]); } //para cada elemento del modelo de datos...(esto yo creo que se tendra que cambiar ya que el modelo sera muy variopinto) //pero siempre tendremos un objeto CLASS int contador = 0; foreach (var item in knnmodel.Datos) { foreach (KeyValuePair <string, SolidColorBrush> item2 in catcolor) { if (item.CLASS == item2.Key) { contador++; grafico.CreatePoint(item.estatura, item.pelo, item2.Value, item2.Key, contador); } } } //crea el punto negro foreach (var item in knnmodel.DatosTest) { grafico.CreatePoint(item.estatura, item.pelo, Brushes.White, "???", 999); } Result <int, double> result = knn.Execute(); foreach (KeyValuePair <int, double> item in result.DataDictionary) { TBConsole.Text = TBConsole.Text + item.Key.ToString() + ": " + item.Value.ToString() + " \r\n"; } }
public static void RunACOIBL() { int k = 9; AccuracyMeasure accuracyMeasure = new AccuracyMeasure(); foreach (string dataset in GetDatasetFolds("datasets.txt")) { //---------------------------------------- Console.WriteLine("Data Table:" + dataset); //---------------------------------------- for (_currentFold = 0; _currentFold < _folds; _currentFold++) { //---------------------------------------- //Console.WriteLine("Fold:" + _currentFold.ToString()); //---------------------------------------- DataMining.Data.Dataset[] tables = LoadTrainingAndTestingData(dataset, _currentFold); DataMining.Data.Dataset trainingSet = tables[0]; DataMining.Data.Dataset testingSet = tables[1]; Dataset datasetFull = Dataset.Merge(trainingSet, testingSet); double quality = 0; try { { KNearestNeighbours knn = SingleTest.CreateKNNClassifier(k, datasetFull, false); quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("KNN: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "KNN", k.ToString(), quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { KNearestNeighbours knnWV = SingleTest.CreateKNNClassifier(k, datasetFull, true); quality = SingleTest.TestClassifier(knnWV, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("KNN-WV: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "KNN-WV", k.ToString(), quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { NearestClassClassifier ncc = SingleTest.CreateNCClassifier(datasetFull); quality = SingleTest.TestClassifier(ncc, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("NNC: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "NNC", quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { KNearestNeighbours knn = SingleTest.CreateKNNAntIBMinerClassifier(k, datasetFull, false); quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("ACO-KNN: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "ACO-KNN", k.ToString(), quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { KNearestNeighbours knn = SingleTest.CreateKNNAntIBMinerClassifier(k, datasetFull, true); quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("ACO-KNN-WV: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "ACO-KNN-WV", k.ToString(), quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { KNearestNeighbours knn = SingleTest.CreateKNNAntIBMinerClassifier_ClassBasedWeights(k, datasetFull, false); quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("ACO-KNN-CB: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "ACO-KNN-CB", k.ToString(), quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { KNearestNeighbours knn = SingleTest.CreateKNNAntIBMinerClassifier_ClassBasedWeights(k, datasetFull, true); quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("ACO-KNN-CB-WV: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "ACO-KNN-CB-WV", k.ToString(), quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { NearestClassClassifier ncc = SingleTest.CreateNCCAntIBMinerClassifier(datasetFull); quality = SingleTest.TestClassifier(ncc, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("ACO-NCC: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "ACO-NCC", quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { NearestClassClassifier ncc = SingleTest.CreateNCCAntIBMinerClassifier_ClassBasedWeights(datasetFull); quality = SingleTest.TestClassifier(ncc, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("ACO-NCC-CB: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "ACO-NCC-CB", quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { GaussianKernelEstimator GKC = SingleTest.CreateGKAntIBMinerClassifier(datasetFull); quality = SingleTest.TestClassifier(GKC, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("ACO-GKC: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "ACO-GKC", quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } { GaussianKernelEstimator GKC = SingleTest.CreateGKAntIBMinerClassifier_ClassBaseWeights(datasetFull); quality = SingleTest.TestClassifier(GKC, datasetFull, accuracyMeasure); quality = Math.Round(quality * 100, 2); //------------------------------------------------------------------ Console.WriteLine("ACO-GKC-CB: " + dataset + " - Accuracy=" + quality); SaveResults(dataset, "ACO-GKC-CB", quality.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } } catch (Exception ex) { LogError(ex); // Console.WriteLine(ex.Message); } } } }
public static void RunConventional() { AccuracyMeasure accuracyMeasure = new AccuracyMeasure(); foreach (string dataset in GetDatasetFolds(DatasetNamesFile)) { //---------------------------------------- Console.WriteLine("Data Table:" + dataset); //---------------------------------------- //try { double quality1 = 0; double quality2 = 0; double quality3 = 0; double quality4 = 0; double quality5 = 0; double quality6 = 0; double quality7 = 0; double quality8 = 0; double quality9 = 0; for (_currentFold = 0; _currentFold < _folds; _currentFold++) { //---------------------------------------- //Console.WriteLine("Fold:" + _currentFold.ToString()); //---------------------------------------- DataMining.Data.Dataset[] tables = LoadTrainingAndTestingData(dataset, _currentFold); DataMining.Data.Dataset trainingSet = tables[0]; DataMining.Data.Dataset testingSet = tables[1]; KNearestNeighbours knn1 = SingleTest.CreateKNNClassifier(1, trainingSet, false); quality1 += SingleTest.TestClassifier(knn1, testingSet, accuracyMeasure); //------------------------------------------------------------------ KNearestNeighbours knn11 = SingleTest.CreateKNNClassifier(11, trainingSet, false); quality2 += SingleTest.TestClassifier(knn11, testingSet, accuracyMeasure); //------------------------------------------------------------------ KNearestNeighbours knn21 = SingleTest.CreateKNNClassifier(21, trainingSet, false); quality3 += SingleTest.TestClassifier(knn21, testingSet, accuracyMeasure); //------------------------------------------------------------------ //------------------------------------------------------------------ //------------------------------------------------------------------ NearestClassClassifier ncc0 = SingleTest.CreateNCClassifier(trainingSet, 0); quality4 += SingleTest.TestClassifier(ncc0, testingSet, accuracyMeasure); //------------------------------------------------------------------ NearestClassClassifier ncc5 = SingleTest.CreateNCClassifier(trainingSet, 0.5); quality5 += SingleTest.TestClassifier(ncc5, testingSet, accuracyMeasure); //------------------------------------------------------------------ NearestClassClassifier ncc1 = SingleTest.CreateNCClassifier(trainingSet, 0.9); quality6 += SingleTest.TestClassifier(ncc1, testingSet, accuracyMeasure); ////------------------------------------------------------------------ ////------------------------------------------------------------------ ////------------------------------------------------------------------ GaussianKernelEstimator gcc0 = SingleTest.CreateGKClassifier(trainingSet, 0); quality7 += SingleTest.TestClassifier(gcc0, testingSet, accuracyMeasure); //------------------------------------------------------------------ GaussianKernelEstimator gcc5 = SingleTest.CreateGKClassifier(trainingSet, 0.25); quality8 += SingleTest.TestClassifier(gcc5, testingSet, accuracyMeasure); //------------------------------------------------------------------ GaussianKernelEstimator gcc1 = SingleTest.CreateGKClassifier(trainingSet, 0.5); quality9 += SingleTest.TestClassifier(gcc1, testingSet, accuracyMeasure); } quality1 = Math.Round((quality1 / _folds) * 100, 2); quality2 = Math.Round((quality2 / _folds) * 100, 2); quality3 = Math.Round((quality3 / _folds) * 100, 2); quality4 = Math.Round((quality4 / _folds) * 100, 2); quality5 = Math.Round((quality5 / _folds) * 100, 2); quality6 = Math.Round((quality6 / _folds) * 100, 2); quality7 = Math.Round((quality7 / _folds) * 100, 2); quality8 = Math.Round((quality8 / _folds) * 100, 2); quality9 = Math.Round((quality9 / _folds) * 100, 2); Console.WriteLine("1NN: " + dataset + " - Accuracy=" + quality1); SaveResults(dataset, "1NN", quality1.ToString()); Console.WriteLine("11NN: " + dataset + " - Accuracy=" + quality2); SaveResults(dataset, "11NN", quality2.ToString()); Console.WriteLine("21NN: " + dataset + " - Accuracy=" + quality3); SaveResults(dataset, "21NN", quality3.ToString()); Console.WriteLine("NCC-0: " + dataset + " - Accuracy=" + quality4); SaveResults(dataset, "NCC-0", quality4.ToString()); Console.WriteLine("NCC-0.5: " + dataset + " - Accuracy=" + quality5); SaveResults(dataset, "NCC-0.5", quality5.ToString()); Console.WriteLine("NCC-1: " + dataset + " - Accuracy=" + quality6); SaveResults(dataset, "NCC-1", quality6.ToString()); Console.WriteLine("GKE-0: " + dataset + " - Accuracy=" + quality7); SaveResults(dataset, "GKE-0", quality7.ToString()); Console.WriteLine("GKE-0.25: " + dataset + " - Accuracy=" + quality8); SaveResults(dataset, "GKE-0.25", quality8.ToString()); Console.WriteLine("GKE-0.5: " + dataset + " - Accuracy=" + quality9); SaveResults(dataset, "GKE-0.5", quality9.ToString()); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); Console.WriteLine("-------------------------------------------"); } //catch (Exception ex) { //LogError(ex); // Console.WriteLine(ex.Message); } } }
static void Main(string[] args) { DataSet dataSet = new DataSet("Data"); DataTable data = new DataTable("DataLearning"); DataTable dataTest = new DataTable("DataTest"); data.Columns.Add("Atributo1", typeof(double)); data.Columns.Add("Atributo2", typeof(double)); data.Columns.Add("Atributo3", typeof(double)); data.Columns.Add("Atributo4", typeof(double)); data.Columns.Add("CLASS", typeof(string)); data.Rows.Add(5.1, 3.5, 1.4, 0.2, "Iris-Setosa"); data.Rows.Add(7.0, 3.2, 4.7, 1.4, "Iris-Versicolor"); data.Rows.Add(5.1, 3.5, 1.4, 2.5, "Iris-Virginica"); data.Rows.Add(5.1, 2.5, 3.4, 1.5, "Iris-Virginica"); dataTest.Columns.Add("Atributo1", typeof(double)); dataTest.Columns.Add("Atributo2", typeof(double)); dataTest.Columns.Add("Atributo3", typeof(double)); dataTest.Columns.Add("Atributo4", typeof(double)); dataTest.Columns.Add("CLASS", typeof(string)); //siempre se llamará asi esta columna dataTest.Rows.Add(5.4, 3.7, 1.5, 0.2, "?"); //dataAnalyze.Rows.Add(4.0, 0.2, 4.7, 3.4, "?"); dataSet.Tables.Add(data); dataSet.Tables.Add(dataTest); KNearestNeighbours <double, DBNull> knn = new KNearestNeighbours <double, DBNull>(dataSet, 1, true); Result <int, double> result = knn.Execute(); //devuelve un diccionario key= posicion value= distancia //test decimal List <decimal> num = new List <decimal> { 1.3m, 4.0m, 5.5m, 6.8m, 9.4m }; Result <decimal, DBNull> resultado = HelperMath <decimal, DBNull> .Media(num); Result <decimal, DBNull> resultado11 = HelperMath <decimal, DBNull> .Mediana(num); Console.Write("la media es:" + resultado.Data + resultado.Message + " "); Console.WriteLine("la mediana es:" + resultado11.Data + resultado11.Message); //test float List <float> num2 = new List <float> { 1.00f, 4.1f, 5.4f, 6.5f, 9.7f }; Result <float, DBNull> resultado2 = HelperMath <float, DBNull> .Media(num2); Result <float, DBNull> resultado22 = HelperMath <float, DBNull> .Mediana(num2); Console.Write("la media es:" + resultado2.Data + resultado2.Message + " "); Console.WriteLine("la mediana es:" + resultado22.Data + resultado22.Message); //test double List <double> num3 = new List <double> { 1.00, 4, 4, 5, 6, 9, 9, 10.5, 4, 5, 3, 4, 6, 7, 4, 4, 4.0 }; Result <double, DBNull> resultado3 = HelperMath <double, DBNull> .Media(num3); Result <double, DBNull> resultado32 = HelperMath <double, DBNull> .Mediana(num3); Console.Write("la media es:" + resultado3.Data + resultado3.Message + " "); Console.WriteLine("la mediana es:" + resultado32.Data + resultado32.Message); //test int List <int> num4 = new List <int> { 2, 4, 6, 1, 9, 100, 8, 7 }; Result <int, DBNull> resultado4 = HelperMath <int, DBNull> .Media(num4); Result <int, DBNull> resultado5 = HelperMath <int, DBNull> .Mediana(num4); Console.Write("la media es:" + resultado4.Data + resultado4.Message + " "); Console.WriteLine("la mediana es:" + resultado5.Data + resultado5.Message); //calculamos maximos y minimos Console.WriteLine("el maximo es " + HelperMath <double, DBNull> .Maximo(num3).Data); Console.WriteLine("el minimo es " + HelperMath <double, DBNull> .Minimo(num3).Data); Console.WriteLine("la varianza es " + HelperMath <double, DBNull> .Varianza(num3).Data); Console.WriteLine("la desviacion tipica es " + HelperMath <double, DBNull> .DesviacionTipica(num3).Data); Console.WriteLine("el coeficiente de variación es " + HelperMath <double, DBNull> .CoeficienteVariacion(num3).Data); Dictionary <double, int> r = HelperMath <double, int> .Moda(num3).DataDictionary; Console.WriteLine("la moda vale " + r.First().Value + " para el elemento de la lista de valor " + r.First().Key); Graph <string> graph = new Graph <string>(); Node <string> nodo1 = new Node <string>("Sevilla"); Node <string> nodo2 = new Node <string>("Cordoba"); Node <string> nodo3 = new Node <string>("Madrid"); Node <string> nodo4 = new Node <string>("Valencia"); graph.InsertarNodo(nodo1); graph.InsertarNodo(nodo2); graph.InsertarNodo(nodo3); graph.InsertarNodo(nodo4); graph.InsertarArco(nodo1, nodo2, 10); graph.InsertarArco(nodo2, nodo3, 6); graph.InsertarArco(nodo3, nodo4, 8); graph.InsertarArco(nodo3, nodo1, 8); graph.InsertarArco(nodo4, nodo1, 4); //Console.WriteLine(graph.isAdyacente(nodo2, nodo3)); List <Node <string> > nodos = graph.getListaNodos(); //Console.WriteLine(graph.getListaNodos().Count()); //graph.MostrarMatrizdeAdyacencia(); BusquedaProfundidadRecursiva <string> bpr = new BusquedaProfundidadRecursiva <string>(); bpr.RecorridoProfundidad(graph); //bool[] resultado=bpr.Resultado(); //for(int i = 0; i<resultado.Length; i++) //{ // Console.WriteLine(resultado[i]); //} Console.ReadLine(); }