Exemplo n.º 1
0
        public string[] Predict(string nodeName)
        {
            try
            {
                switch (TrainModel.TrainingMode)
                {
                case "bayes":
                    var bayesModel = _loadedModel as NaiveBayes;
                    var instance   = _loadedCodebook.Transform(nodeName);
                    var c          = bayesModel.Decide(instance);
                    var result     = _loadedCodebook.Revert("output", c);

                    return(new[] { result });

                case "markov":
                    var markovModel   = _loadedModel as HiddenMarkovModel;
                    var code          = _loadedCodebook.Transform("Nodes", nodeName);
                    var predictSample = markovModel.Predict(new[] { code }, 1);
                    var predictResult = _loadedCodebook.Revert("Nodes", predictSample);
                    return(predictResult);
                }

                return(new string[] { });
            }
            catch
            {
                return(new string[] { });
            }
        }
        public void Probe()
        {
            pb = csv.ImportFromCsvFile(PROBE);
            for (int i = 0; i < pb.Rows.Count; i++)
            {
                int[] query = codebook.Transform(new[, ]
                {
                    { "Buying", pb.Rows[i].ItemArray[0].ToString() },
                    { "Maint", pb.Rows[i].ItemArray[1].ToString() },
                    { "doors", pb.Rows[i].ItemArray[2].ToString() },
                    { "persons", pb.Rows[i].ItemArray[3].ToString() },
                    { "Lug_boot", pb.Rows[i].ItemArray[4].ToString() },
                    { "Safety", pb.Rows[i].ItemArray[5].ToString() }
                });


                int predicted = tree.Decide(query);
                if (predicted != -1)
                {
                    string answer = codebook.Revert("CarType", predicted);
                    result += "BUYING--" + pb.Rows[i].ItemArray[0].ToString() + "-->" + "MAINT--" + pb.Rows[i].ItemArray[1].ToString() + "-->" + "DOORS--"
                              + pb.Rows[i].ItemArray[2].ToString() + "-->" + "PERSONS--" + pb.Rows[i].ItemArray[3].ToString() + "-->" + "LUG_BOOT--" + pb.Rows[i].ItemArray[4].ToString() + "-->" + "SAFETY--" + pb.Rows[i].ItemArray[4].ToString() + "-->" + answer.ToUpper();
                    result += "\r\n";
                }
                else
                {
                    result += "BUYING--" + pb.Rows[i].ItemArray[0].ToString() + "-->" + "MAINT--" + pb.Rows[i].ItemArray[1].ToString() + "-->" + "DOORS--"
                              + pb.Rows[i].ItemArray[2].ToString() + "-->" + "PERSONS--" + pb.Rows[i].ItemArray[3].ToString() + "-->" + "LUG_BOOT--" + pb.Rows[i].ItemArray[4].ToString() + "-->" + "SAFETY--" + pb.Rows[i].ItemArray[4].ToString() + "-->" + "Not Answer Found";
                    result += "\r\n";
                }
            }
        }
Exemplo n.º 3
0
        /*
         * Query asks the tree to predict based on an input given in an arraylist
         * Returns the answer
         */
        public string Query(ArrayList values)
        {
            string[] keys = inputNamesArr;

            // Debug.WriteLine(inputNamesArr.Length);
            // Debug.WriteLine(values.Count);

            // Transform the two arrays inputNamesArr and values into a string[,]
            var queryArr = new String[inputNamesArr.Length, 2];

            for (int i = 0; i < values.Count; i++)
            {
                queryArr[i, 0] = keys.ElementAt(i);
                queryArr[i, 1] = (string)values[i];
            }

            int[] query = codebook.Transform(queryArr);                                             // Transforms the query to a query of ints

            int    predictedInt    = tree.Decide(query);                                            // Get the answer in ints
            string predictedString = codebook.Revert(toPredict, predictedInt);                      // convert the answer back to a string

            // Debug.Write(predictedString);

            return(predictedString);
        }
Exemplo n.º 4
0
        public void Predict(params string[] args)
        {
            int[] instance;
            try
            {
                instance = codeBook.Transform(args);
            }
            catch (Exception e)
            {
                System.Console.WriteLine(e.Message);
                return;
            }

            int    c      = nativeBayes.Decide(instance);
            string result = codeBook.Revert(headerToPredict, c);

            System.Console.WriteLine(result);

            double[] probs = nativeBayes.Probabilities(instance);

            foreach (var item in probs)
            {
                System.Console.WriteLine(item);
            }
        }
Exemplo n.º 5
0
        private string toString(Codification codebook)
        {
            if (IsRoot)
            {
                return("Root");
            }

            String name = Owner.Attributes[Parent.Branches.AttributeIndex].Name;

            if (String.IsNullOrEmpty(name))
            {
                name = "x" + Parent.Branches.AttributeIndex;
            }

            String op = ComparisonExtensions.ToString(Comparison);

            String value;

            if (codebook != null && Value.HasValue && codebook.Columns.Contains(name))
            {
                value = codebook.Revert(name, (int)Value.Value);
            }

            else
            {
                value = Value.ToString();
            }


            return(String.Format("{0} {1} {2}", name, op, value));
        }
Exemplo n.º 6
0
        private string toString(Antecedent antecedent, Codification codebook, CultureInfo culture)
        {
            int    index = antecedent.Index;
            String name  = Variables[index].Name;

            if (String.IsNullOrEmpty(name))
            {
                name = "x[" + index + "]";
            }

            String op = ComparisonExtensions.ToString(antecedent.Comparison);

            String value;

            if (codebook != null && codebook.Columns.Contains(name))
            {
                value = codebook.Revert(name, (int)antecedent.Value);
            }
            else
            {
                value = antecedent.Value.ToString(culture);
            }

            return(String.Format(culture, "{0} {1} {2}", name, op, value));
        }
Exemplo n.º 7
0
        public string getRecommendationsByUsers(string id = "user4")
        {
            DataTable data = new DataTable("dataTable");

            PopulateHead(data);
            PopulateTable(data, id);

            Codification codification = new Codification(data);
            DataTable    codifiedData = codification.Apply(data);

            int[][] input = codifiedData.ToJagged <int>("Age", "Gender");

            int[] predictions = codifiedData.ToArray <int>("Best Genre");

            ID3Learning decisionTreeLearningAlgorithm = new ID3Learning {
            };

            try
            {
                var   customer = _context.Customers.Where(c => c.Username == id).FirstOrDefault();
                int[] query;
                if (customer.Age <= 12)
                {
                    query = codification.Transform(new[, ] {
                        { "Age", "0-12" }, { "Gender", customer.Gender.ToString() }
                    });
                }
                else if (12 < customer.Age && customer.Age <= 25)
                {
                    query = codification.Transform(new[, ] {
                        { "Age", "13-25" }, { "Gender", customer.Gender.ToString() }
                    });
                }
                else if (25 < customer.Age && customer.Age < 40)
                {
                    query = codification.Transform(new[, ] {
                        { "Age", "26-39" }, { "Gender", customer.Gender.ToString() }
                    });
                }
                else
                {
                    query = codification.Transform(new[, ] {
                        { "Age", "40+" }, { "Gender", customer.Gender.ToString() }
                    });
                }

                DecisionTree decisionTree = decisionTreeLearningAlgorithm.Learn(input, predictions);
                int          result       = decisionTree.Decide(query);
                string       diagnosis    = codification.Revert("Best Genre", result);
                return(diagnosis);
            }
            catch (Exception)
            {
                return("Unfortunatly No Matches Were Found");

                throw;
            }
        }
Exemplo n.º 8
0
        private List <productos> revisarProductos(DataTable data)
        {
            var codebook = new Codification(data);

            int numCategorias = db.categorias.Count();

            DecisionVariable[] attributes =
            {
                new DecisionVariable("Categoria", numCategorias),  // 3 possible values (Sunny, overcast, rain)
                                new DecisionVariable("Precio", 5), // 3 possible values (Hot, mild, cool)  
                 
            };

            int classCount = 2; // 2 possible output values for playing tennis: yes or no

            DecisionTree tree = new DecisionTree(attributes, classCount);

            // Create a new instance of the ID3 algorithm
            ID3Learning id3learning = new ID3Learning(tree);

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = codebook.Apply(data);

            int[][] inputs  = symbols.ToIntArray("Categoria", "Precio");
            int[]   outputs = symbols.ToIntArray("recomendar").GetColumn(0);

            // Learn the training instances!
            id3learning.Run(inputs, outputs);

            // Compute the training error when predicting training instances
            double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));

            // The tree can now be queried for new examples through
            // its decide method. For example, we can create a query

            List <productos> product = db.productos.ToList();

            foreach (productos item in db.productos.ToList())
            {
                int[] query = codebook.Transform(new[, ] {
                    { "Categoria", Convert.ToString(item.fkCategoria) },
                    { "Precio", devolverTipoPrecio(item.precio) }
                });

                // And then predict the label using
                int predicted = tree.Decide(query);  // result will be 0

                // We can translate it back to strings using
                string answer = codebook.Revert("recomendar", predicted); // Answer will be: "No"
                if (answer.Equals("no"))
                {
                    product.Remove(item);
                }
            }
            return(product);
        }
Exemplo n.º 9
0
        public void testForInstance(KNearestNeighbors <string> knn, String input, String Output)
        {
            // Obtain the numeric output that represents the answer
            int c = knn.Decide(input); // answer will be 0

            // Now let us convert the numeric output to an actual "Yes" or "No" answer
            string result = codebook.Revert("GeneratedByProgram", c); // answer will be "No"

            Console.WriteLine("Test Data Input :  " + input + "\nExpectation: " + Output + "\nResult: " + result);
        }
Exemplo n.º 10
0
        public string[] Predict(string nodeName)
        {
            try
            {
                switch (TrainModel.TrainingMode)
                {
                case "bayes":
                    var bayesModel = _loadedModel as NaiveBayes;
                    var instance   = _loadedCodebook.Transform(nodeName);

                    // This is for retrieving only one prediction
                    // var c = bayesModel.Decide(instance);
                    // var result = _loadedCodebook.Revert("output", c);

                    double[] probs      = bayesModel.Probabilities(instance);
                    int[]    sortedKeys = SortAndIndex(probs);

                    int      numOfPreds  = 4;
                    string[] predictions = new string[numOfPreds];

                    for (int i = 0; i < 4; i++)
                    {
                        var prediction = _loadedCodebook.Revert("output", sortedKeys[i]);
                        predictions[i] = prediction;
                    }
                    return(predictions);

                case "markov":
                    var markovModel   = _loadedModel as HiddenMarkovModel;
                    var code          = _loadedCodebook.Transform("Nodes", nodeName);
                    var predictSample = markovModel.Predict(new[] { code }, 1);
                    var predictResult = _loadedCodebook.Revert("Nodes", predictSample);

                    return(predictResult);
                }

                return(new string[] { });
            }
            catch
            {
                return(new string[] { });
            }
        }
Exemplo n.º 11
0
        public static void PrintTree(DecisionNode node, String indent, bool last)
        {
            Console.WriteLine();
            string[] spstr     = { " == " };
            var      values    = node.ToString().Split(spstr, StringSplitOptions.None);
            string   nodeValue = node.ToString();

            if (values.Count() == 2)
            {
                nodeValue = values[0] + " == " + codebook.Revert(values[0].Trim(), Int32.Parse(values[1].Trim())).ToString();
            }
            Console.Write(indent + "+- " + nodeValue);
            indent += last ? "   " : "|  ";

            if (last)
            {
                Console.Write(" => " + codebook.Revert("Activity", (int)node.Output));
            }

            for (int i = 0; i < node.Branches.Count; i++)
            {
                PrintTree(node.Branches[i], indent, node.Branches[i].IsLeaf);
            }
        }
Exemplo n.º 12
0
        public string Predict(DataTable dataTable)
        {
            Codification codification = new Codification(dataTable);

            int[] codewords = this.Model.Decide(codification.Apply(dataTable).ToArray <int>("v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15"));
            if (codewords[0] == -1)
            {
                return(null);
            }
            string moveString = codification.Revert(codewords)[0];

            var firstDirectionInString = GetFirstString(moveString);

            return(firstDirectionInString);
        }
Exemplo n.º 13
0
    void Update()
    {
        if (GameOver.reset)                                                                     // Resetando as variáveis caso o jogo seja reiniciado
        {
            answer             = null;
            firstKeyCollected  = null;
            secondKeyCollected = null;
            thirdKeyCollected  = null;
            exit1.GetComponent <MeshRenderer> ().enabled = true;
            exit2.GetComponent <MeshRenderer> ().enabled = true;
            firstExitOpen                   = false;
            secondExitOpen                  = false;
            PlayerControl.score             = 0;
            CollisionBehaviors.keyCollected = null;

            GameOver.reset = false;
        }


        if (firstKeyCollected != null &&                                                // Quando as três chaves forem coletadas, a condição será atendida
            secondKeyCollected != null &&
            thirdKeyCollected != null)
        {
            int[] query = codebook.Transform(new[, ] {                          //	Tabela cujo valor será tratado
                { "First key", firstKeyCollected },
                { "Second key", secondKeyCollected },
                { "Third key", thirdKeyCollected }
            });

            // Passando a tabela a ser tratada como argumento da árvore treinada | tree.Decide(tabelaParaTratar)
            // O resultado tratado (predicted) será em integer symbol
            int predicted = tree.Decide(query);
            answer = codebook.Revert("Exit", predicted);                        // Traduzindo o integer symbol para string
            Debug.Log(answer);

            if (answer == "First")
            {
                exit1.GetComponent <MeshRenderer> ().enabled = false;
                firstExitOpen = true;
            }
            else if (answer == "Second")
            {
                exit2.GetComponent <MeshRenderer> ().enabled = false;
                secondExitOpen = true;
            }
        }
    }
Exemplo n.º 14
0
        public void testForInstance(NaiveBayes nb, String[] testInstance, String Output)
        {
            try
            {
                // Obtain the numeric output that represents the answer
                int c = nb.Decide(codebook.Transform(testInstance)); // answer will be 0

                // Now let us convert the numeric output to an actual "Yes" or "No" answer
                string result = codebook.Revert("GeneratedByProgram", c); // answer will be "No"

                Console.WriteLine("Test Data Input :  " + testInstance[0] + "," + testInstance[1] + "\nExpectation: " + Output + "\nResult: " + result);
            }
            catch (Exception e)
            {
                Console.WriteLine("Test Data Input :  " + testInstance[0] + "," + testInstance[1] + "\nExpectation: " + Output + "\nResult: " + "No");
            }
        }
        public string Evaluate(string branch, string cType, string gender, string payment)
        {
            // The tree can now be queried for new examples through
            // its decide method. For example, we can create a query
            int[] query = codebook.Transform(new[, ]
            {
                { "Branch", branch },
                { "Customer type", cType },
                { "Gender", gender },
                { "Payment", payment }
            });

            // And then predict the label using
            int predicted = tree.Decide(query);  // result will be 0

            // We can translate it back to strings using
            return(codebook.Revert("Product line", predicted)); // Answer will be: "No"
        }
Exemplo n.º 16
0
        public string Predict(Board board)
        {
            DataTable    dataTable    = this.CreateDataTable(board);
            Codification codification = new Codification(dataTable);
            var          inputs       = codification.Apply(dataTable)
                                        .ToArray <int>("v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13", "v14",
                                                       "v15");

            int[] codewords = this.Model.Decide(inputs);
            if (codewords[0] == -1)
            {
                return(null);
            }
            string moveString = codification.Revert(codewords)[0];

            string[] stringSeparators = new string[] { "Up", "Right", "Down", "Left" };
            var      result           = moveString.Split(stringSeparators, StringSplitOptions.None);

            return(result[0]);
        }
Exemplo n.º 17
0
        private string toString(Codification codebook, string outputColumn, CultureInfo culture)
        {
            StringBuilder sb = new StringBuilder();

            var expr = expressions.ToArray();

            for (int i = 0; i < expr.Length - 1; i++)
            {
                sb.AppendFormat("({0}) && ", toString(expr[i], codebook, culture));
            }
            sb.AppendFormat("({0})", toString(expr[expr.Length - 1], codebook, culture));

            if (String.IsNullOrEmpty(outputColumn))
            {
                return(String.Format(culture, "{0} =: {1}", Output, sb));
            }

            string name = codebook.Revert(outputColumn, (int)Output);

            return(String.Format(culture, "{0} =: {1}", name, sb));
        }
Exemplo n.º 18
0
        private TreeNode convert(DecisionNode node)
        {
            TreeNode treeNode = (codebook == null) ?
                                new TreeNode(node.ToString()) :
                                new TreeNode(node.ToString(codebook));


            if (!node.IsLeaf)
            {
                foreach (var child in node.Branches)
                {
                    treeNode.Nodes.Add(convert(child));
                }

                return(treeNode);
            }


            if (codebook == null || !node.Output.HasValue)
            {
                treeNode.Nodes.Add(new TreeNode(node.Output.ToString()));
                return(treeNode);
            }

            int index  = node.Parent.Branches.AttributeIndex;
            var attrib = treeSource.Attributes[index];

            if (attrib.Nature != DecisionVariableKind.Discrete)
            {
                treeNode.Nodes.Add(new TreeNode(node.Output.ToString()));
                return(treeNode);
            }

            string value = codebook.Revert(attrib.Name, node.Output.Value);

            treeNode.Nodes.Add(new TreeNode(value));
            return(treeNode);
        }
Exemplo n.º 19
0
        public string simulate(int year, string generation, string sex)
        {
            var id3learning = new ID3Learning()
            {
                new DecisionVariable("year", 2016 - 1985),
                new DecisionVariable("generation", 6),
                new DecisionVariable("sex", 2),
            };

            tree = id3learning.Learn(inputs, outputs);

            int[] query = codebook.Transform(new[, ]
            {
                { "year", year.ToString() },
                { "generation", generation },
                { "sex", sex },
            });

            int    predicted = tree.Decide(query);
            string answer    = codebook.Revert("risk", predicted);

            return(answer);
        }
Exemplo n.º 20
0
        public string[] DecisionTreeClassifyLib(DataTable data)
        {
            DataTable symbols = codebook.Apply(data);

            int[][] inputs = DataTableToMatrix(symbols, new string[] { "CAP SHAPE", "CAP SURFACE", "CAP COLOR",
                                                                       "BRUISES", "ODOR", "GILL ATTACHMENT",
                                                                       "GILL SPACING", "GILL SIZE", "GILL COLOR",
                                                                       "STALK SHAPE", "STALK ROOT", "STALK SURFACE ABOVE RING",
                                                                       "STALK SURFACE BELOW RING", "STALK COLOR ABOVE RING", "STALK COLOR BELOW RING",
                                                                       "VEIL TYPE", "VEIL COLOR", "RING NUMBER",
                                                                       "RING TYPE", "SPORE PRINT COLOR", "POPULATION",
                                                                       "HABITAT" });

            int[] predicted = decisionTreeLib.Decide(inputs);

            string[] predictedString = new string[predicted.Length];
            for (int i = 0; i < predicted.Length; i++)
            {
                predictedString[i] = codebook.Revert("TYPE", predicted[i]);
                //Aqui se queda mucho tiempo
            }

            return(predictedString);
        }
Exemplo n.º 21
0
    private Vector2 Rank(string presidentLife, string towers, string meliants, string time, string playerMoney)
    {
        try {
            int[] query = codebook.Transform(new[, ]
            {
                { "LIFE", presidentLife },
                { "TOWERS", towers },
                { "MELIANTS", meliants },
                { "TIME", time },
                { "ENEMY_COINS", playerMoney },
            });

            int predicted = tree.Decide(query);

            string   answer  = codebook.Revert("POSITION", predicted);
            string[] splited = answer.Split('x');
            return(new Vector2(
                       float.Parse(splited[0], CultureInfo.InvariantCulture.NumberFormat),
                       float.Parse(splited[1], CultureInfo.InvariantCulture.NumberFormat)
                       ));
        } catch (Exception) {
            return(Vector2.zero);
        }
    }
Exemplo n.º 22
0
 private string DecodeFolder(int codedFolder)
 {
     return(_outputCodeBook?.Revert(new[] { codedFolder })[0]);
 }
        public void output_labels_test()
        {
            #region doc_learn_codification
            // Let's say we have the following data to be classified
            // into three possible classes. Those are the samples:
            //
            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            // Now, suppose that our class labels are not contiguous. We
            // have 3 classes, but they have the class labels 5, 1, and 8
            // respectively. In this case, we can use a Codification filter
            // to obtain a contiguous zero-indexed labeling before learning
            int[] output_labels =
            {
                5, 5, 5, 5, 5,
                1, 1, 1, 1, 1,
                8, 8, 8, 8, 8,
            };

            // Create a codification object to obtain a output mapping
            var codebook = new Codification <int>().Learn(output_labels);

            // Transform the original labels using the codebook
            int[] outputs = codebook.Transform(output_labels);

            // Create the multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning <Gaussian>()
            {
                // Configure the learning algorithm to use SMO to train the
                //  underlying SVMs in each of the binary class subproblems.
                Learner = (param) => new SequentialMinimalOptimization <Gaussian>()
                {
                    // Estimate a suitable guess for the Gaussian kernel's parameters.
                    // This estimate can serve as a starting point for a grid search.
                    UseKernelEstimation = true
                }
            };

            // Configure parallel execution options
            teacher.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Learn a machine
            var machine = teacher.Learn(inputs, outputs);

            // Obtain class predictions for each sample
            int[] predicted = machine.Decide(inputs);

            // Translate the integers back to the original lagbels
            int[] predicted_labels = codebook.Revert(predicted);
            #endregion

            Assert.IsTrue(predicted_labels.IsEqual(output_labels));
        }
        public void learn_new_method()
        {
            #region doc_learn_1
            // Suppose we have a data table relating the age of
            // a person and its categorical classification, as
            // in "child", "adult" or "elder".

            // The Codification filter is able to extract those
            // string labels and transform them into discrete
            // symbols, assigning integer labels to each of them
            // such as "child" = 0, "adult" = 1, and "elder" = 3.

            // Create the aforementioned sample table
            DataTable table = new DataTable("Sample data");
            table.Columns.Add("Age", typeof(int));
            table.Columns.Add("Label", typeof(string));

            //            age   label
            table.Rows.Add(10, "child");
            table.Rows.Add(07, "child");
            table.Rows.Add(04, "child");
            table.Rows.Add(21, "adult");
            table.Rows.Add(27, "adult");
            table.Rows.Add(12, "child");
            table.Rows.Add(79, "elder");
            table.Rows.Add(40, "adult");
            table.Rows.Add(30, "adult");


            // Now, let's say we need to translate those text labels
            // into integer symbols. Let's use a Codification filter:

            var codebook = new Codification(table);


            // After that, we can use the codebook to "translate"
            // the text labels into discrete symbols, such as:

            int a = codebook.Transform(columnName: "Label", value: "child"); // returns 0
            int b = codebook.Transform(columnName: "Label", value: "adult"); // returns 1
            int c = codebook.Transform(columnName: "Label", value: "elder"); // returns 2

            // We can also do the reverse:
            string labela = codebook.Revert(columnName: "Label", codeword: 0); // returns "child"
            string labelb = codebook.Revert(columnName: "Label", codeword: 1); // returns "adult"
            string labelc = codebook.Revert(columnName: "Label", codeword: 2); // returns "elder"
            #endregion

            #region doc_learn_2
            // We can also process an entire data table at once:
            DataTable result = codebook.Apply(table);

            // The resulting table can be transformed to jagged array:
            double[][] matrix = Matrix.ToJagged(result);

            // and the resulting matrix will be given by
            string str = matrix.ToCSharp();
            #endregion

            // str == new double[][]
            // {
            //     new double[] { 10, 0 },
            //     new double[] {  7, 0 },
            //     new double[] {  4, 0 },
            //     new double[] { 21, 1 },
            //     new double[] { 27, 1 },
            //     new double[] { 12, 0 },
            //     new double[] { 79, 2 },
            //     new double[] { 40, 1 },
            //     new double[] { 30, 1 }
            // };


            #region doc_learn_3
            // Now we will be able to feed this matrix to any machine learning
            // algorithm without having to worry about text labels in our data:

            // Use the first column as input variables,
            // and the second column as outputs classes
            //
            double[][] inputs  = matrix.GetColumns(0);
            int[]      outputs = matrix.GetColumn(1).ToInt32();

            // Create a Multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning <Linear>()
            {
                Learner = (p) => new SequentialMinimalOptimization <Linear>()
                {
                    Complexity = 1
                }
            };

            // Run the learning algorithm
            var svm = teacher.Learn(inputs, outputs);

            // Compute the classification error (should be 0)
            double error = new ZeroOneLoss(outputs).Loss(svm.Decide(inputs));


            // After we have learned the machine, we can use it to classify
            // new data points, and use the codebook to translate the machine
            // outputs to the original text labels:

            string result1 = codebook.Revert("Label", svm.Decide(new double[] { 10 })); // child
            string result2 = codebook.Revert("Label", svm.Decide(new double[] { 40 })); // adult
            string result3 = codebook.Revert("Label", svm.Decide(new double[] { 70 })); // elder
            #endregion

            Assert.AreEqual(0, a);
            Assert.AreEqual(1, b);
            Assert.AreEqual(2, c);
            Assert.AreEqual("child", labela);
            Assert.AreEqual("adult", labelb);
            Assert.AreEqual("elder", labelc);

            Assert.AreEqual("child", result1);
            Assert.AreEqual("adult", result2);
            Assert.AreEqual("elder", result3);
        }
        public List <Meal> ID3(dynamic food, List <User> users)
        {
            //dtable();
            // Dictionary<string,List<Category>> FoodCategory = new Dictionary<string,List<Category>>();
            List <Meal> category = new List <Meal> {
            };

            foreach (User u in users)
            {
                BMI  = u.bmi;
                User = u.user;
            }


            //Iterate through food and add each food with range of macronutrients to category list
            foreach (var nw in food)
            {
                var value = (nw.Category.Predicted).Split(",");
                category.Add(new Meal {
                    food = nw.Food.Food, protein = value[0], carb = value[1], fat = value[2], calorie = value[3], fiber = value[4], nprotein = nw.Food.Total_Protein, ncarb = nw.Food.Total_Carb, nfat = nw.Food.Total_Fat, ncalorie = nw.Food.Num_Calorie, nfiber = nw.Food.Fiber, serving = nw.Food.Serving, img = nw.Food.img, fgroup = nw.Food.fgroup, PrefID = nw.Food.ID, servingQty = nw.Food.ServingQty.ToString(), decision = ""
                });
            }

            var csv             = new CsvReader(File.OpenText(path));
            var myCustomObjects = csv.GetRecords <MealData>();

            DataTable dt = new DataTable("FoodDBSample");
            DataRow   row;

            dt.Columns.Add("Category", "Carb", "Protein", "Fat", "Calorie", "Fiber", "Decision");
            foreach (var record in myCustomObjects)
            {
                row = dt.NewRow();


                row["Category"] = record.Category;
                row["Carb"]     = record.Carb;
                row["Protein"]  = record.Protein;
                row["Fat"]      = record.Fat;
                row["Calorie"]  = record.Calorie;
                row["Fiber"]    = record.Fiber;
                row["Decision"] = record.Outcome;

                dt.Rows.Add(row);
            }
            var codebook = new Codification(dt);

            DataTable symbols = codebook.Apply(dt);

            int[][] inputs  = symbols.ToJagged <int>("Category", "Carb", "Protein", "Fat", "Calorie", "Fiber");
            int[]   outputs = symbols.ToArray <int>("Decision");

            //specify which columns to use for making decisions
            var id3learning = new ID3Learning()
            {
                new DecisionVariable("Category", 4),
                new DecisionVariable("Carb", 2),
                new DecisionVariable("Protein", 2),
                new DecisionVariable("Fat", 2),
                new DecisionVariable("Calorie", 2),
                new DecisionVariable("Fiber", 2)
            };


            // Learn the training instances!
            DecisionTree tree = id3learning.Learn(inputs, outputs);

            // Compute the training error when predicting training instances
            double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));



            List <dynamic> predict = new List <dynamic> {
            };

            //iterate through data structure category and make prediction for each food
            foreach (var item in category)
            {
                predict.Add(codebook.Transform(new[, ]
                {
                    { "Category", $"{BMI}" },
                    { "Carb", $"{item.carb}" },
                    { "Protein", $"{item.protein}" },
                    { "Fat", $"{item.fat}" },
                    { "Calorie", $"{item.calorie}" },
                    { "Fiber", $"{item.fiber}" }
                }));
            }

            //Accord.IO.Serializer.Save(tree, Path.Combine(basePath, "ID3TreeModel.bin"));
            //int predicted = tree.Decide(query);
            List <string> q = new List <string>();

            foreach (var i in predict)
            {
                q.Add(codebook.Revert("Decision", tree.Decide(i)));
            }
            // We can translate it back to strings using
            //string answer = codebook.Revert("Decision", predicted);
            //foreach (var item in q)
            //{
            //    System.Diagnostics.Debug.WriteLine(item);
            //}
            var         foods    = category.Zip(q, (n, w) => new { Food = n, Decision = w });
            List <Meal> positive = new List <Meal> {
            };
            List <Meal> negative = new List <Meal> {
            };

            foreach (var nw in foods)
            {
                nw.Food.decision = nw.Decision;
            }
            foreach (var item in foods)
            {
                //System.Diagnostics.Debug.WriteLine(item.Food.food + "\n" + item.Food.nprotein + "\n" + item.Food.ncarb + "\n" + item.Food.nfat + "\n" + item.Food.ncalorie + "\n" + item.Food.serving + "\n" + item.Food.img + "\n" + item.Food.fgroup+"\n"+item.Decision);
                //System.Diagnostics.Debug.WriteLine(item.Food.food+"\n"+item.Decision);
                if (item.Decision.Equals("positive"))
                {
                    positive.Add(new Meal {
                        CurrentUser = User, food = item.Food.food, nprotein = item.Food.nprotein, ncarb = item.Food.ncarb, nfat = item.Food.nfat, ncalorie = item.Food.ncalorie, nfiber = item.Food.nfiber, img = item.Food.img, serving = item.Food.serving, fgroup = item.Food.fgroup, PrefID = item.Food.PrefID, servingQty = item.Food.servingQty, decision = item.Decision
                    });
                }
                if (item.Decision.Equals("negative"))
                {
                    negative.Add(new Meal {
                        CurrentUser = User, food = item.Food.food, nprotein = item.Food.nprotein, ncarb = item.Food.ncarb, nfat = item.Food.nfat, ncalorie = item.Food.ncalorie, nfiber = item.Food.nfiber, img = item.Food.img, serving = item.Food.serving, fgroup = item.Food.fgroup, PrefID = item.Food.PrefID, servingQty = item.Food.servingQty, decision = item.Decision
                    });
                }
            }
            //foreach (var item in positive)
            //{

            //    //System.Diagnostics.Debug.WriteLine(item.food + "\n" + item.nprotein + "\n" + item.ncarb + "\n" + item.nfat + "\n" + item.ncalorie + "\n" + item.serving + "\n" + item.img + "\n" + item.fgroup + "\n" + item.decision);
            //    System.Diagnostics.Debug.WriteLine(item.food + "\n" + "Protein: "+item.nprotein + "\n" + "Carb: " + item.ncarb + "\n" + "Fat: " + item.nfat + "\n" + "Calorie: " + item.ncalorie+"\n"+ "Fiber: "+item.nfiber+"\n"+ "Outcome: " + item.decision);

            //}
            //foreach (var item in negative)
            //{

            //    //System.Diagnostics.Debug.WriteLine(item.food + "\n" + item.nprotein + "\n" + item.ncarb + "\n" + item.nfat + "\n" + item.ncalorie + "\n" + item.serving + "\n" + item.img + "\n" + item.fgroup + "\n" + item.decision);
            //    System.Diagnostics.Debug.WriteLine(item.food + "\n" + "Protein: " + item.nprotein + "\n" + "Carb: " + item.ncarb + "\n" + "Fat: " + item.nfat + "\n" + "Calorie: " + item.ncalorie + "\n" + "Fiber: " + item.nfiber + "\n" + "Outcome: " + item.decision);

            //}


            // Validation purposes only
            //var cm = GeneralConfusionMatrix.Estimate(tree, inputs, outputs);
            //double err = cm.Error;
            //double acc = cm.Accuracy;
            //double kappa = cm.Kappa;
            //validation();
            //System.Diagnostics.Debug.WriteLine("error: " + err);
            //System.Diagnostics.Debug.WriteLine("accuracy: " + acc);
            DisplayDiet diet = new DisplayDiet();

            diet.GetFoods(positive);
            diet.GetUser(users);
            //diet.CreateDiet(positive);
            return(positive);
        }
Exemplo n.º 26
0
        public void learn_doc2()
        {
            #region doc_learn_mitchell
            // In this example, we will be using the famous Play Tennis example by Tom Mitchell (1998).
            // In Mitchell's example, one would like to infer if a person would play tennis or not
            // based solely on four input variables. Those variables are all categorical, meaning that
            // there is no order between the possible values for the variable (i.e. there is no order
            // relationship between Sunny and Rain, one is not bigger nor smaller than the other, but are
            // just distinct). Moreover, the rows, or instances presented above represent days on which the
            // behavior of the person has been registered and annotated, pretty much building our set of
            // observation instances for learning:

            // Note: this example uses DataTables to represent the input data , but this is not required.
            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");
            data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
            data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
            data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
            data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");

            // In order to try to learn a decision tree, we will first convert this problem to a more simpler
            // representation. Since all variables are categories, it does not matter if they are represented
            // as strings, or numbers, since both are just symbols for the event they represent. Since numbers
            // are more easily representable than text string, we will convert the problem to use a discrete
            // alphabet through the use of a Accord.Statistics.Filters.Codification codebook.</para>

            // A codebook effectively transforms any distinct possible value for a variable into an integer
            // symbol. For example, “Sunny” could as well be represented by the integer label 0, “Overcast”
            // by “1”, Rain by “2”, and the same goes by for the other variables. So:</para>

            // Create a new codification codebook to
            // convert strings into integer symbols
            var codebook = new Codification(data);

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = codebook.Apply(data);
            int[][]   inputs  = symbols.ToJagged <int>("Outlook", "Temperature", "Humidity", "Wind");
            int[]     outputs = symbols.ToArray <int>("PlayTennis");

            // For this task, in which we have only categorical variables, the simplest choice
            // to induce a decision tree is to use the ID3 algorithm by Quinlan. Let’s do it:

            // Create a teacher ID3 algorithm
            var id3learning = new ID3Learning()
            {
                // Now that we already have our learning input/ouput pairs, we should specify our
                // decision tree. We will be trying to build a tree to predict the last column, entitled
                // “PlayTennis”. For this, we will be using the “Outlook”, “Temperature”, “Humidity” and
                // “Wind” as predictors (variables which will we will use for our decision). Since those
                // are categorical, we must specify, at the moment of creation of our tree, the
                // characteristics of each of those variables. So:

                new DecisionVariable("Outlook", 3),     // 3 possible values (Sunny, overcast, rain)
                new DecisionVariable("Temperature", 3), // 3 possible values (Hot, mild, cool)
                new DecisionVariable("Humidity", 2),    // 2 possible values (High, normal)
                new DecisionVariable("Wind", 2)         // 2 possible values (Weak, strong)

                // Note: It is also possible to create a DecisionVariable[] from a codebook:
                // DecisionVariable[] attributes = DecisionVariable.FromCodebook(codebook);
            };

            // Learn the training instances!
            DecisionTree tree = id3learning.Learn(inputs, outputs);

            // Compute the training error when predicting training instances
            double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));

            // The tree can now be queried for new examples through
            // its decide method. For example, we can create a query

            int[] query = codebook.Transform(new[, ]
            {
                { "Outlook", "Sunny" },
                { "Temperature", "Hot" },
                { "Humidity", "High" },
                { "Wind", "Strong" }
            });

            // And then predict the label using
            int predicted = tree.Decide(query);  // result will be 0

            // We can translate it back to strings using
            string answer = codebook.Revert("PlayTennis", predicted); // Answer will be: "No"
            #endregion

            Assert.AreEqual(0, predicted);
            Assert.AreEqual("No", answer);
            Assert.AreEqual(0, error);
        }
Exemplo n.º 27
0
        //***First Attempt: This decision tree was looking for input values that were already existing from the creation of the tree
        //this does not comply with this situation as we need to input unknown values that can be predicted based on proximity to the
        //existing input values in the tree.
        //This tree requires a set amount of potential inputs which in this case is very high for each feature
        //TO DO: Research what tree can suit this situation(Look at iris example)
        public void DecisionTree()
        {
            // In this example, we will process the famous Fisher's Iris dataset in
            // which the task is to classify weather the features of an Iris flower
            // belongs to an Iris setosa, an Iris versicolor, or an Iris virginica:
            //
            //  - https://en.wikipedia.org/wiki/Iris_flower_data_set
            //

            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Name", "Acousticness", "Danceability", "Energy", "Instrumentalness", "Liveness", "Loudness", "Speechiness", "Tempo", "Valence", "Target");

            foreach (var item in TargetList)
            {
                data.Rows.Add(item.ToString());
            }

            for (int i = 0; i < audioFeatures.Count; i++)
            {
                data.Rows[i]["Acousticness"]     = audioFeatures[i].Acousticness;
                data.Rows[i]["Danceability"]     = audioFeatures[i].Danceability;
                data.Rows[i]["Energy"]           = audioFeatures[i].Energy;
                data.Rows[i]["Instrumentalness"] = audioFeatures[i].Instrumentalness;
                data.Rows[i]["Liveness"]         = audioFeatures[i].Liveness;
                data.Rows[i]["Loudness"]         = audioFeatures[i].Loudness;
                data.Rows[i]["Speechiness"]      = audioFeatures[i].Speechiness;
                data.Rows[i]["Tempo"]            = audioFeatures[i].Tempo;
                data.Rows[i]["Valence"]          = audioFeatures[i].Valence;
            }

            for (int i = 0; i < TargetValues.Count; i++)
            {
                data.Rows[i]["Target"] = TargetValues[i].ToString();
            }



            var codebook = new Codification(data);

            //// Translate our training data into integer symbols using our codebook:
            DataTable symbols = codebook.Apply(data);

#pragma warning disable CS0618 // Type or member is obsolete
            double[][] inputs = symbols.ToArray <double>("Acousticness", "Danceability", "Energy", "Instrumentalness", "Liveness", "Loudness", "Speechiness", "Tempo", "Valence");
#pragma warning restore CS0618 // Type or member is obsolete
            int[] outputs = symbols.ToArray <int>("Target");

            // Create a teaching algorithm:
            var teacher = new C45Learning()
            {
                new DecisionVariable("Acousticness", DecisionVariableKind.Continuous),
                new DecisionVariable("Danceability", DecisionVariableKind.Continuous),
                new DecisionVariable("Energy", DecisionVariableKind.Continuous),
                new DecisionVariable("Instrumentalness", DecisionVariableKind.Continuous),
                new DecisionVariable("Liveness", DecisionVariableKind.Continuous),
                new DecisionVariable("Loudness", DecisionVariableKind.Continuous),
                new DecisionVariable("Speechiness", DecisionVariableKind.Continuous),
                new DecisionVariable("Tempo", DecisionVariableKind.Continuous),
                new DecisionVariable("Valence", DecisionVariableKind.Continuous)
            };

            // Use the learning algorithm to induce a new tree:
            DecisionTree tree = teacher.Learn(inputs, outputs);

            // To get the estimated class labels, we can use
            //int[] predicted = tree.Decide(inputs);

            // The classification error (0.0266) can be computed as
            //double error = new ZeroOneLoss(outputs).Loss(predicted);

            int[] query = codebook.Transform(new[, ]
            {
                { "Valence", "0.37" },
                { "Acousticness", "0.00187" },
                { "Danceability", "0.808" },
                { "Energy", "0.626" },
                { "Instrumentalness", "0.159" },
                { "Liveness", "0.376" },
                { "Loudness", "-12.733" },
                { "Speechiness", "0.168" },
                { "Tempo", "123.99" }
            });

            // And then predict the label using
            int predicted = tree.Decide(query);  // result will be 0

            // We can translate it back to strings using
            string answer = codebook.Revert("Target", predicted); // Answer will be: "No"

            Console.WriteLine(predicted);
            //// Moreover, we may decide to convert our tree to a set of rules:
            //DecisionSet rules = tree.ToRules();

            //// And using the codebook, we can inspect the tree reasoning:
            //string ruleText = rules.ToString(codebook, "Output",
            //    System.Globalization.CultureInfo.InvariantCulture);



            //var id3learning = new ID3Learning()
            //{
            //    new DecisionVariable("Acousticness",     TargetList.Count),
            //    new DecisionVariable("Danceability", TargetList.Count),
            //    new DecisionVariable("Energy",    TargetList.Count),
            //    new DecisionVariable("Instrumentalness",        TargetList.Count),
            //    new DecisionVariable("Liveness",        TargetList.Count),
            //    new DecisionVariable("Loudness",        TargetList.Count),
            //    new DecisionVariable("Speechiness",        TargetList.Count),
            //    new DecisionVariable("Tempo",        TargetList.Count),
            //    new DecisionVariable("Valence",        TargetList.Count)
            //};

            //// Learn the training instances!
            //DecisionTree tree = id3learning.Learn(inputs, outputs);

            //// Compute the training error when predicting training instances
            //double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));

            //int[] query = codebook.Transform(new[,]
            //{
            //    { "Acousticness", "0.011"},
            //    { "Danceability", "0.905"},
            //    { "Energy", "0.905"},
            //    { "Instrumentalness", "0.000905"},
            //    { "Liveness", "0.302"},
            //    { "Loudness", "-2.743"},
            //    { "Speechiness", "0.103"},
            //    { "Tempo", "114.944"},
            //    { "Valence", "0.625"}
            //});

            //// And then predict the label using
            //int predicted = tree.Decide(query);  // result will be 0

            //// We can translate it back to strings using
            //string answer = codebook.Revert("Target", predicted); // Answer will be: "No"

            //Console.WriteLine(predicted);
        }
Exemplo n.º 28
0
        public static void Run()
        {
            // In this example, we will be using the famous Play Tennis example by Tom Mitchell(1998).
            // In Mitchell's example, one would like to infer if a person would play tennis or not
            // based solely on four input variables. Those variables are all categorical, meaning that
            // there is no order between the possible values for the variable (i.e. there is no order
            // relationship between Sunny and Rain, one is not bigger nor smaller than the other, but are
            // just distinct).

            // Note: this example uses DataTables to represent the input data , but this is not required.
            var example = "Overtake";

            Console.WriteLine(example);

            DataTable data = new DataTable(example);

            data.Columns.Add("Separation", typeof(String));
            data.Columns.Add("Speed", typeof(String));
            data.Columns.Add("OncomingSpeed", typeof(String));
            data.Columns.Add("Result", typeof(String));
            var shuffledInputs = GetInputs(100);

            for (int index = 0; index < shuffledInputs.Length; index++)
            {
                data.Rows.Add(shuffledInputs[index][0], shuffledInputs[index][1], shuffledInputs[index][2], shuffledInputs[index][3]);
            }



            // In order to try to learn a decision tree, we will first convert this problem to a more simpler
            // representation. Since all variables are categories, it does not matter if they are represented
            // as strings, or numbers, since both are just symbols for the event they represent. Since numbers
            // are more easily representable than text string, we will convert the problem to use a discrete
            // alphabet through the use of a Accord.Statistics.Filters.Codification codebook.</para>

            // A codebook effectively transforms any distinct possible value for a variable into an integer
            // symbol. For example, “Sunny” could as well be represented by the integer label 0, “Overcast”
            // by “1”, Rain by “2”, and the same goes by for the other variables. So:</para>

            // Create a new codification codebook to convert strings into integer symbols
            var codebook = new Codification(data);

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = codebook.Apply(data);

            int[][]  inputs     = symbols.ToJagged <int>(new string[] { "Separation", "Speed", "OncomingSpeed", "Result" });
            int[]    outputs    = symbols.ToArray <int>("Overtake");
            string[] classNames = new string[] { "success", "fail" };

            // For this task, in which we have only categorical variables, the simplest choice
            // to induce a decision tree is to use the ID3 algorithm by Quinlan. Let’s do it:

            // Create an ID3 algorithm
            var id3learning = new ID3Learning()
            {
                // Now that we already have our learning input/ouput pairs, we should specify our
                // decision tree. We will be trying to build a tree to predict the last column, entitled
                // “PlayTennis”. For this, we will be using the “Outlook”, “Temperature”, “Humidity” and
                // “Wind” as predictors (variables which will we will use for our decision). Since those
                // are categorical, we must specify, at the moment of creation of our tree, the
                // characteristics of each of those variables. So:

                new DecisionVariable("Separation", 150),    // 3 possible values (Sunny, overcast, rain)
                new DecisionVariable("Speed", 150),         // 3 possible values (Hot, mild, cool)
                new DecisionVariable("OncomingSpeed", 150), // 2 possible values (High, normal)
                new DecisionVariable("Result", 2)           // 2 possible values (Weak, strong)
            };

            // Learn the training instances!
            DecisionTree tree = id3learning.Learn(inputs, outputs);

            // The tree can now be queried for new examples through
            // its Decide method. For example, we can create a query

            int[] query = codebook.Transform(new[, ]
            {
                { "Separation", "150" },
                { "Speed", "150" },
                { "OncomingSpeed", "150" },
                { "Result", "success" }
            });

            // And then predict the label using
            int predicted = tree.Decide(query);

            var answer = codebook.Revert("Overtake", predicted);

            Console.WriteLine("");

            Console.WriteLine(answer);

            double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));

            Console.WriteLine($"{error * 100:F10}");

            DecisionSet rules        = tree.ToRules();
            var         encodedRules = rules.ToString();

            Console.WriteLine(encodedRules);



            Console.ReadKey(); // Keep the window open till a key is pressed
        }
Exemplo n.º 29
0
        static void Main(string[] args)
        {
            // Create a new reader, opening a given path
            ExcelReader excel     = new ExcelReader("Intake Inf Cohort 2017 - Training Set.xlsx");
            ExcelReader excelTest = new ExcelReader("Intake Inf Cohort 2017 - Test Set.xlsx");

            // Afterwards, we can query the file for all
            // worksheets within the specified workbook:
            string[] sheets     = excel.GetWorksheetList();
            string[] sheetsTest = excelTest.GetWorksheetList();

            // Finally, we can request an specific sheet:
            DataTable data     = excel.GetWorksheet(sheets[0]);
            DataTable dataTest = excelTest.GetWorksheet(sheets[0]);

            // Loop through each column in data
            foreach (DataColumn column in data.Columns)
            {
                // Replace empty with underscore
                column.ColumnName = column.ColumnName.Replace(" ", "_");
            }

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codibook = new Codification(data);

            // Set codibook
            Codification = codibook;

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = codibook.Apply(data);

            int[][] inputs = symbols.ToJagged <int>(
                codibook.Columns[5].ColumnName,
                codibook.Columns[7].ColumnName,
                codibook.Columns[8].ColumnName,
                codibook.Columns[9].ColumnName,
                codibook.Columns[12].ColumnName,
                codibook.Columns[13].ColumnName,
                codibook.Columns[14].ColumnName,
                codibook.Columns[15].ColumnName,
                codibook.Columns[16].ColumnName,
                codibook.Columns[20].ColumnName,
                codibook.Columns[29].ColumnName,
                codibook.Columns[30].ColumnName,
                codibook.Columns[34].ColumnName
                );
            int[] outputs = symbols.ToMatrix <int>(codibook.Columns[6].ColumnName).GetColumn(0);

            // Create a teacher ID3 algorithm
            var id3 = new ID3Learning()
            {
                new DecisionVariable(codibook.Columns[5].ColumnName, 2),
                new DecisionVariable(codibook.Columns[7].ColumnName, codibook.Columns[7].NumberOfSymbols),
                new DecisionVariable(codibook.Columns[8].ColumnName, codibook.Columns[8].NumberOfSymbols),
                new DecisionVariable(codibook.Columns[9].ColumnName, 3),
                new DecisionVariable(codibook.Columns[12].ColumnName, 10),
                new DecisionVariable(codibook.Columns[13].ColumnName, 10),
                new DecisionVariable(codibook.Columns[14].ColumnName, 10),
                new DecisionVariable(codibook.Columns[15].ColumnName, 10),
                new DecisionVariable(codibook.Columns[16].ColumnName, 2),
                new DecisionVariable(codibook.Columns[20].ColumnName, 2),
                new DecisionVariable(codibook.Columns[29].ColumnName, 2),
                new DecisionVariable(codibook.Columns[30].ColumnName, 2),
                new DecisionVariable(codibook.Columns[34].ColumnName, 2),
            };

            // Learn the training instances!
            Accord.MachineLearning.DecisionTrees.DecisionTree tree = id3.Learn(inputs, outputs);

            // Create a console table for display
            ConsoleTable table = new ConsoleTable("Studentnumber", "Advice", "Conclusion");

            // Loop through each row in data
            foreach (DataRow row in dataTest.Rows)
            {
                // The tree can now be queried for new examples through
                // its decide method. For example, we can create a query
                int[] query = null;

                try
                {
                    query = codibook.Transform(new[, ]
                    {
                        { codibook.Columns[5].ColumnName, row.ItemArray[5].ToString() },
                        { codibook.Columns[7].ColumnName, row.ItemArray[7].ToString() },
                        { codibook.Columns[8].ColumnName, row.ItemArray[8].ToString() },
                        { codibook.Columns[9].ColumnName, row.ItemArray[9].ToString() },
                        { codibook.Columns[12].ColumnName, row.ItemArray[12].ToString() },
                        { codibook.Columns[13].ColumnName, row.ItemArray[13].ToString() },
                        { codibook.Columns[14].ColumnName, row.ItemArray[14].ToString() },
                        { codibook.Columns[15].ColumnName, row.ItemArray[15].ToString() },
                        { codibook.Columns[16].ColumnName, row.ItemArray[16].ToString() },
                        { codibook.Columns[20].ColumnName, row.ItemArray[20].ToString() },
                        { codibook.Columns[29].ColumnName, row.ItemArray[29].ToString() },
                        { codibook.Columns[30].ColumnName, row.ItemArray[30].ToString() },
                        { codibook.Columns[34].ColumnName, row.ItemArray[34].ToString() },
                    });
                }
                catch (Exception)
                {
                    // Show the result of skipped students
                    var studentnumber = row.ItemArray[0].ToString();
                    var advice        = row.ItemArray[6].ToString();
                    var conclusion    = "(Twijfel)";
                    table.AddRow(studentnumber, advice, conclusion);

                    continue;
                }

                // And then predict the label using
                int predicted = tree.Decide(query);

                // Any predictions off are ignored for consistency
                if (predicted != -1)
                {
                    // We can translate it back to strings using
                    string answer = codibook.Revert("advies", predicted);

                    // Show the result in the output
                    var studentnumber = row.ItemArray[0].ToString();
                    var advice        = row.ItemArray[6].ToString();
                    var conclusion    = answer;
                    table.AddRow(studentnumber, advice, conclusion);
                }
                else
                {
                    // Show the result of skipped students
                    var studentnumber = row.ItemArray[0].ToString();
                    var advice        = row.ItemArray[6].ToString();
                    var conclusion    = "(Twijfel)";
                    table.AddRow(studentnumber, advice, conclusion);
                }
            }

            // Write the table in console
            table.Write();

            // Read Key
            Console.ReadKey();
        }
Exemplo n.º 30
0
        public string Decide(Accord.MachineLearning.DecisionTrees.DecisionTree tree, Codification codebook, params double[] query)
        {
            int predicted = tree.Decide(query);

            return(codebook.Revert(Constants.RESULT_COLUMN_NAME, predicted));
        }