Ejemplo n.º 1
0
        private static void decisionTree(double[][] inputs, int[] outputs)
        {
            // In our problem, we have 2 classes (samples can be either
            // positive or negative), and 2 continuous-valued inputs.
            DecisionTree tree = new DecisionTree(inputs: new[] 
            {
                DecisionVariable.Continuous("X"),
                DecisionVariable.Continuous("Y")
            }, classes: 2);

            C45Learning teacher = new C45Learning(tree);

            // The C4.5 algorithm expects the class labels to
            // range from 0 to k, so we convert -1 to be zero:
            //
            outputs = outputs.Apply(x => x < 0 ? 0 : x);

            double error = teacher.Run(inputs, outputs);

            // Classify the samples using the model
            int[] answers = inputs.Apply(tree.Compute);

            // Plot the results
            ScatterplotBox.Show("Expected results", inputs, outputs);
            ScatterplotBox.Show("Decision Tree results", inputs, answers)
                .Hold();
        }
Ejemplo n.º 2
0
        /// <summary>
        ///   Creates a C# code for the tree.
        /// </summary>
        /// 
        public void Write(DecisionTree tree, string className)
        {
            writer.WriteLine("// This file has been automatically generated by the");
            writer.WriteLine("//");
            writer.WriteLine("// Accord Machine Learning Library");
            writer.WriteLine("// The Accord.NET Framework");
            writer.WriteLine("// http://accord-framework.net");
            writer.WriteLine("//");
            writer.WriteLine();
            writer.WriteLine("namespace DecisionTrees");
            writer.WriteLine("{");
            writer.WriteLine("    using System.CodeDom.Compiler;");
            writer.WriteLine("    using System.Collections.Generic;");
            writer.WriteLine();
            writer.WriteLine("    /// <summary>");
            writer.WriteLine("    ///   Automatically generated decision tree.");
            writer.WriteLine("    /// </summary>");
            writer.WriteLine("    /// ");
            writer.WriteLine("    [GeneratedCode(\"Accord.NET DecisionTree\", \"2.8\")]");
            writer.WriteLine("    public static class {0}", className);
            writer.WriteLine("    {");
            writer.WriteLine();
            writer.WriteLine("        /// <summary>");
            writer.WriteLine("        ///   Assigns a class label to a given input.");
            writer.WriteLine("        /// </summary>");
            writer.WriteLine("        /// ");
            writer.WriteLine("        public static int Function(double[] input)");
            writer.WriteLine("        {");

            create(tree.Root, 3);

            writer.WriteLine("        }");
            writer.WriteLine("    }");
            writer.WriteLine("}");
        }
Ejemplo n.º 3
0
        public static TrainingSet[] GenerateTrainingSets(IEnumerable<KeyValuePair<User, double[]>> studentsAndMarks, string[] normalRecords, string[] anomalies)
        {
            var countOfEntries = normalRecords.Length + anomalies.Length;
            var inputData = new double[countOfEntries][];
            var outputData = new int[countOfEntries];
            var counter = 0;

            foreach (var studentAndMarks in studentsAndMarks)
            {
                if (normalRecords.Contains(studentAndMarks.Key.OpenId))
                {
                    inputData[counter] = studentAndMarks.Value;
                    outputData[counter++] = 1;
                }

                if (!anomalies.Contains(studentAndMarks.Key.OpenId))
                {
                    continue;
                }

                inputData[counter] = studentAndMarks.Value;
                outputData[counter++] = 0;
            }

            var countOfFeatures = studentsAndMarks.ElementAt(0).Value.Length;
            var features = new DecisionVariable[countOfFeatures];
            features[0] = new DecisionVariable("0", DecisionAttributeKind.Continuous, new AForge.DoubleRange(80, 1200));
            
            for (var i = 1; i < countOfFeatures; i++)
            {
                features[i] = new DecisionVariable(i.ToString(), DecisionAttributeKind.Continuous, new AForge.DoubleRange(0, 10));
            }

            // Create the Decision tree with only 2 result values
            var tree = new DecisionTree(features, 2);

            // Creates a new instance of the C4.5 learning algorithm
            var c45 = new C45Learning(tree);

            // Learn the decision tree
            var error = c45.Run(inputData, outputData);

            // Split all data into normal and anomalies
            var setOfNormalRecords = studentsAndMarks.Where(x => tree.Compute(x.Value) == 1);
            var setOfAnomalies = studentsAndMarks.Where(x => tree.Compute(x.Value) == 0);
                        
            // Split normal records into 2 groups (one for training set and one for anomaly detection ocurency detection)
            var setOfNormalRecordsList = setOfNormalRecords.ToList();
            var splitCount = setOfNormalRecordsList.Count * 2 / 3;
            var setOfNormalRecordsTr1 = setOfNormalRecordsList.GetRange(0, splitCount);
            var setOfNormalRecordsTr2 = setOfNormalRecordsList.GetRange(splitCount, setOfNormalRecordsList.Count - splitCount);
            // Create Training Sets
            var trSetNormalFirst = CreateTrainingSetFromResources(setOfNormalRecordsTr1);
            var trSetNormalSecond = CreateTrainingSetFromResources(setOfNormalRecordsTr2);
            var trSetAnomalies = CreateTrainingSetFromResources(setOfAnomalies);

            return new[] { trSetNormalFirst, trSetNormalSecond, trSetAnomalies };
        }
Ejemplo n.º 4
0
        public Codification GenerateDecisionTreeLib(DataTable data)
        {
            Codification b = new Codification(data);

            DataTable symbols = b.Apply(data);

            int[][] inputs = DataTableToMatrix(symbols, new string[] { "CAP SHAPE", "CAP SURFACE", "CAP COLOR",
                                                                       "BRUISES", "ODOR", "GILL ATTACHMENT",
                                                                       "GILL SPACING", "GILL SIZE", "GILL COLOR",
                                                                       "STALK SHAPE", "STALK ROOT", "STALK SURFACE ABOVE RING",
                                                                       "STALK SURFACE BELOW RING", "STALK COLOR ABOVE RING", "STALK COLOR BELOW RING",
                                                                       "VEIL TYPE", "VEIL COLOR", "RING NUMBER",
                                                                       "RING TYPE", "SPORE PRINT COLOR", "POPULATION",
                                                                       "HABITAT" });

            int[][] mOutputs = DataTableToMatrix(symbols, new string[] { "TYPE" });
            int[]   outputs  = new int[mOutputs.Length];
            for (int i = 0; i < mOutputs.Length; i++)
            {
                outputs[i] = mOutputs[i][0];
            }

            ID3Learning id3learning = new ID3Learning()
            {
                new DecisionVariable("CAP SHAPE", Mushroom.CAP_SHAPE.Length),                               //1
                new DecisionVariable("CAP SURFACE", Mushroom.CAP_SURFACE.Length),                           //2
                new DecisionVariable("CAP COLOR", Mushroom.CAP_COLOR.Length),                               //3

                new DecisionVariable("BRUISES", Mushroom.BRUISES.Length),                                   //4
                new DecisionVariable("ODOR", Mushroom.ODOR.Length),                                         //5

                new DecisionVariable("GILL ATTACHMENT", Mushroom.GILL_ATTACHMENT.Length),                   //6
                new DecisionVariable("GILL SPACING", Mushroom.GILL_SPACING.Length),                         //7
                new DecisionVariable("GILL SIZE", Mushroom.GILL_SIZE.Length),                               //8
                new DecisionVariable("GILL COLOR", Mushroom.GILL_COLOR.Length),                             //9

                new DecisionVariable("STALK SHAPE", Mushroom.STALK_SHAPE.Length),                           //10
                new DecisionVariable("STALK ROOT", Mushroom.STALK_ROOT.Length),                             //11
                new DecisionVariable("STALK SURFACE ABOVE RING", Mushroom.STALK_SURFACE_ABOVE_RING.Length), //12
                new DecisionVariable("STALK SURFACE BELOW RING", Mushroom.STALK_SURFACE_BELOW_RING.Length), //13
                new DecisionVariable("STALK COLOR ABOVE RING", Mushroom.STALK_COLOR_ABOVE_RING.Length),     //14
                new DecisionVariable("STALK COLOR BELOW RING", Mushroom.STALK_COLOR_BELOW_RING.Length),     //15

                new DecisionVariable("VEIL TYPE", Mushroom.VEIL_TYPE.Length),                               //16
                new DecisionVariable("VEIL COLOR", Mushroom.VEIL_COLOR.Length),                             //17

                new DecisionVariable("RING NUMBER", Mushroom.RING_NUMBER.Length),                           //18
                new DecisionVariable("RING TYPE", Mushroom.RING_TYPE.Length),                               //19

                new DecisionVariable("SPORE PRINT COLOR", Mushroom.SPORE_PRINT_COLOR.Length),               //20
                new DecisionVariable("POPULATION", Mushroom.POPULATION.Length),                             //21
                new DecisionVariable("HABITAT", Mushroom.HABITAT.Length)                                    //22
            };

            decisionTreeLib = id3learning.Learn(inputs, outputs);

            return(b);
        }
Ejemplo n.º 5
0
        public string getAnswer(Accord.MachineLearning.DecisionTrees.DecisionTree tree, DataTable trainingData, DataTable executionData, List <string> codFieldListComDsFlowResult, List <string> fieldValuesList)
        {
            trainingData.Merge(executionData);
            Codification codebookExecution = new Codification(trainingData, codFieldListComDsFlowResult.ToArray());

            // O primeiro codebook aqui deve ser o codebook original da árvore de decisão, o segundo codebook sim é o da execução.
            // Validar com itens diferentes preenchidos na execução.
            return(codebookExecution.Translate("DsFlowResult", tree.Compute(codebookExecution.Translate(fieldValuesList.ToArray()))));
        }
Ejemplo n.º 6
0
        public FormTreeRule(DecisionTree tree, Codification codification, string rule)
            : this()
        {
            this.codification = codification;

            // Show the learned tree in the view            
            decisionTreeView.SetTree(tree, codification);
            decisionTreeView.viewRule(rule);
            
        }
Ejemplo n.º 7
0
        public FormTreeView(DecisionTree tree, Codification codification)
            : this()
        {
            this.codification = codification;

            // Show the learned tree in the view            
            decisionTreeView.SetTree(tree, codification);
            
            if (tree != null && tree.Root != null)
                CreateRuleList(tree.Root, "");
        }
Ejemplo n.º 8
0
        // Create tree
        public void SetTree(DecisionTree tree, Codification codification)
        {
            this.treeSource = tree;            
            this.codification = codification;

            treeView1.Nodes.Clear();

            if (treeSource != null && treeSource.Root != null)
                treeView1.Nodes.Add(convert(TreeSource.Root));
            
        }
        /// <summary>
        ///   Creates a new C4.5 learning algorithm.
        /// </summary>
        /// 
        /// <param name="tree">The decision tree to be generated.</param>
        /// 
        public c45(DecisionTree tree)
        {
            this.tree = tree;
            this.attributes = new bool[tree.InputCount];
            this.inputRanges = new IntRange[tree.InputCount];
            this.outputClasses = tree.OutputClasses;
            this.maxHeight = attributes.Length;

            for (int i = 0; i < inputRanges.Length; i++)
                inputRanges[i] = tree.Attributes[i].Range.ToIntRange(false);
        }
        /***************************** Constructor *********************************/
        public Classification(SortedList columnList, int classLabeCount)
        {
            // Initialize DecisionTree
            decisionAttributes = new DecisionVariable[columnList.Count];
            for (int i = 0; i < decisionAttributes.Length; i++)
            {
                decisionAttributes[i] = new DecisionVariable((string)columnList.GetByIndex(i), DecisionVariableKind.Continuous);
            }

            int classCount = classLabeCount;
            descisionTree = new DecisionTree(decisionAttributes, classCount);
        }
Ejemplo n.º 11
0
        public static void CreateMitchellExample(out DecisionTree tree, out double[][] inputs, out int[] outputs)
        {
            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Day", typeof(string));
            data.Columns.Add("Outlook", typeof(string));
            data.Columns.Add("Temperature", typeof(double));
            data.Columns.Add("Humidity", typeof(double));
            data.Columns.Add("Wind", typeof(string));
            data.Columns.Add("PlayTennis", typeof(string));

            data.Rows.Add("D1", "Sunny", 85, 85, "Weak", "No");
            data.Rows.Add("D2", "Sunny", 80, 90, "Strong", "No");
            data.Rows.Add("D3", "Overcast", 83, 78, "Weak", "Yes");
            data.Rows.Add("D4", "Rain", 70, 96, "Weak", "Yes");
            data.Rows.Add("D5", "Rain", 68, 80, "Weak", "Yes");
            data.Rows.Add("D6", "Rain", 65, 70, "Strong", "No");
            data.Rows.Add("D7", "Overcast", 64, 65, "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", 72, 95, "Weak", "No");
            data.Rows.Add("D9", "Sunny", 69, 70, "Weak", "Yes");
            data.Rows.Add("D10", "Rain", 75, 80, "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", 75, 70, "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", 72, 90, "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", 81, 75, "Weak", "Yes");
            data.Rows.Add("D14", "Rain", 71, 80, "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols),      // 3 possible values (Sunny, overcast, rain)
               new DecisionVariable("Temperature", DecisionVariableKind.Continuous), // continuous values
               new DecisionVariable("Humidity",    DecisionVariableKind.Continuous), // continuous values
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)          // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)

            tree = new DecisionTree(attributes, classCount);
            C45Learning c45 = new C45Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToArray("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToArray<int>("PlayTennis");

            double error = c45.Run(inputs, outputs);
        }
            /// <summary>
            ///   Creates a new ID3 learning algorithm.
            /// </summary>
            /// 
            /// <param name="tree">The decision tree to be generated.</param>
            /// 
            public ID3Learning(DecisionTree tree)
            {
                this.tree = tree;
                this.inputRanges = new IntRange[tree.InputCount];
                this.outputClasses = tree.OutputClasses;
                this.attributes = new bool[tree.InputCount];
                this.maxHeight = attributes.Length;

                for (int i = 0; i < tree.Attributes.Count; i++)
                if (tree.Attributes[i].Nature != DecisionVariableKind.Discrete)
                    throw new ArgumentException("id3 inputları hatalı geldi.");

                for (int i = 0; i < inputRanges.Length; i++)
                inputRanges[i] = tree.Attributes[i].Range.ToIntRange(false);
            }
Ejemplo n.º 13
0
        // 
        //You can use the following additional attributes as you write your tests:
        //
        //Use ClassInitialize to run code before running the first test in the class
        //[ClassInitialize()]
        //public static void MyClassInitialize(TestContext testContext)
        //{
        //}
        //
        //Use ClassCleanup to run code after all tests in a class have run
        //[ClassCleanup()]
        //public static void MyClassCleanup()
        //{
        //}
        //
        //Use TestInitialize to run code before running each test
        //[TestInitialize()]
        //public void MyTestInitialize()
        //{
        //}
        //
        //Use TestCleanup to run code after each test has run
        //[TestCleanup()]
        //public void MyTestCleanup()
        //{
        //}
        //
        #endregion


        public static void CreateMitchellExample(out DecisionTree tree, out int[][] inputs, out int[] outputs)
        {
            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            data.Rows.Add("D1", "Sunny",     "Hot",  "High",   "Weak",   "No");
            data.Rows.Add("D2", "Sunny",     "Hot",  "High",   "Strong", "No");
            data.Rows.Add("D3", "Overcast",  "Hot",  "High",   "Weak",   "Yes");
            data.Rows.Add("D4", "Rain",      "Mild", "High",   "Weak",   "Yes");
            data.Rows.Add("D5", "Rain",      "Cool", "Normal", "Weak",   "Yes");
            data.Rows.Add("D6", "Rain",      "Cool", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast",  "Cool", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny",     "Mild", "High",   "Weak",   "No");
            data.Rows.Add("D9", "Sunny",     "Cool", "Normal", "Weak",   "Yes");
            data.Rows.Add("D10", "Rain",     "Mild", "Normal", "Weak",   "Yes");
            data.Rows.Add("D11", "Sunny",    "Mild", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Mild", "High",   "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot",  "Normal", "Weak",   "Yes");
            data.Rows.Add("D14", "Rain",     "Mild", "High",   "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols),     // 3 possible values (Sunny, overcast, rain)
               new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
               new DecisionVariable("Humidity",    codebook["Humidity"].Symbols),    // 2 possible values (High, normal)
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)         // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)

            tree = new DecisionTree(attributes, classCount);
            ID3Learning id3 = new ID3Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToIntArray("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToIntArray("PlayTennis").GetColumn(0);

            id3.Run(inputs, outputs);
        }
Ejemplo n.º 14
0
        //public static C45Model CreateC45Model(Codification codification)
        //{
        //    int lastIndex = codification.Columns.Count - 1;

        //    List<DecisionVariable> attributes = new List<DecisionVariable>();

        //    for (int indexColumn = 0; indexColumn < lastIndex; indexColumn++)
        //    {
        //        attributes.Add(new DecisionVariable(codification.Columns[indexColumn].ColumnName,
        //            codification[indexColumn].Symbols));
        //    }

        //    C45Model model = new C45Model(new DecisionTree(attributes.ToArray(), 2));

        //    return model;
        //}

        //public C45Model(DecisionTree tree)
        //{
        //    this.Tree = tree;
        //}

        // Trainning decision tree with C4.5 algorithm
        public override void TrainningModel(TrainningData trainningData)
        {
            // Get data for trainning tree
            Codification codification = trainningData.CodificationData;
            double[][] inputs = trainningData.TrainningAttributes;
            int[] outputs = trainningData.ClassificationAttribute;

            // Create tree
            this.Tree = this.CreateDecisionTree(codification); 
            //var attributes = DecisionVariable.FromCodebook(codification, inputColumns);
            //DecisionTree tree = new DecisionTree(attributes, outputClasses: 5);

           
            // Creates a new instance of the C4.5 learning algorithm
            C45Learning c45 = new C45Learning(this.Tree);

            // Learn the decision tree
            double error = c45.Run(inputs, outputs);        
        }
Ejemplo n.º 15
0
        public void LargeRunTest2()
        {
            Accord.Math.Random.Generator.Seed = 0;

            int[,] random = Matrix.Random(1000, 10, 0.0, 10.0).ToInt32();

            int[][] samples = random.ToJagged();
            int[] outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 5 || Tools.Random.NextDouble() > 0.85)
                    outputs[i] = 1;
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = new DecisionVariable("x" + i, 10);

            DecisionTree tree = new DecisionTree(vars, 2);

            var teacher = new ID3Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            var rules = DecisionSet.FromDecisionTree(tree);

            Simplification simpl = new Simplification(rules)
            {
                Alpha = 0.05
            };

            error = simpl.ComputeError(samples.ToDouble(), outputs);
            Assert.AreEqual(0, error);

            double newError = simpl.Compute(samples.ToDouble(), outputs);

            Assert.AreEqual(0.097, newError);
        }
Ejemplo n.º 16
0
        public Accord.MachineLearning.DecisionTrees.DecisionTree Learn(string[][] records, string[] columnNamesWithoutResult, ref Codification codebook)
        {
            DataTable data = new DataTable();

            foreach (var columnName in records[0])
            {
                data.Columns.Add(columnName);
            }

            int rowsAdderCounter = 0;

            foreach (var record in records)
            {
                if (rowsAdderCounter == 0)
                {
                    rowsAdderCounter++;
                    continue;
                }

                data.Rows.Add(record);
            }

            double[][] inputs = data.ToJagged(columnNamesWithoutResult);
            string[]   labels = data.ToArray <string>(Constants.RESULT_COLUMN_NAME);

            int[] outputs = codebook.Translate(Constants.RESULT_COLUMN_NAME, labels);

            var teacher = new C45Learning();

            foreach (var columnName in columnNamesWithoutResult)
            {
                DecisionVariable decVar = new DecisionVariable(columnName, DecisionVariableKind.Continuous);
                teacher.Add(decVar);
            }

            Accord.MachineLearning.DecisionTrees.DecisionTree tree = teacher.Learn(inputs, outputs);

            return(tree);
        }
Ejemplo n.º 17
0
        public Data.DecisionTree runDecisionSupport(int codTask, int totalExecutions, int classCount)
        {
            Data.DecisionTree decisionTree = new Data.DecisionTree();

            // Utilizado se já existe uma árvore de decisão prévia criada, serve para reutilização de alguns parâmetros
            Data.DecisionTree serializedTree = null;

            // Verifica se já existe uma árvore gerada anteriormente
            bool hasSerializedTree = decisionTree.Serialization.hasSerializedTree(codTask);

            // Carregar a árvore existente
            if (hasSerializedTree)
            {
                serializedTree = decisionTree.deserializeTree(codTask);
            }

            // Número total de execuções da tarefa
            //int totalExecutions = getTotalExecutions(codTask);

            Dictionary <dynamic, int> fieldSymbols = new Dictionary <dynamic, int>();

            // Preenche o dicionário com a lista de campos do processo e a quantidade de símbolos para cada campo
            fillSymbolsCount(codTask, ref fieldSymbols);

            // Preenche a relevância dos campos de acordo com a taxa de variação e de nulos
            setSymbolsRelevance(ref fieldSymbols, totalExecutions, codTask);

            StringBuilder CodFieldListSB       = new StringBuilder();
            StringBuilder CodFieldListSBIsNull = new StringBuilder();
            StringBuilder DsFieldNameList      = new StringBuilder();
            List <string> codFieldList         = new List <string>();

            // Prepara lista de campos
            List <string> codFieldListComDsFlowResult = prepareFieldList(ref fieldSymbols, ref CodFieldListSB, ref CodFieldListSBIsNull, ref DsFieldNameList, ref codFieldList);

            // Consulta os dados de treino
            DataTable data = getData(codTask, CodFieldListSB, CodFieldListSBIsNull);

            decisionTree.Data = data;

            // Criando o objeto TRAINING
            decisionTree.Training.setTrainingData(data);

            // Preenchendo o VALIDATION do DecisionTree
            decisionTree.Validation.setValidationData(data);

            // ############################################# Passado para a classe Training.

            // Passando o data para criação do codebook
            // Converte em números inteiros as strings
            Codification codebook = new Codification(data, codFieldListComDsFlowResult.ToArray());

            // Montando a tabela de variáveis de decisão
            List <DecisionVariable> decisionaVariableList = new List <DecisionVariable>();

            foreach (KeyValuePair <dynamic, int> entry in fieldSymbols.Where(p => ((DecisionSupportField)p.Key).relevante == true))
            {
                DecisionSupportField c = entry.Key;
                c.simbolos = entry.Value;
                decisionaVariableList.Add(new DecisionVariable(c.codigo.ToString(), c.simbolos + 1)); // Adicionando + 1 do possível nulo
            }

            int qtdCamposRelevantes = fieldSymbols.Count(i => ((DecisionSupportField)i.Key).relevante == true);

            DecisionVariable[] attributes = new DecisionVariable[qtdCamposRelevantes];

            // Tabela de variáveis que impactam na decisão
            attributes = decisionaVariableList.ToArray();

            //List<string> DsFlowResults = getTaskResults(codTask);

            // Número de possíveis resultados para a tarefa sendo analisada
            //int classCount = DsFlowResults.Count;

            // Cria a árvore de decisão
            Accord.MachineLearning.DecisionTrees.DecisionTree tree = new Accord.MachineLearning.DecisionTrees.DecisionTree(attributes, classCount);

            // Cria uma instância do algoritmo de aprendizado utilizado, o ID3
            ID3Learning id3learning = new ID3Learning(tree);

            // Traduz os dados de treino em simbolos inteiros utilizando o codebook
            DataTable symbols = codebook.Apply(decisionTree.Training.TrainingData);

            // Colunas de entrada
            // *** Quantidade de colunas dos inputs deve ser o mesmo número de DecisionVariables(attributes)
            int[][] inputs = symbols.ToArray <int>(codFieldList.ToArray());

            // Coluna com a saída
            int[] outputs = symbols.ToArray <int>("DsFlowResult");

            // Aprendendo com as instâncias de treino
            id3learning.Run(inputs, outputs);

            // ############################################# Passado para a classe Training.

            decisionTree.Tree = tree;

            // Atribuindo as listas de códigos de campos e nomes ao objeto decisionTree
            // Utilizado para manter o uso da palavra chave 'ref'
            decisionTree.CodFieldListSB              = CodFieldListSB;
            decisionTree.CodFieldListSBIsNull        = CodFieldListSBIsNull;
            decisionTree.DsFieldNameList             = DsFieldNameList;
            decisionTree.codFieldList                = codFieldList;
            decisionTree.codFieldListComDsFlowResult = codFieldListComDsFlowResult;
            decisionTree.fieldSymbols                = fieldSymbols;
            decisionTree.Codebook = codebook;

            decisionTree.serializeTree(codTask);

            return(decisionTree);
        }
Ejemplo n.º 18
0
        public static DecisionTree createNurseryExample(out double[][] inputs, out int[] outputs, int first)
        {
            string nurseryData = Resources.nursery;

            string[] inputColumns = 
            {
                "parents", "has_nurs", "form", "children",
                "housing", "finance", "social", "health"
            };

            string outputColumn = "output";

            DataTable table = new DataTable("Nursery");
            table.Columns.Add(inputColumns);
            table.Columns.Add(outputColumn);

            string[] lines = nurseryData.Split(
                new[] { Environment.NewLine }, StringSplitOptions.None);

            foreach (var line in lines)
                table.Rows.Add(line.Split(','));

            Codification codebook = new Codification(table);
            DataTable symbols = codebook.Apply(table);
            inputs = symbols.ToArray(inputColumns);
            outputs = symbols.ToArray<int>(outputColumn);

            var attributes = DecisionVariable.FromCodebook(codebook, inputColumns);
            var tree = new DecisionTree(attributes, outputClasses: 5);

            C45Learning c45 = new C45Learning(tree);
            double error = c45.Run(inputs.Submatrix(first), outputs.Submatrix(first));

            Assert.AreEqual(0, error);

            return tree;
        }
Ejemplo n.º 19
0
        public void ArgumentCheck1()
        {
            double[][] samples =
            {
                new [] { 0, 2, 4.0 },
                new [] { 1, 5, 2.0 },
                null,
                new [] { 1, 5, 6.0 },
            };

            int[] outputs = 
            {
                1, 1, 0, 0
            };

            DecisionVariable[] vars = new DecisionVariable[3];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = DecisionVariable.Continuous(i.ToString());

            DecisionTree tree = new DecisionTree(vars, 2);
            var teacher = new C45Learning(tree);

            bool thrown = false;

            try { double error = teacher.Run(samples, outputs); }
            catch (ArgumentNullException) { thrown = true; }

            Assert.IsTrue(thrown);
        }
Ejemplo n.º 20
0
        static void Main(string[] args)
        {
            // Create a new reader, opening a given path
            ExcelReader excel     = new ExcelReader("Intake Inf Cohort 2017 - Training Set.xlsx");
            ExcelReader excelTest = new ExcelReader("Intake Inf Cohort 2017 - Test Set.xlsx");

            // Afterwards, we can query the file for all
            // worksheets within the specified workbook:
            string[] sheets     = excel.GetWorksheetList();
            string[] sheetsTest = excelTest.GetWorksheetList();

            // Finally, we can request an specific sheet:
            DataTable data     = excel.GetWorksheet(sheets[0]);
            DataTable dataTest = excelTest.GetWorksheet(sheets[0]);

            // Loop through each column in data
            foreach (DataColumn column in data.Columns)
            {
                // Replace empty with underscore
                column.ColumnName = column.ColumnName.Replace(" ", "_");
            }

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codibook = new Codification(data);

            // Set codibook
            Codification = codibook;

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = codibook.Apply(data);

            int[][] inputs = symbols.ToJagged <int>(
                codibook.Columns[5].ColumnName,
                codibook.Columns[7].ColumnName,
                codibook.Columns[8].ColumnName,
                codibook.Columns[9].ColumnName,
                codibook.Columns[12].ColumnName,
                codibook.Columns[13].ColumnName,
                codibook.Columns[14].ColumnName,
                codibook.Columns[15].ColumnName,
                codibook.Columns[16].ColumnName,
                codibook.Columns[20].ColumnName,
                codibook.Columns[29].ColumnName,
                codibook.Columns[30].ColumnName,
                codibook.Columns[34].ColumnName
                );
            int[] outputs = symbols.ToMatrix <int>(codibook.Columns[6].ColumnName).GetColumn(0);

            // Create a teacher ID3 algorithm
            var id3 = new ID3Learning()
            {
                new DecisionVariable(codibook.Columns[5].ColumnName, 2),
                new DecisionVariable(codibook.Columns[7].ColumnName, codibook.Columns[7].NumberOfSymbols),
                new DecisionVariable(codibook.Columns[8].ColumnName, codibook.Columns[8].NumberOfSymbols),
                new DecisionVariable(codibook.Columns[9].ColumnName, 3),
                new DecisionVariable(codibook.Columns[12].ColumnName, 10),
                new DecisionVariable(codibook.Columns[13].ColumnName, 10),
                new DecisionVariable(codibook.Columns[14].ColumnName, 10),
                new DecisionVariable(codibook.Columns[15].ColumnName, 10),
                new DecisionVariable(codibook.Columns[16].ColumnName, 2),
                new DecisionVariable(codibook.Columns[20].ColumnName, 2),
                new DecisionVariable(codibook.Columns[29].ColumnName, 2),
                new DecisionVariable(codibook.Columns[30].ColumnName, 2),
                new DecisionVariable(codibook.Columns[34].ColumnName, 2),
            };

            // Learn the training instances!
            Accord.MachineLearning.DecisionTrees.DecisionTree tree = id3.Learn(inputs, outputs);

            // Create a console table for display
            ConsoleTable table = new ConsoleTable("Studentnumber", "Advice", "Conclusion");

            // Loop through each row in data
            foreach (DataRow row in dataTest.Rows)
            {
                // The tree can now be queried for new examples through
                // its decide method. For example, we can create a query
                int[] query = null;

                try
                {
                    query = codibook.Transform(new[, ]
                    {
                        { codibook.Columns[5].ColumnName, row.ItemArray[5].ToString() },
                        { codibook.Columns[7].ColumnName, row.ItemArray[7].ToString() },
                        { codibook.Columns[8].ColumnName, row.ItemArray[8].ToString() },
                        { codibook.Columns[9].ColumnName, row.ItemArray[9].ToString() },
                        { codibook.Columns[12].ColumnName, row.ItemArray[12].ToString() },
                        { codibook.Columns[13].ColumnName, row.ItemArray[13].ToString() },
                        { codibook.Columns[14].ColumnName, row.ItemArray[14].ToString() },
                        { codibook.Columns[15].ColumnName, row.ItemArray[15].ToString() },
                        { codibook.Columns[16].ColumnName, row.ItemArray[16].ToString() },
                        { codibook.Columns[20].ColumnName, row.ItemArray[20].ToString() },
                        { codibook.Columns[29].ColumnName, row.ItemArray[29].ToString() },
                        { codibook.Columns[30].ColumnName, row.ItemArray[30].ToString() },
                        { codibook.Columns[34].ColumnName, row.ItemArray[34].ToString() },
                    });
                }
                catch (Exception)
                {
                    // Show the result of skipped students
                    var studentnumber = row.ItemArray[0].ToString();
                    var advice        = row.ItemArray[6].ToString();
                    var conclusion    = "(Twijfel)";
                    table.AddRow(studentnumber, advice, conclusion);

                    continue;
                }

                // And then predict the label using
                int predicted = tree.Decide(query);

                // Any predictions off are ignored for consistency
                if (predicted != -1)
                {
                    // We can translate it back to strings using
                    string answer = codibook.Revert("advies", predicted);

                    // Show the result in the output
                    var studentnumber = row.ItemArray[0].ToString();
                    var advice        = row.ItemArray[6].ToString();
                    var conclusion    = answer;
                    table.AddRow(studentnumber, advice, conclusion);
                }
                else
                {
                    // Show the result of skipped students
                    var studentnumber = row.ItemArray[0].ToString();
                    var advice        = row.ItemArray[6].ToString();
                    var conclusion    = "(Twijfel)";
                    table.AddRow(studentnumber, advice, conclusion);
                }
            }

            // Write the table in console
            table.Write();

            // Read Key
            Console.ReadKey();
        }
Ejemplo n.º 21
0
        public void IrisDatasetTest()
        {
            #region doc_iris
            // In this example, we will process the famous Fisher's Iris dataset in 
            // which the task is to classify weather the features of an Iris flower 
            // belongs to an Iris setosa, an Iris versicolor, or an Iris virginica:
            //
            //  - https://en.wikipedia.org/wiki/Iris_flower_data_set
            //

            // First, let's load the dataset into an array of text that we can process
            string[][] text = Resources.iris_data.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries).Apply(x => x.Split(','));

            // The first four columns contain the flower features
            double[][] inputs = text.GetColumns(0, 1, 2, 3).To<double[][]>();

            // The last column contains the expected flower type
            string[] labels = text.GetColumn(4);

            // Since the labels are represented as text, the first step is to convert
            // those text labels into integer class labels, so we can process them
            // more easily. For this, we will create a codebook to encode class labels:
            //
            var codebook = new Codification("Output", labels);

            // With the codebook, we can convert the labels:
            int[] outputs = codebook.Translate("Output", labels);

            // Let's declare the names of our input variables:
            DecisionVariable[] features =
            {
                new DecisionVariable("sepal length", DecisionVariableKind.Continuous), 
                new DecisionVariable("sepal width", DecisionVariableKind.Continuous), 
                new DecisionVariable("petal length", DecisionVariableKind.Continuous), 
                new DecisionVariable("petal width", DecisionVariableKind.Continuous), 
            };

            // Now, we can finally create our tree for the 3 classes:
            var tree = new DecisionTree(inputs: features, classes: 3);

            // And we can use the C4.5 for learning:
            var teacher = new C45Learning(tree);

            // And finally induce the tree:
            teacher.Learn(inputs, outputs);

            // To get the estimated class labels, we can use
            int[] predicted = tree.Decide(inputs);
            
            // And the classification error can be computed as 
            double error = new ZeroOneLoss(outputs) // 0.0266
            {
                Mean = true
            }.Loss(tree.Decide(inputs));

            // Moreover, we may decide to convert our tree to a set of rules:
            DecisionSet rules = tree.ToRules();

            // And using the codebook, we can inspect the tree reasoning:
            string ruleText = rules.ToString(codebook, "Output",
                System.Globalization.CultureInfo.InvariantCulture);

            // The output is:
            string expected = @"Iris-setosa =: (petal length <= 2.45)
Iris-versicolor =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (sepal width <= 2.85)
Iris-versicolor =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (sepal width > 2.85)
Iris-versicolor =: (petal length > 2.45) && (petal width > 1.75) && (sepal length <= 5.95) && (sepal width > 3.05)
Iris-virginica =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length > 7.05)
Iris-virginica =: (petal length > 2.45) && (petal width > 1.75) && (sepal length > 5.95)
Iris-virginica =: (petal length > 2.45) && (petal width > 1.75) && (sepal length <= 5.95) && (sepal width <= 3.05)
";
            #endregion

            Assert.AreEqual(0.026666666666666668, error, 1e-10);
            Assert.AreEqual(4, tree.NumberOfInputs);
            Assert.AreEqual(3, tree.NumberOfOutputs);

            double newError = ComputeError(rules, inputs, outputs);
            Assert.AreEqual(0.026666666666666668, newError, 1e-10);
            Assert.AreEqual(expected, ruleText);
        }
Ejemplo n.º 22
0
        public void ConsistencyTest1()
        {
            int[,] random = Matrix.Random(1000, 10, 0, 10).ToInt32();

            int[][] samples = random.ToArray();
            int[] outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 8)
                    outputs[i] = 1;
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = new DecisionVariable(i.ToString(), new IntRange(0,10));

            DecisionTree tree = new DecisionTree(vars, 2);

            ID3Learning teacher = new ID3Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(11, tree.Root.Branches.Count);
            for (int i = 0; i < tree.Root.Branches.Count; i++)
                Assert.IsTrue(tree.Root.Branches[i].IsLeaf);    
        }
Ejemplo n.º 23
0
        public void ConstantDiscreteVariableTest()
        {
            DecisionTree tree;
            int[][] inputs;
            int[] outputs;

            DataTable data = new DataTable("Degenerated Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
            data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D4", "Rain", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D5", "Rain", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D6", "Rain", "Hot", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast", "Hot", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D9", "Sunny", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D10", "Rain", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", "Hot", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Hot", "High", "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D14", "Rain", "Hot", "High", "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols),     // 3 possible values (Sunny, overcast, rain)
               new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 1 constant value (Hot)
               new DecisionVariable("Humidity",    codebook["Humidity"].Symbols),    // 2 possible values (High, normal)
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)         // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)


            bool thrown = false;
            try
            {
                tree = new DecisionTree(attributes, classCount);
            }
            catch
            {
                thrown = true;
            }

            Assert.IsTrue(thrown);


            attributes[1] = new DecisionVariable("Temperature", 2);
            tree = new DecisionTree(attributes, classCount);
            ID3Learning id3 = new ID3Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToArray<int>("PlayTennis");

            double error = id3.Run(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                int y = tree.Compute(inputs[i]);
                Assert.AreEqual(outputs[i], y);
            }
        }
Ejemplo n.º 24
0
        public static void CreateXORExample(out DecisionTree tree, out int[][] inputs, out int[] outputs)
        {
            inputs = new int[][]
            {
                new int[] { 1, 0, 0, 1 },
                new int[] { 0, 1, 0, 0 },
                new int[] { 0, 0, 0, 0 },
                new int[] { 1, 1, 0, 0 },
                new int[] { 0, 1, 1, 1 },
                new int[] { 0, 0, 1, 1 },
                new int[] { 1, 0, 1, 1 }
            };

            outputs = new int[]
            {
                1, 1, 0, 0, 1, 0, 1
            };

            DecisionVariable[] attributes =
            {
               new DecisionVariable("a1", 2), 
               new DecisionVariable("a2", 2), 
               new DecisionVariable("a3", 2), 
               new DecisionVariable("a4", 2)  
            };

            int classCount = 2;

            tree = new DecisionTree(attributes, classCount);
            ID3Learning id3 = new ID3Learning(tree);


            double error = id3.Run(inputs, outputs);
        }
Ejemplo n.º 25
0
        public string Decide(Accord.MachineLearning.DecisionTrees.DecisionTree tree, Codification codebook, params double[] query)
        {
            int predicted = tree.Decide(query);

            return(codebook.Revert(Constants.RESULT_COLUMN_NAME, predicted));
        }
Ejemplo n.º 26
0
        public void same_input_different_output()
        {
            double[][] inputs = new double[][] {
                new double[] { 1 },
                new double[] { 0 },
                new double[] { 2 },
                new double[] { 3 },
                new double[] { 0 },
            };

            int[] outputs = new int[] {
                11,
                00,
                22,
                33,
                01
            };


            DecisionVariable[] variables = { new DecisionVariable("x", DecisionVariableKind.Continuous) };

            DecisionTree decisionTree = new DecisionTree(variables, 34);
            C45Learning c45Learning = new C45Learning(decisionTree)
            {
                Join = 10,
                MaxHeight = 10
            };
            c45Learning.Run(inputs, outputs); // System.AggregateException thrown here

            int[] actual = decisionTree.Decide(inputs);

            Assert.AreEqual(11, actual[0]);
            Assert.AreEqual(00, actual[1]);
            Assert.AreEqual(22, actual[2]);
            Assert.AreEqual(33, actual[3]);
            Assert.AreEqual(00, actual[4]);
        }
Ejemplo n.º 27
0
        public void same_input_different_output_minimal()
        {
            double[][] inputs = new double[][] {
                new double[] { 0 },
                new double[] { 0 }
            };

            int[] outputs = new int[] {
                1,
                0
            };


            DecisionVariable[] variables = { new DecisionVariable("x", DecisionVariableKind.Continuous) };

            DecisionTree decisionTree = new DecisionTree(variables, 2);
            C45Learning c45Learning = new C45Learning(decisionTree);
            c45Learning.Run(inputs, outputs); // System.AggregateException thrown here

            Assert.AreEqual(decisionTree.Decide(new[] { 0 }), 0);
        }
Ejemplo n.º 28
0
        public void AttributeReuseTest1()
        {
            string[][] text = Resources.iris_data.Split(
                new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries)
                .Apply(x => x.Split(','));

            double[][] inputs = new double[text.Length][];
            for (int i = 0; i < inputs.Length; i++)
                inputs[i] = text[i].First(4).Convert(s => Double.Parse(s, System.Globalization.CultureInfo.InvariantCulture));

            string[] labels = text.GetColumn(4);

            Codification codebook = new Codification("Label", labels);

            int[] outputs = codebook.Translate("Label", labels);


            DecisionVariable[] features =
            {
                new DecisionVariable("sepal length", DecisionVariableKind.Continuous), 
                new DecisionVariable("sepal width", DecisionVariableKind.Continuous), 
                new DecisionVariable("petal length", DecisionVariableKind.Continuous), 
                new DecisionVariable("petal width", DecisionVariableKind.Continuous), 
            };


            DecisionTree tree = new DecisionTree(features, codebook.Columns[0].Symbols);

            C45Learning teacher = new C45Learning(tree);

            teacher.Join = 3;

            double error = teacher.Run(inputs, outputs);
            Assert.AreEqual(0.02, error, 1e-10);

            DecisionSet rules = tree.ToRules();

            double newError = ComputeError(rules, inputs, outputs);
            Assert.AreEqual(0.02, newError, 1e-10);

            string ruleText = rules.ToString(codebook,
                System.Globalization.CultureInfo.InvariantCulture);

            // TODO: implement this assertion properly, actually checking
            // the text contents once the feature is completely finished.
            Assert.AreEqual(600, ruleText.Length);
        }
Ejemplo n.º 29
0
        public void RunTest()
        {
            int[][] inputs =
            {
                new int[] { 0, 0 },
                new int[] { 0, 1 },
                new int[] { 1, 0 },
                new int[] { 1, 1 },
            };

            int[] outputs = // xor
            {
                0,
                1,
                1,
                0
            };

            DecisionVariable[] attributes = 
            {
                new DecisionVariable("x", DecisionVariableKind.Discrete),
                new DecisionVariable("y", DecisionVariableKind.Discrete),
            };


            DecisionTree tree = new DecisionTree(attributes, 2);

            ID3Learning teacher = new ID3Learning(tree);

            double error = teacher.Run(inputs, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(0, tree.Root.Branches.AttributeIndex); // x
            Assert.AreEqual(2, tree.Root.Branches.Count);
            Assert.IsNull(tree.Root.Value);
            Assert.IsNull(tree.Root.Value);

            Assert.AreEqual(0.0, tree.Root.Branches[0].Value); // x = [0]
            Assert.AreEqual(1.0, tree.Root.Branches[1].Value); // x = [1]

            Assert.AreEqual(tree.Root, tree.Root.Branches[0].Parent);
            Assert.AreEqual(tree.Root, tree.Root.Branches[1].Parent);

            Assert.AreEqual(2, tree.Root.Branches[0].Branches.Count);
            Assert.AreEqual(2, tree.Root.Branches[1].Branches.Count);

            Assert.IsTrue(tree.Root.Branches[0].Branches[0].IsLeaf);
            Assert.IsTrue(tree.Root.Branches[0].Branches[1].IsLeaf);

            Assert.IsTrue(tree.Root.Branches[1].Branches[0].IsLeaf);
            Assert.IsTrue(tree.Root.Branches[1].Branches[1].IsLeaf);

            Assert.AreEqual(0.0, tree.Root.Branches[0].Branches[0].Value); // y = [0]
            Assert.AreEqual(1.0, tree.Root.Branches[0].Branches[1].Value); // y = [1]

            Assert.AreEqual(0.0, tree.Root.Branches[1].Branches[0].Value); // y = [0]
            Assert.AreEqual(1.0, tree.Root.Branches[1].Branches[1].Value); // y = [1]

            Assert.AreEqual(0, tree.Root.Branches[0].Branches[0].Output); // 0 ^ 0 = 0
            Assert.AreEqual(1, tree.Root.Branches[0].Branches[1].Output); // 0 ^ 1 = 1
            Assert.AreEqual(1, tree.Root.Branches[1].Branches[0].Output); // 1 ^ 0 = 1
            Assert.AreEqual(0, tree.Root.Branches[1].Branches[1].Output); // 1 ^ 1 = 0
        }
Ejemplo n.º 30
0
        public void LargeSampleTest1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            int[][] dataSamples = Matrix.Random(500, 3, 0, 10).ToInt32().ToArray();
            int[] target = Matrix.Random(500, 1, 0, 2).ToInt32().GetColumn(0);
            DecisionVariable[] features =
            {
                new DecisionVariable("Outlook",      10), 
                new DecisionVariable("Temperature",  10), 
                new DecisionVariable("Humidity",     10), 
            };


            DecisionTree tree = new DecisionTree(features, 2);
            ID3Learning id3Learning = new ID3Learning(tree);

            double error = id3Learning.Run(dataSamples, target);

            Assert.IsTrue(error < 0.2);

            var code = tree.ToCode("MyTree");


            Assert.IsNotNull(code);
            Assert.IsTrue(code.Length > 0);
        }
Ejemplo n.º 31
0
        public void IncompleteDiscreteVariableTest()
        {
            DecisionTree tree;
            int[][] inputs;
            int[] outputs;

            DataTable data = new DataTable("Degenerated Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
            data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
            data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
            data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols+200), // 203 possible values, 200 undefined
               new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
               new DecisionVariable("Humidity",    codebook["Humidity"].Symbols),    // 2 possible values (High, normal)
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)         // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)

            tree = new DecisionTree(attributes, classCount);
            ID3Learning id3 = new ID3Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToArray<int>("PlayTennis");

            double error = id3.Run(inputs, outputs);

            Assert.AreEqual(203, tree.Root.Branches.Count);
            Assert.IsTrue(tree.Root.Branches[100].IsLeaf);
            Assert.IsNull(tree.Root.Branches[100].Output);

            for (int i = 0; i < inputs.Length; i++)
            {
                int y = tree.Compute(inputs[i]);
                Assert.AreEqual(outputs[i], y);
            }
        }
Ejemplo n.º 32
0
        public void LargeSampleTest_WithRepetition()
        {
            Accord.Math.Tools.SetupGenerator(0);

            int[][] dataSamples = Matrix.Random(500, 3, 0, 10).ToInt32().ToArray();
            int[] target = Matrix.Random(500, 1, 0, 2).ToInt32().GetColumn(0);
            DecisionVariable[] features =
            {
                new DecisionVariable("Outlook",      10), 
                new DecisionVariable("Temperature",  10), 
                new DecisionVariable("Humidity",     10), 
            };


            DecisionTree tree = new DecisionTree(features, 2);
            ID3Learning id3Learning = new ID3Learning(tree)
            {
                Rejection = false,
                Join = 2 // every variable can join two times
            };

            double error = id3Learning.Run(dataSamples, target);

            int height = tree.GetHeight();
            Assert.AreEqual(6, height);

            foreach (var node in tree)
            {
                if (node.IsLeaf)
                    Assert.IsNotNull(node.Output);
            }

            Assert.IsTrue(error < 0.15);
        }
Ejemplo n.º 33
0
        // 
        //You can use the following additional attributes as you write your tests:
        //
        //Use ClassInitialize to run code before running the first test in the class
        //[ClassInitialize()]
        //public static void MyClassInitialize(TestContext testContext)
        //{
        //}
        //
        //Use ClassCleanup to run code after all tests in a class have run
        //[ClassCleanup()]
        //public static void MyClassCleanup()
        //{
        //}
        //
        //Use TestInitialize to run code before running each test
        //[TestInitialize()]
        //public void MyTestInitialize()
        //{
        //}
        //
        //Use TestCleanup to run code after each test has run
        //[TestCleanup()]
        //public void MyTestCleanup()
        //{
        //}
        //
        #endregion


        public static void CreateMitchellExample(out DecisionTree tree, out int[][] inputs, out int[] outputs)
        {
            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
            data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
            data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
            data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols),     // 3 possible values (Sunny, overcast, rain)
               new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 3 possible values (Hot, mild, cool)
               new DecisionVariable("Humidity",    codebook["Humidity"].Symbols),    // 2 possible values (High, normal)
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)         // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)

            tree = new DecisionTree(attributes, classCount);
            ID3Learning id3 = new ID3Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToArray<int>("PlayTennis");

            double error = id3.Run(inputs, outputs);
            Assert.AreEqual(0, error);


            foreach (DataRow row in data.Rows)
            {
                var x = codebook.Translate(row, "Outlook", "Temperature", "Humidity", "Wind");

                int y = tree.Compute(x);

                string actual = codebook.Translate("PlayTennis", y);
                string expected = row["PlayTennis"] as string;

                Assert.AreEqual(expected, actual);
            }

            {
                string answer = codebook.Translate("PlayTennis",
                    tree.Compute(codebook.Translate("Sunny", "Hot", "High", "Strong")));

                Assert.AreEqual("No", answer);
            }
        }
Ejemplo n.º 34
0
 /// <summary>
 ///   Creates a new decision node.
 /// </summary>
 /// 
 /// <param name="owner">The owner tree for this node.</param>
 /// 
 public DecisionNode(DecisionTree owner)
 {
     Owner = owner;
     Comparison = ComparisonKind.None;
     Branches = new DecisionBranchNodeCollection(this);
 }
Ejemplo n.º 35
0
        public void ArgumentCheck1()
        {
            int[][] samples =
            {
                new [] { 0, 2, 4 },
                new [] { 1, 5, 2 },
                null,
                new [] { 1, 5, 6 },
            };

            int[] outputs = 
            {
                1, 1, 0, 0
            };

            DecisionVariable[] vars = new DecisionVariable[3];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = DecisionVariable.Discrete(i.ToString(), new IntRange(0, 10));

            DecisionTree tree = new DecisionTree(vars, 2);
            ID3Learning teacher = new ID3Learning(tree);

            bool thrown = false;

            try { double error = teacher.Run(samples, outputs); }
            catch (ArgumentNullException) { thrown = true; }

            Assert.IsTrue(thrown);
        }
Ejemplo n.º 36
0
 /// <summary>
 ///   Creates a new decision node.
 /// </summary>
 ///
 /// <param name="owner">The owner tree for this node.</param>
 ///
 public DecisionNode(DecisionTree owner)
 {
     Owner      = owner;
     Comparison = ComparisonKind.None;
 }
Ejemplo n.º 37
0
 /// <summary>
 ///   Creates a new decision node.
 /// </summary>
 ///
 /// <param name="owner">The owner tree for this node.</param>
 ///
 public DecisionNode(DecisionTree owner)
 {
     Owner      = owner;
     Comparison = ComparisonKind.None;
     Branches   = new DecisionBranchNodeCollection(this);
 }
Ejemplo n.º 38
0
 /// <summary>
 ///   Initializes a new instance of the <see cref="DecisionTreeExpressionCreator"/> class.
 /// </summary>
 ///
 /// <param name="tree">The decision tree.</param>
 ///
 internal DecisionTreeExpressionCreator(DecisionTree tree)
 {
     this.tree = tree;
 }
Ejemplo n.º 39
-1
        /// <summary>
        ///   Creates and learns a Decision Tree to recognize the
        ///   previously loaded dataset using the current settings.
        /// </summary>
        /// 
        private void btnCreate_Click(object sender, EventArgs e)
        {
            if (dgvLearningSource.DataSource == null)
            {
                MessageBox.Show("Please load some data first.");
                return;
            }

            // Finishes and save any pending changes to the given data
            dgvLearningSource.EndEdit();



            // Creates a matrix from the entire source data table
            double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames);

            // Get only the input vector values (first two columns)
            double[][] inputs = table.GetColumns(0, 1).ToArray();

            // Get only the output labels (last column)
            int[] outputs = table.GetColumn(2).ToInt32();


            // Specify the input variables
            DecisionVariable[] variables = 
            {
                new DecisionVariable("x", DecisionVariableKind.Continuous),
                new DecisionVariable("y", DecisionVariableKind.Continuous),
            };

            // Create the discrete Decision tree
            tree = new DecisionTree(variables, 2);

            // Create the C4.5 learning algorithm
            C45Learning c45 = new C45Learning(tree);

            // Learn the decision tree using C4.5
            double error = c45.Run(inputs, outputs);

            // Show the learned tree in the view
            decisionTreeView1.TreeSource = tree;


            // Get the ranges for each variable (X and Y)
            DoubleRange[] ranges = Matrix.Range(table, 0);

            // Generate a Cartesian coordinate system
            double[][] map = Matrix.CartesianProduct(
                Matrix.Interval(ranges[0], 0.05),
                Matrix.Interval(ranges[1], 0.05));

            // Classify each point in the Cartesian coordinate system
            double[] result = map.Apply(tree.Compute).ToDouble();
            double[,] surface = map.ToMatrix().InsertColumn(result);

            CreateScatterplot(zedGraphControl2, surface);

            lbStatus.Text = "Learning finished! Click the other tabs to explore results!";
        }