Example #1
0
 private static StringBuilder RecursiveTreePrintBuilder(DecisionTree.DecisionNode root, StringBuilder output, string indentation = "", int level = 0)
 {
     if (output == null)
     {
         output = new StringBuilder();
     }
     else
     {
         indentation += "\t";
     }
     if (root.Actor == null)
     {
         output.AppendLine("ROOT");
         indentation += "\t";
     }
     if (root.DaughterNodes == null)
     {
         indentation += "\t";
         output.AppendLine(indentation + "Leaf " + root.FeatureType + " " + root.Operator + " " + root.incomingEgde + " " + root.Action.StateName + " " + root.Actor);
     }
     else
     {
         foreach (DecisionTree.DecisionNode d in root.DaughterNodes)
         {
             output.AppendLine(indentation + Array.IndexOf(root.DaughterNodes, d).ToString() + " " + d.FeatureType + " " + d.Operator + " " + d.incomingEgde + " " + d.Action.StateName + " " + d.Actor);
             RecursiveTreePrintBuilder(d, output, indentation, ++level);
         }
     }
     return(output);
 }
Example #2
0
        public DecisionTree(TrainingExample[] examples, Feature[] features, DecisionNode decisionNode)
        {
            rootNode = decisionNode;
            Features = features;

            TreeLearner(examples, features, decisionNode);
        }
Example #3
0
        public void DecisionTreePrinter(DecisionNode startNode)
        {
            // Console.WriteLine("Feature for Root is " + startNode.TestValue.ID.ToString());
            if (startNode.DaughterNodes != null)
            {
                for (int i = 0; i < startNode.DaughterNodes.Length; i++)
                {
                    if (startNode.DaughterNodes[i] != null)
                    {
//                        UnityEngine.Debug.Log("Decision Node " + startNode.TestValue.ID + " Incoming Egde " + startNode.DaughterNodes[i].incomingEgde);
                        DecisionTreePrinter(startNode.DaughterNodes[i]);
                    }
                }
            }
            else
            {
                if (startNode.Probability != null)
                {
                    Debug.Log("Node: " + startNode.ToString());
                    foreach (var pair in startNode.Probability)
                    {
                        Debug.Log("Probability of State: " + pair.Key + " : " + pair.Value);
                    }
                }
            }
        }
Example #4
0
 public DecisionNode(
     Question question        = null,
     DecisionNode trueBranch  = null,
     DecisionNode falseBranch = null)
 {
     Question    = question;
     TrueBranch  = trueBranch;
     FalseBranch = falseBranch;
     Predictions = null;
 }
Example #5
0
        public DecisionTreeId3(DataTable trainingData)
        {
            ColumnsName = new List <string>();
            foreach (DataColumn column in trainingData.Columns)
            {
                ColumnsName.Add(column.ColumnName);
            }

            Root = BuildTree(trainingData);
        }
Example #6
0
        public Dictionary <string, float> Query()
        {
            //Refreshes the data in the features array
            GetUpdatedData();
            //Printing Tree
            Debug.Log("Printing Tree to file: " + Application.persistentDataPath);
            XMLManager.PrintTree(this);
            DecisionNode leaf = Search(Features.ToList(), rootNode);

            if (leaf == null)
            {
                return(null);
            }
            return(leaf.Probability);
        }
Example #7
0
        /// <summary>
        /// Decide whether to follow the true-branch or the false-branch.
        /// Compare the feature / value stored in the node,
        /// to the example we're considering.
        /// </summary>
        private static DecisionNode Classify(Dictionary <string, string> dataRow, DecisionNode node)
        {
            if (node.IsLeaf())
            {
                return(node);
            }

            if (node.Question.IsMatch(dataRow))
            {
                return(Classify(dataRow, node.TrueBranch));
            }
            else
            {
                return(Classify(dataRow, node.FalseBranch));
            }
        }
Example #8
0
        /// <summary>
        /// World's most elegant tree printing function.
        /// </summary>
        public static void PrintTree(DecisionNode node, string spacing = "")
        {
            if (node.IsLeaf())
            {
                DebugLog(spacing + node, ConsoleColor.Green);
                return;
            }

            DebugLog(spacing + node.Question);

            DebugLog(spacing + "--> True:");
            PrintTree(node.TrueBranch, spacing + "  ");

            DebugLog(spacing + "--> False:");
            PrintTree(node.FalseBranch, spacing + "  ");
        }
Example #9
0
        /// <summary>
        /// A nicer way to print the predictions at a leaf.
        /// </summary>
        private static string PrintLeaf(DecisionNode leaf)
        {
            var predictions = leaf.Predictions;
            var total       = predictions.Values.Sum() * 1.0;

            var startSymbol = "{";
            var finalText   = startSymbol;

            foreach (var prediction in predictions)
            {
                if (finalText != startSymbol)
                {
                    finalText += ",";
                }

                finalText += "\'" + prediction.Key + ": " + (((double)prediction.Value / total) * 100) + "% ";
            }

            finalText += "}";
            return(finalText);
        }
        private void btnBuildTree_Click(object sender, EventArgs e)
        {
            List <int> listIndexSet = new List <int>();

            for (int i = 0; i < outputs.Length; i++)
            {
                listIndexSet.Add(i);
            }
            Dictionary <string, double> probability = Utilities.ProbabilityOfClass(new HashSet <string>(outputs).ToList(), listIndexSet, outputs);
            double infoSet = Utilities.Info(probability.Values.ToList());

            Bitmap pic = Utilities.CreatePicturePercent(colorForNodes, probability);

            treeView1.ImageList = new ImageList();
            treeView1.ImageList.Images.Add(pic);

            DecisionNode root = new DecisionNode("ALL", "", listIndexSet, infoSet, pictureIndex, probability);

            pictureIndex++;
            BuildSubTree(root, justificationOfFuzzySet);

            treeView1.Nodes.Add(root);
            treeView1.Update();
        }
Example #11
0
        /*
         * FIXME We do not check each tree for each feature but find it in a first fit method
         * This results in inaccurate behaviour but avoids exponential running time...
         */

        private DecisionNode Search(List <Feature> features, DecisionNode node)
        {
            if (features.Count >= 1 && node.DaughterNodes != null && node.DaughterNodes.Count() >= 1)
            {
                foreach (Feature f in features)
                {
                    //if(node.TestValue.ID == f.ID)
                    //{
                    for (int i = 0; i < node.DaughterNodes.Count(); ++i)
                    {
                        if (f.TypeOfFeature == node.DaughterNodes[i].FeatureType && f.Actor == node.DaughterNodes[i].Actor)
                        {
                            //f.OperatorSign
                            for (int j = 0; j < node.DaughterNodes.Count(); ++j)
                            {
                                if (Math.Abs(float.Parse(node.DaughterNodes[j].incomingEgde) - float.Parse(f.FeatureValue)) < Math.Abs(float.Parse(node.DaughterNodes[i].incomingEgde) - float.Parse(f.FeatureValue)) && i != j)
                                {
                                    i = j;
                                }
                            }
                            switch (node.DaughterNodes[i].Operator)
                            {
                            case ">":
                                if (float.Parse(f.FeatureValue) > float.Parse(node.DaughterNodes[i].incomingEgde))
                                {
                                    features.Remove(f);
                                    return(Search(features, node.DaughterNodes[i]));
                                }
                                break;

                            case "<":
                                if (float.Parse(f.FeatureValue) < float.Parse(node.DaughterNodes[i].incomingEgde))
                                {
                                    features.Remove(f);
                                    return(Search(features, node.DaughterNodes[i]));
                                }
                                break;

                            case ">=":
                                if (float.Parse(f.FeatureValue) >= float.Parse(node.DaughterNodes[i].incomingEgde))
                                {
                                    features.Remove(f);
                                    return(Search(features, node.DaughterNodes[i]));
                                }
                                break;

                            case "<=":
                                if (float.Parse(f.FeatureValue) <= float.Parse(node.DaughterNodes[i].incomingEgde))
                                {
                                    features.Remove(f);
                                    return(Search(features, node.DaughterNodes[i]));
                                }
                                break;

                            case "==":
                                if (float.Parse(f.FeatureValue) == float.Parse(node.DaughterNodes[i].incomingEgde))
                                {
                                    features.Remove(f);
                                    return(Search(features, node.DaughterNodes[i]));
                                }
                                break;
                            }
                        }
                    }
                    //}
                }
            }
            else
            {
                return(node);
            }
            Debug.Log("WARNING - Undefined Behaviour, defaulting to current State");
            return(node);
        }
Example #12
0
        private void TreeLearner(TrainingExample[] examples, Feature[] features, DecisionNode decisionNode)
        {
            if (examples.Length == 0)
            {
                return;
            }
            if (features.Length == 0)
            {
                return;
            }
            double entropy = Entropy(examples);

            List <TrainingExample>[] bestSets = null;
            if (entropy <= 0)
            {
                return;
            }
            else
            {
                int     exampleCount        = examples.Length;
                double  bestInformationGain = 0;
                Feature bestSplitFeature    = null;
                foreach (Feature f in features)
                {
                    List <TrainingExample>[] sets = SplitByAttribute(examples, f);
                    double overallEntropy         = EntropyOfSets(sets, exampleCount);
                    double informationGain        = entropy - overallEntropy;
                    if (informationGain > bestInformationGain)
                    {
                        bestInformationGain = informationGain;
                        bestSplitFeature    = f;
                        bestSets            = sets;
                    }
                }
                decisionNode.TestValue = bestSplitFeature;

                List <Feature> lst = features.OfType <Feature>().ToList();
                lst.Remove(bestSplitFeature);
                Feature[] newFeatures = new Feature[features.Length - 1];
                newFeatures = lst.ToArray();

                //Instatiate Daugther array of current decisioNode to be equal size of feature's domain
                decisionNode.CreateDaughterNodes(bestSplitFeature.FeatureDomain.Count);

                for (int i = 0; i < bestSets.Count(); i++)
                {
                    if (bestSets[i].Count == 0)
                    {
                        decisionNode.DaughterNodes[i] = new DecisionNode(decisionNode);
                        decisionNode.DaughterNodes[i].incomingEgde = bestSplitFeature.FeatureDomain[i];
                        decisionNode.DaughterNodes[i].Action       = decisionNode.Action;
                        decisionNode.DaughterNodes[i].Operator     = bestSplitFeature.OperatorSign;
                        decisionNode.DaughterNodes[i].FeatureType  = bestSplitFeature.TypeOfFeature;
                        decisionNode.DaughterNodes[i].Actor        = bestSplitFeature.Actor;
                    }
                    else
                    {
                        //If there are no trainings examples for the domain value we create a new node with the action.

                        //Otherwise we create a subset of nodes from each set of trainings examples.
                        decisionNode.DaughterNodes[i] = new DecisionNode(decisionNode);
                        decisionNode.DaughterNodes[i].incomingEgde = bestSplitFeature.FeatureDomain[i];

                        List <State> actions = new List <State>();

                        foreach (TrainingExample t in bestSets[i])
                        {
                            actions.Add(t.Action);
                        }

                        State mostCommonAction = (from j in actions
                                                  group j by j into grp
                                                  orderby grp.Count() descending
                                                  select grp.Key).First();
                        decisionNode.DaughterNodes[i].Action      = mostCommonAction;
                        decisionNode.DaughterNodes[i].FeatureType = bestSplitFeature.TypeOfFeature;
                        decisionNode.DaughterNodes[i].Probability = GetProbability(actions);
                        decisionNode.DaughterNodes[i].Operator    = bestSplitFeature.OperatorSign;
                        decisionNode.DaughterNodes[i].Actor       = bestSplitFeature.Actor;
                        TreeLearner(bestSets[i].ToArray(), newFeatures, decisionNode.DaughterNodes[i]);
                    }
                }
            }
        }
Example #13
0
 public DecisionNode(DecisionNode parent)
 {
     this.Parent = parent;
     //HACK domain size is only 2
 }
        private void BuildSubTree(DecisionNode rt, Dictionary <string, Dictionary <string, Dictionary <double, double> > > fuzzySets)
        {
            List <DecisionNode> ListNodeXk = new List <DecisionNode>();
            List <double>       GainRatio  = new List <double>();
            List <double>       Gain       = new List <double>();
            List <double>       SplitInfo  = new List <double>();

            for (int i = 0; i < fuzzySets.Count; i++)
            {
                GainRatio.Add(0.0);
                Gain.Add(0.0);
                SplitInfo.Add(0.0);
            }
            int k = 0;

            foreach (var atribut in fuzzySets)
            {
                int j = Array.FindIndex(inputs, s => s.Equals(atribut.Key));

                foreach (var rank in atribut.Value)
                {
                    DecisionNode tmp = new DecisionNode(atribut.Key, rank.Key, new List <int>(), 0.0, -1, new Dictionary <string, double>());
                    foreach (var i in rt.ListIndexElements)
                    {
                        double val = conformityStringToDouble[inputs[j]][data[i, j]];
                        if (rank.Value[val] >= 0.5)
                        {
                            tmp.ListIndexElements.Add(i);
                        }
                    }
                    tmp.ProbabilityClasses = Utilities.ProbabilityOfClass(new HashSet <string>(outputs).ToList(), tmp.ListIndexElements, outputs);
                    tmp.Entropy            = Utilities.Info(tmp.ProbabilityClasses.Values.ToList());
                    Gain[k] += (Convert.ToDouble(tmp.ListIndexElements.Count) /
                                Convert.ToDouble(rt.ListIndexElements.Count))
                               * tmp.Entropy;
                    double fraq = (double)tmp.ListIndexElements.Count / (double)rt.ListIndexElements.Count;
                    double lg   = Math.Log(fraq) / Math.Log(2);
                    SplitInfo[k] -= fraq * (Math.Log(fraq) / Math.Log(2));
                    ListNodeXk.Add(tmp);
                }
                //вычислить энтропию переменной
                Gain[k]      = rt.Entropy - Gain[k];
                GainRatio[k] = Gain[k] / SplitInfo[k];
                k++;
            }
            //проверить что хотя бы в N дочерних больше чем E элементов
            int N             = 2;
            int E             = 2;
            int elemMoreThenE = 0;

            foreach (var item in ListNodeXk)
            {
                if (item.ListIndexElements.Count > E)
                {
                    elemMoreThenE++;
                }
            }
            if (elemMoreThenE > N)
            {
                int Xmax = inputs.ToList().IndexOf(fuzzySets.Keys.ToList()[GainRatio.IndexOf(GainRatio.Max())]);
                foreach (var nd in ListNodeXk)
                {
                    if (nd.Atribute == inputs[Xmax])
                    {
                        Bitmap pic = Utilities.CreatePicturePercent(colorForNodes, nd.ProbabilityClasses);
                        treeView1.ImageList.Images.Add(pic);
                        nd.ImageIndex = pictureIndex;
                        pictureIndex++;
                        rt.Nodes.Add(nd);
                    }
                }
                Dictionary <string, Dictionary <string, Dictionary <double, double> > > newFuzzySet = new Dictionary <string, Dictionary <string, Dictionary <double, double> > >();
                foreach (var item in fuzzySets)
                {
                    if (item.Key != inputs[Xmax])
                    {
                        newFuzzySet[item.Key] = item.Value;
                    }
                }
                if (newFuzzySet.Count > 0)
                {
                    foreach (DecisionNode curNod in rt.Nodes)
                    {
                        if (!curNod.ProbabilityClasses.Values.ToList().Contains(1.0))
                        {
                            BuildSubTree(curNod, newFuzzySet);
                        }
                    }
                }
            }
        }