public void SetDecisionSet(DecisionSet set)
    {
        decisionSet = set;

        // Set the action time to be the decision sets specified time
        SetTimeOfAction(set.Time);
    }
示例#2
0
        public void IrisDatasetTest()
        {
            string[][] text = Resources.iris_data.Split(
                new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries)
                              .Apply(x => x.Split(','));

            double[][] inputs = new double[text.Length][];
            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = text[i].First(4).Convert(s => Double.Parse(s, System.Globalization.CultureInfo.InvariantCulture));
            }

            string[] labels = text.GetColumn(4);

            Codification codebook = new Codification("Label", labels);

            int[] outputs = codebook.Translate("Label", labels);


            DecisionVariable[] features =
            {
                new DecisionVariable("sepal length", DecisionVariableKind.Continuous),
                new DecisionVariable("sepal width",  DecisionVariableKind.Continuous),
                new DecisionVariable("petal length", DecisionVariableKind.Continuous),
                new DecisionVariable("petal width",  DecisionVariableKind.Continuous),
            };


            DecisionTree tree = new DecisionTree(features, codebook.Columns[0].Symbols);

            C45Learning teacher = new C45Learning(tree);

            double error = teacher.Run(inputs, outputs);

            Assert.AreEqual(0.026666666666666668, error, 1e-10);

            DecisionSet rules = tree.ToRules();

            double newError = ComputeError(rules, inputs, outputs);

            Assert.AreEqual(0.026666666666666668, newError, 1e-10);

            string ruleText = rules.ToString(codebook,
                                             System.Globalization.CultureInfo.InvariantCulture);

            // TODO: implement this assertion properly, actually checking
            // the text contents once the feature is completely finished.
            Assert.AreEqual(596, ruleText.Length);

            string expected = @"0 =: (petal length <= 2.45)
1 =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (sepal width <= 2.85)
1 =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (sepal width > 2.85)
1 =: (petal length > 2.45) && (petal width > 1.75) && (sepal length <= 5.95) && (sepal width > 3.05)
2 =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length > 7.05)
2 =: (petal length > 2.45) && (petal width > 1.75) && (sepal length > 5.95)
2 =: (petal length > 2.45) && (petal width > 1.75) && (sepal length <= 5.95) && (sepal width <= 3.05)
";

            Assert.AreEqual(expected, ruleText);
        }
    protected void btnchangepassword0_Click(object sender, EventArgs e)
    {
        DataTable data = new DataTable();

        data = f1.getrecord1("select * from dataset");
        if (data.Rows.Count > 0)
        {
            double[][] inputs = data.ToJagged <double>("n", "p", "k", "ph", "ec");
            string[]   labels = new string[data.Rows.Count];
            for (int i = 0; i < data.Rows.Count; i++)
            {
                labels[i] = data.Rows[i]["fertility"].ToString();
            }
            var codebook = new Codification("fertility", labels);
            // With the codebook, we can convert the labels:
            int[]       outputs   = codebook.Translate("fertility", labels);
            C45Learning teacher   = new C45Learning();
            var         tree      = teacher.Learn(inputs, outputs);
            int[]       predicted = tree.Decide(inputs);
            DecisionSet rules     = tree.ToRules();
            string      ruleText  = rules.ToString(codebook, "fertility", System.Globalization.CultureInfo.InvariantCulture);
            var         cm1       = new GeneralConfusionMatrix(classes: 3, expected: outputs, predicted: predicted);
            //int[,] matrix = cm.Matrix;
            double cm  = cm1.Accuracy;
            double cm2 = cm * 100;
            Label1.Text = cm2.ToString();
        }
    }
示例#4
0
        private void ComputeInference()
        {
            var codebook = new Codification();

            codebook.Learn(tradeTable);

            DataTable symbols = codebook.Apply(tradeTable);

            string[]   inputNames = new[] { "Strike", "MarketPrice", "Notional" };
            double[][] inputs     = tradeTable.ToJagged(inputNames);
            int[]      outputs    = tradeTable.ToArray <int>("Result");


            var teacher = new C45Learning()
            {
                Attributes = DecisionVariable.FromCodebook(codebook, inputNames)
            };


            DecisionTree tree = teacher.Learn(inputs, outputs);

            int[]       predicted = tree.Decide(inputs);
            double      error     = new ZeroOneLoss(outputs).Loss(predicted);
            DecisionSet rules     = tree.ToRules();

            var str = rules.ToString();

            textBoxInferredRules.Text = str;
        }
        public void Learn(List <BaseAttribute <T> > attributeColumns, BaseAttribute <T> outputAttributeColumn)
        {
            // Create a new instance of the ID3 algorithm
            ID3Learning id3learning = new ID3Learning(DecisionTree);

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = Codebook.Apply(Data);

            var columnNames = attributeColumns.Select(attribute => attribute.Name).ToArray();

            int[][] inputs  = symbols.ToJagged <int>(columnNames);
            int[]   outputs = symbols.ToJagged <int>(outputAttributeColumn.Name).GetColumn(0);

            // Learn the training instances!
            DecisionTree = id3learning.Learn(inputs, outputs);

            double error = new ZeroOneLoss(outputs).Loss(DecisionTree.Decide(inputs));

            Debug.WriteLine(error);

            // Moreover, we may decide to convert our tree to a set of rules:
            DecisionSet rules = DecisionTree.ToRules();
            // And using the codebook, we can inspect the tree reasoning:
            string ruleText = rules.ToString(Codebook as Codification <string>, outputAttributeColumn.Name,
                                             System.Globalization.CultureInfo.InvariantCulture);

            Debug.WriteLine(ruleText);
        }
示例#6
0
 public void StartDecisionProcess(DecisionSet decisionSet)
 {
     if (decisionSet != null)
     {
         OnDecisionProcessStart(decisionSet);
     }
     else
     {
         Debug.LogWarning("Decision set is NULL");
     }
 }
示例#7
0
        /*
         * Takes a Datatable with the training data
         * translates the data to ints
         * trains using the training data
         * The last col of the datatable input is the thing to predicted
         */
        public void Train(int index)
        {
            DataTable dataTable = this.theData;

            // Debug.Write("DataTable size: ");
            // Debug.Write("Rows: " + dataTable.Rows.Count);
            // Debug.Write("Cols: " + dataTable.Columns.Count);

            ArrayList inputNames = new ArrayList();

            foreach (DataColumn column in dataTable.Columns)
            {
                inputNames.Add(column.ColumnName);
            }
            this.toPredict = (string)inputNames[index];                                         // The column to predict
            inputNames.RemoveAt(index);                                                         // the data input data (predict column removed)
            this.inputNamesArr = (string[])inputNames.ToArray(typeof(string));

            // Debug.Write("Input arr size: " + inputNamesArr.Length);

            // Using Accord.Statistics.Filters to present the data as integers,
            // as integers are more efficient
            this.codebook = new Codification(dataTable)
            {
                DefaultMissingValueReplacement = 0
            };                                                                                   // codebook object that can convert  strings to ints, null/missing value will be defaulted to 0
            DataTable symbols = codebook.Apply(dataTable);                                       // applying our data to the codebook

            int[][] inputs  = symbols.ToJagged <int>(inputNamesArr);                             // The conversion to ints
            int[]   outputs = symbols.ToArray <int>(toPredict);                                  // The conversion to ints

            // Debug.Write("Array size: ");
            // Debug.Write("inputs: " + inputs.Length);
            // Debug.Write("outputs: " + outputs.Length);

            // Debug.Write("Test");

            var id3 = new ID3Learning()                                                          // the id3 algo
            {
                Attributes = DecisionVariable.FromCodebook(codebook, inputNamesArr)              // the trees decision attributes/headers from excel, second argument could be given saying what columns it should be
            };

            this.tree = id3.Learn(inputs, outputs);                                              // Learn using the inputs and output defined above

            // transform the rules of the tree into a string
            DecisionSet treeRules = tree.ToRules();

            ruleText = treeRules.ToString(codebook, toPredict,
                                          System.Globalization.CultureInfo.InvariantCulture);
            Debug.WriteLine(ruleText);
        }
示例#8
0
        public void new_method_create_tree()
        {
            string[][] text = Resources.iris_data.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries).Apply(x => x.Split(','));

            double[][] inputs = text.GetColumns(0, 1, 2, 3).To <double[][]>();

            string[] labels = text.GetColumn(4);

            var codebook = new Codification("Output", labels);

            int[] outputs = codebook.Translate("Output", labels);

            // And we can use the C4.5 for learning:
            var teacher = new C45Learning();

            // And finally induce the tree:
            var tree = teacher.Learn(inputs, outputs);

            // To get the estimated class labels, we can use
            int[] predicted = tree.Decide(inputs);

            // And the classification error can be computed as
            double error = new ZeroOneLoss(outputs) // 0.0266
            {
                Mean = true
            }.Loss(tree.Decide(inputs));

            // Moreover, we may decide to convert our tree to a set of rules:
            DecisionSet rules = tree.ToRules();

            // And using the codebook, we can inspect the tree reasoning:
            string ruleText = rules.ToString(codebook, "Output",
                                             System.Globalization.CultureInfo.InvariantCulture);

            // The output is:
            string expected = @"Iris-setosa =: (2 <= 2.45)
Iris-versicolor =: (2 > 2.45) && (3 <= 1.75) && (0 <= 7.05) && (1 <= 2.85)
Iris-versicolor =: (2 > 2.45) && (3 <= 1.75) && (0 <= 7.05) && (1 > 2.85)
Iris-versicolor =: (2 > 2.45) && (3 > 1.75) && (0 <= 5.95) && (1 > 3.05)
Iris-virginica =: (2 > 2.45) && (3 <= 1.75) && (0 > 7.05)
Iris-virginica =: (2 > 2.45) && (3 > 1.75) && (0 > 5.95)
Iris-virginica =: (2 > 2.45) && (3 > 1.75) && (0 <= 5.95) && (1 <= 3.05)
";

            Assert.AreEqual(0.026666666666666668, error, 1e-10);

            double newError = ComputeError(rules, inputs, outputs);

            Assert.AreEqual(0.026666666666666668, newError, 1e-10);
            Assert.AreEqual(expected, ruleText);
        }
示例#9
0
    private void CleanupDecisionVisuals()
    {
        // Get the UI Controller
        UIController uiController = Controller.GetSimulationComponent <UIController>();

        // Clear all the active buttons
        uiController.ClearActiveButtons();

        // Resume the simulation once the decision has been made
        Controller.ResumeSimulation();

        // Clear the active decision set
        activeDecisionSet = null;
    }
示例#10
0
        public double ComputeError(DecisionSet rules, double[][] inputs, int[] outputs)
        {
            int miss = 0;

            for (int i = 0; i < inputs.Length; i++)
            {
                if (rules.Compute(inputs[i]) != outputs[i])
                {
                    miss++;
                }
            }

            return((double)miss / inputs.Length);
        }
示例#11
0
        public void LargeRunTest()
        {
            double[][]   inputs;
            int[]        outputs;
            DecisionTree tree = createTree(out inputs, out outputs);

            var rules = DecisionSet.FromDecisionTree(tree);

            Simplification simpl = new Simplification(rules);
            double         error = simpl.ComputeError(inputs, outputs);

            Assert.AreEqual(0, error);

            double newError = simpl.Compute(inputs, outputs);

            Assert.AreEqual(0.067515432098765427, newError);
        }
示例#12
0
    private void OnDecisionProcessStart(DecisionSet decisionSet)
    {
        Debug.Log("Decision Process started");

        activeDecisionSet = decisionSet;

        // Start the decision process
        DecisionProcess();

        // Pause the simulation
        Controller.PauseSimulation();

        // Post a message to notify all other components that the decision process has started
        Message message = new Message((int)MessageDestination.DECISION_START, "", activeDecisionSet);

        Controller.PropagateMessage(message);
    }
示例#13
0
        public void LargeRunTest2()
        {
            Accord.Math.Random.Generator.Seed = 0;

            int[,] random = Matrix.Random(1000, 10, 0.0, 10.0).ToInt32();

            int[][] samples = random.ToJagged();
            int[]   outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 5 || Tools.Random.NextDouble() > 0.85)
                {
                    outputs[i] = 1;
                }
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
            {
                vars[i] = new DecisionVariable("x" + i, 10);
            }

            DecisionTree tree = new DecisionTree(vars, 2);

            var teacher = new ID3Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            var rules = DecisionSet.FromDecisionTree(tree);

            Simplification simpl = new Simplification(rules)
            {
                Alpha = 0.05
            };

            error = simpl.ComputeError(samples.ToDouble(), outputs);
            Assert.AreEqual(0, error);

            double newError = simpl.Compute(samples.ToDouble(), outputs);

            Assert.AreEqual(0.097, newError);
        }
示例#14
0
	public void Populate(DecisionSet decisions) {

		Debug.Log("Populating...");

		decisionSet = decisions;

		if (null == newspaperRoot) {
			newspaperRoot = GameObject.Find ("NewspaperRoot").transform;
		}
		Vector3 d = newspaperDelta;
		foreach( GameObject paper in decisions.newspapers ) {
			paper.transform.SetParent (newspaperRoot);
			paper.transform.position = newspaperRoot.position;
			paper.transform.localRotation = newspaperRoot.localRotation;
			paper.transform.position += d;
			d += newspaperDelta;
		}

		if (null == memoRoot) {
			memoRoot = GameObject.Find ("MemoRoot").transform;
		}
		d = memoDelta;
		foreach( GameObject memo in decisions.memos ) {
			memo.transform.SetParent (memoRoot);
			memo.transform.position = memoRoot.position;
			memo.transform.position += d;
			d += memoDelta;
		}

		if (null == docetRoot) {
			docetRoot = GameObject.Find ("DocetRoot").transform;
		}
		d = docetDelta;
		foreach( GameObject docet in decisions.docets ) {
			docet.transform.SetParent (docetRoot);
			docet.transform.position = docetRoot.position;
			docet.transform.localRotation = docetRoot.localRotation;
			docet.transform.position += d;
			d += docetDelta;
		}
	}
示例#15
0
        static double Decision_Tree(bool show)
        {
            DataTable    data       = DataController.MakeDataTable("../../drug_consumption.txt");
            DataTable    entireData = DataController.MakeDataTable("../../drug_consumption.txt");
            DataTable    tests      = DataController.MakeDataTable("../../drug_consumption_test2.txt");
            Codification codebook   = new Codification(entireData);

            DecisionVariable[] attributes = DataController.GetAttributes();
            int classCount = 7; // (7) "Never Used", "Used over a Decade Ago", "Used in Last Decade", "Used in Last Year", "Used in Last Month", "Used in Last Week", and "Used in Last Day"

            DecisionTree tree        = new DecisionTree(attributes, classCount);
            ID3Learning  id3learning = new ID3Learning(tree);

            id3learning.MaxHeight = 7;
            DataTable symbols    = codebook.Apply(data);
            string    LookingFor = "Cannabis";

            int[][] inputs  = symbols.ToJagged <int>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS");
            int[]   outputs = symbols.ToArray <int>(LookingFor);

            id3learning.Learn(inputs, outputs);
            DataTable testSymbols = codebook.Apply(tests);

            int[][]     testIn   = testSymbols.ToJagged <int>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS");
            int[]       testOut  = testSymbols.ToArray <int>(LookingFor);
            DecisionSet rules    = tree.ToRules();
            string      ruleText = rules.ToString(codebook, LookingFor, System.Globalization.CultureInfo.InvariantCulture);
            double      error    = new ZeroOneLoss(testOut).Loss(tree.Decide(testIn));

            if (show == true)
            {
                Console.WriteLine(LookingFor);
                Console.WriteLine();
                Console.WriteLine(ruleText);
                Console.ReadKey();
                Console.WriteLine("Blad - " + Math.Round(error, 4) + "%");
                Console.ReadKey();
            }
            return(error);
        }
示例#16
0
        public string Rules2String()
        {
            int count = dt.Rows.Count;

            int[][]  inputs = new int [count][];
            string[] labels = new string[count];
            int      num    = 0;

            foreach (DataRow dr in dt.Rows)
            {
                int res = Convert.ToInt32(dr[30]);
                inputs[num] = new int[30];
                for (int sensor_i = 0; sensor_i < 30; sensor_i++)
                {
                    inputs[num][sensor_i] = Convert.ToInt32(dr[sensor_i]);
                }
                labels[num] = "class-" + res.ToString();
                num++;
            }
            var codebook = new Codification("Output", labels);

            int[] outputs = codebook.Transform("Output", labels);

            DecisionVariable[] dv = new DecisionVariable[30];
            for (int i = 0; i < 30; i++)
            {
                string name = "sensor_" + (i + 1).ToString();
                dv[i] = new DecisionVariable(name, DecisionVariableKind.Continuous);
            }
            //use C45 Spanning tree algorithm
            var          C45  = new C45Learning(dv);
            DecisionTree tree = C45.Learn(inputs, outputs);

            int[]       predicted = tree.Decide(inputs);
            double      error     = new ZeroOneLoss(outputs).Loss(predicted);
            DecisionSet rules     = tree.ToRules();

            return(rules.ToString(codebook, "Output", System.Globalization.CultureInfo.InvariantCulture));
        }
示例#17
0
        public void runTest()
        {
            List <DataHolder>        dataHolders = new List <DataHolder>();
            DateTime                 previousTime;
            Dictionary <string, int> ruleQuatityDict = new Dictionary <string, int>();

            bool         done = false;
            int          amountToReadPerIncrement = 10000;
            int          currentAmount            = 0;
            StreamReader reader    = File.OpenText("../../../../ncdb_2015.csv");
            DataTable    data      = new DataTable("2015 Collision Data");
            string       firstLine = reader.ReadLine();

            string[] firstItems = firstLine.Split(',');

            // setup the columns only at the start
            data.Columns.Add("C_CASE");
            for (int i = 0; i < firstItems.Length; i++)
            {
                if (!firstItems[i].Equals("P_ISEV") && !firstItems[i].Equals("C_CASE") && !firstItems[i].Equals("C_YEAR") && !firstItems[i].Equals("C_SEV"))
                {
                    data.Columns.Add(firstItems[i]);
                }
            }
            data.Columns.Add("P_ISEV");

            string[] stringSplitStuff = { "=:", "&&", "(", ")", "==", " " };

            while (!done)
            {
                currentAmount += amountToReadPerIncrement;
                Console.WriteLine("Currently on: " + currentAmount);

                previousTime = DateTime.Now;
                DataHolder dataHolder = new DataHolder(amountToReadPerIncrement);
                dataHolders.Add(dataHolder);

                int amountOfDataRead = -1;

                string line;
                // Clear the rows to make way for the new data
                data.Rows.Clear();

                while ((line = reader.ReadLine()) != null && amountOfDataRead < amountToReadPerIncrement)
                {
                    string[] items = line.Split(',');
                    data.Rows.Add(items[22], items[1], items[2], items[3], items[5], items[6], items[7], items[8]
                                  , items[9], items[10], items[11], items[12], items[13], items[14], items[15], items[16], items[17],
                                  items[18], items[20], items[21], items[19]);
                    amountOfDataRead++;
                }


                // Ignore the last bit of data as it will be shorter than 10000 elements
                if (amountOfDataRead < amountToReadPerIncrement)
                {
                    Console.WriteLine("Finished, writing output: " + currentAmount);
                    done = true;
                    continue;
                }

                dataHolder.readTime = DateTime.Now.Subtract(previousTime).TotalSeconds;
                previousTime        = DateTime.Now;

                Codification codebook = new Codification(data);

                dataHolder.codeTime = DateTime.Now.Subtract(previousTime).TotalSeconds;
                previousTime        = DateTime.Now;

                DecisionVariable[] attributes =
                {
                    new DecisionVariable("C_MNTH",  14),
                    new DecisionVariable("C_WDAY",   9),
                    new DecisionVariable("C_HOUR",  25),
                    //new DecisionVariable("C_SEV", 4),
                    new DecisionVariable("C_VEHS", 101),
                    new DecisionVariable("C_CONF",  44),
                    new DecisionVariable("C_RCFG",  15),
                    new DecisionVariable("C_WTHR",  10),
                    new DecisionVariable("C_RSUR",  12),
                    new DecisionVariable("C_RALN",   9),
                    new DecisionVariable("C_TRAF",  21),
                    new DecisionVariable("V_ID",   100),
                    new DecisionVariable("V_TYPE",  27),
                    new DecisionVariable("V_YEAR", 118),
                    new DecisionVariable("P_ID",   101),
                    new DecisionVariable("P_SEX",    5),
                    new DecisionVariable("P_AGE",  103),
                    new DecisionVariable("P_PSN",   35),
                    new DecisionVariable("P_SAFE",  11),
                    new DecisionVariable("P_USER", 6)
                };
                int          classCount = 6;        // 2 possible output values for playing tennis: yes or no
                DecisionTree tree       = new DecisionTree(attributes, classCount);

                // Create a new instance of the ID3 algorithm
                C45Learning c45learnig = new C45Learning(tree);

                // Translate our training data into integer symbols using our codebook:
                DataTable symbols = codebook.Apply(data);
                int[][]   inputs  = symbols.ToIntArray("C_MNTH", "C_WDAY", "C_HOUR", "C_VEHS", "C_CONF", "C_RCFG", "C_WTHR", "C_RSUR", "C_RALN", "C_TRAF", "V_ID", "V_TYPE", "V_YEAR", "P_ID", "P_SEX", "P_AGE", "P_PSN", "P_SAFE", "P_USER");
                int[]     outputs = symbols.ToIntArray("P_ISEV").GetColumn(0);

                // Learn the training instances!
                c45learnig.Learn(inputs, outputs);

                dataHolder.treeLearningTime = DateTime.Now.Subtract(previousTime).TotalSeconds;
                previousTime = DateTime.Now;

                DecisionSet ds = tree.ToRules();

                // Place the decicion rules into the hash
                foreach (DecisionRule dr in ds)
                {
                    string[] splitString = dr.ToString().Split(stringSplitStuff, System.StringSplitOptions.RemoveEmptyEntries);
                    int      severity    = int.Parse(splitString[0].Trim());

                    string translatedKey = codebook.Revert("P_ISEV", severity) + " =: ";
                    translatedKey += "(" + splitString[1] + " == " + codebook.Revert(splitString[1], int.Parse(splitString[2])) + ")";

                    for (int i = 3; i < splitString.Length; i += 2)
                    {
                        translatedKey += " && (" + splitString[i] + " == " + codebook.Revert(splitString[i], int.Parse(splitString[i + 1])) + ")";
                    }

                    if (!ruleQuatityDict.ContainsKey(translatedKey))
                    {
                        ruleQuatityDict.Add(translatedKey, 1);
                    }
                    else
                    {
                        ruleQuatityDict[translatedKey]++;
                    }
                }

                dataHolder.ruleFetchTime = DateTime.Now.Subtract(previousTime).TotalSeconds;
                dataHolder.totalRules    = ds.Count;
                string rules = dataHolder.ToString() + "\n" + " Rules: " + dataHolder.totalRules + "\n" + ds.ToString();

                System.IO.File.WriteAllText("../../../../rules" + amountToReadPerIncrement + ".txt", rules);
                Console.WriteLine(amountToReadPerIncrement + " generated " + dataHolder.totalRules + " rules\n" + dataHolder.ToString());
            }

            reader.Close();

            // Print out a csv with the time and rule data
            string csvData = "Data_Amount,Read_Time,Codify_Time,Learn_Time,Fetch_Rule_Time,Total_Time,Total_Rules\n";

            foreach (DataHolder dh in dataHolders)
            {
                csvData += dh.dataAmount + "," + dh.readTime + "," + dh.codeTime + "," + dh.treeLearningTime + "," + dh.ruleFetchTime + "," + dh.getTotalTime() + "," + dh.totalRules + "\n";
            }

            System.IO.File.WriteAllText("../../../../All2015-10000Data.csv", csvData);

            Console.WriteLine("Sorting rule dictionary...");
            var myList = ruleQuatityDict.ToList();

            myList.Sort((pair1, pair2) => pair2.Value.CompareTo(pair1.Value));
            StringBuilder sb    = new StringBuilder();
            int           total = myList.Count;

            Console.WriteLine("Writing rules...");
            for (int i = 0; i < myList.Count; i++)
            {
                sb.Append(myList[i].Value + ": " + myList[i].Key + "\n");
                if (i % 100 == 0)
                {
                    Console.WriteLine("Rules written: " + i);
                }
            }

            System.IO.File.WriteAllText("../../../../ruleQuantityData100000.txt", sb.ToString());
        }
示例#18
0
        public void IrisDatasetTest()
        {
            #region doc_iris
            // In this example, we will process the famous Fisher's Iris dataset in
            // which the task is to classify weather the features of an Iris flower
            // belongs to an Iris setosa, an Iris versicolor, or an Iris virginica:
            //
            //  - https://en.wikipedia.org/wiki/Iris_flower_data_set
            //

            // First, let's load the dataset into an array of text that we can process
            string[][] text = Resources.iris_data.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries).Apply(x => x.Split(','));

            // The first four columns contain the flower features
            double[][] inputs = text.GetColumns(0, 1, 2, 3).To <double[][]>();

            // The last column contains the expected flower type
            string[] labels = text.GetColumn(4);

            // Since the labels are represented as text, the first step is to convert
            // those text labels into integer class labels, so we can process them
            // more easily. For this, we will create a codebook to encode class labels:
            //
            var codebook = new Codification("Output", labels);

            // With the codebook, we can convert the labels:
            int[] outputs = codebook.Translate("Output", labels);

            // Let's declare the names of our input variables:
            DecisionVariable[] features =
            {
                new DecisionVariable("sepal length", DecisionVariableKind.Continuous),
                new DecisionVariable("sepal width",  DecisionVariableKind.Continuous),
                new DecisionVariable("petal length", DecisionVariableKind.Continuous),
                new DecisionVariable("petal width",  DecisionVariableKind.Continuous),
            };

            // Now, we can finally create our tree for the 3 classes:
            var tree = new DecisionTree(inputs: features, classes: 3);

            // And we can use the C4.5 for learning:
            var teacher = new C45Learning(tree);

            // And finally induce the tree:
            teacher.Learn(inputs, outputs);

            // To get the estimated class labels, we can use
            int[] predicted = tree.Decide(inputs);

            // And the classification error can be computed as
            double error = new ZeroOneLoss(outputs) // 0.0266
            {
                Mean = true
            }.Loss(tree.Decide(inputs));

            // Moreover, we may decide to convert our tree to a set of rules:
            DecisionSet rules = tree.ToRules();

            // And using the codebook, we can inspect the tree reasoning:
            string ruleText = rules.ToString(codebook, "Output",
                                             System.Globalization.CultureInfo.InvariantCulture);

            // The output is:
            string expected = @"Iris-setosa =: (petal length <= 2.45)
Iris-versicolor =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (sepal width <= 2.85)
Iris-versicolor =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (sepal width > 2.85)
Iris-versicolor =: (petal length > 2.45) && (petal width > 1.75) && (sepal length <= 5.95) && (sepal width > 3.05)
Iris-virginica =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length > 7.05)
Iris-virginica =: (petal length > 2.45) && (petal width > 1.75) && (sepal length > 5.95)
Iris-virginica =: (petal length > 2.45) && (petal width > 1.75) && (sepal length <= 5.95) && (sepal width <= 3.05)
";
            #endregion

            Assert.AreEqual(0.026666666666666668, error, 1e-10);
            Assert.AreEqual(4, tree.NumberOfInputs);
            Assert.AreEqual(3, tree.NumberOfOutputs);

            double newError = ComputeError(rules, inputs, outputs);
            Assert.AreEqual(0.026666666666666668, newError, 1e-10);
            Assert.AreEqual(expected, ruleText);
        }
示例#19
0
        public static void Run()
        {
            // In this example, we will be using the famous Play Tennis example by Tom Mitchell(1998).
            // In Mitchell's example, one would like to infer if a person would play tennis or not
            // based solely on four input variables. Those variables are all categorical, meaning that
            // there is no order between the possible values for the variable (i.e. there is no order
            // relationship between Sunny and Rain, one is not bigger nor smaller than the other, but are
            // just distinct).

            // Note: this example uses DataTables to represent the input data , but this is not required.
            var example = "Overtake";

            Console.WriteLine(example);

            DataTable data = new DataTable(example);

            data.Columns.Add("Separation", typeof(String));
            data.Columns.Add("Speed", typeof(String));
            data.Columns.Add("OncomingSpeed", typeof(String));
            data.Columns.Add("Result", typeof(String));
            var shuffledInputs = GetInputs(100);

            for (int index = 0; index < shuffledInputs.Length; index++)
            {
                data.Rows.Add(shuffledInputs[index][0], shuffledInputs[index][1], shuffledInputs[index][2], shuffledInputs[index][3]);
            }



            // In order to try to learn a decision tree, we will first convert this problem to a more simpler
            // representation. Since all variables are categories, it does not matter if they are represented
            // as strings, or numbers, since both are just symbols for the event they represent. Since numbers
            // are more easily representable than text string, we will convert the problem to use a discrete
            // alphabet through the use of a Accord.Statistics.Filters.Codification codebook.</para>

            // A codebook effectively transforms any distinct possible value for a variable into an integer
            // symbol. For example, “Sunny” could as well be represented by the integer label 0, “Overcast”
            // by “1”, Rain by “2”, and the same goes by for the other variables. So:</para>

            // Create a new codification codebook to convert strings into integer symbols
            var codebook = new Codification(data);

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = codebook.Apply(data);

            int[][]  inputs     = symbols.ToJagged <int>(new string[] { "Separation", "Speed", "OncomingSpeed", "Result" });
            int[]    outputs    = symbols.ToArray <int>("Overtake");
            string[] classNames = new string[] { "success", "fail" };

            // For this task, in which we have only categorical variables, the simplest choice
            // to induce a decision tree is to use the ID3 algorithm by Quinlan. Let’s do it:

            // Create an ID3 algorithm
            var id3learning = new ID3Learning()
            {
                // Now that we already have our learning input/ouput pairs, we should specify our
                // decision tree. We will be trying to build a tree to predict the last column, entitled
                // “PlayTennis”. For this, we will be using the “Outlook”, “Temperature”, “Humidity” and
                // “Wind” as predictors (variables which will we will use for our decision). Since those
                // are categorical, we must specify, at the moment of creation of our tree, the
                // characteristics of each of those variables. So:

                new DecisionVariable("Separation", 150),    // 3 possible values (Sunny, overcast, rain)
                new DecisionVariable("Speed", 150),         // 3 possible values (Hot, mild, cool)
                new DecisionVariable("OncomingSpeed", 150), // 2 possible values (High, normal)
                new DecisionVariable("Result", 2)           // 2 possible values (Weak, strong)
            };

            // Learn the training instances!
            DecisionTree tree = id3learning.Learn(inputs, outputs);

            // The tree can now be queried for new examples through
            // its Decide method. For example, we can create a query

            int[] query = codebook.Transform(new[, ]
            {
                { "Separation", "150" },
                { "Speed", "150" },
                { "OncomingSpeed", "150" },
                { "Result", "success" }
            });

            // And then predict the label using
            int predicted = tree.Decide(query);

            var answer = codebook.Revert("Overtake", predicted);

            Console.WriteLine("");

            Console.WriteLine(answer);

            double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));

            Console.WriteLine($"{error * 100:F10}");

            DecisionSet rules        = tree.ToRules();
            var         encodedRules = rules.ToString();

            Console.WriteLine(encodedRules);



            Console.ReadKey(); // Keep the window open till a key is pressed
        }
示例#20
0
        /// <summary>
        /// 构建决策树
        /// </summary>
        public void BuildTree()
        {
            // 采样
            List <Cell> samplePoints = getSample();

            updateConsoleEvent("-----------");
            updateConsoleEvent("起始城市栅格数目:" + this.BeginCityCnt);
            updateConsoleEvent("目标城市栅格数目:" + this.EndCityCnt);
            updateConsoleEvent("-----------");
            updateConsoleEvent("-----------开始训练----------");

            // 样本数目
            int COUNT = samplePoints.Count;

            // 构造输入和输出数据集
            double[][] inputs  = new double[COUNT][];
            int[]      outputs = new int[COUNT];
            for (int i = 0; i < COUNT; i++)
            {
                Cell          cell  = samplePoints[i];
                int           pos   = cell.row * width + cell.col;
                List <double> input = (from buffer in driveBufferList
                                       select buffer[pos]).ToList <double>();
                input.Add(GetNeighbourAffect(beginBuffer, width, height, cell.row, cell.col, 3));
                inputs[i] = input.ToArray <double>();
                if (this.landInfo.UrbanInfos[0].LandUseTypeValue == (int)beginBuffer[pos])
                {
                    outputs[i] = 1;
                }
                else
                {
                    outputs[i] = 0;
                }
            }


            // 训练数据集
            var trainingInputs = inputs.Submatrix(0, COUNT / 2 - 1);
            var trainingOutput = outputs.Submatrix(0, COUNT / 2 - 1);

            // 检验数据集
            var pruningInputs = inputs.Submatrix(COUNT / 2, COUNT - 1);
            var pruningOutput = outputs.Submatrix(COUNT / 2, COUNT - 1);

            // 设置驱动因子的名字
            List <DecisionVariable> featuresList = (from column in this.driveLayerNames
                                                    select new DecisionVariable(column, DecisionVariableKind.Continuous)).ToList <DecisionVariable>();

            featuresList.Add(new DecisionVariable("affectofneighbour", DecisionVariableKind.Continuous));

            // 训练树
            var tree    = new DecisionTree(inputs: featuresList, classes: 2);
            var teacher = new C45Learning(tree);

            teacher.Learn(trainingInputs, trainingOutput);

            // 剪枝
            ErrorBasedPruning prune = new ErrorBasedPruning(tree, pruningInputs, pruningOutput);

            prune.Threshold = 0.1;// Gain threshold ?
            double lastError;
            double error = Double.PositiveInfinity;

            do
            {
                lastError = error;
                error     = prune.Run();
            } while (error < lastError);

            updateConsoleEvent("错误率:" + error);

            this.func = tree.ToExpression().Compile();
            //UpdateUi("错误率" + error);

            DecisionSet rules    = tree.ToRules();
            string      ruleText = rules.ToString();

            //consolePad.addLineToInfo(ruleText);
            updateConsoleEvent("规则:");
            updateConsoleEvent(ruleText);
            updateConsoleEvent("-----------训练结束----------");
        }
示例#21
0
        public void Run()
        {
            Library.Overtake overtake;

            WriteLine("Decision Tree - C45 Learning");

            //Get amount of data the user wants the decision tree to train
            int trainAmount = GetUserInput("Amount of data to train");

            double[][] trainInputs  = new double[trainAmount][];
            int[]      trainOutputs = new int[trainAmount];

            //Get data from OvertakeAI program and insert it into train inputs and outputs arrays
            for (int i = 0; i < trainAmount; i++)
            {
                overtake = OvertakeData.GetData();

                trainInputs[i] = new double[3]
                {
                    overtake.InitialSeparationM,
                    overtake.OvertakingSpeedMPS,
                    overtake.OncomingSpeedMPS
                };

                trainOutputs[i] = ToInt32(overtake.Success);
            }

            //Train decison tree using C4.5 algorithm using the trainInputs and trainOutputs
            var          learningAlgorithm = new C45Learning();
            DecisionTree tree = learningAlgorithm.Learn(trainInputs, trainOutputs);

            //Get the amount of data the user wants to predict against the decision tree
            int testAmount = GetUserInput("Amount of data to predict");

            double[] testInputs;
            int      outcomeIndex;
            string   actualOutcome;
            var      scoreCard = new List <bool>();

            string[] possibleOutcomes = { "Won't Pass", "Will Pass" };
            string   predictedOutcome;

            WriteLine($"\n{"Initial Seperation (m)",21}" +
                      $"{"Overtaking Speed (m/s)",28}" +
                      $"{"Oncoming Speed (m/s)",26}" +
                      $"{"Outcome",14}" +
                      $"{"Prediction",17}");

            //Loop for amount of times that want to be predicted
            for (int i = 0; i < testAmount; i++)
            {
                //Get the data from OvertakeAI
                overtake = OvertakeData.GetData();

                testInputs = new double[3] {
                    overtake.InitialSeparationM,
                    overtake.OvertakingSpeedMPS,
                    overtake.OncomingSpeedMPS
                };

                actualOutcome = overtake.Success ? "Will Pass" : "Won't Pass";

                //Preict the result using the decision tree
                outcomeIndex = tree.Decide(testInputs);

                //Compare actual outcome to the predicted outcome
                scoreCard.Add(actualOutcome == possibleOutcomes[outcomeIndex]);

                //Print out the data
                predictedOutcome = scoreCard[i] ? "Correct" : "Incorrect";
                WriteLine($"{Round(testInputs[0], 2).ToString("F"),14}" +
                          $"{Round(testInputs[1], 2).ToString("F"),27}" +
                          $"{Round(testInputs[2], 2).ToString("F"),27}" +
                          $"{actualOutcome,22}" +
                          $"{predictedOutcome,17}");
            }

            //Count amount of correct values in score card to show accuracy percentage
            WriteLine($"\nAccuracy: {Round((scoreCard.Count(x => x) / ToDouble(scoreCard.Count)) * 100, 2)}%");

            //Get the training error of the decision tree
            int[]  predicted = tree.Decide(trainInputs);
            double error     = new ZeroOneLoss(trainOutputs).Loss(predicted);

            WriteLine($"Training Error: {Round(error, 2)}\n");

            //Print out the rules that the decision tree came up with
            WriteLine("Decision Tree Rules:");
            DecisionSet rules        = tree.ToRules();
            var         codebook     = new Codification("Possible Results", possibleOutcomes);
            var         encodedRules = rules.ToString(codebook, "Possible Results", CultureInfo.InvariantCulture);

            WriteLine($"{encodedRules}");
        }
示例#22
0
 /// <summary>
 ///   Initializes a new instance of the <see cref="Simplification"/> class.
 /// </summary>
 /// 
 /// <param name="list">The decision set to be simplified.</param>
 /// 
 public Simplification(DecisionSet list)
 {
     this.decisionList = list;
 }
示例#23
0
        public string DecisionTreeDecisionsLib()
        {
            DecisionSet rules = decisionTreeLib.ToRules();

            return(rules.ToString(codebook, "TYPE", System.Globalization.CultureInfo.InvariantCulture));
        }
示例#24
0
        public void iris_new_method_create_tree()
        {
            string[][] text = Resources.iris_data.Split(new[] { "\r\n" },
                                                        StringSplitOptions.RemoveEmptyEntries).Apply(x => x.Split(','));

            double[][] inputs = text.GetColumns(0, 1, 2, 3).To <double[][]>();

            string[] labels = text.GetColumn(4);

            var codebook = new Codification("Output", labels);

            int[] outputs = codebook.Translate("Output", labels);

            DecisionVariable[] features =
            {
                new DecisionVariable("sepal length", DecisionVariableKind.Continuous),
                new DecisionVariable("sepal width",  DecisionVariableKind.Continuous),
                new DecisionVariable("petal length", DecisionVariableKind.Continuous),
                new DecisionVariable("petal width",  DecisionVariableKind.Continuous),
            };

            var teacher = new C45Learning(features);

            var tree = teacher.Learn(inputs, outputs);

            Assert.AreEqual(4, tree.NumberOfInputs);
            Assert.AreEqual(3, tree.NumberOfOutputs);


            // To get the estimated class labels, we can use
            int[] predicted = tree.Decide(inputs);

            // And the classification error can be computed as
            double error = new ZeroOneLoss(outputs) // 0.0266
            {
                Mean = true
            }.Loss(tree.Decide(inputs));

            // Moreover, we may decide to convert our tree to a set of rules:
            DecisionSet rules = tree.ToRules();

            // And using the codebook, we can inspect the tree reasoning:
            string ruleText = rules.ToString(codebook, "Output",
                                             System.Globalization.CultureInfo.InvariantCulture);

            // The output is:
            string expected = @"Iris-setosa =: (petal length <= 2.45)
Iris-versicolor =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (sepal width <= 2.85)
Iris-versicolor =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (sepal width > 2.85)
Iris-versicolor =: (petal length > 2.45) && (petal width > 1.75) && (sepal length <= 5.95) && (sepal width > 3.05)
Iris-virginica =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length > 7.05)
Iris-virginica =: (petal length > 2.45) && (petal width > 1.75) && (sepal length > 5.95)
Iris-virginica =: (petal length > 2.45) && (petal width > 1.75) && (sepal length <= 5.95) && (sepal width <= 3.05)
";

            expected = expected.Replace("\r\n", Environment.NewLine);

            Assert.AreEqual(0.026666666666666668, error, 1e-10);

            double newError = ComputeError(rules, inputs, outputs);

            Assert.AreEqual(0.026666666666666668, newError, 1e-10);
            Assert.AreEqual(expected, ruleText);
        }
示例#25
0
        public Learn()
        {
            try
            {
                //http://accord-framework.net/docs/html/T_Accord_MachineLearning_DecisionTrees_Learning_C45Learning.htm
                using (var db = new DatabaseEntities())
                {
                    var allItems = db.Records.ToList();

                    DataTable data = new DataTable("e-Tracker Values");

                    data.Columns.Add("Id", typeof(int));
                    data.Columns.Add("Age", typeof(string));
                    data.Columns.Add("L1", typeof(string));
                    data.Columns.Add("Word", typeof(string));
                    data.Columns.Add("Synonym", typeof(string));

                    allItems.ForEach(r =>
                    {
                        r.DetailRecords.ToList().ForEach(dr =>
                        {
                            data.Rows.Add(dr.Id, r.Age, r.L1, dr.UnknownWord, dr.SelectedSynonism);
                        });
                    });

                    // Create a new codification codebook to convert
                    // the strings above into numeric, integer labels:
                    CodeBook = new Codification()
                    {
                        DefaultMissingValueReplacement = Double.NaN
                    };

                    // Learn the codebook
                    CodeBook.Learn(data);

                    // Use the codebook to convert all the data
                    DataTable symbols = CodeBook.Apply(data);

                    // Grab the training input and output instances:
                    int[][] inputs  = symbols.ToJagged <int>(InputNames);
                    int[]   outputs = symbols.ToArray <int>("Synonym");

                    // Create a new learning algorithm
                    var teacher = new C45Learning()
                    {
                        Attributes = DecisionVariable.FromCodebook(CodeBook, InputNames),
                    };

                    // Use the learning algorithm to induce a new tree:
                    Tree = teacher.Learn(inputs, outputs);

                    // To get the estimated class labels, we can use
                    int[] predicted = Tree.Decide(inputs);

                    // The classification error (~0.214) can be computed as
                    double error = new ZeroOneLoss(outputs).Loss(predicted);

                    // Moreover, we may decide to convert our tree to a set of rules:
                    DecisionSet rules = Tree.ToRules();

                    // And using the codebook, we can inspect the tree reasoning:
                    string ruleText = rules.ToString(CodeBook, "Synonym",
                                                     System.Globalization.CultureInfo.InvariantCulture);

                    Rules = ruleText;

                    Code = Tree.ToCode("Rules");
                }
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
            }
        }
示例#26
0
        public void AttributeReuseTest1()
        {
            string[][] text = Resources.iris_data
                              .Split(new[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries)
                              .Apply(x => x.Split(','));

            Assert.AreEqual(150, text.Rows());
            Assert.AreEqual(5, text.Columns());
            Assert.AreEqual("Iris-setosa", text[0].Get(-1));
            Assert.AreEqual("Iris-virginica", text.Get(-1).Get(-1));

            double[][] inputs = new double[text.Length][];
            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = text[i].First(4).Convert(s => Double.Parse(s, System.Globalization.CultureInfo.InvariantCulture));
            }

            string[] labels = text.GetColumn(4);

            Codification codebook = new Codification("Label", labels);

            int[] outputs = codebook.Translate("Label", labels);


            DecisionVariable[] features =
            {
                new DecisionVariable("sepal length", DecisionVariableKind.Continuous),
                new DecisionVariable("sepal width",  DecisionVariableKind.Continuous),
                new DecisionVariable("petal length", DecisionVariableKind.Continuous),
                new DecisionVariable("petal width",  DecisionVariableKind.Continuous),
            };


            DecisionTree tree = new DecisionTree(features, codebook.Columns[0].Symbols);

            C45Learning teacher = new C45Learning(tree);

            teacher.Join = 3;

            double error = teacher.Run(inputs, outputs);

            Assert.AreEqual(0.02, error, 1e-10);

            DecisionSet rules = tree.ToRules();

            double newError = ComputeError(rules, inputs, outputs);

            Assert.AreEqual(0.02, newError, 1e-10);

            string ruleText = rules.ToString(codebook,
                                             System.Globalization.CultureInfo.InvariantCulture);

            string expected = @"0 =: (petal length <= 2.45)
1 =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (petal length <= 4.95)
1 =: (petal length > 2.45) && (petal width > 1.75) && (petal length <= 4.85) && (sepal length <= 5.95)
2 =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length > 7.05)
2 =: (petal length > 2.45) && (petal width > 1.75) && (petal length > 4.85)
2 =: (petal length > 2.45) && (petal width <= 1.75) && (sepal length <= 7.05) && (petal length > 4.95)
2 =: (petal length > 2.45) && (petal width > 1.75) && (petal length <= 4.85) && (sepal length > 5.95)
";

            expected = expected.Replace("\r\n", Environment.NewLine);

            Assert.AreEqual(expected, ruleText);
        }
示例#27
0
        public void new_method_create_tree()
        {
            #region doc_simplest
            // In this example, we will process the famous Fisher's Iris dataset in
            // which the task is to classify weather the features of an Iris flower
            // belongs to an Iris setosa, an Iris versicolor, or an Iris virginica:
            //
            //  - https://en.wikipedia.org/wiki/Iris_flower_data_set
            //

            // First, let's load the dataset into an array of text that we can process
            string[][] text = Resources.iris_data.Split(new[] { "\r\n" },
                                                        StringSplitOptions.RemoveEmptyEntries).Apply(x => x.Split(','));

            // The first four columns contain the flower features
            double[][] inputs = text.GetColumns(0, 1, 2, 3).To <double[][]>();

            // The last column contains the expected flower type
            string[] labels = text.GetColumn(4);

            // Since the labels are represented as text, the first step is to convert
            // those text labels into integer class labels, so we can process them
            // more easily. For this, we will create a codebook to encode class labels:
            //
            var codebook = new Codification("Output", labels);

            // With the codebook, we can convert the labels:
            int[] outputs = codebook.Translate("Output", labels);

            // And we can use the C4.5 for learning:
            C45Learning teacher = new C45Learning();

            // Finally induce the tree from the data:
            var tree = teacher.Learn(inputs, outputs);

            // To get the estimated class labels, we can use
            int[] predicted = tree.Decide(inputs);

            // And the classification error (of 0.0266) can be computed as
            double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));

            // Moreover, we may decide to convert our tree to a set of rules:
            DecisionSet rules = tree.ToRules();

            // And using the codebook, we can inspect the tree reasoning:
            string ruleText = rules.ToString(codebook, "Output",
                                             System.Globalization.CultureInfo.InvariantCulture);

            // The output is:
            string expected = @"Iris-setosa =: (2 <= 2.45)
Iris-versicolor =: (2 > 2.45) && (3 <= 1.75) && (0 <= 7.05) && (1 <= 2.85)
Iris-versicolor =: (2 > 2.45) && (3 <= 1.75) && (0 <= 7.05) && (1 > 2.85)
Iris-versicolor =: (2 > 2.45) && (3 > 1.75) && (0 <= 5.95) && (1 > 3.05)
Iris-virginica =: (2 > 2.45) && (3 <= 1.75) && (0 > 7.05)
Iris-virginica =: (2 > 2.45) && (3 > 1.75) && (0 > 5.95)
Iris-virginica =: (2 > 2.45) && (3 > 1.75) && (0 <= 5.95) && (1 <= 3.05)
";
            #endregion

            expected = expected.Replace("\r\n", Environment.NewLine);

            Assert.AreEqual(0.026666666666666668, error, 1e-10);

            double newError = ComputeError(rules, inputs, outputs);
            Assert.AreEqual(0.026666666666666668, newError, 1e-10);
            Assert.AreEqual(expected, ruleText);
        }
示例#28
0
        private void button_Click(object sender, RoutedEventArgs e)
        {
            //Decided against the Iris set which is in Accord
            //var iris = new Iris();
            //double[][] inputs = iris.Instances;
            //int[] outputs = iris.ClassLabels;

            string[][] data = DataSet.CustomIris.iris_values.Split(new[] { "\r\n" },
                                                                   StringSplitOptions.RemoveEmptyEntries).Apply(x => x.Split(','));

            //features
            double[][] inputs = data.GetColumns(0, 1, 2, 3).To <double[][]>();

            //labels
            string[] labels = data.GetColumn(4);

            //Codebook translates any input into usable (integers) for the tree
            //var codebook = new Codification(outputs, inputs);
            var cb = new Codification("Output", labels);

            int[] outputs = cb.Transform("Output", labels);

            DecisionVariable[] features =
            {
                new DecisionVariable("sepal length", DecisionVariableKind.Continuous),
                new DecisionVariable("sepal width",  DecisionVariableKind.Continuous),
                new DecisionVariable("petal length", DecisionVariableKind.Continuous),
                new DecisionVariable("petal width",  DecisionVariableKind.Continuous),
            };

            var decisionTree = new DecisionTree(inputs: features, classes: 3);
            var c45learner   = new C45Learning(decisionTree);

            c45learner.Learn(inputs, outputs);

            int[] estimated = decisionTree.Decide(inputs);

            double error = new ZeroOneLoss(outputs).Loss(decisionTree.Decide(inputs));

            //Why rules?
            DecisionSet decisionSet = decisionTree.ToRules();

            string ruleText = decisionSet.ToString(cb, "Output",
                                                   System.Globalization.CultureInfo.InvariantCulture);

            //var tree = new DecisionTree(inputs: features, classes: 3);

            #region UI
            //Set ouput to UI
            tb_output.Text = ruleText;

            //Calculate the flowers and input to UI -> TODO Bindings
            var setosaCount     = 0;
            var versicolorCount = 0;
            var virginicaCount  = 0;

            for (int i = 0; i < estimated.Length; i++)
            {
                if (estimated[i] == 0)
                {
                    setosaCount++;
                }
                if (estimated[i] == 1)
                {
                    versicolorCount++;
                }
                if (estimated[i] == 2)
                {
                    virginicaCount++;
                }
            }

            tb_setosa.Text = setosaCount.ToString();
            tb_versi.Text  = versicolorCount.ToString();
            tb_virgi.Text  = virginicaCount.ToString();
            #endregion UI
        }
示例#29
0
 /// <summary>
 ///   Transforms the tree into a set of <see cref="DecisionSet">decision rules</see>.
 /// </summary>
 ///
 /// <returns>A <see cref="DecisionSet"/> created from this tree.</returns>
 ///
 public DecisionSet ToRules()
 {
     return(DecisionSet.FromDecisionTree(this));
 }
示例#30
0
        public double ComputeError(DecisionSet rules, double[][] inputs, int[] outputs)
        {
            int miss = 0;
            for (int i = 0; i < inputs.Length; i++)
            {
                if (rules.Compute(inputs[i]) != outputs[i])
                    miss++;
            }

            return (double)miss / inputs.Length;
        }
        public void Run()
        {
            DataTable data = new DataTable("Mitchell's Tennis Example");

            data.Columns.Add("Day");
            data.Columns.Add("Outlook");
            data.Columns.Add("Temperature");
            data.Columns.Add("Humidity");
            data.Columns.Add("Wind");
            data.Columns.Add("PlayTennis");

            data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
            data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D4", "Rain", "Mild", "High", "Weak", "Yes");
            data.Rows.Add("D5", "Rain", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D6", "Rain", "Cool", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast", "Cool", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", "Mild", "High", "Weak", "No");
            data.Rows.Add("D9", "Sunny", "Cool", "Normal", "Weak", "Yes");
            data.Rows.Add("D10", "Rain", "Mild", "Normal", "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", "Mild", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Mild", "High", "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D14", "Rain", "Mild", "High", "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data, "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = codebook.Apply(data);



            CreateDic("Outlook", symbols);
            CreateDic("Temperature", symbols);
            CreateDic("Humidity", symbols);
            CreateDic("Wind", symbols);
            CreateDic("PlayTennis", symbols);


            int[][] inputs = (from p in symbols.AsEnumerable()
                              select new int[]
            {
                GetIndex("Outlook", p["Outlook"].ToString()),
                GetIndex("Temperature", p["Temperature"].ToString()),
                GetIndex("Humidity", p["Humidity"].ToString()),
                GetIndex("Wind", p["Wind"].ToString())
            }).Cast <int[]>().ToArray();


            int[] outputs = (from p in symbols.AsEnumerable()
                             select GetIndex("PlayTennis", p["PlayTennis"].ToString())).Cast <int>().ToArray();



            /*
             * // Gather information about decision variables
             * DecisionVariable[] attributes =
             * {
             * new DecisionVariable("Outlook",     3), // 3 possible values (Sunny, overcast, rain)
             * new DecisionVariable("Temperature", 3), // 3 possible values (Hot, mild, cool)
             * new DecisionVariable("Humidity",    2), // 2 possible values (High, normal)
             * new DecisionVariable("Wind",        2)  // 2 possible values (Weak, strong)
             * };
             *
             */
            DecisionVariable[] attributes =
            {
                new DecisionVariable("Outlook", GetCount("Outlook")),         // 3 possible values (Sunny, overcast, rain)
                new DecisionVariable("Temperature", GetCount("Temperature")), // 3 possible values (Hot, mild, cool)
                new DecisionVariable("Humidity", GetCount("Humidity")),       // 2 possible values (High, normal)
                new DecisionVariable("Wind", GetCount("Wind"))                // 2 possible values (Weak, strong)
            };


            int classCount = GetCount("PlayTennis"); // 2 possible output values for playing tennis: yes or no

            //Create the decision tree using the attributes and classes
            DecisionTree tree = new DecisionTree(attributes, classCount);

            // Create a new instance of the ID3 algorithm
            ID3Learning id3learning = new ID3Learning(tree);

            // Learn the training instances!
            id3learning.Run(inputs, outputs);


            string answer = codebook.Translate("PlayTennis",
                                               tree.Compute(codebook.Translate("Sunny", "Hot", "High", "Strong")));

            Console.WriteLine("Calculate for: Sunny, Hot, High, Strong");
            Console.WriteLine("Answer: " + answer);


            var expression = tree.ToExpression();

            Console.WriteLine(tree.ToCode("ClassTest"));

            DecisionSet s = tree.ToRules();

            Console.WriteLine(s.ToString());

            // Compiles the expression to IL
            var func = expression.Compile();
        }
示例#32
0
        public void Run(String filename)
        {
            ReadFile(filename);

            // Create a new codification codebook to
            // convert strings into integer symbols


            Codification codebook = new Codification(data, inputColumns.ToArray());

            // Translate our training data into integer symbols using our codebook:
            DataTable symbols = codebook.Apply(data);


            foreach (String s in inputColumns)
            {
                CreateDic(s, symbols);
            }

            CreateDic(outputColumn, symbols);


            int[][] inputs = (from p in symbols.AsEnumerable()
                              select GetInputRow(p)
                              ).Cast <int[]>().ToArray();


            int[] outputs = (from p in symbols.AsEnumerable()
                             select GetIndex(outputColumn, p[outputColumn].ToString())).Cast <int>().ToArray();


            // Gather information about decision variables

            DecisionVariable[] attributes = GetDecisionVariables();


            int classCount = GetCount(outputColumn); // 2 possible output values for playing tennis: yes or no

            //Create the decision tree using the attributes and classes
            DecisionTree tree = new DecisionTree(attributes, classCount);

            // Create a new instance of the ID3 algorithm
            ID3Learning id3learning = new ID3Learning(tree);

            //C45Learning c45learning = new C45Learning(tree);

            // Learn the training instances!
            id3learning.Run(inputs, outputs);
            //c45learning.Run(inputs2, outputs);

            /*
             * string answer = codebook.Translate(outputColumn,
             *  tree.Compute(codebook.Translate("Sunny", "Hot", "High", "Strong")));
             *
             * Console.WriteLine("Calculate for: Sunny, Hot, High, Strong");
             * Console.WriteLine("Answer: " + answer);
             */

            var expression = tree.ToExpression();

            Console.WriteLine(tree.ToCode("ClassTest"));

            DecisionSet rules = tree.ToRules();

            Console.WriteLine(rules.ToString());

            // Compiles the expression to IL
            var func = expression.Compile();
        }
示例#33
0
	DecisionSet GenerateDecisionSet() {
		DecisionSet set = new DecisionSet ();

		//newspapers
		foreach(City city in gameState.cities) {
			//pick a topic at random
			Topic topic = TopicUtil.Random ();
			//Get a Random article related to the selected topic
			DecisionDefinition def = NewspaperDecisions.GetRandomForTopic (topic);
			if(null == def) {
				Debug.Log ("Could not find article for topic: " + TopicUtil.ToString (topic));
				continue;
			}
			GameObject paper = Instantiate (prefabs.newspaper);
			Dictionary<string, string> values = new Dictionary<string, string>();
			values["CityName"] = city.name;
			values ["Topic"] = TopicUtil.ToString (topic);
			paper.GetComponent<NewspaperDecision>().Define(def, values);
			set.newspapers.Add (paper);
		}

		//docets
		foreach(City city in gameState.cities) {
			//pick a topic at random
			Topic topic = TopicUtil.Random ();
			//Get a Random article related to the selected topic
			DocketDecisionDefinition def = DocketDecisions.GetRandomForTopic (topic);
			if(null == def) {
				Debug.Log ("Could not find docket for topic: " + TopicUtil.ToString (topic));
				continue;
			}
			GameObject docket = Instantiate (prefabs.docet);
			Dictionary<string, string> values = new Dictionary<string, string>();
			values["CityName"] = city.name;
			values ["CharacterType"] = "" + def.personType;
			values ["Topic"] = TopicUtil.ToString (topic);
			def.org = city;
			docket.GetComponent<DocketDecision>().Define(def, values);
			set.docets.Add (docket);
		}
		//TODO memos
		return set;
	}
示例#34
0
        public void DecisinTree(string[] labelnya, List <string[]> data)
        {
            // The first four columns contain the flower features
            datarules = new List <Rule>();
            string[][] text = data.ToArray <string[]>();

            // The first four columns contain the flower features
            double[][] inputs  = new double[data.Count][];
            string[]   targets = new string[data.Count];

            for (int i = 0; i < data.Count; ++i)
            {
                inputs[i]  = text[i].Skip(0).Take(text[i].Length - 1).Select(double.Parse).ToArray();
                targets[i] = text[i][3];
            }

            string[] labels = targets;

            // Since the labels are represented as text, the first step is to convert
            // those text labels into integer class labels, so we can process them
            // more easily. For this, we will create a codebook to encode class labels:
            //
            var codebook = new Codification("Output", labels);

            // With the codebook, we can convert the labels:
            int[] outputs = codebook.Translate("Output", labels);

            // And we can use the C4.5 for learning:
            C45Learning teacher = new C45Learning();


            // Finally induce the tree from the data:
            tree = teacher.Learn(inputs, outputs);

            // To get the estimated class labels, we can use
            int[] predicted = tree.Decide(inputs);

            // The classification error (0.0266) can be computed as
            double error = new ZeroOneLoss(outputs).Loss(predicted);

            // Moreover, we may decide to convert our tree to a set of rules:
            DecisionSet rules = tree.ToRules();
            // And using the codebook, we can inspect the tree reasoning:
            string ruleText = rules.ToString(codebook, "Output",
                                             System.Globalization.CultureInfo.InvariantCulture);
            List <string> rule = ruleText.Split(new[] { "\r\n" }, StringSplitOptions.None)
                                 .ToList();
            List <string[]> rows = rule.Select(x => x.Split(':')).ToList();


            int no = 1;

            foreach (var item in rows)
            {
                if (item.Count() >= 2)
                {
                    var r = new Rule();
                    r.id    = no++;
                    r.kelas = item[0].Replace('=', ' ');
                    r.rule  = item[1].ToString();
                    datarules.Add(r);
                }
            }

            GridSourceRule.DataContext = datarules;
        }