Example #1
0
        public void ArgumentCheck1()
        {
            int[][] samples =
            {
                new [] { 0, 2, 4 },
                new [] { 1, 5, 2 },
                null,
                new [] { 1, 5, 6 },
            };

            int[] outputs =
            {
                1, 1, 0, 0
            };

            DecisionVariable[] vars = new DecisionVariable[3];
            for (int i = 0; i < vars.Length; i++)
            {
                vars[i] = DecisionVariable.Discrete(i.ToString(), new IntRange(0, 10));
            }

            DecisionTree tree    = new DecisionTree(vars, 2);
            ID3Learning  teacher = new ID3Learning(tree);

            bool thrown = false;

            try { double error = teacher.Run(samples, outputs); }
            catch (ArgumentNullException) { thrown = true; }

            Assert.IsTrue(thrown);
        }
Example #2
0
        public void ConsistencyTest1()
        {
            int[,] random = Matrix.Random(1000, 10, 0.0, 10.0).ToInt32();

            int[][] samples = random.ToJagged();
            int[]   outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 8)
                {
                    outputs[i] = 1;
                }
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
            {
                vars[i] = new DecisionVariable(i.ToString(), new IntRange(0, 10));
            }

            DecisionTree tree = new DecisionTree(vars, 2);

            ID3Learning teacher = new ID3Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(11, tree.Root.Branches.Count);
            for (int i = 0; i < tree.Root.Branches.Count; i++)
            {
                Assert.IsTrue(tree.Root.Branches[i].IsLeaf);
            }
        }
Example #3
0
File: Program.cs Project: volend/ML
        ID3Learner LoadDecisionTree(List <Record> trainingSet, ReferenceTable table, int depth)
        {
            int[][] inputs;
            int[]   outputs;
            var     codebook = BuildCodebook(trainingSet, table, out inputs, out outputs);

            var attributes = new DecisionVariable[table.Columns.Length - 1];

            for (int i = 0; i < attributes.Length; i++)
            {
                attributes[i] = new DecisionVariable(table.Columns[i], table.GetValues(i).Length);
            }

            int classCount = 2;

            DecisionTree  tree        = new DecisionTree(attributes, classCount);
            ID3LearningEx id3Learning = new ID3LearningEx(tree)
            {
                MaxHeight = depth
            };

            id3Learning.Run(inputs, outputs);

            return(new ID3Learner(this, tree, codebook, table.Columns.Last()));
        }
Example #4
0
        // Loads up the Decision Tree
        public void CreateDecisionTree()
        {
            string[] cols       = { "Array Size", "Runs" };
            var      attributes = DecisionVariable.FromCodebook(codebook, cols);

            tree = new DecisionTree(attributes, ClassCount);
        }
Example #5
0
        public double CheckAccuracy(int trees, double ratio)
        {
            var variables = new DecisionVariable[Data.FeatureCount];

            for (int i = 0; i < Data.FeatureCount; i++)
            {
                variables[i] = new DecisionVariable(i.ToString(), DecisionVariableKind.Continuous);
            }
            var options = new ParallelOptions();

            options.MaxDegreeOfParallelism = 4;
            double accuracy = 0;

            for (int k = 0; k < Folds; k++)
            {
                RandomForestLearning teacher = new RandomForestLearning(variables);
                //teacher.ParallelOptions = options;
                teacher.SampleRatio   = ratio;
                teacher.NumberOfTrees = trees;
                teacher.
                var model   = teacher.Learn(TrainInput[k], TrainOutput[k]);
                int correct = 0;
                for (int i = 0; i < Data.InstancesPerFold; i++)
                {
                    var label = model.Decide(TestInput[k][i]);
                    if (label == TestOutput[k][i])
                    {
                        correct++;
                    }
                }
                accuracy += (double)correct / Data.InstancesPerFold;
            }

            return(accuracy);
        }
Example #6
0
        public void ConsistencyTest1()
        {
            double[,] random = Matrix.Random(1000, 10, 0.0, 1.0);

            double[][] samples = random.ToJagged();
            int[]      outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 0.8)
                {
                    outputs[i] = 1;
                }
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
            {
                vars[i] = new DecisionVariable(i.ToString(), DecisionVariableKind.Continuous);
            }

            DecisionTree tree = new DecisionTree(vars, 2);

            C45Learning teacher = new C45Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(2, tree.Root.Branches.Count);
            Assert.IsTrue(tree.Root.Branches[0].IsLeaf);
            Assert.IsTrue(tree.Root.Branches[1].IsLeaf);
        }
Example #7
0
        public void serialize_batch_models()
        {
#if !MONO
            test(new NaiveBayes(classes: 3, symbols: new[] { 1, 2, 3 }));
            test(new NaiveBayes <NormalDistribution>(classes: 4, inputs: 2, initial: (i, j) => new NormalDistribution(i, j + 1)));
            test(new NaiveBayes <NormalDistribution, double>(classes: 5, inputs: 3, initial: (i, j) => new NormalDistribution(i, j + 1)));
#endif
            test(new LogisticRegression());
            test(new SimpleLinearRegression());
            test(new MultivariateLinearRegression());
            test(new MultipleLinearRegression());

            test(new SupportVectorMachine(inputs: 3));
            test(new SupportVectorMachine <Gaussian>(inputs: 3, kernel: new Gaussian(0.5)));
            test(new SupportVectorMachine <Gaussian, double[]>(inputs: 3, kernel: new Gaussian(0.5)));

            test(new MulticlassSupportVectorMachine(inputs: 3, kernel: new Gaussian(0.5), classes: 2));
            test(new MulticlassSupportVectorMachine <Gaussian, double[]>(inputs: 3, kernel: new Gaussian(0.5), classes: 2));

            test(new MultilabelSupportVectorMachine <Gaussian>(inputs: 3, kernel: new Gaussian(0.5), classes: 2));
            test(new MultilabelSupportVectorMachine <Gaussian, double[]>(inputs: 3, kernel: new Gaussian(0.5), classes: 2));

            test(new DecisionTree(new[] { DecisionVariable.Continuous("test") }, classes: 2));
            test(new HiddenMarkovModel(states: 2, symbols: 3));
            test(new HiddenConditionalRandomField <double>());
        }
Example #8
0
        public DecisionTree GenerateDecisionTree(int inputsCount, ref double[][] inputs, ref int[] outputs,
                                                 int outputClassNum, List <string> listVariablesName, int neiWindowSize, int landuseTypesCount)
        {
            DecisionVariable[] variable = new DecisionVariable[inputsCount];

            for (int i = 0; i < inputsCount - 2; i++)
            {
                DecisionVariable v = new DecisionVariable(listVariablesName[i], DecisionVariableKind.Continuous);
                variable[i] = v;
            }
            ;
            DecisionVariable dv = new DecisionVariable(listVariablesName[inputsCount - 2], neiWindowSize * neiWindowSize + 1);

            variable[inputsCount - 2] = dv;
            DecisionVariable dv2 = new DecisionVariable(listVariablesName[inputsCount - 1], landuseTypesCount);

            variable[inputsCount - 1] = dv2;

            DecisionTree tree = new DecisionTree(variable, outputClassNum);
            C45Learning  c45  = new C45Learning(tree);

            //double error = c45.Run(inputs, outputs);
            tree = c45.Learn(inputs, outputs);

            return(tree);
        }
Example #9
0
        /// <summary>
        /// Learns a model, wrapper for
        /// <see cref="Accord.MachineLearning.DecisionTrees.Learning.C45Learning.Learn(double[][], int[], double[])"/>
        /// </summary>
        public void Learn()
        {
            var features = new DecisionVariable[GlobalVariables.Dimensions];

            // Adding DecisionVariables (features)
            for (var i = 0; i < GlobalVariables.Dimensions; i++)
            {
                features[i] = new DecisionVariable($"x{i}", DecisionVariableKind.Continuous);
            }

            // Create 2 class tree object
            DecisionTree = new DecisionTree(features, 2);

            var c45 = new C45Learning(DecisionTree)
            {
                Join            = Join,
                MaxHeight       = MaxHeight,
                ParallelOptions = new ParallelOptions {
                    MaxDegreeOfParallelism = 1
                }                                                                     // Only 1 thread will be used by the learning algorithm
            };

            c45.Learn(Inputs, Outputs);

            // Getting rules from tree and saving them to file
            using (var sw = new StreamWriter(OutputPath))
            {
                OutputRules = DecisionTree.ToRules().ToString().Replace(",", ".");
                sw.Write(OutputRules);
            }
        }
Example #10
0
        public void RandomForestBuild(List <train> datalist)
        {
            int length = datalist.Count;
            int d      = datalist[0].d;

            forest = new List <DecisionTree>();

            int n = datalist.Count;
            int k = d; //(int)Math.Sqrt(d);
            int m = 100;

            for (int i = 0; i < m; ++i)
            {
                double[][] inputs;
                int[]      outputs;
                int[]      indexs;
                GetData(out inputs, out outputs, datalist, n, k, out indexs);

                DecisionVariable[] variables = new DecisionVariable[k];
                for (int j = 0; j < k; ++j)
                {
                    variables[j] = new DecisionVariable("attribute" + (indexs[j] + 1), DecisionVariableKind.Continuous);
                }

                // Create the C4.5 learning algorithm
                var c45 = new C45Learning(variables);

                // Learn the decision tree using C4.5
                DecisionTree dtmp = c45.Learn(inputs, outputs);
                forest.Add(dtmp);
            }
            log("The random forest model has been trained");
        }
Example #11
0
        public void ArgumentCheck1()
        {
            double[][] samples =
            {
                new [] { 0, 2, 4.0 },
                new [] { 1, 5, 2.0 },
                null,
                new [] { 1, 5, 6.0 },
            };

            int[] outputs =
            {
                1, 1, 0, 0
            };

            DecisionVariable[] vars = new DecisionVariable[3];
            for (int i = 0; i < vars.Length; i++)
            {
                vars[i] = DecisionVariable.Continuous(i.ToString());
            }

            DecisionTree tree    = new DecisionTree(vars, 2);
            var          teacher = new C45Learning(tree);

            bool thrown = false;

            try { double error = teacher.Run(samples, outputs); }
            catch (ArgumentNullException) { thrown = true; }

            Assert.IsTrue(thrown);
        }
Example #12
0
        static void Main(string[] args)
        {
            DataTable table = new Accord.IO.CsvReader("C:\\Users\\michael\\Downloads\\JulyToOct2015Test.csv", true).ToTable();

            // Convert the DataTable to input and output vectors
            double[][] inputs  = table.ToJagged <double>("BookToPrice", "DividendYield", "DebtToEquity", "MarketBeta", "SectorID");
            int[]      outputs = table.Columns["MonthlyReturn"].ToArray <int>();


            //SecurityID BookToPrice DividendYield EarningsYield   SalesGrowth AssetsToEquity  MarketCap MarketBeta  DebtToEquity    1YrVol  5YrVol  3YrVol ExposureToCurrencyGain  SectorID countryID

            DecisionTree tree = new DecisionTree(
                inputs: new List <DecisionVariable>
            {
                DecisionVariable.Continuous("BookToPrice"),
                DecisionVariable.Continuous("DividendYield"),
                DecisionVariable.Continuous("DebtToEquity"),
                DecisionVariable.Continuous("MarketBeta"),
                DecisionVariable.Discrete("SectorID", 11)
            },
                classes: 2);

            C45Learning teacher = new C45Learning(tree);

            teacher.Learn(inputs, outputs);
            int[] answers = tree.Decide(inputs);


            // Plot the results
            // ScatterplotBox.Show("Expected results", inputs, outputs);
            //ScatterplotBox.Show("Ans", inputs, answers)
            //    .Hold();
        }
Example #13
0
        private static void decisionTree(double[][] inputs, int[] outputs)
        {
            // In our problem, we have 2 classes (samples can be either
            // positive or negative), and 2 continuous-valued inputs.
            DecisionTree tree = new DecisionTree(inputs: new[]
            {
                DecisionVariable.Continuous("X"),
                DecisionVariable.Continuous("Y")
            }, classes: 2);

            C45Learning teacher = new C45Learning(tree);

            // The C4.5 algorithm expects the class labels to
            // range from 0 to k, so we convert -1 to be zero:
            //
            outputs = outputs.Apply(x => x < 0 ? 0 : x);

            double error = teacher.Run(inputs, outputs);

            // Classify the samples using the model
            int[] answers = inputs.Apply(tree.Compute);

            // Plot the results
            ScatterplotBox.Show("Expected results", inputs, outputs);
            ScatterplotBox.Show("Decision Tree results", inputs, answers)
            .Hold();
        }
Example #14
0
        private void ComputeInference()
        {
            var codebook = new Codification();

            codebook.Learn(tradeTable);

            DataTable symbols = codebook.Apply(tradeTable);

            string[]   inputNames = new[] { "Strike", "MarketPrice", "Notional" };
            double[][] inputs     = tradeTable.ToJagged(inputNames);
            int[]      outputs    = tradeTable.ToArray <int>("Result");


            var teacher = new C45Learning()
            {
                Attributes = DecisionVariable.FromCodebook(codebook, inputNames)
            };


            DecisionTree tree = teacher.Learn(inputs, outputs);

            int[]       predicted = tree.Decide(inputs);
            double      error     = new ZeroOneLoss(outputs).Loss(predicted);
            DecisionSet rules     = tree.ToRules();

            var str = rules.ToString();

            textBoxInferredRules.Text = str;
        }
Example #15
0
        public static TrainingSet[] GenerateTrainingSets(IEnumerable <KeyValuePair <User, double[]> > studentsAndMarks, string[] normalRecords, string[] anomalies)
        {
            var countOfEntries = normalRecords.Length + anomalies.Length;
            var inputData      = new double[countOfEntries][];
            var outputData     = new int[countOfEntries];
            var counter        = 0;

            foreach (var studentAndMarks in studentsAndMarks)
            {
                if (normalRecords.Contains(studentAndMarks.Key.OpenId))
                {
                    inputData[counter]    = studentAndMarks.Value;
                    outputData[counter++] = 1;
                }

                if (!anomalies.Contains(studentAndMarks.Key.OpenId))
                {
                    continue;
                }

                inputData[counter]    = studentAndMarks.Value;
                outputData[counter++] = 0;
            }

            var countOfFeatures = studentsAndMarks.ElementAt(0).Value.Length;
            var features        = new DecisionVariable[countOfFeatures];

            features[0] = new DecisionVariable("0", DecisionAttributeKind.Continuous, new AForge.DoubleRange(80, 1200));

            for (var i = 1; i < countOfFeatures; i++)
            {
                features[i] = new DecisionVariable(i.ToString(), DecisionAttributeKind.Continuous, new AForge.DoubleRange(0, 10));
            }

            // Create the Decision tree with only 2 result values
            var tree = new DecisionTree(features, 2);

            // Creates a new instance of the C4.5 learning algorithm
            var c45 = new C45Learning(tree);

            // Learn the decision tree
            var error = c45.Run(inputData, outputData);

            // Split all data into normal and anomalies
            var setOfNormalRecords = studentsAndMarks.Where(x => tree.Compute(x.Value) == 1);
            var setOfAnomalies     = studentsAndMarks.Where(x => tree.Compute(x.Value) == 0);

            // Split normal records into 2 groups (one for training set and one for anomaly detection ocurency detection)
            var setOfNormalRecordsList = setOfNormalRecords.ToList();
            var splitCount             = setOfNormalRecordsList.Count * 2 / 3;
            var setOfNormalRecordsTr1  = setOfNormalRecordsList.GetRange(0, splitCount);
            var setOfNormalRecordsTr2  = setOfNormalRecordsList.GetRange(splitCount, setOfNormalRecordsList.Count - splitCount);
            // Create Training Sets
            var trSetNormalFirst  = CreateTrainingSetFromResources(setOfNormalRecordsTr1);
            var trSetNormalSecond = CreateTrainingSetFromResources(setOfNormalRecordsTr2);
            var trSetAnomalies    = CreateTrainingSetFromResources(setOfAnomalies);

            return(new[] { trSetNormalFirst, trSetNormalSecond, trSetAnomalies });
        }
Example #16
0
        public static TrainingSet[] GenerateTrainingSets(IEnumerable<KeyValuePair<User, double[]>> studentsAndMarks, string[] normalRecords, string[] anomalies)
        {
            var countOfEntries = normalRecords.Length + anomalies.Length;
            var inputData = new double[countOfEntries][];
            var outputData = new int[countOfEntries];
            var counter = 0;

            foreach (var studentAndMarks in studentsAndMarks)
            {
                if (normalRecords.Contains(studentAndMarks.Key.OpenId))
                {
                    inputData[counter] = studentAndMarks.Value;
                    outputData[counter++] = 1;
                }

                if (!anomalies.Contains(studentAndMarks.Key.OpenId))
                {
                    continue;
                }

                inputData[counter] = studentAndMarks.Value;
                outputData[counter++] = 0;
            }

            var countOfFeatures = studentsAndMarks.ElementAt(0).Value.Length;
            var features = new DecisionVariable[countOfFeatures];
            features[0] = new DecisionVariable("0", DecisionAttributeKind.Continuous, new AForge.DoubleRange(80, 1200));
            
            for (var i = 1; i < countOfFeatures; i++)
            {
                features[i] = new DecisionVariable(i.ToString(), DecisionAttributeKind.Continuous, new AForge.DoubleRange(0, 10));
            }

            // Create the Decision tree with only 2 result values
            var tree = new DecisionTree(features, 2);

            // Creates a new instance of the C4.5 learning algorithm
            var c45 = new C45Learning(tree);

            // Learn the decision tree
            var error = c45.Run(inputData, outputData);

            // Split all data into normal and anomalies
            var setOfNormalRecords = studentsAndMarks.Where(x => tree.Compute(x.Value) == 1);
            var setOfAnomalies = studentsAndMarks.Where(x => tree.Compute(x.Value) == 0);
                        
            // Split normal records into 2 groups (one for training set and one for anomaly detection ocurency detection)
            var setOfNormalRecordsList = setOfNormalRecords.ToList();
            var splitCount = setOfNormalRecordsList.Count * 2 / 3;
            var setOfNormalRecordsTr1 = setOfNormalRecordsList.GetRange(0, splitCount);
            var setOfNormalRecordsTr2 = setOfNormalRecordsList.GetRange(splitCount, setOfNormalRecordsList.Count - splitCount);
            // Create Training Sets
            var trSetNormalFirst = CreateTrainingSetFromResources(setOfNormalRecordsTr1);
            var trSetNormalSecond = CreateTrainingSetFromResources(setOfNormalRecordsTr2);
            var trSetAnomalies = CreateTrainingSetFromResources(setOfAnomalies);

            return new[] { trSetNormalFirst, trSetNormalSecond, trSetAnomalies };
        }
Example #17
0
        public static void Main(string[] args)
        {
            //getting example data
            Iris iris = new Iris();

            //we are creating training data arrays
            double[][] input  = new double[147][];
            int[]      output = new int[147];

            //we process 'Iris' data and delete 1 from each type for later test purpose
            int j = 0;

            for (int i = 0; i < 147; i++)
            {
                if (i != 0 || i != 50 || i != 100)
                {
                    input[j]  = new double[4];
                    output[j] = iris.ClassLabels[i];
                    for (int k = 0; k < 4; k++)
                    {
                        input[j][k] = iris.Instances[i][k];
                    }
                    j++;
                }
            }

            //learning algorithm for decision tree
            C45Learning teacher = new C45Learning(new[] {
                DecisionVariable.Continuous(iris.VariableNames[0]),
                DecisionVariable.Continuous(iris.VariableNames[1]),
                DecisionVariable.Continuous(iris.VariableNames[2]),
                DecisionVariable.Continuous(iris.VariableNames[3]),
            });

            //model learning
            DecisionTree tree = teacher.Learn(input, output);

            //If we would have some other irises we could just wrote like this
            //DecisionTree tree = teacher.Learn(iris.Instances, iris.ClassLabels);
            //but we prefer to left some for test purpose (to check if our programm is working fine)

            //testing our model
            double[][] test    = { iris.Instances[0], iris.Instances[50], iris.Instances[100] };
            int[]      answers = tree.Decide(test);

            Console.WriteLine("Answer should be as follow:\n0,1,2,\nAnswer is:");

            foreach (int ans in answers)
            {
                Console.Write(ans + ",");
            }

            Console.Write("\nPress any key to continue . . . ");
            Console.ReadKey(true);
        }
Example #18
0
        /// <summary>
        ///   Creates a new <see cref="DecisionTree"/> to process
        ///   the given <paramref name="attributes"/> and the given
        ///   number of possible <paramref name="outputClasses"/>.
        /// </summary>
        /// 
        /// <param name="attributes">An array specifying the attributes to be processed by this tree.</param>
        /// <param name="outputClasses">The number of possible output classes for the given atributes.</param>
        /// 
        public DecisionTree(DecisionVariable[] attributes, int outputClasses)
        {
            if (outputClasses <= 0)
                throw new ArgumentOutOfRangeException("outputClasses");
            if (attributes == null)
                throw new ArgumentNullException("attributes");

            this.Attributes = new DecisionAttributeCollection(attributes);
            this.InputCount = attributes.Length;
            this.OutputClasses = outputClasses;
        }
Example #19
0
        public void Classification_Train(double[,] train_docrule, int[] label, string algorithm)
        {
            string classmodelpath;
            int    attrSize     = eclatlitems.Count;
            int    attrSizeTest = eclatlitems.Count;

            // Specify the input variables
            DecisionVariable[] variables = new DecisionVariable[attrSize];
            for (int i = 0; i < attrSize; i++)
            {
                variables[i] = new DecisionVariable((i + 1).ToString(), DecisionVariableKind.Discrete);
            }

            if (algorithm == "Tree")
            {
                classmodelpath = algorithm + ".model";
                //RandomForest tree2 = new RandomForest(2, variables);
                DecisionTree tree    = new DecisionTree(variables, 2);
                C45Learning  teacher = new C45Learning(tree);
                var          model   = teacher.Learn(train_docrule.ToJagged(), label);
                //save model
                teacher.Save(Path.Combine("", classmodelpath));
            }
            if (algorithm == "SVM")
            {
                classmodelpath = algorithm + ".model";
                var learn = new SequentialMinimalOptimization()
                {
                    UseComplexityHeuristic = true,
                    UseKernelEstimation    = false
                };
                SupportVectorMachine teacher = learn.Learn(train_docrule.ToJagged(), label);
                //save model
                teacher.Save(Path.Combine("", classmodelpath));
            }

            if (algorithm == "Logistic")
            {
                classmodelpath = algorithm + ".model";
                var learner = new IterativeReweightedLeastSquares <LogisticRegression>()
                {
                    Tolerance      = 1e-4, // Let's set some convergence parameters
                    Iterations     = 1,    // maximum number of iterations to perform
                    Regularization = 0
                };
                LogisticRegression teacher = learner.Learn(train_docrule.ToJagged(), label);
                teacher.Save(Path.Combine("", classmodelpath));
            }

            if (algorithm == "GA")
            {
                weights_ga_matlab();
            }
        }
Example #20
0
        /// <summary>
        ///   Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        ///
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        ///
        /// <returns>A model that has learned how to produce <paramref name="y"/> given <paramref name="x"/>.</returns>
        ///
        public DecisionTree Learn(double[][] x, int[] y, double[] weights = null)
        {
            if (tree == null)
            {
                var variables = DecisionVariable.FromData(x);
                int classes   = y.DistinctCount();
                init(new DecisionTree(variables, classes));
            }

            this.Run(x, y);
            return(tree);
        }
        /***************************** Constructor *********************************/
        public Classification(SortedList columnList, int classLabeCount)
        {
            // Initialize DecisionTree
            decisionAttributes = new DecisionVariable[columnList.Count];
            for (int i = 0; i < decisionAttributes.Length; i++)
            {
                decisionAttributes[i] = new DecisionVariable((string)columnList.GetByIndex(i), DecisionVariableKind.Continuous);
            }

            int classCount = classLabeCount;
            descisionTree = new DecisionTree(decisionAttributes, classCount);
        }
Example #22
0
        /// <summary>
        ///   Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        ///
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        ///
        /// <returns>A model that has learned how to produce <paramref name="y"/> given <paramref name="x"/>.</returns>
        ///
        public DecisionTree Learn(int[][] x, int[] y, double[] weights = null)
        {
            if (tree == null)
            {
                var variables = DecisionVariable.FromData(x);
                int classes   = y.Max() + 1;
                init(new DecisionTree(variables, classes));
            }

            this.run(x.ToDouble(), y);
            return(tree);
        }
Example #23
0
        public static DecisionTree Learn(DataTable data, string[] inputColumns, string outputColumn)
        {
            var codebook = new Codification(data);
            var symbols  = codebook.Apply(data);

            double[][] inputs  = symbols.ToJagged(inputColumns);
            int[]      outputs = symbols.ToArray <int>(outputColumn);

            var attributes = DecisionVariable.FromCodebook(codebook, inputColumns);
            var c45        = new C45Learning(attributes);

            return(c45.Learn(inputs, outputs));
        }
Example #24
0
        /***************************** Constructor *********************************/
        public Classification(SortedList columnList, int classLabeCount)
        {
            // Initialize DecisionTree
            decisionAttributes = new DecisionVariable[columnList.Count];
            for (int i = 0; i < decisionAttributes.Length; i++)
            {
                decisionAttributes[i] = new DecisionVariable((string)columnList.GetByIndex(i), DecisionVariableKind.Continuous);
            }

            int classCount = classLabeCount;

            descisionTree = new DecisionTree(decisionAttributes, classCount);
        }
Example #25
0
        public void buildModel()
        {
            var attributes = DecisionVariable.FromData(inputs);
            // Now, let's create the forest learning algorithm
            var teacher = new RandomForestLearning(attributes)
            {
                NumberOfTrees = 1,
                SampleRatio   = 1.0
            };

            // Finally, learn a random forest from data
            this.forest = teacher.Learn(inputs, outputs);
        }
Example #26
0
        /*
         * Takes a Datatable with the training data
         * translates the data to ints
         * trains using the training data
         * The last col of the datatable input is the thing to predicted
         */
        public void Train(int index)
        {
            DataTable dataTable = this.theData;

            // Debug.Write("DataTable size: ");
            // Debug.Write("Rows: " + dataTable.Rows.Count);
            // Debug.Write("Cols: " + dataTable.Columns.Count);

            ArrayList inputNames = new ArrayList();

            foreach (DataColumn column in dataTable.Columns)
            {
                inputNames.Add(column.ColumnName);
            }
            this.toPredict = (string)inputNames[index];                                         // The column to predict
            inputNames.RemoveAt(index);                                                         // the data input data (predict column removed)
            this.inputNamesArr = (string[])inputNames.ToArray(typeof(string));

            // Debug.Write("Input arr size: " + inputNamesArr.Length);

            // Using Accord.Statistics.Filters to present the data as integers,
            // as integers are more efficient
            this.codebook = new Codification(dataTable)
            {
                DefaultMissingValueReplacement = 0
            };                                                                                   // codebook object that can convert  strings to ints, null/missing value will be defaulted to 0
            DataTable symbols = codebook.Apply(dataTable);                                       // applying our data to the codebook

            int[][] inputs  = symbols.ToJagged <int>(inputNamesArr);                             // The conversion to ints
            int[]   outputs = symbols.ToArray <int>(toPredict);                                  // The conversion to ints

            // Debug.Write("Array size: ");
            // Debug.Write("inputs: " + inputs.Length);
            // Debug.Write("outputs: " + outputs.Length);

            // Debug.Write("Test");

            var id3 = new ID3Learning()                                                          // the id3 algo
            {
                Attributes = DecisionVariable.FromCodebook(codebook, inputNamesArr)              // the trees decision attributes/headers from excel, second argument could be given saying what columns it should be
            };

            this.tree = id3.Learn(inputs, outputs);                                              // Learn using the inputs and output defined above

            // transform the rules of the tree into a string
            DecisionSet treeRules = tree.ToRules();

            ruleText = treeRules.ToString(codebook, toPredict,
                                          System.Globalization.CultureInfo.InvariantCulture);
            Debug.WriteLine(ruleText);
        }
        public RandomForestClassifier(FeatureValueTypes featureDefaultsValueTypes, FeatureGranularities featureGranularities, string serializedClassifierPath = null)
        {
            var featureKinds = new List <DecisionVariable>();

            foreach (var entry in featureDefaultsValueTypes)
            {
                var featureName = entry.Key.ToString();
                var featureType = entry.Value;

                var featureGranularity = featureGranularities[entry.Key];

                if (featureGranularity == FeatureGranularity.Continuous)
                {
                    featureKinds.Add(new DecisionVariable(featureName, DecisionVariableKind.Continuous));
                }
                else if (featureGranularity == FeatureGranularity.Discrete)
                {
                    var decisionVar = new DecisionVariable(featureName, DecisionVariableKind.Discrete);

                    // TODO: Fix uint, there is no Accord.UIntRange
                    if (featureType == (typeof(int)) || featureType == (typeof(int?)) || featureType == (typeof(uint)) || featureType == (typeof(uint?)))
                    {
                        decisionVar.Range = new Accord.IntRange(min: int.MinValue, max: int.MaxValue);
                    }
                    else if (featureType == (typeof(byte)) || featureType == (typeof(byte?)))
                    {
                        decisionVar.Range = new Accord.IntRange(min: byte.MinValue, max: byte.MaxValue);
                    }

                    featureKinds.Add(decisionVar);
                }
                else
                {
                    throw new ArgumentException("Unknown feature granularity");
                }
            }

            var featureKindsArr = featureKinds.ToArray <DecisionVariable>();

            this.RandomForestLearner = new RandomForestLearning(featureKindsArr)
            {
                NumberOfTrees = 10
            };

            if (serializedClassifierPath != null)
            {
                this.RandomForest = Serializer.Load <RandomForest>(serializedClassifierPath);
            }
        }
        public void Learn()
        {
            var inputs  = GetLearnInputs();
            var outputs = GetOutputs();
            var teacher = new C45Learning {
                Join = 0
            };

            foreach (var controllerOutputProperty in GetControllerOutputProperties())
            {
                teacher.Attributes.Add(DecisionVariable.Continuous(controllerOutputProperty));
            }

            DecisionTree = teacher.Learn(inputs, outputs);
        }
Example #29
0
        /// <summary>
        ///   Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        ///
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        ///
        /// <returns>A model that has learned how to produce <paramref name="y"/> given <paramref name="x"/>.</returns>
        ///
        public DecisionTree Learn(double[][] x, int[] y, double[] weights = null)
        {
            if (tree == null)
            {
                if (this.attributes == null)
                {
                    this.attributes = DecisionVariable.FromData(x);
                }
                int classes = y.Max() + 1;
                init(new DecisionTree(this.attributes, classes));
            }

            this.run(x, y);
            return(tree);
        }
Example #30
0
    // Decision Tree
    public DecisionTree DecisionThrust(double[][] inputs, int[] outputs)
    {
        C45Learning teacher = new C45Learning(new[] {
            DecisionVariable.Continuous("X"),
            DecisionVariable.Continuous("Y"),
            DecisionVariable.Continuous("Z"),
            DecisionVariable.Continuous("W")
        });

        teacher.ParallelOptions.MaxDegreeOfParallelism = 1;

        // Use the learning algorithm to induce the tree
        DecisionTree tree = teacher.Learn(inputs, outputs);

        return(tree);
    }
Example #31
0
        private static DecisionTree createTree(out double[][] inputs, out int[] outputs)
        {
            string nurseryData = Resources.nursery;

            string[] inputColumns =
            {
                "parents", "has_nurs", "form",   "children",
                "housing", "finance",  "social", "health"
            };


            string outputColumn = "output";


            DataTable table = new DataTable("Nursery");

            table.Columns.Add(inputColumns);
            table.Columns.Add(outputColumn);

            string[] lines = nurseryData.Split(
                new[] { Environment.NewLine }, StringSplitOptions.None);

            foreach (var line in lines)
            {
                table.Rows.Add(line.Split(','));
            }


            Codification codebook = new Codification(table);


            DataTable symbols = codebook.Apply(table);

            inputs  = symbols.ToArray(inputColumns);
            outputs = symbols.ToArray <int>(outputColumn);


            var attributes = DecisionVariable.FromCodebook(codebook, inputColumns);
            var tree       = new DecisionTree(attributes, classes: 5);


            C45Learning c45 = new C45Learning(tree);

            c45.Run(inputs, outputs);

            return(tree);
        }
Example #32
0
        public void  Train(List <TrainingValue> trainingData)
        {
            List <DecisionVariable> trainingVariables = new List <DecisionVariable>();

            for (int i = 0; i < featureSize; i++)
            {
                trainingVariables.Add(DecisionVariable.Continuous(i.ToString()));
            }

            tree = new DecisionTree(inputs: trainingVariables, classes: 2);


            double[][] featuresArray = new double[trainingData.Count][];
            int[]      labels        = new int[trainingData.Count];

            for (int i = 0; i < featuresArray.Length; i++)
            {
                featuresArray[i] = trainingData[i].Features;
                labels[i]        = Convert.ToInt32(trainingData[i].State);
            }

            switch (type)
            {
            case ClassifierType.DecisionTree:
                C45Learning teacher = new C45Learning(tree);
                teacher.Learn(featuresArray, labels);
                break;

            case ClassifierType.LDA:
                LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis();
                pipeline = lda.Learn(featuresArray, labels);
                break;

            case ClassifierType.SVM:
                LinearCoordinateDescent svmLearner = new LinearCoordinateDescent();
                svm = svmLearner.Learn(featuresArray, labels);
                break;

            case ClassifierType.Bayes:
                NaiveBayesLearning <NormalDistribution> learner = new NaiveBayesLearning <NormalDistribution>();
                bayes = learner.Learn(featuresArray, labels);
                break;
            }

            Trained = true;
        }
Example #33
0
        public static DecisionTree createNurseryExample(out double[][] inputs, out int[] outputs, int first)
        {
            string nurseryData = Resources.nursery;

            string[] inputColumns =
            {
                "parents", "has_nurs", "form",   "children",
                "housing", "finance",  "social", "health"
            };

            string outputColumn = "output";

            DataTable table = new DataTable("Nursery");

            table.Columns.Add(inputColumns);
            table.Columns.Add(outputColumn);

            string[] lines = nurseryData.Split(
                new[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries);

            Assert.AreEqual(12960, lines.Length);
            Assert.AreEqual("usual,proper,complete,1,convenient,convenient,nonprob,recommended,recommend", lines[0]);
            Assert.AreEqual("great_pret,very_crit,foster,more,critical,inconv,problematic,not_recom,not_recom", lines[lines.Length - 1]);

            foreach (var line in lines)
            {
                table.Rows.Add(line.Split(','));
            }

            Codification codebook = new Codification(table);
            DataTable    symbols  = codebook.Apply(table);

            inputs  = symbols.ToArray(inputColumns);
            outputs = symbols.ToArray <int>(outputColumn);

            var attributes = DecisionVariable.FromCodebook(codebook, inputColumns);
            var tree       = new DecisionTree(attributes, classes: 5);

            C45Learning c45   = new C45Learning(tree);
            double      error = c45.Run(inputs.First(first), outputs.First(first));

            Assert.AreEqual(0, error);

            return(tree);
        }
        public void LargeRunTest2()
        {
            Accord.Math.Random.Generator.Seed = 0;

            int[,] random = Matrix.Random(1000, 10, 0.0, 10.0).ToInt32();

            int[][] samples = random.ToJagged();
            int[] outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 5 || Tools.Random.NextDouble() > 0.85)
                    outputs[i] = 1;
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = new DecisionVariable("x" + i, 10);

            DecisionTree tree = new DecisionTree(vars, 2);

            var teacher = new ID3Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            var rules = DecisionSet.FromDecisionTree(tree);

            Simplification simpl = new Simplification(rules)
            {
                Alpha = 0.05
            };

            error = simpl.ComputeError(samples.ToDouble(), outputs);
            Assert.AreEqual(0, error);

            double newError = simpl.Compute(samples.ToDouble(), outputs);

            Assert.AreEqual(0.097, newError);
        }
	public void Play( string text )
	{
		List<string> commands = GetKeyValuePairs(text);
	
		// set end time to start time
		endtime = Time.time;
		
		// init string to build
		outputString = "";
		
		foreach( string command in commands )
		{
			if ( command.Contains("audio") )
			{
				string audio;
				GetToken(command,"audio",out audio);
				// add audio
				AddAudioBite(audio);
				// build string
				outputString += (GetClipString(audio));
				// add gap
				endtime += biteGap;
			}
			if ( command.Contains("variable") )
			{
				// have to build a number here, from a decision variable
				string numString="";
				GetToken(command,"variable",out numString);
				// this is a decision variable
				DecisionVariable var = new DecisionVariable(numString.ToLower(), true);
				if ( var.Valid == true )
				{
					PlayNumber(var.Get());
					// convert number to string
					float number = Convert.ToSingle(var.Get());
					number = (float)Math.Round(number,(decimals==true)?1:0);
					outputString += (number.ToString());
				}
			}
			if ( command.Contains("number") )
			{
				// have to build a number here, from a decision variable
				string numString="";
				GetToken(command,"number",out numString);
				PlayNumber(numString);
				outputString += (numString);
			}
			if ( command.Contains("decimals") )
			{
				string decimals="";
				GetToken(command,"decimals", out decimals);
				this.decimals = Convert.ToBoolean(decimals);
			}
			if ( command.Contains("gap") )
			{
				string gap="";
				GetToken(command,"gap", out gap);
				endtime += Convert.ToSingle(gap);
				outputString += ".  ";
			}
			if ( command.Contains("text") )
			{
				string textStr="";
				GetToken(command,"text", out textStr);
				if ( textStr == "space" )
					outputString += " ";
				else
					outputString += textStr;
			}
		}
	}	
 public static DecisionVariable GetVar(string condition, string var)
 {
     string arg = GetArg(condition, var);
     if (arg != null)
     {
         DecisionVariable variable = new DecisionVariable(arg);
         // return var
         return variable;
     }
     return null;
 }
    public override bool Parse(string strval)
    {
        input = strval;

        // get var1
        variable = GetVar(strval, "var");
        if (variable == null)
            return false;

        // get constant
        string tmp = GetArg(strval, "constant");
        if (tmp != null)
            constant = tmp;
        else
            return false;

        // get condition
        condition = GetArg(strval, "condition");
        if (condition == null)
            return false;

        return true;
    }
Example #38
0
        public void ConsistencyTest1()
        {
            double[,] random = Matrix.Random(1000, 10, 0.0, 1.0);

            double[][] samples = random.ToJagged();
            int[] outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 0.8)
                    outputs[i] = 1;
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = new DecisionVariable(i.ToString(), DecisionVariableKind.Continuous);

            DecisionTree tree = new DecisionTree(vars, 2);

            C45Learning teacher = new C45Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(2, tree.Root.Branches.Count);
            Assert.IsTrue(tree.Root.Branches[0].IsLeaf);
            Assert.IsTrue(tree.Root.Branches[1].IsLeaf);
        }
Example #39
0
        public void ArgumentCheck1()
        {
            double[][] samples =
            {
                new [] { 0, 2, 4.0 },
                new [] { 1, 5, 2.0 },
                null,
                new [] { 1, 5, 6.0 },
            };

            int[] outputs = 
            {
                1, 1, 0, 0
            };

            DecisionVariable[] vars = new DecisionVariable[3];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = DecisionVariable.Continuous(i.ToString());

            DecisionTree tree = new DecisionTree(vars, 2);
            var teacher = new C45Learning(tree);

            bool thrown = false;

            try { double error = teacher.Run(samples, outputs); }
            catch (ArgumentNullException) { thrown = true; }

            Assert.IsTrue(thrown);
        }
Example #40
0
        public void ArgumentCheck1()
        {
            int[][] samples =
            {
                new [] { 0, 2, 4 },
                new [] { 1, 5, 2 },
                null,
                new [] { 1, 5, 6 },
            };

            int[] outputs = 
            {
                1, 1, 0, 0
            };

            DecisionVariable[] vars = new DecisionVariable[3];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = DecisionVariable.Discrete(i.ToString(), new IntRange(0, 10));

            DecisionTree tree = new DecisionTree(vars, 2);
            ID3Learning teacher = new ID3Learning(tree);

            bool thrown = false;

            try { double error = teacher.Run(samples, outputs); }
            catch (ArgumentNullException) { thrown = true; }

            Assert.IsTrue(thrown);
        }
Example #41
0
        public void ConsistencyTest1()
        {
            int[,] random = Matrix.Random(1000, 10, 0, 10).ToInt32();

            int[][] samples = random.ToArray();
            int[] outputs = new int[1000];

            for (int i = 0; i < samples.Length; i++)
            {
                if (samples[i][0] > 8)
                    outputs[i] = 1;
            }

            DecisionVariable[] vars = new DecisionVariable[10];
            for (int i = 0; i < vars.Length; i++)
                vars[i] = new DecisionVariable(i.ToString(), new IntRange(0,10));

            DecisionTree tree = new DecisionTree(vars, 2);

            ID3Learning teacher = new ID3Learning(tree);

            double error = teacher.Run(samples, outputs);

            Assert.AreEqual(0, error);

            Assert.AreEqual(11, tree.Root.Branches.Count);
            for (int i = 0; i < tree.Root.Branches.Count; i++)
                Assert.IsTrue(tree.Root.Branches[i].IsLeaf);    
        }
Example #42
0
        public void ConstantDiscreteVariableTest()
        {
            DecisionTree tree;
            int[][] inputs;
            int[] outputs;

            DataTable data = new DataTable("Degenerated Tennis Example");

            data.Columns.Add("Day", "Outlook", "Temperature", "Humidity", "Wind", "PlayTennis");

            data.Rows.Add("D1", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D2", "Sunny", "Hot", "High", "Strong", "No");
            data.Rows.Add("D3", "Overcast", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D4", "Rain", "Hot", "High", "Weak", "Yes");
            data.Rows.Add("D5", "Rain", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D6", "Rain", "Hot", "Normal", "Strong", "No");
            data.Rows.Add("D7", "Overcast", "Hot", "Normal", "Strong", "Yes");
            data.Rows.Add("D8", "Sunny", "Hot", "High", "Weak", "No");
            data.Rows.Add("D9", "Sunny", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D10", "Rain", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D11", "Sunny", "Hot", "Normal", "Strong", "Yes");
            data.Rows.Add("D12", "Overcast", "Hot", "High", "Strong", "Yes");
            data.Rows.Add("D13", "Overcast", "Hot", "Normal", "Weak", "Yes");
            data.Rows.Add("D14", "Rain", "Hot", "High", "Strong", "No");

            // Create a new codification codebook to
            // convert strings into integer symbols
            Codification codebook = new Codification(data);

            DecisionVariable[] attributes =
            {
               new DecisionVariable("Outlook",     codebook["Outlook"].Symbols),     // 3 possible values (Sunny, overcast, rain)
               new DecisionVariable("Temperature", codebook["Temperature"].Symbols), // 1 constant value (Hot)
               new DecisionVariable("Humidity",    codebook["Humidity"].Symbols),    // 2 possible values (High, normal)
               new DecisionVariable("Wind",        codebook["Wind"].Symbols)         // 2 possible values (Weak, strong)
            };

            int classCount = codebook["PlayTennis"].Symbols; // 2 possible values (yes, no)


            bool thrown = false;
            try
            {
                tree = new DecisionTree(attributes, classCount);
            }
            catch
            {
                thrown = true;
            }

            Assert.IsTrue(thrown);


            attributes[1] = new DecisionVariable("Temperature", 2);
            tree = new DecisionTree(attributes, classCount);
            ID3Learning id3 = new ID3Learning(tree);

            // Extract symbols from data and train the classifier
            DataTable symbols = codebook.Apply(data);
            inputs = symbols.ToArray<int>("Outlook", "Temperature", "Humidity", "Wind");
            outputs = symbols.ToArray<int>("PlayTennis");

            double error = id3.Run(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                int y = tree.Compute(inputs[i]);
                Assert.AreEqual(outputs[i], y);
            }
        }
Example #43
0
	public string Parse( string input )
	{
		string output="";
		
		if ( input.Contains("%") == true )
		{
			string[] words = input.Split(' ');
			foreach( string word in words )
			{
				if ( word.Contains("%") )
				{
					// remove %
					string tmp = word.Replace("%","");
					// get DV
					DecisionVariable dv = new DecisionVariable(tmp,true);
					if ( dv != null )
					{
						int val = (int)(Math.Round(dv.GetFloat(),0));
						output += val.ToString() + " ";
					}
					else
						output += "(<" + word + "> not found)" + " ";
				}
				else
					output += word + " ";
			}
			return output;
		}
		
		return input;
	}	
Example #44
0
 bool AddDecisionVariable(string key)
 {
     // dictionary doesn't contain this key so try to make one
     // form name, decision variable needs the object name
     DecisionVariable v = new DecisionVariable(this.Name + "." + key.Substring(1), true);
     if ( v != null && v.Valid == true )
     {
         decisionVariables.Add(key,v);
         return true;
     }
     else
     {
         UnityEngine.Debug.LogError("Object.GetAttribute(" + key + ") : can't create decision variable");
         return false;
     }
 }
    public bool Test(DecisionVariable var2, string condition)
    {
        if (GetType() == typeof(System.Single))
        {
            switch (condition)
            {
			case "=":
            case "equal":
                return (this.GetFloat() == var2.GetFloat());
			case "!=":
            case "notequal":
                return (this.GetFloat() != var2.GetFloat());
			case "<":
            case "less":
                return (this.GetFloat() < var2.GetFloat());
			case ">":
            case "greater":
                return (this.GetFloat() > var2.GetFloat());
            default:
                return false;
            }
        }
        if (GetType() == typeof(System.Single))
        {
            return (this.GetString() == var2.GetString());
        }
        return false;
    }
Example #46
0
        /// <summary>
        /// Создание дерева 
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void btnCreate_Click(object sender, EventArgs e)
        {
            try
            {
                if (dgvLearningSource.DataSource == null)
                {
                    MessageBox.Show("Загрузите данные");
                    return;
                }
                if (Tree_property.Property_View == true)
                {
                    Tree_property.ShowDialog();
                }
                // Завершаем операцию с DataGridView
                dgvLearningSource.EndEdit();

                #region Алгоритм С4.5
                ///
                ///Алгоритм С4.5
                ///
                if (Tree_property.Alg == "C4.5")
                {
                    // // создаем матрицу из  data table
                    double[,] sourceMatrix = (dgvLearningSource.DataSource as DataTable).ToMatrix(out sourceColumns);

                    C45Learning c45;

                    // получаем входные значения
                    double[][] inputs = sourceMatrix.Submatrix(null, 0, Tree_property.Coun_In - 1).ToArray();

                    // получаем выходные значения
                    int[] outputs = sourceMatrix.GetColumn(Tree_property.Coun_Out - 1).ToInt32();

                    DecisionVariable[] attributes = new DecisionVariable[Tree_property.Coun_In];

                    for (int j = 0; j < Tree_property.Coun_In; j++)
                    {
                        attributes[j] = new DecisionVariable(dgvLearningSource.Columns[j].Name, DecisionAttributeKind.Continuous);
                    }

                    // создаем дерево решений
                    tree = new DecisionTree(attributes, 60);

                    c45 = new C45Learning(tree);
                    double error = c45.Run(inputs, outputs);
                }
                #endregion

                #region Алгоритм ID3
                ///
                ///Алгоритм ID3
                ///
                if (Tree_property.Alg == "ID3")
                {
                    // создаем матрицу из дататыйбл
                    int[][] arr = (dgvLearningSource.DataSource as DataTable).ToIntArray(sourceColumns);
                    int[,] sourceMatrix = arr.ToMatrix();

                    //// получаем входные значения

                    int[][] inputs = sourceMatrix.Submatrix(null, 0, Tree_property.Coun_In - 1).ToArray();

                    //// получаем выходные значения
                    int[] outputs = sourceMatrix.GetColumn(Tree_property.Coun_In - 1);

                    DecisionVariable[] attributes = new DecisionVariable[Tree_property.Coun_In];

                    for (int j = 0; j < Tree_property.Coun_In; j++)
                    {
                        attributes[j] = new DecisionVariable(j.ToString(), DecisionAttributeKind.Continuous);
                    }

                    // создаем дерево решений
                    tree = new DecisionTree(attributes, 60);

                    ID3Learning id3learning = new ID3Learning(tree);

                    double error = id3learning.Run(inputs, outputs);

                }
                #endregion

                asd.Dispose();
                asd.Close();

                Drawing dr = new Drawing();

                dr.recursion(tree.Root, tree.Root.Branches, 0);
                dr.Save_();

                asd = new Tree_View();
                asd.userControl11.Load_f(Application.StartupPath);

                System.Linq.Expressions.Expression df = tree.ToExpression();

               // выбираем tabe page для просмотра дерева
                tabControl.SelectTab(tabOverview);

                // отображаем построенной дереыыо решений
                decisionTreeView1.TreeSource = tree;

                try
                {
                    File.Copy(@".\Resources\recursion.png", @".\Resources\recursion2.png", true);
                }
                catch
                {
                }
                using (Stream s = File.OpenRead(@".\Resources\recursion2.png"))
                {
                    pictureBox1.Image = Image.FromStream(s);
                }

            }
            catch (Exception t)
            {
                MessageBox.Show(t.Message);
            }
        }
    public override bool Parse(string strval)
    {
        input = strval;

        // get var1
        var1 = GetVar(strval, "var1");
        var2 = GetVar(strval, "var2");
        if (var1 == null || var2 == null)
            return false;

        // get condition
        condition = GetArg(strval, "condition");

        return true;
    }