コード例 #1
0
        /// <summary>
        /// Learns a model, wrapper for
        /// <see cref="Accord.MachineLearning.DecisionTrees.Learning.C45Learning.Learn(double[][], int[], double[])"/>
        /// </summary>
        public void Learn()
        {
            var features = new DecisionVariable[GlobalVariables.Dimensions];

            // Adding DecisionVariables (features)
            for (var i = 0; i < GlobalVariables.Dimensions; i++)
            {
                features[i] = new DecisionVariable($"x{i}", DecisionVariableKind.Continuous);
            }

            // Create 2 class tree object
            DecisionTree = new DecisionTree(features, 2);

            var c45 = new C45Learning(DecisionTree)
            {
                Join            = Join,
                MaxHeight       = MaxHeight,
                ParallelOptions = new ParallelOptions {
                    MaxDegreeOfParallelism = 1
                }                                                                     // Only 1 thread will be used by the learning algorithm
            };

            c45.Learn(Inputs, Outputs);

            // Getting rules from tree and saving them to file
            using (var sw = new StreamWriter(OutputPath))
            {
                OutputRules = DecisionTree.ToRules().ToString().Replace(",", ".");
                sw.Write(OutputRules);
            }
        }
コード例 #2
0
        public void Learn()
        {
            var stopWatch = new Stopwatch();

            stopWatch.Start();

            var variables = new List <DecisionVariable>();

            foreach (var vector in LearningData.TrainingData.First().ToVectorArray(Metadata, PropertiesToSkip))
            {
                variables.Add(new DecisionVariable(variables.Count.ToString(), new DoubleRange(-1, 1)));
            }
            Tree = new DecisionTree(variables, 2);
            var learner = new C45Learning(Tree);

            learner.Learn(LearningData.TrainingData.Select(data => data.ToVectorArray(Metadata, PropertiesToSkip)).ToArray(),
                          LearningData.TrainingData.Select(data => data.PercentMatch > 0 ? 1 : 0).ToArray());

            var matcher = new LoggingDecisionTreeMatcher(LearningData.TrainingData);

            matcher.LogMatchCount($"{Name} TrainingData", Tree, Metadata, PropertiesToSkip);
            matcher = new LoggingDecisionTreeMatcher(LearningData.TestData);
            matcher.LogMatchCount($"{Name} TestData", Tree, Metadata, PropertiesToSkip);

            stopWatch.Stop();
            Logger.InfoFormat("DecisionTreeLearning took {0}", stopWatch.Elapsed);
        }
コード例 #3
0
        public void RandomForestBuild(List <train> datalist)
        {
            int length = datalist.Count;
            int d      = datalist[0].d;

            forest = new List <DecisionTree>();

            int n = datalist.Count;
            int k = d; //(int)Math.Sqrt(d);
            int m = 100;

            for (int i = 0; i < m; ++i)
            {
                double[][] inputs;
                int[]      outputs;
                int[]      indexs;
                GetData(out inputs, out outputs, datalist, n, k, out indexs);

                DecisionVariable[] variables = new DecisionVariable[k];
                for (int j = 0; j < k; ++j)
                {
                    variables[j] = new DecisionVariable("attribute" + (indexs[j] + 1), DecisionVariableKind.Continuous);
                }

                // Create the C4.5 learning algorithm
                var c45 = new C45Learning(variables);

                // Learn the decision tree using C4.5
                DecisionTree dtmp = c45.Learn(inputs, outputs);
                forest.Add(dtmp);
            }
            log("The random forest model has been trained");
        }
コード例 #4
0
        static void Main(string[] args)
        {
            DataTable table = new Accord.IO.CsvReader("C:\\Users\\michael\\Downloads\\JulyToOct2015Test.csv", true).ToTable();

            // Convert the DataTable to input and output vectors
            double[][] inputs  = table.ToJagged <double>("BookToPrice", "DividendYield", "DebtToEquity", "MarketBeta", "SectorID");
            int[]      outputs = table.Columns["MonthlyReturn"].ToArray <int>();


            //SecurityID BookToPrice DividendYield EarningsYield   SalesGrowth AssetsToEquity  MarketCap MarketBeta  DebtToEquity    1YrVol  5YrVol  3YrVol ExposureToCurrencyGain  SectorID countryID

            DecisionTree tree = new DecisionTree(
                inputs: new List <DecisionVariable>
            {
                DecisionVariable.Continuous("BookToPrice"),
                DecisionVariable.Continuous("DividendYield"),
                DecisionVariable.Continuous("DebtToEquity"),
                DecisionVariable.Continuous("MarketBeta"),
                DecisionVariable.Discrete("SectorID", 11)
            },
                classes: 2);

            C45Learning teacher = new C45Learning(tree);

            teacher.Learn(inputs, outputs);
            int[] answers = tree.Decide(inputs);


            // Plot the results
            // ScatterplotBox.Show("Expected results", inputs, outputs);
            //ScatterplotBox.Show("Ans", inputs, answers)
            //    .Hold();
        }
コード例 #5
0
ファイル: Form1.cs プロジェクト: arunIITB/InferenceRule
        private void ComputeInference()
        {
            var codebook = new Codification();

            codebook.Learn(tradeTable);

            DataTable symbols = codebook.Apply(tradeTable);

            string[]   inputNames = new[] { "Strike", "MarketPrice", "Notional" };
            double[][] inputs     = tradeTable.ToJagged(inputNames);
            int[]      outputs    = tradeTable.ToArray <int>("Result");


            var teacher = new C45Learning()
            {
                Attributes = DecisionVariable.FromCodebook(codebook, inputNames)
            };


            DecisionTree tree = teacher.Learn(inputs, outputs);

            int[]       predicted = tree.Decide(inputs);
            double      error     = new ZeroOneLoss(outputs).Loss(predicted);
            DecisionSet rules     = tree.ToRules();

            var str = rules.ToString();

            textBoxInferredRules.Text = str;
        }
コード例 #6
0
        public DecisionTree GenerateDecisionTree(int inputsCount, ref double[][] inputs, ref int[] outputs,
                                                 int outputClassNum, List <string> listVariablesName, int neiWindowSize, int landuseTypesCount)
        {
            DecisionVariable[] variable = new DecisionVariable[inputsCount];

            for (int i = 0; i < inputsCount - 2; i++)
            {
                DecisionVariable v = new DecisionVariable(listVariablesName[i], DecisionVariableKind.Continuous);
                variable[i] = v;
            }
            ;
            DecisionVariable dv = new DecisionVariable(listVariablesName[inputsCount - 2], neiWindowSize * neiWindowSize + 1);

            variable[inputsCount - 2] = dv;
            DecisionVariable dv2 = new DecisionVariable(listVariablesName[inputsCount - 1], landuseTypesCount);

            variable[inputsCount - 1] = dv2;

            DecisionTree tree = new DecisionTree(variable, outputClassNum);
            C45Learning  c45  = new C45Learning(tree);

            //double error = c45.Run(inputs, outputs);
            tree = c45.Learn(inputs, outputs);

            return(tree);
        }
コード例 #7
0
        private double run(double[][] inputs, int[] output)
        {
            int rows = inputs.Length;
            int cols = inputs[0].Length;

            int colsPerTree = 0;

            if (CoverageRatio == 0)
            {
                colsPerTree = (int)(System.Math.Sqrt(cols));
            }
            else
            {
                colsPerTree = (int)(cols * CoverageRatio);
            }

            var trees = forest.Trees;

            Parallel.For(0, trees.Length, ParallelOptions, i =>
            {
                int[] idx = Vector.Sample(SampleRatio, output.Length);
                var x     = inputs.Get(idx);
                var y     = output.Get(idx);

                var c45 = new C45Learning(forest.Trees[i])
                {
                    MaxVariables = colsPerTree,
                    Join         = 100
                };

                c45.Learn(x, y);
            });

            return(0);
        }
コード例 #8
0
        public void DesicionTreeLearning(List <string> list)
        {
            // Specify the input variables
            DecisionVariable[] variables =
            {
                new DecisionVariable("duration",           DecisionVariableKind.Continuous),
                new DecisionVariable("blood",              DecisionVariableKind.Continuous),
                new DecisionVariable("tower",              DecisionVariableKind.Continuous),
                new DecisionVariable("inhibitor",          DecisionVariableKind.Continuous),
                new DecisionVariable("baron",              DecisionVariableKind.Continuous),
                new DecisionVariable("dragon",             DecisionVariableKind.Continuous),
                new DecisionVariable("herald",             DecisionVariableKind.Continuous),
                new DecisionVariable("towerAdvantage",     DecisionVariableKind.Continuous),
                new DecisionVariable("inhibitorAdvantage", DecisionVariableKind.Continuous),
                new DecisionVariable("baronAdvantage",     DecisionVariableKind.Continuous),
                new DecisionVariable("DragonAdvantage",    DecisionVariableKind.Continuous),
                new DecisionVariable("HeraldAdvantage",    DecisionVariableKind.Continuous),
            };

            // Create the C4.5 learning algorithm
            var c45 = new C45Learning(variables);

            var inputs = GetC45Data(list);

            // Learn the decision tree using C4.5
            tree = new DecisionTree(c45.Attributes, 2);
            tree = c45.Learn(inputs, outputsC45);
        }
コード例 #9
0
    protected void btnchangepassword0_Click(object sender, EventArgs e)
    {
        DataTable data = new DataTable();

        data = f1.getrecord1("select * from dataset");
        if (data.Rows.Count > 0)
        {
            double[][] inputs = data.ToJagged <double>("n", "p", "k", "ph", "ec");
            string[]   labels = new string[data.Rows.Count];
            for (int i = 0; i < data.Rows.Count; i++)
            {
                labels[i] = data.Rows[i]["fertility"].ToString();
            }
            var codebook = new Codification("fertility", labels);
            // With the codebook, we can convert the labels:
            int[]       outputs   = codebook.Translate("fertility", labels);
            C45Learning teacher   = new C45Learning();
            var         tree      = teacher.Learn(inputs, outputs);
            int[]       predicted = tree.Decide(inputs);
            DecisionSet rules     = tree.ToRules();
            string      ruleText  = rules.ToString(codebook, "fertility", System.Globalization.CultureInfo.InvariantCulture);
            var         cm1       = new GeneralConfusionMatrix(classes: 3, expected: outputs, predicted: predicted);
            //int[,] matrix = cm.Matrix;
            double cm  = cm1.Accuracy;
            double cm2 = cm * 100;
            Label1.Text = cm2.ToString();
        }
    }
コード例 #10
0
        private static DecisionTree TrainModel(DataTable dt)
        {
            DecisionTree tree = new DecisionTree((IList <DecisionVariable>) new DecisionVariable[16]
            {
                new DecisionVariable("v0", 2048),
                new DecisionVariable("v1", 2048),
                new DecisionVariable("v2", 2048),
                new DecisionVariable("v3", 2048),
                new DecisionVariable("v4", 2048),
                new DecisionVariable("v5", 2048),
                new DecisionVariable("v6", 2048),
                new DecisionVariable("v7", 2048),
                new DecisionVariable("v8", 2048),
                new DecisionVariable("v9", 2048),
                new DecisionVariable("v10", 2048),
                new DecisionVariable("v11", 2048),
                new DecisionVariable("v12", 2048),
                new DecisionVariable("v13", 2048),
                new DecisionVariable("v14", 2048),
                new DecisionVariable("v15", 2048)
            }, 1000);
            Codification codification = new Codification(dt);
            //ID3Learning id3Learning = new ID3Learning(tree);
            DataTable table = codification.Apply(dt);

            int[][]      array1  = table.ToArray <int>("v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15");
            int[]        array2  = table.ToArray <int>("classification");
            C45Learning  teacher = new C45Learning();
            DecisionTree model   = teacher.Learn(array1, array2, null);

            //DecisionTree model = id3Learning.Learn(array1, array2, (double[]) null);
            return(model);
        }
コード例 #11
0
        private void trainingC45lib()
        {
            Accord.Math.Random.Generator.Seed = 0;
            c45Learning = new C45Learning()
            {
                Join      = 2,
                MaxHeight = 5
            };
            int size = trainingSets.Count;

            double[][] inputs1  = new double[size][];
            int[]      outputs1 = new int[size];
            int        i        = 0;

            foreach (Patient patient in trainingSets)
            {
                double[] aux = new double[9];
                for (int j = 1; j <= 9; j++)
                {
                    if (j == 1)
                    {
                        aux[j - 1] = patient.get(j) < 30 ? 0 : patient.get(j) < 60 ? 1 : 2;
                    }
                    else
                    {
                        aux[j - 1] = patient.get(j);
                    }
                }
                inputs1[i]  = aux;
                outputs1[i] = patient.get(10);
                i++;
            }

            var crossValidation = CrossValidation.Create(

                k: 5,

                learner: (p) => new C45Learning()
            {
                Join      = 2,
                MaxHeight = 5
            },
                loss: (actual, expected, p) => new ZeroOneLoss(expected).Loss(actual),

                fit: (teacher, x, y, w) => teacher.Learn(x, y, w),

                x: inputs1, y: outputs1
                );

            decisionTreeLib = c45Learning.Learn(inputs1, outputs1);
            var result = crossValidation.Learn(inputs1, outputs1);

            GeneralConfusionMatrix gcm = result.ToConfusionMatrix(inputs1, outputs1);

            accuracyC45lib = Math.Round(gcm.Accuracy, 3);
        }
コード例 #12
0
        /// <summary>
        ///   Creates and learns a Decision Tree to recognize the
        ///   previously loaded dataset using the current settings.
        /// </summary>
        ///
        private void btnCreate_Click(object sender, EventArgs e)
        {
            if (dgvLearningSource.DataSource == null)
            {
                MessageBox.Show("Please load some data first.");
                return;
            }

            // Finishes and save any pending changes to the given data
            dgvLearningSource.EndEdit();



            // Creates a matrix from the entire source data table
            double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames);

            // Get only the input vector values (first two columns)
            double[][] inputs = table.GetColumns(0, 1).ToArray();

            // Get only the output labels (last column)
            int[] outputs = table.GetColumn(2).ToInt32();


            // Specify the input variables
            DecisionVariable[] variables =
            {
                new DecisionVariable("x", DecisionVariableKind.Continuous),
                new DecisionVariable("y", DecisionVariableKind.Continuous),
            };

            // Create the C4.5 learning algorithm
            var c45 = new C45Learning(variables);

            // Learn the decision tree using C4.5
            tree = c45.Learn(inputs, outputs);

            // Show the learned tree in the view
            decisionTreeView1.TreeSource = tree;


            // Get the ranges for each variable (X and Y)
            DoubleRange[] ranges = table.GetRange(0);

            // Generate a Cartesian coordinate system
            double[][] map = Matrix.Cartesian(
                Vector.Interval(ranges[0], 0.05),
                Vector.Interval(ranges[1], 0.05));

            // Classify each point in the Cartesian coordinate system
            double[] result = map.Apply(tree.Compute).ToDouble();
            double[,] surface = map.ToMatrix().InsertColumn(result);

            CreateScatterplot(zedGraphControl2, surface);

            lbStatus.Text = "Learning finished! Click the other tabs to explore results!";
        }
コード例 #13
0
ファイル: Program.cs プロジェクト: heya10/Blog
        public static void Main(string[] args)
        {
            //getting example data
            Iris iris = new Iris();

            //we are creating training data arrays
            double[][] input  = new double[147][];
            int[]      output = new int[147];

            //we process 'Iris' data and delete 1 from each type for later test purpose
            int j = 0;

            for (int i = 0; i < 147; i++)
            {
                if (i != 0 || i != 50 || i != 100)
                {
                    input[j]  = new double[4];
                    output[j] = iris.ClassLabels[i];
                    for (int k = 0; k < 4; k++)
                    {
                        input[j][k] = iris.Instances[i][k];
                    }
                    j++;
                }
            }

            //learning algorithm for decision tree
            C45Learning teacher = new C45Learning(new[] {
                DecisionVariable.Continuous(iris.VariableNames[0]),
                DecisionVariable.Continuous(iris.VariableNames[1]),
                DecisionVariable.Continuous(iris.VariableNames[2]),
                DecisionVariable.Continuous(iris.VariableNames[3]),
            });

            //model learning
            DecisionTree tree = teacher.Learn(input, output);

            //If we would have some other irises we could just wrote like this
            //DecisionTree tree = teacher.Learn(iris.Instances, iris.ClassLabels);
            //but we prefer to left some for test purpose (to check if our programm is working fine)

            //testing our model
            double[][] test    = { iris.Instances[0], iris.Instances[50], iris.Instances[100] };
            int[]      answers = tree.Decide(test);

            Console.WriteLine("Answer should be as follow:\n0,1,2,\nAnswer is:");

            foreach (int ans in answers)
            {
                Console.Write(ans + ",");
            }

            Console.Write("\nPress any key to continue . . . ");
            Console.ReadKey(true);
        }
コード例 #14
0
        public override Task <List <GeneralConfusionMatrix> > ComputeFoldedConfusionMatrixAsync(ClassificationModel classificationModel, int folds)
        {
            return(Task.Factory.StartNew(() =>
            {
                int numFeatures = classificationModel.FeatureVectors.Count;
                DecisionVariable[] decisionVariables = Enumerable.ToArray(classificationModel.Bands.Select(b => DecisionVariable.Continuous(b.ToString())));

                double[][] input = new double[numFeatures][];
                int[] responses = new int[numFeatures];

                for (int featureIndex = 0; featureIndex < classificationModel.FeatureVectors.Count; ++featureIndex)
                {
                    var featureVector = classificationModel.FeatureVectors[featureIndex];

                    input[featureIndex] = Array.ConvertAll(featureVector.FeatureVector.BandIntensities, s => (double)s / ushort.MaxValue);
                    responses[featureIndex] = featureVector.FeatureClass;
                }

                List <GeneralConfusionMatrix> confusionMatrices = new List <GeneralConfusionMatrix>();

                // Create a new Cross-validation algorithm passing the data set size and the number of folds
                var crossvalidation = new CrossValidation(input.Length, folds);

                crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
                {
                    // Lets now grab the training data:
                    var trainingInputs = input.Get(indicesTrain);
                    var trainingOutputs = responses.Get(indicesTrain);

                    // And now the validation data:
                    var validationInputs = input.Get(indicesValidation);
                    var validationOutputs = responses.Get(indicesValidation);

                    var tree = new DecisionTree(decisionVariables, Enum.GetValues(typeof(LandcoverTypeViewModel)).Length);
                    C45Learning id3Learning = new C45Learning(tree);
                    id3Learning.Learn(trainingInputs, trainingOutputs);

                    var predictedTraining = tree.Decide(trainingInputs);
                    var predictedValidation = tree.Decide(validationInputs);

                    double trainingError = new ZeroOneLoss(trainingOutputs).Loss(predictedTraining);
                    double validationError = new ZeroOneLoss(validationOutputs).Loss(predictedValidation);

                    GeneralConfusionMatrix confusionMatrix = new GeneralConfusionMatrix(Enum.GetValues(typeof(LandcoverTypeViewModel)).Length - 1, validationOutputs, predictedValidation);
                    confusionMatrices.Add(confusionMatrix);

                    // Return a new information structure containing the model and the errors achieved.
                    return new CrossValidationValues(trainingError, validationError);
                };

                var result = crossvalidation.Compute();

                return confusionMatrices;
            }));
        }
コード例 #15
0
        public void Classification_Train(double[,] train_docrule, int[] label, string algorithm)
        {
            string classmodelpath;
            int    attrSize     = eclatlitems.Count;
            int    attrSizeTest = eclatlitems.Count;

            // Specify the input variables
            DecisionVariable[] variables = new DecisionVariable[attrSize];
            for (int i = 0; i < attrSize; i++)
            {
                variables[i] = new DecisionVariable((i + 1).ToString(), DecisionVariableKind.Discrete);
            }

            if (algorithm == "Tree")
            {
                classmodelpath = algorithm + ".model";
                //RandomForest tree2 = new RandomForest(2, variables);
                DecisionTree tree    = new DecisionTree(variables, 2);
                C45Learning  teacher = new C45Learning(tree);
                var          model   = teacher.Learn(train_docrule.ToJagged(), label);
                //save model
                teacher.Save(Path.Combine("", classmodelpath));
            }
            if (algorithm == "SVM")
            {
                classmodelpath = algorithm + ".model";
                var learn = new SequentialMinimalOptimization()
                {
                    UseComplexityHeuristic = true,
                    UseKernelEstimation    = false
                };
                SupportVectorMachine teacher = learn.Learn(train_docrule.ToJagged(), label);
                //save model
                teacher.Save(Path.Combine("", classmodelpath));
            }

            if (algorithm == "Logistic")
            {
                classmodelpath = algorithm + ".model";
                var learner = new IterativeReweightedLeastSquares <LogisticRegression>()
                {
                    Tolerance      = 1e-4, // Let's set some convergence parameters
                    Iterations     = 1,    // maximum number of iterations to perform
                    Regularization = 0
                };
                LogisticRegression teacher = learner.Learn(train_docrule.ToJagged(), label);
                teacher.Save(Path.Combine("", classmodelpath));
            }

            if (algorithm == "GA")
            {
                weights_ga_matlab();
            }
        }
コード例 #16
0
        public override Task TrainAsync(ClassificationModel classificationModel)
        {
            int numFeatures = classificationModel.FeatureVectors.Count;

            DecisionVariable[] decisionVariables = Enumerable.ToArray(classificationModel.Bands.Select(b => DecisionVariable.Continuous(b.ToString())));

            double[][] input     = new double[numFeatures][];
            int[]      responses = new int[numFeatures];

            for (int featureIndex = 0; featureIndex < classificationModel.FeatureVectors.Count; ++featureIndex)
            {
                var featureVector = classificationModel.FeatureVectors[featureIndex];
                input[featureIndex]     = Array.ConvertAll(featureVector.FeatureVector.BandIntensities, s => (double)s / ushort.MaxValue);
                responses[featureIndex] = featureVector.FeatureClass;
            }

            if (Boosting)
            {
                return(Task.Factory.StartNew(() =>
                {
                    var classifier = new Boost <Weak <DecisionTree> >();

                    var teacher = new AdaBoostM1 <Weak <DecisionTree> >(classifier)
                    {
                        Creation = (weights) =>
                        {
                            var tree = new DecisionTree(decisionVariables, Enum.GetValues(typeof(LandcoverTypeViewModel)).Length);
                            var c45Learning = new C45Learning(tree);
                            c45Learning.Learn(input, responses, weights);
                            return new Weak <DecisionTree>(tree, (s, x) => s.Decide(x));
                        },

                        Iterations = Iterations,
                        Tolerance = 1e-2
                    };

                    teacher.Run(input, responses);
                    _tree = Either.Right <DecisionTree, Boost <Weak <DecisionTree> > >(classifier);
                }));
            }
            else
            {
                return(Task.Factory.StartNew(() =>
                {
                    var tree = new DecisionTree(decisionVariables, Enum.GetValues(typeof(LandcoverTypeViewModel)).Length);
                    C45Learning id3Learning = new C45Learning(tree);
                    id3Learning.Learn(input, responses);

                    _tree = Either.Left <DecisionTree, Boost <Weak <DecisionTree> > >(tree);
                }));
            }
        }
コード例 #17
0
        public static DecisionTree Learn(DataTable data, string[] inputColumns, string outputColumn)
        {
            var codebook = new Codification(data);
            var symbols  = codebook.Apply(data);

            double[][] inputs  = symbols.ToJagged(inputColumns);
            int[]      outputs = symbols.ToArray <int>(outputColumn);

            var attributes = DecisionVariable.FromCodebook(codebook, inputColumns);
            var c45        = new C45Learning(attributes);

            return(c45.Learn(inputs, outputs));
        }
コード例 #18
0
        private void btnSampleRunAnalysis_Click(object sender, EventArgs e)
        {
            if (!isTrainingDataLoaded)
            {
                MessageBox.Show("Please load your training data first");
                return;
            }

            // Creates a matrix from the entire source data table
            double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames);

            // Get only the input vector values (in the first two columns)
            double[][] inputs = ConvertDataTableToMatrix(TrainingData.Tables["InterestedTrainingDataValues"]);

            // Get only the output labels (last column)
            int[] outputs = table.GetColumn(2).ToInt32();


            // Creates a new instance of the SMO learning algorithm
            DecisionVariable[] variables =
             {
                new DecisionVariable("x", DecisionVariableKind.Continuous),
                new DecisionVariable("y", DecisionVariableKind.Continuous),
            };


            // Create the C4.5 learning algorithm
            var c45 = new C45Learning(variables);

            // Learn the decision tree using C4.5
            tree = c45.Learn(inputs, outputs);

            // Show the learned tree in the view
            decisionTreeView1.TreeSource = tree;


            // Get the ranges for each variable (X and Y)
            DoubleRange[] ranges = table.GetRange(0);

            // Generate a Cartesian coordinate system
            double[][] map = Matrix.Cartesian(
                Vector.Interval(ranges[0], 0.05),
                Vector.Interval(ranges[1], 0.05));

            // Classify each point in the Cartesian coordinate system
            double[] result = map.Apply(tree.Compute).ToDouble();
            double[,] surface = map.ToMatrix().InsertColumn(result);
            
            MessageBox.Show("Training Complete");

        }
コード例 #19
0
ファイル: C45LearningTest.cs プロジェクト: xadxxadx/framework
        public void new_method_create_tree()
        {
            string[][] text = Resources.iris_data.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries).Apply(x => x.Split(','));

            double[][] inputs = text.GetColumns(0, 1, 2, 3).To <double[][]>();

            string[] labels = text.GetColumn(4);

            var codebook = new Codification("Output", labels);

            int[] outputs = codebook.Translate("Output", labels);

            // And we can use the C4.5 for learning:
            var teacher = new C45Learning();

            // And finally induce the tree:
            var tree = teacher.Learn(inputs, outputs);

            // To get the estimated class labels, we can use
            int[] predicted = tree.Decide(inputs);

            // And the classification error can be computed as
            double error = new ZeroOneLoss(outputs) // 0.0266
            {
                Mean = true
            }.Loss(tree.Decide(inputs));

            // Moreover, we may decide to convert our tree to a set of rules:
            DecisionSet rules = tree.ToRules();

            // And using the codebook, we can inspect the tree reasoning:
            string ruleText = rules.ToString(codebook, "Output",
                                             System.Globalization.CultureInfo.InvariantCulture);

            // The output is:
            string expected = @"Iris-setosa =: (2 <= 2.45)
Iris-versicolor =: (2 > 2.45) && (3 <= 1.75) && (0 <= 7.05) && (1 <= 2.85)
Iris-versicolor =: (2 > 2.45) && (3 <= 1.75) && (0 <= 7.05) && (1 > 2.85)
Iris-versicolor =: (2 > 2.45) && (3 > 1.75) && (0 <= 5.95) && (1 > 3.05)
Iris-virginica =: (2 > 2.45) && (3 <= 1.75) && (0 > 7.05)
Iris-virginica =: (2 > 2.45) && (3 > 1.75) && (0 > 5.95)
Iris-virginica =: (2 > 2.45) && (3 > 1.75) && (0 <= 5.95) && (1 <= 3.05)
";

            Assert.AreEqual(0.026666666666666668, error, 1e-10);

            double newError = ComputeError(rules, inputs, outputs);

            Assert.AreEqual(0.026666666666666668, newError, 1e-10);
            Assert.AreEqual(expected, ruleText);
        }
コード例 #20
0
        public void Learn()
        {
            var inputs  = GetLearnInputs();
            var outputs = GetOutputs();
            var teacher = new C45Learning {
                Join = 0
            };

            foreach (var controllerOutputProperty in GetControllerOutputProperties())
            {
                teacher.Attributes.Add(DecisionVariable.Continuous(controllerOutputProperty));
            }

            DecisionTree = teacher.Learn(inputs, outputs);
        }
コード例 #21
0
    protected void btnshow_Click(object sender, EventArgs e)
    {
        quality.Visible = true;
        DataTable _dataTable = new DataTable();

        Panel1.Visible = true;
        n          = Convert.ToDouble(txtN.Text);
        p          = Convert.ToDouble(txtP.Text);
        k          = Convert.ToDouble(txtK.Text);
        ph         = Convert.ToDouble(txtPh.Text);
        ec         = Convert.ToDouble(txtec.Text);
        _dataTable = f1.getrecord1("select * from dataset");

        double[][] inputs = _dataTable.ToJagged <double>("n", "p", "k", "ph", "ec");
        string[]   labels = new string[_dataTable.Rows.Count];
        for (int i = 0; i < _dataTable.Rows.Count; i++)
        {
            labels[i] = _dataTable.Rows[i]["fertility"].ToString();
        }
        var codebook = new Codification("fertility", labels);

        // With the codebook, we can convert the labels:
        int[]       outputs = codebook.Translate("fertility", labels);
        C45Learning teacher = new C45Learning();
        var         tree    = teacher.Learn(inputs, outputs);

        double[][] input_test =
        {
            new double[] { n, p, k, ph, ec },
        };



        int[] val = tree.Decide(input_test);

        if (val[0] == 0)
        {
            quality.Text = "Low";
        }
        else if (val[0] == 1)
        {
            quality.Text = "Medium";
        }
        else
        {
            quality.Text = "High";
        }
    }
コード例 #22
0
    // Decision Tree
    public DecisionTree DecisionThrust(double[][] inputs, int[] outputs)
    {
        C45Learning teacher = new C45Learning(new[] {
            DecisionVariable.Continuous("X"),
            DecisionVariable.Continuous("Y"),
            DecisionVariable.Continuous("Z"),
            DecisionVariable.Continuous("W")
        });

        teacher.ParallelOptions.MaxDegreeOfParallelism = 1;

        // Use the learning algorithm to induce the tree
        DecisionTree tree = teacher.Learn(inputs, outputs);

        return(tree);
    }
コード例 #23
0
ファイル: Trainer.cs プロジェクト: node9909/unity-biosignals
        public void  Train(List <TrainingValue> trainingData)
        {
            List <DecisionVariable> trainingVariables = new List <DecisionVariable>();

            for (int i = 0; i < featureSize; i++)
            {
                trainingVariables.Add(DecisionVariable.Continuous(i.ToString()));
            }

            tree = new DecisionTree(inputs: trainingVariables, classes: 2);


            double[][] featuresArray = new double[trainingData.Count][];
            int[]      labels        = new int[trainingData.Count];

            for (int i = 0; i < featuresArray.Length; i++)
            {
                featuresArray[i] = trainingData[i].Features;
                labels[i]        = Convert.ToInt32(trainingData[i].State);
            }

            switch (type)
            {
            case ClassifierType.DecisionTree:
                C45Learning teacher = new C45Learning(tree);
                teacher.Learn(featuresArray, labels);
                break;

            case ClassifierType.LDA:
                LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis();
                pipeline = lda.Learn(featuresArray, labels);
                break;

            case ClassifierType.SVM:
                LinearCoordinateDescent svmLearner = new LinearCoordinateDescent();
                svm = svmLearner.Learn(featuresArray, labels);
                break;

            case ClassifierType.Bayes:
                NaiveBayesLearning <NormalDistribution> learner = new NaiveBayesLearning <NormalDistribution>();
                bayes = learner.Learn(featuresArray, labels);
                break;
            }

            Trained = true;
        }
コード例 #24
0
ファイル: MainForm.cs プロジェクト: tranntn/DoAn3
        /// <summary>
        ///   Creates and learns a Decision Tree to recognize the
        ///   previously loaded dataset using the current settings.
        /// </summary>
        ///
        private void btnCreate_Click(object sender, EventArgs e)
        {
            if (dgvLearningSource.DataSource == null)
            {
                MessageBox.Show("Please load some data first.");
                return;
            }

            // Finishes and save any pending changes to the given data
            dgvLearningSource.EndEdit();

            // Creates a matrix from the entire source data table
            double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames);

            // Get only the input vector values (first two columns)
            double[][] inputs = table.GetColumns(0, 1, 2, 3, 4, 5).ToJagged();

            // Get the expected output labels (last column)
            int[] outputs = table.GetColumn(6).ToInt32();

            // Specify the input variables
            DecisionVariable[] variables =
            {
                new DecisionVariable("Pclass",   DecisionVariableKind.Continuous),
                new DecisionVariable("Sex",      DecisionVariableKind.Continuous),
                new DecisionVariable("Parch",    DecisionVariableKind.Continuous),
                new DecisionVariable("Fare",     DecisionVariableKind.Continuous),
                new DecisionVariable("Age",      DecisionVariableKind.Continuous),
                new DecisionVariable("Embarked", DecisionVariableKind.Continuous),
            };

            // Create the C4.5 learning algorithm
            var c45 = new C45Learning(variables);

            // Learn the decision tree using C4.5
            tree = c45.Learn(inputs, outputs);

            // Show the learned tree in the view
            decisionTreeView1.TreeSource = tree;

            // Get the ranges for each variable (X and Y)
            DoubleRange[] ranges = table.GetRange(0);

            CreateScatterplot(zedGraphControl2, table);
            lbStatus.Text = "Learning finished! Click the other tabs to explore results!";
        }
コード例 #25
0
        public void TreeLearning()
        {
            Console.WriteLine("SottoProgramma chiamato: TreeLearning.");
            this.clock = DateTime.Now;

            var teacher = new C45Learning();

            DecisionTree TreeAlgorithm = teacher.Learn(DataSets[1].ItemsFeatures, DataSets[1].CatIDs);

            int[] predicted = TreeAlgorithm.Decide(DataSets[0].ItemsFeatures);

            double error = new ZeroOneLoss(DataSets[0].CatIDs).Loss(predicted);

            PrintReport(predicted, error, "Tree");

            Console.WriteLine("SottoProgramma TreeLearning terminato.\nErrore: {0}", error);
            Console.WriteLine("Tempo richiesto per l'operazione: " + (DateTime.Now - clock).TotalSeconds + " secondi.");
        }
コード例 #26
0
        private void ParameterLearning()
        {
            int[][] inputs  = new int[trainingVects.Length * trainingVects[0].Count][];
            int[]   outputs = new int[trainingVects.Length * trainingVects[0].Count];
            for (int i = 0; i < trainingVects.Length; i++)
            {
                for (int j = 0; j < trainingVects[i].Count; j++)
                {
                    inputs[i * trainingVects[i].Count + j] = new int[] {
                        (int)trainingVects[i][j][0],
                        (int)trainingVects[i][j][1]
                    };
                    outputs[i * trainingVects[i].Count + j] = (int)trainingVects[i][j][2] - 1;
                }
            }

            // Create an ID3 learning algorithm
            C45Learning      teacher = new C45Learning();
            DecisionVariable var1    = new DecisionVariable("A", new Accord.DoubleRange(0, 100));
            DecisionVariable var2    = new DecisionVariable("B", new Accord.DoubleRange(0, 100));

            var1.Nature = DecisionVariableKind.Continuous;
            var2.Nature = DecisionVariableKind.Continuous;
            teacher.Attributes.Add(var1);
            teacher.Attributes.Add(var2);
            var tree = teacher.Learn(inputs, outputs);

            var r = tree.ToRules();

            for (int i = 0; i < r.Count; i++)
            {
                double o     = r.ElementAt(i).Output;
                string name1 = r.ElementAt(i).Variables.ElementAt(0).Name;
                string name2 = r.ElementAt(i).Variables.ElementAt(1).Name;
            }
            double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));

            int[]   predicted = tree.Decide(inputs);
            int[][] inputs2   = new int[1][];
            inputs2[0] = new int[2] {
                80, 81
            };
            var tmp = tree.Decide(inputs2);
        }
コード例 #27
0
        public void DecisionTreeBuild(string Filename)
        {
            StreamReader sr = new StreamReader(Filename);
            string       str;

            irislist = new List <Iris>();

            do
            {
                str = sr.ReadLine();
                if (str == null || str == "")
                {
                    break;
                }
                string[] ss        = str.Split(',');
                int      ss_length = ss.Length;
                for (int i = 0; i < ss_length; ++i)
                {
                    ss[i] = ss[i].Trim();
                }
                Iris iris = new Iris(ss, ss_length);
                irislist.Add(iris);
            }while (str != null && str != "");

            sr.Close();

            double[][] inputs; int[] outputs;
            GetData(out inputs, out outputs);

            // Specify the input variables
            DecisionVariable[] variables =
            {
                new DecisionVariable("SepalLength", DecisionVariableKind.Continuous),
                new DecisionVariable("SepalWidth",  DecisionVariableKind.Continuous),
                new DecisionVariable("PetalLength", DecisionVariableKind.Continuous),
                new DecisionVariable("PetalWidth",  DecisionVariableKind.Continuous),
            };

            // Create the C4.5 learning algorithm
            var c45 = new C45Learning(variables);

            // Learn the decision tree using C4.5
            tree = c45.Learn(inputs, outputs);
        }
コード例 #28
0
        public static void training()
        {
            string filepath = Path.Combine(HostingEnvironment.ApplicationPhysicalPath + @"Content\files\indian_liver_patient.xls");

            DataTable table = new ExcelReader(filepath).GetWorksheet("indian_liver_patient");

            double[][] inputs = table.ToJagged <double>("Age", "Gender", "Total_Bilirubin", "Direct_Bilirubin", "Alkaline_Phosphotase"
                                                        , "Alamine_Aminotransferase", "Aspartate_Aminotransferase"
                                                        , "Total_Protiens", "Albumin", "Albumin_and_Globulin_Ratio");
            int[] outputs = table.ToArray <int>("Dataset");

            for (int i = 0; i < outputs.Length; i++)
            {
                outputs[i] = outputs[i] - 1;
            }

            DecisionVariable[] var =
            {
                new DecisionVariable("A",   DecisionVariableKind.Continuous),
                new DecisionVariable("G",   DecisionVariableKind.Continuous),
                new DecisionVariable("TB",  DecisionVariableKind.Continuous),
                new DecisionVariable("DB",  DecisionVariableKind.Continuous),
                new DecisionVariable("AP",  DecisionVariableKind.Continuous),
                new DecisionVariable("AA",  DecisionVariableKind.Continuous),
                new DecisionVariable("AS",  DecisionVariableKind.Continuous),
                new DecisionVariable("TP",  DecisionVariableKind.Continuous),
                new DecisionVariable("ALB", DecisionVariableKind.Continuous),
                new DecisionVariable("AGR", DecisionVariableKind.Continuous)
            };

            tree = new DecisionTree(var, 2);

            C45Learning teacher = new C45Learning(tree);

            teacher.Learn(inputs, outputs);
            var learner = new IterativeReweightedLeastSquares <LogisticRegression>()
            {
                Tolerance      = 1e-6, // Let's set some convergence parameters
                MaxIterations  = 1000, // maximum number of iterations to perform
                Regularization = 0
            };

            regression = learner.Learn(inputs, outputs);
        }
コード例 #29
0
        static void Main(string[] args)
        {
            // In this example, we will learn a decision tree directly from integer
            // matrices that define the inputs and outputs of our learning problem.

            int[][] inputs =
            {
                new int[] { 0, 0 },
                new int[] { 0, 1 },
                new int[] { 1, 0 },
                new int[] { 1, 1 },
            };

            int[] outputs = // xor between inputs[0] and inputs[1]
            {
                0, 2, 1, 2
            };

            // Create an ID3 learning algorithm
            C45Learning      teacher = new C45Learning();
            DecisionVariable var1    = new DecisionVariable("0", new Accord.DoubleRange(0, 999));
            DecisionVariable var2    = new DecisionVariable("1", new Accord.DoubleRange(0, 999));

            var1.Nature = DecisionVariableKind.Continuous;
            var2.Nature = DecisionVariableKind.Continuous;
            teacher.Attributes.Add(var1);
            teacher.Attributes.Add(var2);

            // Learn a decision tree for the XOR problem
            var tree = teacher.Learn(inputs, outputs);
            var r    = tree.ToRules();

            for (int i = 0; i < r.Count; i++)
            {
                double o     = r.ElementAt(i).Output;
                string name1 = r.ElementAt(i).Variables.ElementAt(0).Name;
                string name2 = r.ElementAt(i).Variables.ElementAt(1).Name;
            }
            // Compute the error in the learning
            double error = new ZeroOneLoss(outputs).Loss(tree.Decide(inputs));

            // The tree can now be queried for new examples:
            int[] predicted = tree.Decide(inputs); // should be { 0, 1, 1, 0 }
        }
コード例 #30
0
        public double Learn(double[][] observations, int[] labels)
        {
            int max = observations[0].Length;

            DecisionVariable[] a = new DecisionVariable[max];
            for (int i = 0; i < max; i++)
            {
                a[i] = DecisionVariable.Continuous(i.ToString());
            }
            C45Learning teacher = new C45Learning(a);

            // Use the learning algorithm to induce the tree
            machine = teacher.Learn(observations, labels);

            // Classify the samples using the model
            int[]  predicted = machine.Decide(observations);
            double error     = new AccuracyLoss(labels).Loss(predicted);

            return(1 - error);
        }