示例#1
0
        public void IrisDecisionTreeTest()
        {
            DataSetLoader dataSetLoader = new DataSetLoader();

            Console.WriteLine(" Reading DataSet.. ");
            var irises = dataSetLoader.SelectIrises();
            DecisionTreeClassifier decisionTreeClassifier =
                new DecisionTreeClassifier(irises, new ShannonEntropySplitter());

            decisionTreeClassifier.Train();
            var animalsTest = dataSetLoader.SelectIrises();
            var trueCounter = 0;
            var counter     = 0;

            foreach (var item in animalsTest)
            {
                var outputValue = decisionTreeClassifier.Classify(item.Item1);
                if (outputValue == item.Item2)
                {
                    trueCounter++;
                }
                Debug.WriteLine(string.Format("Value {0} - Predicted {1} = {2}",
                                              item.Item2, outputValue, (outputValue == item.Item2) ? "true" : "false"));
                counter++;
            }
            Debug.WriteLine(string.Format("Data {0} - True {1} Verhältnis: {2}",
                                          counter.ToString(), trueCounter.ToString(), (Convert.ToDouble(trueCounter) / Convert.ToDouble(counter)).ToString()));
        }
示例#2
0
        public void DecisionTreeTest()
        {
            List <Tuple <double[], double> > data =
                new List <Tuple <double[], double> >();

            data.Add(new Tuple <double[], double>(new double[] { 0.0, 3.0, 0.0, 0.0 }, 40.0));
            data.Add(new Tuple <double[], double>(new double[] { 0.0, 3.0, 1.0, 1.0 }, 50.0));
            data.Add(new Tuple <double[], double>(new double[] { 1.0, 3.0, 1.0, 1.0 }, 50.0));
            DecisionTreeClassifier decisionTreeClassifier =
                new DecisionTreeClassifier(data, new ShannonEntropySplitter());

            decisionTreeClassifier.Train();
            var result = decisionTreeClassifier.Classify(new double[] { 0.0, 3.0, 0.0, 0.0 });

            result = decisionTreeClassifier.Classify(new double[] { 0.0, 3.0, 1.0, 1.0 });
            result = decisionTreeClassifier.Classify(new double[] { 1.0, 3.0, 0.0, 0.0 });
            result = decisionTreeClassifier.Classify(new double[] { 1.0, 0.0, 0.0, 0.0 });
        }
示例#3
0
        public override void Train()
        {
            List <double[]>         FeaturSpace    = new List <double[]>();
            List <int>              FeaturLabel    = new List <int>();
            List <List <double[]> > DataWindows    = new List <List <double[]> >();
            List <List <int> >      LabelWindows   = new List <List <int> >();
            FeatureManager          featureManager = new FeatureManager();

            featureManager.Add(new Mean(1, 2, 3));
            featureManager.Add(new StandardDeviation(1, 2, 3));
            featureManager.Add(new MeanAbsoluteDeviation(1, 2, 3));
            featureManager.Add(new AverageTimeBetweenPeaks(0, 1, 2, 3));
            featureManager.Add(new ResultantAcceleration());
            featureManager.Add(new BinDistribution(10, 1, 2, 3));
            //featureManager.Add(new PreProcessData());

            var dataReader = new CSVReader <double[]>("data.txt", false, (dynamic output) =>
            {
                DataWindows = output;
                DataWindows.ForEach(window =>
                {
                    FeaturSpace.Add(featureManager.Generate(window).ToArray());
                });

                var labelReader = new CSVReader <int>("label.txt", false, (dynamic outputLabels) =>
                {
                    LabelWindows = outputLabels;
                    LabelWindows.ForEach(row =>
                    {
                        FeaturLabel.Add(row.GroupBy(x => x).OrderByDescending(g => g.Count()).Take(1).Select(i => i.Key).First());
                    });

                    DecisionTreeClassifier dt = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
                    dt.Train(FeaturSpace, FeaturLabel, true);
                    dt.Save("DecisionTreeWSDM");
                }, new SlidingWindow <int>(200, 0));
                labelReader.Start();
            }, new SlidingWindow <double[]>(200, 0));

            dataReader.Add(new Normalize());
            dataReader.Start();
        }
示例#4
0
        public void TrainFromFeatures()
        {
            List <double[]>         FeaturSpace  = new List <double[]>();
            List <int>              FeaturLabel  = new List <int>();
            List <List <double[]> > DataWindows  = new List <List <double[]> >();
            List <List <int> >      LabelWindows = new List <List <int> >();
            var dataReader = new CSVReader <double[]>("MyoGymFeatures.csv", false, (dynamic output) =>
            {
                FeaturSpace     = output;
                var labelReader = new CSVReader <int>("MyoGymFeaturesLabels.csv", false, (dynamic outputLabels) =>
                {
                    FeaturLabel = outputLabels;

                    DecisionTreeClassifier dt = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
                    dt.Train(FeaturSpace, FeaturLabel, true);
                    dt.Save("LDAMyoGYM");
                });

                labelReader.Start();
            });

            dataReader.Start();
        }
示例#5
0
        public override void Train()
        {
            List <double[]>         FeaturSpace  = new List <double[]>();
            List <int>              FeaturLabel  = new List <int>();
            List <List <double[]> > DataWindows  = new List <List <double[]> >();
            List <List <int> >      LabelWindows = new List <List <int> >();
            var dataReader = new CSVReader <double[]>("MyoGym.csv", false, (dynamic output) =>
            {
                DataWindows = output;
                DataWindows.ForEach(window =>
                {
                    FeaturSpace.Add(FeatureManager.Generate(window, new List <IFeature>()
                    {
                        new HjorthParameters(10, 11, 12),
                        new StandardDeviation(10, 11, 12),
                        new Mean(10, 11, 12),
                        new Min(10, 11, 12),
                        new Max(10, 11, 12),
                        new Percentile(5, 10, 11, 12),
                        new Percentile(10, 10, 11, 12),
                        new Percentile(25, 10, 11, 12),
                        new Percentile(50, 10, 11, 12),
                        new Percentile(75, 10, 11, 12),
                        new Percentile(90, 10, 11, 12),
                        new Percentile(95, 10, 11, 12),
                        new ZeroCrossing(10, 11, 12),
                        new MeanCrossing(10, 11, 12),
                        new Entropy(11, 12, 13),
                        new Correlation(10, 11),
                        new Correlation(10, 12),
                        new Correlation(11, 12),

                        new HjorthParameters(14, 15, 16),
                        new StandardDeviation(14, 15, 16),
                        new Mean(14, 15, 16),
                        new Min(14, 15, 16),
                        new Max(14, 15, 16),
                        new Percentile(5, 14, 15, 16),
                        new Percentile(10, 14, 15, 16),
                        new Percentile(25, 14, 15, 16),
                        new Percentile(50, 14, 15, 16),
                        new Percentile(75, 14, 15, 16),
                        new Percentile(90, 14, 15, 16),
                        new Percentile(95, 14, 15, 16),
                        new ZeroCrossing(14, 15, 16),
                        new MeanCrossing(14, 15, 16),
                        new Entropy(14, 15, 16),

                        new StandardDeviation(1, 2, 3, 4, 5, 6, 7, 8),
                        new Mean(1, 2, 3, 4, 5, 6, 7, 8),
                        new Min(1, 2, 3, 4, 5, 6, 7, 8),
                        new Max(1, 2, 3, 4, 5, 6, 7, 8),
                        new Median(1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(5, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(10, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(25, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(50, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(75, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(90, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(95, 1, 2, 3, 4, 5, 6, 7, 8),

                        new SumLargerThan(25, 1, 2, 3, 4, 5, 6, 7, 8),
                        new SumLargerThan(50, 1, 2, 3, 4, 5, 6, 7, 8),
                        new SumLargerThan(100, 1, 2, 3, 4, 5, 6, 7, 8),
                    }).ToArray());
                });

                var labelReader = new CSVReader <int>("MyoGymLabel.csv", false, (dynamic outputLabels) =>
                {
                    LabelWindows = outputLabels;
                    LabelWindows.ForEach(row =>
                    {
                        FeaturLabel.Add(row.GroupBy(x => x).OrderByDescending(g => g.Count()).Take(1).Select(i => i.Key).First());
                    });
                    SaveArrayAsCSV(FeaturSpace.ToArray(), "MyoGymFeatures.csv");
                    SaveArrayAsCSV(FeaturLabel.ToArray(), "MyoGymFeaturesLabels.csv");
                    DecisionTreeClassifier dt = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
                    dt.Train(FeaturSpace, FeaturLabel, true);
                    dt.Save("LDAMyoGYM");
                }, new SlidingWindow <int>(200, 25));
                labelReader.Start();
            }, new SlidingWindow <double[]>(200, 25));

            dataReader.Start();
        }