示例#1
0
        public void AnimalClassifyMethod()
        {
            DataSetLoader          dataSetLoader          = new DataSetLoader();
            var                    animals                = dataSetLoader.SelectAnimals();
            var                    data                   = dataSetLoader.CalculatePercent(50, animals);
            DecisionTreeClassifier decisionTreeClassifier =
                new DecisionTreeClassifier(data.Item1, new ShannonEntropySplitter());
            NaiveBayesClassifier naiveBayes =
                new NaiveBayesClassifier(data.Item1);
            var           list   = new List <NetML.Classification>();
            Kernel        kernel = new LinearKernel();
            SVMClassifier animalSVMClassifier =
                new SVMClassifier(animals, kernel, 0.001, 10.0);
            var neuronalAnimals = dataSetLoader.SelectNeuronalNetworkAnimals();
            NeuronalNetworkClassifier neuronalNetworkClassifier =
                new NeuronalNetworkClassifier(neuronalAnimals, 16, 7, 16, 500, 0.1);

            list.Add(decisionTreeClassifier);
            list.Add(naiveBayes);
            list.Add(animalSVMClassifier);
            list.Add(neuronalNetworkClassifier);
            Classifier classifier = new Classifier();

            classifier.Classify(list, data.Item2);
        }
示例#2
0
        public void IrisDecisionTreeTest()
        {
            DataSetLoader dataSetLoader = new DataSetLoader();

            Console.WriteLine(" Reading DataSet.. ");
            var irises = dataSetLoader.SelectIrises();
            DecisionTreeClassifier decisionTreeClassifier =
                new DecisionTreeClassifier(irises, new ShannonEntropySplitter());

            decisionTreeClassifier.Train();
            var animalsTest = dataSetLoader.SelectIrises();
            var trueCounter = 0;
            var counter     = 0;

            foreach (var item in animalsTest)
            {
                var outputValue = decisionTreeClassifier.Classify(item.Item1);
                if (outputValue == item.Item2)
                {
                    trueCounter++;
                }
                Debug.WriteLine(string.Format("Value {0} - Predicted {1} = {2}",
                                              item.Item2, outputValue, (outputValue == item.Item2) ? "true" : "false"));
                counter++;
            }
            Debug.WriteLine(string.Format("Data {0} - True {1} Verhältnis: {2}",
                                          counter.ToString(), trueCounter.ToString(), (Convert.ToDouble(trueCounter) / Convert.ToDouble(counter)).ToString()));
        }
示例#3
0
        public void CreditDataClassifyMethod()
        {
            DataSetLoader          dataSetLoader          = new DataSetLoader();
            var                    creditData             = dataSetLoader.SelectCreditData();
            var                    data                   = dataSetLoader.CalculatePercent(100, creditData);
            DecisionTreeClassifier decisionTreeClassifier =
                new DecisionTreeClassifier(data.Item1, new ShannonEntropySplitter());
            NaiveBayesClassifier naiveBayes =
                new NaiveBayesClassifier(data.Item1);
            var           list          = new List <NetML.Classification>();
            Kernel        kernel        = new LinearKernel();
            SVMClassifier SVMClassifier =
                new SVMClassifier(creditData, kernel, 0.001, 10.0);
            var neuronalCreditData = dataSetLoader.SelectNeuronalNetworksCreditData();
            NeuronalNetworkClassifier neuronalNetworkClassifier =
                new NeuronalNetworkClassifier(neuronalCreditData, 20, 2, 20, 5000, 0.1);

            list.Add(decisionTreeClassifier);
            list.Add(naiveBayes);
            list.Add(SVMClassifier);
            //list.Add(neuronalNetworkClassifier);
            Classifier classifier = new Classifier();

            classifier.Classify(list, creditData);
        }
示例#4
0
        static void Cmacc_mqtt()
        {
            FeatureManager featureManager = new FeatureManager();
            var            Classifier     = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);

            Classifier.Load("C:\\Users\\riz\\Desktop\\WISDM\\WisdnDT");
            var dataReader = new MQTTReader <double[]>("/i5/mobileMotion/accelerometer", (dynamic output) =>
            {
                var FeaturSpace = FeatureManager.Generate(output, new List <IFeature>()
                {
                    new Mean(),
                    new StandardDeviation(),
                    new MeanAbsoluteDeviation(),
                    new ResultantAcceleration(),
                    new BinDistribution(10),

                    new Variance(),
                    new Median(),
                    new Range(),
                    new Min(),
                    new Max(),
                    new RootMeanSquare()
                });

                Console.WriteLine("HAR(" + Classifier.Classify(FeaturSpace) + ")");
            });

            dataReader.Add(new SlidingWindow <double[]>(200, 0));
            dataReader.Start();
        }
示例#5
0
        public List <EmergingPattern> ExtractPatterns(DecisionTreeClassifier tree,
                                                      Feature classFeature)
        {
            List <EmergingPattern> result = new List <EmergingPattern>();

            ExtractPatterns(tree, ep => result.Add(ep), classFeature);
            return(result);
        }
示例#6
0
        public void ExtractPatterns(DecisionTreeClassifier tree,
                                    Action <EmergingPattern> patternFound,
                                    Feature classFeature)
        {
            List <SelectorContext> context = new List <SelectorContext>();

            DoExtractPatterns(tree.DecisionTree.TreeRootNode, context,
                              tree.Model, patternFound, classFeature);
        }
        public IEnumerable <IEmergingPattern> Mine(InstanceModel model, IEnumerable <Instance> instances, Feature classFeature)
        {
            EmergingPatternCreator     EpCreator  = new EmergingPatternCreator();
            IEmergingPatternSimplifier simplifier = new EmergingPatternSimplifier(new ItemComparer());

            List <Feature> featuresToConsider = model.Features.Where(f => f != classFeature).ToList();
            //int featureCount = (FeatureCount != -1) ? FeatureCount : Convert.ToInt32(Math.Max((int)Math.Log(featuresToConsider.Count, 2) + 1, 0.63* featuresToConsider.Count));
            int featureCount   = (FeatureCount != -1) ? FeatureCount : (int)Math.Log(featuresToConsider.Count, 2) + 1;
            var resultPatterns = new List <IEmergingPattern>();

            var instanceCount = instances.Count();

            for (int i = 0; i < TreeCount; i++)
            {
                unsupervisedDecisionTreeBuilder.OnSelectingFeaturesToConsider =
                    (features, level) => _sampler.SampleWithoutRepetition(featuresToConsider, featureCount);

                DecisionTree           tree           = unsupervisedDecisionTreeBuilder.Build(model, instances, classFeature);
                DecisionTreeClassifier treeClassifier = new DecisionTreeClassifier(tree);

                if (treeClassifier.DecisionTree.Leaves > 1)
                {
                    EpCreator.ExtractPatterns(treeClassifier,
                                              delegate(EmergingPattern p)
                    {
                        if (EPTester.Test(p.Counts, model, classFeature))
                        {
                            resultPatterns.Add(simplifier.Simplify(p));
                        }
                    },
                                              classFeature);
                }

                resultPatterns.Add(null);
            }

            foreach (var ep in resultPatterns)
            {
                if (ep != null)
                {
                    ep.Counts = new double[1];
                    foreach (var instance in instances)
                    {
                        if (ep.IsMatch(instance))
                        {
                            ep.Counts[0]++;
                        }
                    }

                    ep.Supports    = new double[1];
                    ep.Supports[0] = ep.Counts[0] / instanceCount;
                }
            }

            return(resultPatterns);
        }
示例#8
0
 public void PreConfig()
 {
     Classifier = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
     Classifier.Load("Modules/HAR/WisdnDT");
     Feature1 = new DataInFeatureOut(AccelerometerSensor.GetConfiguration().Reader, AccelerometerSensor.GetConfiguration().Features);
     Decision = new FeaturesInDecisionOut(new List <IFusionStrategy>()
     {
         Feature1
     }, Classifier);
 }
示例#9
0
        public void PreConfig()
        {
            Classifier = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
            Classifier.Load("LDAMyoGYM");
            AccelerometerFeatures = new DataInFeatureOut(AccelerometerSensor.GetConfiguration().Reader, AccelerometerSensor.GetConfiguration().Features);
            GryoFeatures          = new DataInFeatureOut(GyroscopeSensor.GetConfiguration().Reader, GyroscopeSensor.GetConfiguration().Features);
            EMGFeatures           = new DataInFeatureOut(EmgSensor.GetConfiguration().Reader, EmgSensor.GetConfiguration().Features);
            var CombinedFeatures = new FeaturesInFeatureOut(new List <IFusionStrategy>()
            {
                AccelerometerFeatures, GryoFeatures, EMGFeatures
            });

            Decision = new FeaturesInDecisionOut(new List <IFusionStrategy>()
            {
                CombinedFeatures
            }, Classifier);
        }
示例#10
0
        public void DecisionTreeTest()
        {
            List <Tuple <double[], double> > data =
                new List <Tuple <double[], double> >();

            data.Add(new Tuple <double[], double>(new double[] { 0.0, 3.0, 0.0, 0.0 }, 40.0));
            data.Add(new Tuple <double[], double>(new double[] { 0.0, 3.0, 1.0, 1.0 }, 50.0));
            data.Add(new Tuple <double[], double>(new double[] { 1.0, 3.0, 1.0, 1.0 }, 50.0));
            DecisionTreeClassifier decisionTreeClassifier =
                new DecisionTreeClassifier(data, new ShannonEntropySplitter());

            decisionTreeClassifier.Train();
            var result = decisionTreeClassifier.Classify(new double[] { 0.0, 3.0, 0.0, 0.0 });

            result = decisionTreeClassifier.Classify(new double[] { 0.0, 3.0, 1.0, 1.0 });
            result = decisionTreeClassifier.Classify(new double[] { 1.0, 3.0, 0.0, 0.0 });
            result = decisionTreeClassifier.Classify(new double[] { 1.0, 0.0, 0.0, 0.0 });
        }
示例#11
0
        public override void Train()
        {
            List <double[]>         FeaturSpace    = new List <double[]>();
            List <int>              FeaturLabel    = new List <int>();
            List <List <double[]> > DataWindows    = new List <List <double[]> >();
            List <List <int> >      LabelWindows   = new List <List <int> >();
            FeatureManager          featureManager = new FeatureManager();

            featureManager.Add(new Mean(1, 2, 3));
            featureManager.Add(new StandardDeviation(1, 2, 3));
            featureManager.Add(new MeanAbsoluteDeviation(1, 2, 3));
            featureManager.Add(new AverageTimeBetweenPeaks(0, 1, 2, 3));
            featureManager.Add(new ResultantAcceleration());
            featureManager.Add(new BinDistribution(10, 1, 2, 3));
            //featureManager.Add(new PreProcessData());

            var dataReader = new CSVReader <double[]>("data.txt", false, (dynamic output) =>
            {
                DataWindows = output;
                DataWindows.ForEach(window =>
                {
                    FeaturSpace.Add(featureManager.Generate(window).ToArray());
                });

                var labelReader = new CSVReader <int>("label.txt", false, (dynamic outputLabels) =>
                {
                    LabelWindows = outputLabels;
                    LabelWindows.ForEach(row =>
                    {
                        FeaturLabel.Add(row.GroupBy(x => x).OrderByDescending(g => g.Count()).Take(1).Select(i => i.Key).First());
                    });

                    DecisionTreeClassifier dt = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
                    dt.Train(FeaturSpace, FeaturLabel, true);
                    dt.Save("DecisionTreeWSDM");
                }, new SlidingWindow <int>(200, 0));
                labelReader.Start();
            }, new SlidingWindow <double[]>(200, 0));

            dataReader.Add(new Normalize());
            dataReader.Start();
        }
示例#12
0
        public void TrainFromFeatures()
        {
            List <double[]>         FeaturSpace  = new List <double[]>();
            List <int>              FeaturLabel  = new List <int>();
            List <List <double[]> > DataWindows  = new List <List <double[]> >();
            List <List <int> >      LabelWindows = new List <List <int> >();
            var dataReader = new CSVReader <double[]>("MyoGymFeatures.csv", false, (dynamic output) =>
            {
                FeaturSpace     = output;
                var labelReader = new CSVReader <int>("MyoGymFeaturesLabels.csv", false, (dynamic outputLabels) =>
                {
                    FeaturLabel = outputLabels;

                    DecisionTreeClassifier dt = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
                    dt.Train(FeaturSpace, FeaturLabel, true);
                    dt.Save("LDAMyoGYM");
                });

                labelReader.Start();
            });

            dataReader.Start();
        }
        //public void CreateDatasetAndTestset(
        //    AppIdentDataSource appIdentDataSource,
        //    double trainingToClassifyingRatio,
        //    out List<FeatureVector> trainingSet,
        //    out List<FeatureVector> verificationSet)
        //{
        //    trainingSet = new List<FeatureVector>();
        //    verificationSet = new List<FeatureVector>();

        //    var groupedFeatureVectors = from featureVector in appIdentDataSource.FeatureVectors
        //        group featureVector by featureVector.Label
        //        into featureVectors
        //        orderby featureVectors.Key
        //        select featureVectors;
        //    //todo this can me managed more randomly
        //    foreach(var gc in groupedFeatureVectors)
        //    {
        //        var conves = gc.ToList();
        //        var ratioIndex = (int) (conves.Count * trainingToClassifyingRatio);
        //        var testingDataCount = conves.Count - ratioIndex;
        //        trainingSet.AddRange(conves.GetRange(0, ratioIndex));
        //        verificationSet.AddRange(conves.GetRange(ratioIndex, testingDataCount));
        //    }
        //}

        public ApplicationProtocolClassificationStatisticsMeter DecisionTreeClassify(
            AppIdentDataSource appIdentDataSource,
            double trainingToVerificationRatio,
            double precisionTrashHold)
        {
            var precMeasure = new ApplicationProtocolClassificationStatisticsMeter();

            //this.CreateDatasetAndTestset(appIdentDataSource, trainingToVerificationRatio, out var trainingSet, out var verificationSet);
            var classifier = new DecisionTreeClassifier(appIdentDataSource.TrainingSet);

            foreach (var feature in appIdentDataSource.VerificationSet)
            {
                var appTag = feature.Label;
                feature.Label = "Unknown";
                classifier.Normalizator.Normalize(feature);
                var cl = classifier.ClassifierModel.Predict(feature);
                if (cl.Precision > precisionTrashHold)
                {
                    precMeasure.UpdateStatistics(feature.Label, appTag);
                }
            }
            return(precMeasure);
        }
示例#14
0
        public override void Train()
        {
            List <double[]>         FeaturSpace  = new List <double[]>();
            List <int>              FeaturLabel  = new List <int>();
            List <List <double[]> > DataWindows  = new List <List <double[]> >();
            List <List <int> >      LabelWindows = new List <List <int> >();
            var dataReader = new CSVReader <double[]>("MyoGym.csv", false, (dynamic output) =>
            {
                DataWindows = output;
                DataWindows.ForEach(window =>
                {
                    FeaturSpace.Add(FeatureManager.Generate(window, new List <IFeature>()
                    {
                        new HjorthParameters(10, 11, 12),
                        new StandardDeviation(10, 11, 12),
                        new Mean(10, 11, 12),
                        new Min(10, 11, 12),
                        new Max(10, 11, 12),
                        new Percentile(5, 10, 11, 12),
                        new Percentile(10, 10, 11, 12),
                        new Percentile(25, 10, 11, 12),
                        new Percentile(50, 10, 11, 12),
                        new Percentile(75, 10, 11, 12),
                        new Percentile(90, 10, 11, 12),
                        new Percentile(95, 10, 11, 12),
                        new ZeroCrossing(10, 11, 12),
                        new MeanCrossing(10, 11, 12),
                        new Entropy(11, 12, 13),
                        new Correlation(10, 11),
                        new Correlation(10, 12),
                        new Correlation(11, 12),

                        new HjorthParameters(14, 15, 16),
                        new StandardDeviation(14, 15, 16),
                        new Mean(14, 15, 16),
                        new Min(14, 15, 16),
                        new Max(14, 15, 16),
                        new Percentile(5, 14, 15, 16),
                        new Percentile(10, 14, 15, 16),
                        new Percentile(25, 14, 15, 16),
                        new Percentile(50, 14, 15, 16),
                        new Percentile(75, 14, 15, 16),
                        new Percentile(90, 14, 15, 16),
                        new Percentile(95, 14, 15, 16),
                        new ZeroCrossing(14, 15, 16),
                        new MeanCrossing(14, 15, 16),
                        new Entropy(14, 15, 16),

                        new StandardDeviation(1, 2, 3, 4, 5, 6, 7, 8),
                        new Mean(1, 2, 3, 4, 5, 6, 7, 8),
                        new Min(1, 2, 3, 4, 5, 6, 7, 8),
                        new Max(1, 2, 3, 4, 5, 6, 7, 8),
                        new Median(1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(5, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(10, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(25, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(50, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(75, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(90, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(95, 1, 2, 3, 4, 5, 6, 7, 8),

                        new SumLargerThan(25, 1, 2, 3, 4, 5, 6, 7, 8),
                        new SumLargerThan(50, 1, 2, 3, 4, 5, 6, 7, 8),
                        new SumLargerThan(100, 1, 2, 3, 4, 5, 6, 7, 8),
                    }).ToArray());
                });

                var labelReader = new CSVReader <int>("MyoGymLabel.csv", false, (dynamic outputLabels) =>
                {
                    LabelWindows = outputLabels;
                    LabelWindows.ForEach(row =>
                    {
                        FeaturLabel.Add(row.GroupBy(x => x).OrderByDescending(g => g.Count()).Take(1).Select(i => i.Key).First());
                    });
                    SaveArrayAsCSV(FeaturSpace.ToArray(), "MyoGymFeatures.csv");
                    SaveArrayAsCSV(FeaturLabel.ToArray(), "MyoGymFeaturesLabels.csv");
                    DecisionTreeClassifier dt = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
                    dt.Train(FeaturSpace, FeaturLabel, true);
                    dt.Save("LDAMyoGYM");
                }, new SlidingWindow <int>(200, 25));
                labelReader.Start();
            }, new SlidingWindow <double[]>(200, 25));

            dataReader.Start();
        }
        public IEnumerable <IEmergingPattern> MineTest(InstanceModel model, IEnumerable <Instance> instances, Feature classFeature)
        {
            EmergingPatternCreator     EpCreator  = new EmergingPatternCreator();
            IEmergingPatternSimplifier simplifier = new EmergingPatternSimplifier(new ItemComparer());

            List <Feature> featuresToConsider = model.Features.Where(f => f != classFeature).ToList();
            int            featureCount       = (FeatureCount != -1) ? FeatureCount : (int)Math.Max(Math.Log(featuresToConsider.Count, 2) + 1, 0.63 * featuresToConsider.Count);
            var            resultPatterns     = new List <IEmergingPattern>();

            featureUseCount = new Dictionary <Feature, int>();
            foreach (var feature in featuresToConsider)
            {
                featureUseCount.Add(feature, 0);
            }

            allFeaturesUseCount = 0;
            var instanceCount = instances.Count();

            for (int i = 0; i < TreeCount; i++)
            {
                cumulativeProbabilities = new List <double>();
                double max = 0;
                for (int j = 0; j < featuresToConsider.Count; j++)
                {
                    if (featureUseCount[featuresToConsider[j]] > max)
                    {
                        max = featureUseCount[featuresToConsider[j]];
                    }
                }
                double sum = 0;
                for (int j = 0; j < featuresToConsider.Count; j++)
                {
                    cumulativeProbabilities.Add(allFeaturesUseCount == 0
                        ? 1.0 / featuresToConsider.Count
                        : 1.0 * (max - featureUseCount[featuresToConsider[j]]) / max);

                    //cumulativeProbabilities.Add(allFeaturesUseCount == 0
                    //    ? 1.0 / featuresToConsider.Count
                    //    : 1.0 * (featureUseCount[featuresToConsider[j]]) / allFeaturesUseCount);


                    sum += cumulativeProbabilities[j];

                    if (j > 0)
                    {
                        cumulativeProbabilities[j] += cumulativeProbabilities[j - 1];
                    }

                    if (sum != cumulativeProbabilities[j])
                    {
                        throw new Exception("Error computing cumalitive probabilities!");
                    }
                }
                for (int j = 0; j < featuresToConsider.Count; j++)
                {
                    cumulativeProbabilities[j] /= sum;
                }

                unsupervisedDecisionTreeBuilder.OnSelectingFeaturesToConsider =
                    (features, level) => SampleWithDistribution(featuresToConsider, featureCount);

                DecisionTree           tree           = unsupervisedDecisionTreeBuilder.Build(model, instances, classFeature);
                DecisionTreeClassifier treeClassifier = new DecisionTreeClassifier(tree);

                if (treeClassifier.DecisionTree.Leaves > 1)
                {
                    EpCreator.ExtractPatterns(treeClassifier,
                                              delegate(EmergingPattern p)
                    {
                        if (EPTester.Test(p.Counts, model, classFeature))
                        {
                            foreach (Item item in p.Items)
                            {
                                featureUseCount[item.Feature]++;
                                allFeaturesUseCount++;
                            }

                            resultPatterns.Add(simplifier.Simplify(p));
                        }
                    },
                                              classFeature);
                }

                resultPatterns.Add(null);
            }

            foreach (var ep in resultPatterns)
            {
                if (ep != null)
                {
                    ep.Counts = new double[1];
                    foreach (var instance in instances)
                    {
                        if (ep.IsMatch(instance))
                        {
                            ep.Counts[0]++;
                        }
                    }

                    ep.Supports    = new double[1];
                    ep.Supports[0] = ep.Counts[0] / instanceCount;
                }
            }

            return(resultPatterns);
        }
示例#16
0
        public void TestSampleWeight()
        {
            // Test that zero-weighted samples are not taken into account
            var X = Enumerable.Range(0, 100).ToColumnMatrix();
            var y = Enumerable.Repeat(1, 100).ToArray();

            Array.Clear(y, 0, 50);

            var sampleWeight = Enumerable.Repeat(1, 100).ToVector();

            sampleWeight.SetSubVector(0, 50, Enumerable.Repeat(0, 50).ToVector());

            var clf = new DecisionTreeClassifier <int>(random: new Random(0));

            clf.Fit(X, y, sampleWeight: sampleWeight);
            AssertExt.ArrayEqual(clf.Predict(X), Enumerable.Repeat(1, 100).ToArray());

            // Test that low weighted samples are not taken into account at low depth
            X = Enumerable.Range(0, 200).ToColumnMatrix();
            y = new int[200];
            Array.Copy(Enumerable.Repeat(1, 50).ToArray(), 0, y, 50, 50);
            Array.Copy(Enumerable.Repeat(2, 100).ToArray(), 0, y, 100, 100);
            X.SetSubMatrix(100, 100, 0, 1, Enumerable.Repeat(200, 100).ToColumnMatrix());

            sampleWeight = Enumerable.Repeat(1, 200).ToVector();

            sampleWeight.SetSubVector(100, 100, Enumerable.Repeat(0.51, 100).ToVector());
            // Samples of class '2' are still weightier
            clf = new DecisionTreeClassifier <int>(maxDepth: 1, random: new Random(0));
            clf.Fit(X, y, sampleWeight: sampleWeight);
            Assert.AreEqual(149.5, clf.Tree.Threshold[0]);

            sampleWeight.SetSubVector(100, 100, Enumerable.Repeat(0.50, 100).ToVector());
            // Samples of class '2' are no longer weightier
            clf = new DecisionTreeClassifier <int>(maxDepth: 1, random: new Random(0));
            clf.Fit(X, y, sampleWeight: sampleWeight);
            Assert.AreEqual(49.5, clf.Tree.Threshold[0]); // Threshold should have moved


            // Test that sample weighting is the same as having duplicates
            X = iris.Data;
            y = iris.Target;

            var random     = new Random(0);
            var duplicates = new int[200];

            for (int i = 0; i < duplicates.Length; i++)
            {
                duplicates[i] = random.Next(X.RowCount);
            }

            clf = new DecisionTreeClassifier <int>(random: new Random(1));
            clf.Fit(X.RowsAt(duplicates), y.ElementsAt(duplicates));


            sampleWeight = Np.BinCount(duplicates, minLength: X.RowCount).ToVector();
            var clf2 = new DecisionTreeClassifier <int>(random: new Random(1));

            clf2.Fit(X, y, sampleWeight: sampleWeight);


            var @internal = clf.Tree.ChildrenLeft.Indices(v => v != Tree._TREE_LEAF);

            AssertExt.AlmostEqual(clf.Tree.Threshold.ElementsAt(@internal),
                                  clf2.Tree.Threshold.ElementsAt(@internal));
        }
示例#17
0
        static void CMacc()
        {
            FeatureManager featureManager = new FeatureManager();
            var            Classifier     = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);

            Classifier.Load("C:\\Users\\riz\\Desktop\\MyoGymDT");
            var dataReader = new MQTTReader <double[]>("/i5/myo/full", (dynamic output) =>
            {
                var FeaturSpace = FeatureManager.Generate(output, new List <IFeature>()
                {
                    new HjorthParameters(8, 9, 10),
                    new StandardDeviation(8, 9, 10),
                    new Mean(8, 9, 10),
                    new Max(8, 9, 10),
                    new Min(8, 9, 10),
                    new Percentile(5, 8, 9, 10),
                    new Percentile(10, 8, 9, 10),
                    new Percentile(25, 8, 9, 10),
                    new Percentile(50, 8, 9, 10),
                    new Percentile(75, 8, 9, 10),
                    new Percentile(90, 8, 9, 10),
                    new Percentile(95, 8, 9, 10),
                    new ZeroCrossing(8, 9, 10),
                    new MeanCrossing(8, 9, 10),
                    new Entropy(9, 10, 11),
                    new Correlation(9, 10),
                    new Correlation(9, 11),
                    new Correlation(10, 11),

                    new HjorthParameters(11, 12, 13),
                    new StandardDeviation(11, 12, 13),
                    new Mean(11, 12, 13),
                    new Max(11, 12, 13),
                    new Min(11, 12, 13),
                    new Percentile(5, 11, 12, 13),
                    new Percentile(10, 11, 12, 13),
                    new Percentile(25, 11, 12, 13),
                    new Percentile(50, 11, 12, 13),
                    new Percentile(75, 11, 12, 13),
                    new Percentile(90, 11, 12, 13),
                    new Percentile(95, 11, 12, 13),
                    new ZeroCrossing(11, 12, 13),
                    new MeanCrossing(11, 12, 13),
                    new Entropy(11, 12, 13),

                    new StandardDeviation(0, 1, 2, 3, 4, 5, 6, 7),
                    new Mean(0, 1, 2, 3, 4, 5, 6, 7),
                    new Max(0, 1, 2, 3, 4, 5, 6, 7),
                    new Min(0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(5, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(10, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(25, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(50, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(75, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(90, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(95, 0, 1, 2, 3, 4, 5, 6, 7),

                    new SumLargerThan(25, 0, 1, 2, 3, 4, 5, 6, 7),
                    new SumLargerThan(50, 0, 1, 2, 3, 4, 5, 6, 7),
                    new SumLargerThan(100, 0, 1, 2, 3, 4, 5, 6, 7)
                });

                Console.WriteLine(Classifier.Classify(FeaturSpace));
            });

            dataReader.Add(new SlidingWindow <double[]>(200, 0));
            dataReader.Start();
        }