Beispiel #1
0
        static void Cmacc_mqtt()
        {
            FeatureManager featureManager = new FeatureManager();
            var            Classifier     = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);

            Classifier.Load("C:\\Users\\riz\\Desktop\\WISDM\\WisdnDT");
            var dataReader = new MQTTReader <double[]>("/i5/mobileMotion/accelerometer", (dynamic output) =>
            {
                var FeaturSpace = FeatureManager.Generate(output, new List <IFeature>()
                {
                    new Mean(),
                    new StandardDeviation(),
                    new MeanAbsoluteDeviation(),
                    new ResultantAcceleration(),
                    new BinDistribution(10),

                    new Variance(),
                    new Median(),
                    new Range(),
                    new Min(),
                    new Max(),
                    new RootMeanSquare()
                });

                Console.WriteLine("HAR(" + Classifier.Classify(FeaturSpace) + ")");
            });

            dataReader.Add(new SlidingWindow <double[]>(200, 0));
            dataReader.Start();
        }
Beispiel #2
0
        /// <summary>
        /// Fuse the data in to features
        /// </summary>
        public void Fuse()
        {
            PreProcess(ref Data);
            List <double> NewFeatures = FeatureManager.Generate(Data, Features);

            PostProcess(ref Data);

            OnFusionFinished(NewFeatures);
        }
Beispiel #3
0
        static void MyoGym_no_acc()
        {
            FeatureManager featureManager = new FeatureManager();
            var            Classifier     = new MulticlassSupportVectorMachineClassifier();

            Classifier.Load("C:\\Users\\riz\\Desktop\\MyoGYm\\MyoGym_NOEMG_SVM");
            var dataReader = new MQTTReader <double[]>("/i5/myo/full", (dynamic output) =>
            {
                var FeaturSpace = FeatureManager.Generate(output, new List <IFeature>()
                {
                    new HjorthParameters(0, 1, 2),
                    new StandardDeviation(0, 1, 2),
                    new Mean(0, 1, 2),
                    new Max(0, 1, 2),
                    new Min(0, 1, 2),
                    new Percentile(5, 0, 1, 2),
                    new Percentile(10, 0, 1, 2),
                    new Percentile(25, 0, 1, 2),
                    new Percentile(50, 0, 1, 2),
                    new Percentile(75, 0, 1, 2),
                    new Percentile(90, 0, 1, 2),
                    new Percentile(95, 0, 1, 2),
                    new ZeroCrossing(0, 1, 2),
                    new MeanCrossing(0, 1, 2),
                    new Entropy(0, 1, 2),
                    new Correlation(0, 1),
                    new Correlation(0, 2),
                    new Correlation(1, 2),

                    new HjorthParameters(3, 4, 5),
                    new StandardDeviation(3, 4, 5),
                    new Mean(3, 4, 5),
                    new Max(3, 4, 5),
                    new Min(3, 4, 5),
                    new Percentile(5, 3, 4, 5),
                    new Percentile(10, 3, 4, 5),
                    new Percentile(25, 3, 4, 5),
                    new Percentile(50, 3, 4, 5),
                    new Percentile(75, 3, 4, 5),
                    new Percentile(90, 3, 4, 5),
                    new Percentile(95, 3, 4, 5),
                    new ZeroCrossing(3, 4, 5),
                    new MeanCrossing(3, 4, 5),
                    new Entropy(3, 4, 5)
                });

                Console.WriteLine(Classifier.Classify(FeaturSpace));
            });

            dataReader.Add(new SlidingWindow <double[]>(200, 0));
            dataReader.Start();
        }
Beispiel #4
0
        public override void Train()
        {
            List <double[]>         FeaturSpace    = new List <double[]>();
            List <int>              FeaturLabel    = new List <int>();
            List <List <double[]> > DataWindows    = new List <List <double[]> >();
            List <List <int> >      LabelWindows   = new List <List <int> >();
            FeatureManager          featureManager = new FeatureManager();

            featureManager.Add(new Mean(1, 2, 3));
            featureManager.Add(new StandardDeviation(1, 2, 3));
            featureManager.Add(new MeanAbsoluteDeviation(1, 2, 3));
            featureManager.Add(new AverageTimeBetweenPeaks(0, 1, 2, 3));
            featureManager.Add(new ResultantAcceleration());
            featureManager.Add(new BinDistribution(10, 1, 2, 3));
            //featureManager.Add(new PreProcessData());

            var dataReader = new CSVReader <double[]>("data.txt", false, (dynamic output) =>
            {
                DataWindows = output;
                DataWindows.ForEach(window =>
                {
                    FeaturSpace.Add(featureManager.Generate(window).ToArray());
                });

                var labelReader = new CSVReader <int>("label.txt", false, (dynamic outputLabels) =>
                {
                    LabelWindows = outputLabels;
                    LabelWindows.ForEach(row =>
                    {
                        FeaturLabel.Add(row.GroupBy(x => x).OrderByDescending(g => g.Count()).Take(1).Select(i => i.Key).First());
                    });

                    DecisionTreeClassifier dt = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
                    dt.Train(FeaturSpace, FeaturLabel, true);
                    dt.Save("DecisionTreeWSDM");
                }, new SlidingWindow <int>(200, 0));
                labelReader.Start();
            }, new SlidingWindow <double[]>(200, 0));

            dataReader.Add(new Normalize());
            dataReader.Start();
        }
Beispiel #5
0
        public override void Train()
        {
            List <double[]>         FeaturSpace  = new List <double[]>();
            List <int>              FeaturLabel  = new List <int>();
            List <List <double[]> > DataWindows  = new List <List <double[]> >();
            List <List <int> >      LabelWindows = new List <List <int> >();
            var dataReader = new CSVReader <double[]>("MyoGym.csv", false, (dynamic output) =>
            {
                DataWindows = output;
                DataWindows.ForEach(window =>
                {
                    FeaturSpace.Add(FeatureManager.Generate(window, new List <IFeature>()
                    {
                        new HjorthParameters(10, 11, 12),
                        new StandardDeviation(10, 11, 12),
                        new Mean(10, 11, 12),
                        new Min(10, 11, 12),
                        new Max(10, 11, 12),
                        new Percentile(5, 10, 11, 12),
                        new Percentile(10, 10, 11, 12),
                        new Percentile(25, 10, 11, 12),
                        new Percentile(50, 10, 11, 12),
                        new Percentile(75, 10, 11, 12),
                        new Percentile(90, 10, 11, 12),
                        new Percentile(95, 10, 11, 12),
                        new ZeroCrossing(10, 11, 12),
                        new MeanCrossing(10, 11, 12),
                        new Entropy(11, 12, 13),
                        new Correlation(10, 11),
                        new Correlation(10, 12),
                        new Correlation(11, 12),

                        new HjorthParameters(14, 15, 16),
                        new StandardDeviation(14, 15, 16),
                        new Mean(14, 15, 16),
                        new Min(14, 15, 16),
                        new Max(14, 15, 16),
                        new Percentile(5, 14, 15, 16),
                        new Percentile(10, 14, 15, 16),
                        new Percentile(25, 14, 15, 16),
                        new Percentile(50, 14, 15, 16),
                        new Percentile(75, 14, 15, 16),
                        new Percentile(90, 14, 15, 16),
                        new Percentile(95, 14, 15, 16),
                        new ZeroCrossing(14, 15, 16),
                        new MeanCrossing(14, 15, 16),
                        new Entropy(14, 15, 16),

                        new StandardDeviation(1, 2, 3, 4, 5, 6, 7, 8),
                        new Mean(1, 2, 3, 4, 5, 6, 7, 8),
                        new Min(1, 2, 3, 4, 5, 6, 7, 8),
                        new Max(1, 2, 3, 4, 5, 6, 7, 8),
                        new Median(1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(5, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(10, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(25, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(50, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(75, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(90, 1, 2, 3, 4, 5, 6, 7, 8),
                        new Percentile(95, 1, 2, 3, 4, 5, 6, 7, 8),

                        new SumLargerThan(25, 1, 2, 3, 4, 5, 6, 7, 8),
                        new SumLargerThan(50, 1, 2, 3, 4, 5, 6, 7, 8),
                        new SumLargerThan(100, 1, 2, 3, 4, 5, 6, 7, 8),
                    }).ToArray());
                });

                var labelReader = new CSVReader <int>("MyoGymLabel.csv", false, (dynamic outputLabels) =>
                {
                    LabelWindows = outputLabels;
                    LabelWindows.ForEach(row =>
                    {
                        FeaturLabel.Add(row.GroupBy(x => x).OrderByDescending(g => g.Count()).Take(1).Select(i => i.Key).First());
                    });
                    SaveArrayAsCSV(FeaturSpace.ToArray(), "MyoGymFeatures.csv");
                    SaveArrayAsCSV(FeaturLabel.ToArray(), "MyoGymFeaturesLabels.csv");
                    DecisionTreeClassifier dt = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);
                    dt.Train(FeaturSpace, FeaturLabel, true);
                    dt.Save("LDAMyoGYM");
                }, new SlidingWindow <int>(200, 25));
                labelReader.Start();
            }, new SlidingWindow <double[]>(200, 25));

            dataReader.Start();
        }
Beispiel #6
0
        static void CMacc()
        {
            FeatureManager featureManager = new FeatureManager();
            var            Classifier     = new DecisionTreeClassifier(FusionFramework.Classifiers.DecisionTreeLearningAlgorithms.C45Learning);

            Classifier.Load("C:\\Users\\riz\\Desktop\\MyoGymDT");
            var dataReader = new MQTTReader <double[]>("/i5/myo/full", (dynamic output) =>
            {
                var FeaturSpace = FeatureManager.Generate(output, new List <IFeature>()
                {
                    new HjorthParameters(8, 9, 10),
                    new StandardDeviation(8, 9, 10),
                    new Mean(8, 9, 10),
                    new Max(8, 9, 10),
                    new Min(8, 9, 10),
                    new Percentile(5, 8, 9, 10),
                    new Percentile(10, 8, 9, 10),
                    new Percentile(25, 8, 9, 10),
                    new Percentile(50, 8, 9, 10),
                    new Percentile(75, 8, 9, 10),
                    new Percentile(90, 8, 9, 10),
                    new Percentile(95, 8, 9, 10),
                    new ZeroCrossing(8, 9, 10),
                    new MeanCrossing(8, 9, 10),
                    new Entropy(9, 10, 11),
                    new Correlation(9, 10),
                    new Correlation(9, 11),
                    new Correlation(10, 11),

                    new HjorthParameters(11, 12, 13),
                    new StandardDeviation(11, 12, 13),
                    new Mean(11, 12, 13),
                    new Max(11, 12, 13),
                    new Min(11, 12, 13),
                    new Percentile(5, 11, 12, 13),
                    new Percentile(10, 11, 12, 13),
                    new Percentile(25, 11, 12, 13),
                    new Percentile(50, 11, 12, 13),
                    new Percentile(75, 11, 12, 13),
                    new Percentile(90, 11, 12, 13),
                    new Percentile(95, 11, 12, 13),
                    new ZeroCrossing(11, 12, 13),
                    new MeanCrossing(11, 12, 13),
                    new Entropy(11, 12, 13),

                    new StandardDeviation(0, 1, 2, 3, 4, 5, 6, 7),
                    new Mean(0, 1, 2, 3, 4, 5, 6, 7),
                    new Max(0, 1, 2, 3, 4, 5, 6, 7),
                    new Min(0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(5, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(10, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(25, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(50, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(75, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(90, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(95, 0, 1, 2, 3, 4, 5, 6, 7),

                    new SumLargerThan(25, 0, 1, 2, 3, 4, 5, 6, 7),
                    new SumLargerThan(50, 0, 1, 2, 3, 4, 5, 6, 7),
                    new SumLargerThan(100, 0, 1, 2, 3, 4, 5, 6, 7)
                });

                Console.WriteLine(Classifier.Classify(FeaturSpace));
            });

            dataReader.Add(new SlidingWindow <double[]>(200, 0));
            dataReader.Start();
        }
Beispiel #7
0
        static void MyoGym()
        {
            FeatureManager featureManager = new FeatureManager();
            var            Classifier     = new MulticlassSupportVectorMachineClassifier();

            Classifier.Load("C:\\Users\\riz\\Desktop\\MyoGYm\\MyoGym_SVM");
            var dataReader = new MQTTReader <double[]>("/i5/myo/full", (dynamic output) =>
            {
                var FeaturSpace = FeatureManager.Generate(output, new List <IFeature>()
                {
                    new HjorthParameters(8, 9, 10),
                    new StandardDeviation(8, 9, 10),
                    new Mean(8, 9, 10),
                    new Max(8, 9, 10),
                    new Min(8, 9, 10),
                    new Percentile(5, 8, 9, 10),
                    new Percentile(10, 8, 9, 10),
                    new Percentile(25, 8, 9, 10),
                    new Percentile(50, 8, 9, 10),
                    new Percentile(75, 8, 9, 10),
                    new Percentile(90, 8, 9, 10),
                    new Percentile(95, 8, 9, 10),
                    new ZeroCrossing(8, 9, 10),
                    new MeanCrossing(8, 9, 10),
                    new Entropy(8, 9, 10),
                    new Correlation(9, 10),
                    new Correlation(9, 11),
                    new Correlation(10, 11),

                    new HjorthParameters(11, 12, 13),
                    new StandardDeviation(11, 12, 13),
                    new Mean(11, 12, 13),
                    new Max(11, 12, 13),
                    new Min(11, 12, 13),
                    new Percentile(5, 11, 12, 13),
                    new Percentile(10, 11, 12, 13),
                    new Percentile(25, 11, 12, 13),
                    new Percentile(50, 11, 12, 13),
                    new Percentile(75, 11, 12, 13),
                    new Percentile(90, 11, 12, 13),
                    new Percentile(95, 11, 12, 13),
                    new ZeroCrossing(11, 12, 13),
                    new MeanCrossing(11, 12, 13),
                    new Entropy(11, 12, 13),

                    new StandardDeviation(0, 1, 2, 3, 4, 5, 6, 7),
                    new Mean(0, 1, 2, 3, 4, 5, 6, 7),
                    new Max(0, 1, 2, 3, 4, 5, 6, 7),
                    new Min(0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(5, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(10, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(25, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(50, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(75, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(90, 0, 1, 2, 3, 4, 5, 6, 7),
                    new Percentile(95, 0, 1, 2, 3, 4, 5, 6, 7),

                    new SumLargerThan(25, 0, 1, 2, 3, 4, 5, 6, 7),
                    new SumLargerThan(50, 0, 1, 2, 3, 4, 5, 6, 7),
                    new SumLargerThan(100, 0, 1, 2, 3, 4, 5, 6, 7)
                });

                Console.WriteLine("HGR(" + Classifier.Classify(FeaturSpace) + ")");
            });

            dataReader.Add(new SlidingWindow <double[]>(200, 0));
            dataReader.Start();
        }