Exemplo n.º 1
0
        public dynamicDetection(String s)
        {
            this.recog = new recognizer(s);
            //this.file = new System.IO.StreamWriter("C:\\Users\\workshop\\Desktop\\11122.csv");

            this.iter = 0;
            this.frame = 0;
            this.feature = new _feature(0.0);

            //this.featureSet = new double[20];

            this.wDist = new double[4];
            this.wDistLeg = new double[4];
            this.prevAccel = new double[4];
            this.prevAccelLeg = new double[4];

            this.prevSpeed = new double[4];
            this.prevSpeedLeg = new double[4];
            this.totJI = new double[4];

            this.wprevLeg = new _qbit[4];

            this.wprev = new _qbit[4];

            this.featureList = new List<double[]>();

            refreshVars();
        }
Exemplo n.º 2
0
 public confidence(String filename = "confidenceNet.dat")
 {
     this.emoCategory = 10;
     this.emoThreshold = 0.1;
     this.ConfidenceLowThreshold = 0.4;
     this.ConfidenceHighThreshold = 0.6;
     this.confiRecog = new recognizer(filename);
 }
Exemplo n.º 3
0
        private double sendToANN(double[] feat, recognizer r)
        {
            try
            {
                double[] output;// = new double[1];
                double val = 0;

                output = r.recognizeEmotion(feat);

                if (r.net.Output.Length == 2)
                {
                    if (Math.Round(output[0]) == 1 && Math.Round(output[1]) == 1)
                        val = 100;
                    else if (Math.Round(output[0]) == 1 && Math.Round(output[1]) == 0)
                        val = 75;
                    else if (Math.Round(output[0]) == 0 && Math.Round(output[1]) == 1)
                        val = 50;
                    else if (Math.Round(output[0]) == 0 && Math.Round(output[1]) == 0)
                        val = 25;
                    Console.WriteLine("output: " + output[0] + ", " + output[1]);
                }
                else
                {
                    if (output[0] == double.NaN || output[0] == double.NegativeInfinity || output[0] == double.PositiveInfinity)
                        val = -1.0;
                    else
                        val = (output[0]) * 100;

                    Console.WriteLine("output: " + output[0]);
                }

                return val;
            }
            catch
            {
                System.Windows.MessageBox.Show("Detection Module: --" + r.name + "-- has failed for some reason. Most probably wrong Network file selected.",
                    "detection error", MessageBoxButton.OK, MessageBoxImage.Error);
                return -2;
            }
        }
Exemplo n.º 4
0
        public int pollFeatures(SkeletonData s, userContext poseofUser)
        {
            iter++; //frame++;
            double[][] ans = globalVars.fExtract.getRawDataStream(s);

            /* if something goes wrong with the feature extractor*/
            if (ans[0] == null || ans[1] == null)
            {
                //System.Windows.MessageBox.Show("Ans 0 " + ans[0] + " ans 1 "+ans[1], "feature extractor error", MessageBoxButton.OK, MessageBoxImage.Error);
                return -3;
            }

            if (movement == null)
                movement = new double[ans[0].Length];
            for (int i = 0; i < ans[0].Length; i++)
                movement[i] += ans[0][i];
            movementTick++;

            if (position == null)
                position = new double[ans[1].Length];
            for (int i = 0; i < ans[1].Length; i++)
                position[i] += ans[1][i];
            positionTick++;

            //System.Windows.MessageBox.Show("iter: "+iter, "ANN init error", MessageBoxButton.OK, MessageBoxImage.Error);

            /* Here the detection starts */

            if (iter % 30 == 0 && iter != 0)
            {
                /*this awesome part automates the recognizer choice for Sitting or Standing context*/
                if (poseofUser == userContext.Sitting)
                {
                    mRecog = arousalSittingRecognizer;
                    posRecog = valenceSittingRecognizer;
                }
                else if (poseofUser == userContext.Standing)
                {
                    mRecog = arousalStandingRecognizer;
                    posRecog = valenceStandingRecognizer;
                }
                else
                {
                    System.Windows.MessageBox.Show("there is no saved network for this pose. Please try standing or sitting only.",
                           "detection error", MessageBoxButton.OK, MessageBoxImage.Error);
                    return -1;
                }

                double[] temp1, temp2;
                temp1 = new double[ans[0].Length];
                temp2 = new double[ans[1].Length];

                /*movement and position average*/
                /*releasing movement and position variables*/

                for (int i = 0; i < movement.Length; i++)
                {
                    temp1[i] = movement[i] / movementTick;
                    movement[i] = 0;
                    Console.Write(temp1[i] + " ");
                }
                movementTick = 0;
                Console.WriteLine();
                for (int i = 0; i < position.Length; i++)
                {
                    temp2[i] = position[i] / positionTick;
                    //Console.Write(temp2[i] + " ");
                    position[i] = 0;
                }
                positionTick = 0;
                Console.WriteLine();

                if (slidingWindow == true)
                {
                    /*adding them in a list for sliding window*/
                    movementFeatureList.Add(temp1);
                    positionFeatureList.Add(temp2);

                    double[] finalFeature = new double[movement.Length];
                    for (int i = 0; i < movement.Length; i++)
                    {
                        foreach (double[] d in this.movementFeatureList)
                        {
                            finalFeature[i] += d[i];
                            //Console.Write(d[i]+", ");
                        }
                        finalFeature[i] /= movementFeatureList.Count;
                        //Console.WriteLine("ffm: "+finalFeature[i]);
                    }

                    double[] finalFeature1 = new double[position.Length];

                    for (int i = 0; i < position.Length; i++)
                    {
                        foreach (double[] d in this.positionFeatureList)
                        {
                            finalFeature1[i] += d[i];
                            //Console.Write(d[i]+", ");
                        }
                        finalFeature1[i] /= positionFeatureList.Count;
                        //Console.WriteLine("ffp: " + finalFeature1[i]);
                    }

                    //Console.WriteLine(featureList.Count);

                    //Here the slidingwindow length is determined. ehich can be controlled with the variable: "interval".
                    if (movementFeatureList.Count == 6)
                    {
                        movementFeatureList.RemoveAt(0);
                        positionFeatureList.RemoveAt(0);
                    }
                    detect(finalFeature, finalFeature1);
                }

                else
                {
                    detect(temp1, temp2);
                }
            }
            return iter;
        }
Exemplo n.º 5
0
        public newDynamicDetection(String s, String s1)
        {
            try
            {
                mRecog = new recognizer(s);
                //posRecog = new recognizer(Environment.GetFolderPath(Environment.SpecialFolder.Desktop) + "\\" + "positionANN.dat");
                posRecog = new recognizer(s1);

                arousalSittingRecognizer = new recognizer(s);
                arousalStandingRecognizer = new recognizer(s);
                valenceSittingRecognizer = new recognizer(s1);
                valenceStandingRecognizer = new recognizer(s1); ;

                this.iter = 0;
                movementFeatureList = new List<double[]>();
                positionFeatureList = new List<double[]>();
                globalVars.detectorOn = true;
                globalVars.fExtract.frames = 0;
            }
            catch
            {
                System.Windows.MessageBox.Show("Some problem with the Saved Networks. CHeck your files and try again.", "ANN init error", MessageBoxButton.OK, MessageBoxImage.Error);
                this.stopDetection();
                globalVars.detectorOn = false;
            }
                //this.feature = featureExtractor();
        }