Example #1
0
        static void Main(string[] args)
        {
            Model model = new Model();

            RSModule.Init(model);
            // Add ActionUnits

            model.AddModule(new FaceTrackerModule(null));
            model.AddModule(new ME_BrowShift());
            model.AddModule(new ME_EyelidTight());
            model.AddModule(new ME_LipsTightened());
            model.AddModule(new ME_JawDrop());
            model.AddModule(new ME_LipCorner());
            model.AddModule(new ME_LipLine());
            model.AddModule(new ME_LipStretched());
            model.AddModule(new ME_NoseWrinkled());
            model.AddModule(new ME_LowerLipLowered());
            model.AddModule(new ME_UpperLipRaised());
            model.AddModule(new ME_LowerLipRaised());
            //  model.AddModule(new ME_BearTeeth());

            model.AddModule(new EM_Joy());
            model.AddModule(new EM_Anger());
            model.AddModule(new EM_Contempt());
            model.AddModule(new EM_Disgust());
            model.AddModule(new EM_Fear());
            model.AddModule(new EM_Sadness());
            model.AddModule(new EM_Surprise());

            // Default Modules
            model.AddModule(new Gauge_Module());
            Application.Run(new CameraView(model, false));
        }
Example #2
0
        static void Main(string[] args)
        {
            Model model = new Model(true);

            RSModule.Init(model);

            // Add ActionUnits
            model.AddModule(new AU_BrowShift());
            model.AddModule(new AU_EyelidTight());
            model.AddModule(new AU_LipsTightened());
            model.AddModule(new AU_JawDrop());
            model.AddModule(new AU_LipCorner());
            model.AddModule(new AU_LipLine());
            model.AddModule(new AU_LipStretched());
            model.AddModule(new AU_NoseWrinkled());
            model.AddModule(new AU_LowerLipLowered());
            model.AddModule(new AU_UpperLipRaised());
            model.AddModule(new AU_LowerLipRaised());

            model.AddModule(new EM_Joy());
            model.AddModule(new EM_Anger());
            model.AddModule(new EM_Contempt());
            model.AddModule(new EM_Disgust());
            model.AddModule(new EM_Fear());
            model.AddModule(new EM_Sadness());
            model.AddModule(new EM_Surprise());

            // Default Modules
            model.AddModule(new Gauge_Module());
            model.AddModule(new FaceRecorder());

            //Start Application
            Application.Run(new CameraView(model, Model.MODE.TEST));
        }
Example #3
0
            /**
             * Initializes all the Emotion- and ActionUnit-Modules for the recording.
             * Every recording requieres it's own modules since they do not share the same data (model)
             */
            private void initModules()
            {
                model = new RealSense.Model(false);
                RSModule.Init(model);
                microExpressionModules[1]  = new AU_BrowShift();
                microExpressionModules[2]  = new AU_EyelidTight();
                microExpressionModules[3]  = new AU_JawDrop();
                microExpressionModules[4]  = new AU_LipCorner();
                microExpressionModules[5]  = new AU_LipLine();
                microExpressionModules[6]  = new AU_LipsTightened();
                microExpressionModules[7]  = new AU_LipStretched();
                microExpressionModules[8]  = new AU_LowerLipLowered();
                microExpressionModules[9]  = new AU_LowerLipRaised();
                microExpressionModules[10] = new AU_NoseWrinkled();
                microExpressionModules[0]  = new AU_UpperLipRaised();

                foreach (RSModule m in microExpressionModules)
                {
                    m.Debug = false;
                }

                emotionModules[0] = new EM_Anger();
                emotionModules[1] = new EM_Joy();
                emotionModules[2] = new EM_Fear();
                emotionModules[4] = new EM_Sadness();
                emotionModules[5] = new EM_Disgust();
                emotionModules[6] = new EM_Surprise();

                foreach (RSModule m in emotionModules)
                {
                    m.Debug = false;
                }
            }
Example #4
0
        static void Main(string[] args)
        {
            Model model = new Model();

            RSModule.Init(model);
            // Create modules beforehand
            model.AddModule(new FaceTrackerModule());
            model.AddModule(new HandTrackerModule());
            model.AddModule(new SmileModule());
            model.AddModule(new FaceTrackerModule_Tobi());
            model.AddModule(new FaceTrackModule_Anton());

            Application.Run(new CameraView(model));
        }
Example #5
0
            /**
             * Updates the modules and their respective data and refreshes the UI to display the current frame (the ActionUnit- and Emotion-Data).
             */
            public void udpateAndVisualizeData()
            {
                if (vlcControl.IsPlaying)
                {
                    vlcControl.Pause();
                }
                model.CurrentFace = faceRecording.getFace(currentFrame);
                model.NullFace    = faceRecording.getNullFace();
                RSModule.Init(model);

                foreach (RSModule rsm in microExpressionModules)
                {
                    rsm.Work(null);
                }

                foreach (RSModule rsm in emotionModules)
                {
                    rsm.Work(null);
                }

                monitors[0].currentValue = (int)model.Emotions[Model.Emotion.ANGER];
                monitors[1].currentValue = (int)model.Emotions[Model.Emotion.JOY];
                monitors[2].currentValue = (int)model.Emotions[Model.Emotion.FEAR];
                monitors[3].currentValue = (int)model.Emotions[Model.Emotion.CONTEMPT];
                monitors[4].currentValue = (int)model.Emotions[Model.Emotion.SADNESS];
                monitors[5].currentValue = (int)model.Emotions[Model.Emotion.DISGUST];
                monitors[6].currentValue = (int)model.Emotions[Model.Emotion.SURPRISE];

                //Bitmap newImage = new Bitmap(Width, viewHeight);
                Graphics g = Graphics.FromImage(dataImage);

                g.Clear(System.Drawing.SystemColors.MenuBar);
                g.TextRenderingHint = TextRenderingHint.AntiAlias;

                foreach (FriggnAweseomeGraphix.MEMonitor monitor in monitors)
                {
                    FriggnAweseomeGraphix.DrawMEMontior(g, monitor, false);
                }

                int yPos = (int)(gap * 1.5), yPos2 = yPos;
                int idx = 0;

                if (viewHeight != VIEW_TINY)
                {
                    foreach (KeyValuePair <string, double> entry in model.AU_Values)
                    {
                        int xBase, yBase;
                        if (idx++ > 7 && viewHeight != VIEW_LARGE)
                        {
                            yBase  = yPos2;
                            xBase  = monitors[0].radius * 4 + gap + 420;
                            yPos2 += gap;
                        }
                        else
                        {
                            yBase = yPos;
                            yPos += gap;
                            xBase = monitors[0].radius * 4 + gap + 100;
                        }

                        g.DrawString(entry.Key.Substring(entry.Key.IndexOf(".") + 1), textFont, textBrush, xBase, yBase - 5);
                        g.DrawString((int)entry.Value + "", textFont, textBrush, xBase + 250, yBase - 5);
                    }
                }

                g.DrawString(shortName, textFont, textBrush, monitors[0].radius * 8 + gap, 0);
                g.DrawString("Frame: " + currentFrame, textFont, textBrush, monitors[0].radius * 8 + gap + 250, 0);
                g.DrawLine(new Pen(textBrush), 0, Height - 1, Width, Height - 1);
                Invoke(pictureUpdate);
            }