예제 #1
0
 /// <summary>
 /// Here is a default setup for Affectiva
 /// </summary>
 public PostAffectivaEvaltionTool(string path, double processrate, Affdex.ProcessStatusListener _Processer)
 {
     try
     {
         setValenceOfEmotion(4.0f);
         setoldValues();
         setTimeStep(.1f);
         setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data");
         setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]");
         mVideoDetector = new VideoDetector(processrate);
         setLicensePath(getLincenseDirectory());
         setClassiferFolderPath(getDataDirectory());
         /// turn on detectors for defualt
         mVideoDetector.setDetectAllEmotions(true);
         mVideoDetector.setDetectAllExpressions(true);
         /// set types of detectors for Affdex
         mVideoDetector.setFaceListener(this);
         mVideoDetector.setImageListener(this);
         mVideoDetector.setProcessStatusListener(_Processer);
         maffectData = new AffectivaDataRecordingEmotionsandExpressions();//[maxAmountOfFaces];
         this.mpath  = path;
     }
     catch (Exception ex)
     {
         System.Windows.Forms.MessageBox.Show(ex.ToString());
     }
 }
예제 #2
0
 public PostAffectivaEvaltionTool(float _valenceEmotion, float _timeStep, int _cameraId, double _camperaCaptureRate, double _processRate)
 {
     try
     {
         setValenceOfEmotion(_valenceEmotion);
         setoldValues();
         setTimeStep(_timeStep);
         setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data");
         setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]");
         mVideoDetector = new VideoDetector();
         setLicensePath(getLincenseDirectory());
         setClassiferFolderPath(getDataDirectory());
         /// turn on detectors for defualt
         mVideoDetector.setDetectAllEmotions(true);
         mVideoDetector.setDetectAllExpressions(true);
         /// set types of detectors for Affdex
         mVideoDetector.setFaceListener(this);
         mVideoDetector.setImageListener(this);
         //  mVideoDetector.setProcessStatusListener(this);
         //   mcamDetector.setProcessStatusListener(this);
         maffectData = new AffectivaDataRecordingEmotionsandExpressions();
     }
     catch (Exception ex)
     {
         System.Windows.Forms.MessageBox.Show(ex.ToString());
     }
 }
예제 #3
0
        public ProcessVideo(Affdex.VideoDetector detector)
        {
            using (Form1 form2 = new Form1())
            {
                form2.Visible = false;
                if (form2.ShowDialog() == DialogResult.Cancel)
                {
                    form2.Dispose();
                    form2.Close();
                    inputfile = Form1.getFilename();
                    Console.WriteLine(inputfile);
                }
            }

            //Thread camThread = new  Thread(camStart);
            //camThread.Start();
            detector.setImageListener(this);
            detector.setProcessStatusListener(this);
            InitializeComponent();
            // label2.Text = generateSentence();
            //mciSendString("open new Type waveaudio alias recsound", null, 0, IntPtr.Zero);
            //button1.Click += new EventHandler(this.button1_Click_1);

            /*var dir = new DirectoryInfo(@"C:\Users\Rana\Desktop\video\enterface database");
             * foreach (var file in dir.EnumerateFiles("*.avi", SearchOption.AllDirectories))
             * {
             *  inputfile = file.FullName;
             *  changed = !changed;
             * }*/
            label11.Visible = false;
        }