示例#1
0
 public PostAffectivaEvaltionTool(float _valenceEmotion, float _timeStep, int _cameraId, double _camperaCaptureRate, double _processRate)
 {
     try
     {
         setValenceOfEmotion(_valenceEmotion);
         setoldValues();
         setTimeStep(_timeStep);
         setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data");
         setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]");
         mVideoDetector = new VideoDetector();
         setLicensePath(getLincenseDirectory());
         setClassiferFolderPath(getDataDirectory());
         /// turn on detectors for defualt
         mVideoDetector.setDetectAllEmotions(true);
         mVideoDetector.setDetectAllExpressions(true);
         /// set types of detectors for Affdex
         mVideoDetector.setFaceListener(this);
         mVideoDetector.setImageListener(this);
         //  mVideoDetector.setProcessStatusListener(this);
         //   mcamDetector.setProcessStatusListener(this);
         maffectData = new AffectivaDataRecordingEmotionsandExpressions();
     }
     catch (Exception ex)
     {
         System.Windows.Forms.MessageBox.Show(ex.ToString());
     }
 }
示例#2
0
 /// <summary>
 /// Here is a default setup for Affectiva
 /// </summary>
 public PostAffectivaEvaltionTool(string path, double processrate, Affdex.ProcessStatusListener _Processer)
 {
     try
     {
         setValenceOfEmotion(4.0f);
         setoldValues();
         setTimeStep(.1f);
         setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data");
         setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]");
         mVideoDetector = new VideoDetector(processrate);
         setLicensePath(getLincenseDirectory());
         setClassiferFolderPath(getDataDirectory());
         /// turn on detectors for defualt
         mVideoDetector.setDetectAllEmotions(true);
         mVideoDetector.setDetectAllExpressions(true);
         /// set types of detectors for Affdex
         mVideoDetector.setFaceListener(this);
         mVideoDetector.setImageListener(this);
         mVideoDetector.setProcessStatusListener(_Processer);
         maffectData = new AffectivaDataRecordingEmotionsandExpressions();//[maxAmountOfFaces];
         this.mpath  = path;
     }
     catch (Exception ex)
     {
         System.Windows.Forms.MessageBox.Show(ex.ToString());
     }
 }