예제 #1
0
        public override bool stopRecording()
        {
            AffCamFacRec = ManuelWindow.getAffData();
            ManuelWindow.Close();

            return(true);
        }
예제 #2
0
 public ManuelTagWindow(Stopwatch _stopWatch)
 {
     InitializeComponent();
     mAffCamFacRec = new AffectivaDataRecordingEmotionsandExpressions();
     stopWatch     = _stopWatch;
     EmotionComboBox.SelectedIndex = 0;
 }
예제 #3
0
 public PostAffectivaEvaltionTool(float _valenceEmotion, float _timeStep, int _cameraId, double _camperaCaptureRate, double _processRate)
 {
     try
     {
         setValenceOfEmotion(_valenceEmotion);
         setoldValues();
         setTimeStep(_timeStep);
         setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data");
         setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]");
         mVideoDetector = new VideoDetector();
         setLicensePath(getLincenseDirectory());
         setClassiferFolderPath(getDataDirectory());
         /// turn on detectors for defualt
         mVideoDetector.setDetectAllEmotions(true);
         mVideoDetector.setDetectAllExpressions(true);
         /// set types of detectors for Affdex
         mVideoDetector.setFaceListener(this);
         mVideoDetector.setImageListener(this);
         //  mVideoDetector.setProcessStatusListener(this);
         //   mcamDetector.setProcessStatusListener(this);
         maffectData = new AffectivaDataRecordingEmotionsandExpressions();
     }
     catch (Exception ex)
     {
         System.Windows.Forms.MessageBox.Show(ex.ToString());
     }
 }
예제 #4
0
 /// <summary>
 /// Here is a default setup for Affectiva
 /// </summary>
 public PostAffectivaEvaltionTool(string path, double processrate, Affdex.ProcessStatusListener _Processer)
 {
     try
     {
         setValenceOfEmotion(4.0f);
         setoldValues();
         setTimeStep(.1f);
         setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data");
         setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]");
         mVideoDetector = new VideoDetector(processrate);
         setLicensePath(getLincenseDirectory());
         setClassiferFolderPath(getDataDirectory());
         /// turn on detectors for defualt
         mVideoDetector.setDetectAllEmotions(true);
         mVideoDetector.setDetectAllExpressions(true);
         /// set types of detectors for Affdex
         mVideoDetector.setFaceListener(this);
         mVideoDetector.setImageListener(this);
         mVideoDetector.setProcessStatusListener(_Processer);
         maffectData = new AffectivaDataRecordingEmotionsandExpressions();//[maxAmountOfFaces];
         this.mpath  = path;
     }
     catch (Exception ex)
     {
         System.Windows.Forms.MessageBox.Show(ex.ToString());
     }
 }
예제 #5
0
        /// <summary>
        /// For setting up for post processing
        /// </summary>
        /// <param name="_AFCAMandVIdeo"></param>
        /// <param name="_PostProcesses"></param>
        public RecordingAffectivaReview(AffectivaDataRecordingEmotionsandExpressions _AFCAMandVIdeo, bool _PostProcesses)
        {
            InitializeComponent();
            comboBoxEmotionSelect.SelectedIndex = 0;
            AffData = _AFCAMandVIdeo;

            SetupChart(_AFCAMandVIdeo);
        }
예제 #6
0
 /// <summary>
 /// Push in data from another recording tool
 /// </summary>
 /// <param name="_MergeIn"></param>
 public void MergeInData(AffectivaDataRecordingEmotionsandExpressions _MergeIn)
 {
     if (chart1.Series.Count > 0)
     {
         MergeInSeriesOfList(_MergeIn.GetChartSeriesOfData());
     }
     else
     {
         SetupChart(_MergeIn);
     }
 }
예제 #7
0
 /// <summary>
 /// For Starting a chart from Recording Tool
 /// </summary>
 /// <param name="AfCFRAVR"></param>
 private void SetupChart(AffectivaDataRecordingEmotionsandExpressions AfCFRAVR)
 {
     SetupChartFromSeries(AfCFRAVR.GetChartSeriesOfData());
 }