public override bool stopRecording() { AffCamFacRec = ManuelWindow.getAffData(); ManuelWindow.Close(); return(true); }
public ManuelTagWindow(Stopwatch _stopWatch) { InitializeComponent(); mAffCamFacRec = new AffectivaDataRecordingEmotionsandExpressions(); stopWatch = _stopWatch; EmotionComboBox.SelectedIndex = 0; }
public PostAffectivaEvaltionTool(float _valenceEmotion, float _timeStep, int _cameraId, double _camperaCaptureRate, double _processRate) { try { setValenceOfEmotion(_valenceEmotion); setoldValues(); setTimeStep(_timeStep); setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data"); setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]"); mVideoDetector = new VideoDetector(); setLicensePath(getLincenseDirectory()); setClassiferFolderPath(getDataDirectory()); /// turn on detectors for defualt mVideoDetector.setDetectAllEmotions(true); mVideoDetector.setDetectAllExpressions(true); /// set types of detectors for Affdex mVideoDetector.setFaceListener(this); mVideoDetector.setImageListener(this); // mVideoDetector.setProcessStatusListener(this); // mcamDetector.setProcessStatusListener(this); maffectData = new AffectivaDataRecordingEmotionsandExpressions(); } catch (Exception ex) { System.Windows.Forms.MessageBox.Show(ex.ToString()); } }
/// <summary> /// Here is a default setup for Affectiva /// </summary> public PostAffectivaEvaltionTool(string path, double processrate, Affdex.ProcessStatusListener _Processer) { try { setValenceOfEmotion(4.0f); setoldValues(); setTimeStep(.1f); setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data"); setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]"); mVideoDetector = new VideoDetector(processrate); setLicensePath(getLincenseDirectory()); setClassiferFolderPath(getDataDirectory()); /// turn on detectors for defualt mVideoDetector.setDetectAllEmotions(true); mVideoDetector.setDetectAllExpressions(true); /// set types of detectors for Affdex mVideoDetector.setFaceListener(this); mVideoDetector.setImageListener(this); mVideoDetector.setProcessStatusListener(_Processer); maffectData = new AffectivaDataRecordingEmotionsandExpressions();//[maxAmountOfFaces]; this.mpath = path; } catch (Exception ex) { System.Windows.Forms.MessageBox.Show(ex.ToString()); } }
/// <summary> /// For setting up for post processing /// </summary> /// <param name="_AFCAMandVIdeo"></param> /// <param name="_PostProcesses"></param> public RecordingAffectivaReview(AffectivaDataRecordingEmotionsandExpressions _AFCAMandVIdeo, bool _PostProcesses) { InitializeComponent(); comboBoxEmotionSelect.SelectedIndex = 0; AffData = _AFCAMandVIdeo; SetupChart(_AFCAMandVIdeo); }
/// <summary> /// Push in data from another recording tool /// </summary> /// <param name="_MergeIn"></param> public void MergeInData(AffectivaDataRecordingEmotionsandExpressions _MergeIn) { if (chart1.Series.Count > 0) { MergeInSeriesOfList(_MergeIn.GetChartSeriesOfData()); } else { SetupChart(_MergeIn); } }
/// <summary> /// For Starting a chart from Recording Tool /// </summary> /// <param name="AfCFRAVR"></param> private void SetupChart(AffectivaDataRecordingEmotionsandExpressions AfCFRAVR) { SetupChartFromSeries(AfCFRAVR.GetChartSeriesOfData()); }