public AffectivaCameraRecordingTool(float _valenceEmotion, float _timeStep, int _cameraId, double _camperaCaptureRate, double _processRate) { uint maxAmountOfFaces = 2; try { setValenceOfEmotion(_valenceEmotion); setoldValues(); setTimeStep(_timeStep); setDataDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\data"); setLincenseDirectory(Environment.CurrentDirectory + "\\AffectivaFiles\\[email protected]"); mcamDetector = new CameraDetector(0, _camperaCaptureRate, _processRate, maxAmountOfFaces, FaceDetectorMode.SMALL_FACES); setLicensePath(getLincenseDirectory()); setClassiferFolderPath(getDataDirectory()); /// turn on detectors for defualt mcamDetector.setDetectAllEmotions(true); mcamDetector.setDetectAllExpressions(true); /// set types of detectors for Affdex mcamDetector.setFaceListener(this); mcamDetector.setImageListener(this); // mcamDetector.setProcessStatusListener(this); maffectData = new AffectivaDataRecordingEmotionsandExpressions(); mcamDetector.start(); } catch (Exception ex) { System.Windows.Forms.MessageBox.Show(ex.ToString()); } }
/// <summary> /// Configure Detector by (re)setting Detector-Parameters. /// </summary> /// <remarks> /// Needs to be called before Detector can be started by <see cref="StartDetector()"/> /// </remarks> /// <param name="cameraFPS">Frames Per Second of Camera (Default: 15)</param> /// <param name="processFPS">Frames Per Second that get Processed (Default: 15)</param> /// <param name="numberOfFaces">Max number of Faces that should be detected (Default: 10)</param> /// <param name="cameraID">ID of camera that Detector should use (Default = 0)</param> public void ConfigureDetector(double cameraFPS = 15, double processFPS = 15, uint numberOfFaces = 10, int cameraID = 0) { if (Detector != null) { Detector.Dispose(); } CameraID = cameraID; CameraFPS = cameraFPS; ProcessFPS = processFPS; NumberOfFaces = numberOfFaces; Detector = new Affdex.CameraDetector(CameraID, CameraFPS, ProcessFPS, NumberOfFaces, Affdex.FaceDetectorMode.LARGE_FACES); }