/// <summary> /// Get the current windowing type for band power calculation /// </summary> /// <param name="userId">user ID</param> /// <param name="type">windowing type enum from IEE_WindowingTypes (default: IEE_HANNING)</param> /// <returns>EDK_ERROR_CODE</returns> public void IEE_FFTGetWindowingType(UInt32 userId, EdkDll.IEE_WindowingTypes type) { errorHandler(EdkDll.IEE_FFTGetWindowingType(userId, type)); }
/// <summary> /// Query the number of channels of available sensor contact quality data /// </summary> /// <returns>number of channels for which contact quality data is available</returns> public Int32 GetNumContactQualityChannels() { return(EdkDll.ES_GetNumContactQualityChannels(hEmoState)); }
/// <summary> /// Set the training control flag for Cognitiv training /// </summary> /// <param name="userId">user ID</param> /// <param name="control">pre-defined Cognitiv training control</param> public void CognitivSetTrainingControl(UInt32 userId, EdkDll.EE_CognitivTrainingControl_t control) { errorHandler(EdkDll.EE_CognitivSetTrainingControl(userId, control)); }
/// <summary> /// Set the current facial expression for Expressiv training /// </summary> /// <param name="userId">user ID</param> /// <param name="action">which facial expression would like to be trained</param> public void ExpressivSetTrainingAction(UInt32 userId, EdkDll.EE_ExpressivAlgo_t action) { errorHandler(EdkDll.EE_ExpressivSetTrainingAction(userId, action)); }
/// <summary> /// Query the eyelids state of the user /// </summary> /// <remarks> /// The left and right eyelid state are stored in the parameter leftEye and rightEye /// respectively. They are floating point values ranging from 0.0 to 1.0. /// 0.0 indicates that the eyelid is fully opened while 1.0 indicates that the /// eyelid is fully closed. /// </remarks> /// <param name="leftEye">the left eyelid state (0.0 to 1.0)</param> /// <param name="rightEye">the right eyelid state (0.0 to 1.0)</param> public void FacialExpressionGetEyelidState(out Single leftEye, out Single rightEye) { EdkDll.IS_FacialExpressionGetEyelidState(hEmoState, out leftEye, out rightEye); }
/// <summary> /// Get a list of vital algorithms of specific suite from optimization parameter /// </summary> /// <param name="suite">suite that you are interested in</param> /// <returns>returns a list of vital algorithm composed of EE_ExpressivAlgo_t, EE_AffectivAlgo_t or EE_CognitivAction_t depending on the suite parameter</returns> public UInt32 GetVitalAlgorithm(EdkDll.EE_EmotivSuite_t suite) { UInt32 vitalAlgorithmBitVectorOut = 0; EmoEngine.errorHandler(EdkDll.EE_OptimizationGetVitalAlgorithm(hOptimizationParam, suite, out vitalAlgorithmBitVectorOut)); return vitalAlgorithmBitVectorOut; }
/// <summary> /// Gets the current skill rating for particular MentalCommand actions of the user /// </summary> /// <param name="userId">user ID</param> /// <param name="action">a particular action of IEE_MentalCommandAction_t contant</param> /// <returns>receives the action skill rating [from 0.0 to 1.0]</returns> public Single MentalCommandGetActionSkillRating(UInt32 userId, EdkDll.IEE_MentalCommandAction_t action) { Single actionSkillRatingOut = 0.0F; errorHandler(EdkDll.IEE_MentalCommandGetActionSkillRating(userId, action, out actionSkillRatingOut)); return actionSkillRatingOut; }
/// <summary> /// Query whether the eyes of the user are opened at the time the EmoState is captured. /// </summary> /// <returns>eye open status (true: eyes open, false: eyes closed)</returns> public Boolean FacialExpressionIsEyesOpen() { return(EdkDll.IS_FacialExpressionIsEyesOpen(hEmoState)); }
/// <summary> /// Set the current facial expression for FacialExpression training /// </summary> /// <param name="userId">user ID</param> /// <param name="action">which facial expression would like to be trained</param> public void FacialExpressionSetTrainingAction(UInt32 userId, EdkDll.IEE_FacialExpressionAlgo_t action) { errorHandler(EdkDll.IEE_FacialExpressionSetTrainingAction(userId, action)); }
/// <summary> /// Set the control flag for FacialExpression training /// </summary> /// <param name="userId">user ID</param> /// <param name="control">pre-defined control command</param> public void FacialExpressionSetTrainingControl(UInt32 userId, EdkDll.IEE_FacialExpressionTrainingControl_t control) { errorHandler(EdkDll.IEE_FacialExpressionSetTrainingControl(userId, control)); }
/// <summary> /// Set threshold for FacialExpression algorithms /// </summary> /// <param name="userId">user ID</param> /// <param name="algoName">FacialExpression algorithm type</param> /// <param name="thresholdName">FacialExpression threshold type</param> /// <param name="value">threshold value (min: 0 max: 1000)</param> public void FacialExpressionSetThreshold(UInt32 userId, EdkDll.IEE_FacialExpressionAlgo_t algoName, EdkDll.IEE_FacialExpressionThreshold_t thresholdName, Int32 value) { errorHandler(EdkDll.IEE_FacialExpressionSetThreshold(userId, algoName, thresholdName, value)); }
/// <summary> /// Configures the FacialExpression suite to use either the built-in, universal signature or a personal, trained signature /// </summary> /// <remarks> /// FacialExpression defaults to use its universal signature. This function will fail if IEE_FacialExpressionGetTrainedSignatureAvailable returns false. /// </remarks> /// <param name="userId">user ID</param> /// <param name="sigType">signature type to use</param> public void FacialExpressionSetSignatureType(UInt32 userId, EdkDll.IEE_FacialExpressionSignature_t sigType) { errorHandler(EdkDll.IEE_FacialExpressionSetSignatureType(userId, sigType)); }
/// <summary> /// Get threshold from FacialExpression algorithms /// </summary> /// <param name="userId">user ID</param> /// <param name="algoName">FacialExpression algorithm type</param> /// <param name="thresholdName">FacialExpression threshold type</param> /// <returns>receives threshold value</returns> public Int32 FacialExpressionGetThreshold(UInt32 userId, EdkDll.IEE_FacialExpressionAlgo_t algoName, EdkDll.IEE_FacialExpressionThreshold_t thresholdName) { Int32 valueOut = 0; errorHandler(EdkDll.IEE_FacialExpressionGetThreshold(userId, algoName, thresholdName, out valueOut)); return valueOut; }
/// <summary> /// Query the contact quality of all the electrodes in one single call /// </summary> /// <remarks> /// The contact quality will be stored in the array, contactQuality, passed to the function. /// The value stored in contactQuality[0] is identical to the result returned by /// IS_GetContactQuality(state, 0) /// The value stored in contactQuality[1] is identical to the result returned by /// IS_GetContactQuality(state, 1). etc. /// The ordering of the array is consistent with the ordering of the logical input /// channels in IEE_InputChannels_enum. /// </remarks> /// <returns>Number of signal quality values copied to the contactQuality array.</returns> public EdkDll.IEE_EEG_ContactQuality_t[] GetContactQualityFromAllChannels() { EdkDll.IEE_EEG_ContactQuality_t[] contactQuality; EdkDll.IS_GetContactQualityFromAllChannels(hEmoState, out contactQuality); return(contactQuality); }
/// <summary> /// Set the type of MentalCommand action to be trained /// </summary> /// <param name="userId">user ID</param> /// <param name="action">which action would like to be trained</param> public void MentalCommandSetTrainingAction(UInt32 userId, EdkDll.IEE_MentalCommandAction_t action) { errorHandler(EdkDll.IEE_MentalCommandSetTrainingAction(userId, action)); }
/// <summary> /// Get the level of charge remaining in the headset battery /// </summary> /// <param name="chargeLevel">the current level of charge in the headset battery</param> /// <param name="maxChargeLevel">the maximum level of charge in the battery</param> public void GetBatteryChargeLevel(out Int32 chargeLevel, out Int32 maxChargeLevel) { EdkDll.IS_GetBatteryChargeLevel(hEmoState, out chargeLevel, out maxChargeLevel); }
/// <summary> /// Set the training control flag for MentalCommand training /// </summary> /// <param name="userId">user ID</param> /// <param name="control">pre-defined MentalCommand training control</param> public void MentalCommandSetTrainingControl(UInt32 userId, EdkDll.IEE_MentalCommandTrainingControl_t control) { errorHandler(EdkDll.IEE_MentalCommandSetTrainingControl(userId, control)); }
/// <summary> /// Query whether the user is looking down at the time the EmoState is captured. /// </summary> /// <returns>eyes position (true: looking down, false: not looking down)</returns> public Boolean FacialExpressionIsLookingDown() { return(EdkDll.IS_FacialExpressionIsLookingDown(hEmoState)); }
/// <summary> /// Returns the clench extent of the user (Obsolete function) /// </summary> /// <returns>clench extent value (0.0 to 1.0)</returns> public Single ExpressivGetClenchExtent() { return(EdkDll.ES_ExpressivGetClenchExtent(hEmoState)); }
/// <summary> /// Query whether the signal is too noisy for Expressiv detection to be active /// </summary> /// <param name="type">Expressiv detection type</param> /// <returns>detection state (false: Not Active, true: Active)</returns> public Boolean ExpressivIsActive(EdkDll.EE_ExpressivAlgo_t type) { return EdkDll.ES_ExpressivIsActive(hEmoState, type); }
/// <summary> /// Constructor /// </summary> public EmoState() { hEmoState = EdkDll.ES_Create(); }
/// <summary> /// Gets the current skill rating for particular Cognitiv actions of the user /// </summary> /// <param name="userId">user ID</param> /// <param name="action">a particular action of EE_CognitivAction_t contant</param> /// <returns>receives the action skill rating [from 0.0 to 1.0]</returns> public Single CognitivGetActionSkillRating(UInt32 userId, EdkDll.EE_CognitivAction_t action) { Single actionSkillRatingOut = 0.0F; errorHandler(EdkDll.EE_CognitivGetActionSkillRating(userId, action, out actionSkillRatingOut)); return actionSkillRatingOut; }
/// <summary> /// Returns the detected lower face Expressiv action of the user /// </summary> /// <returns>pre-defined Expressiv action types</returns> public EdkDll.EE_ExpressivAlgo_t ExpressivGetLowerFaceAction() { return(EdkDll.ES_ExpressivGetLowerFaceAction(hEmoState)); }
/// <summary> /// Configures the Expressiv suite to use either the built-in, universal signature or a personal, trained signature /// </summary> /// <remarks> /// Expressiv defaults to use its universal signature. This function will fail if EE_ExpressivGetTrainedSignatureAvailable returns false. /// </remarks> /// <param name="userId">user ID</param> /// <param name="sigType">signature type to use</param> public void ExpressivSetSignatureType(UInt32 userId, EdkDll.EE_ExpressivSignature_t sigType) { errorHandler(EdkDll.EE_ExpressivSetSignatureType(userId, sigType)); }
/// <summary> /// Returns the detected lower face Expressiv action power of the user /// </summary> /// <returns>power value (0.0 to 1.0)</returns> public Single ExpressivGetLowerFaceActionPower() { return(EdkDll.ES_ExpressivGetLowerFaceActionPower(hEmoState)); }
/// <summary> /// Returns a struct containing details about the specified EEG channel's headset /// </summary> /// <param name="channelId">channel identifier</param> /// <returns>provides detailed sensor location and other info</returns> public EdkDll.InputSensorDescriptor_t HeadsetGetSensorDetails(EdkDll.EE_InputChannels_t channelId) { EdkDll.InputSensorDescriptor_t descriptorOut; errorHandler(EdkDll.EE_HeadsetGetSensorDetails(channelId, out descriptorOut)); return descriptorOut; }
/// <summary> /// Query whether the signal is too noisy for Expressiv detection to be active /// </summary> /// <param name="type">Expressiv detection type</param> /// <returns>detection state (false: Not Active, true: Active)</returns> public Boolean ExpressivIsActive(EdkDll.EE_ExpressivAlgo_t type) { return(EdkDll.ES_ExpressivIsActive(hEmoState, type)); }
/// <summary> /// Return whether the headset has been put on correctly or not /// </summary> /// <remarks> /// If the headset cannot not be detected on the head, then signal quality will not report /// any results for all the channels /// </remarks> /// <returns>int (1: On, 0: Off)</returns> public Int32 GetHeadsetOn() { return(EdkDll.ES_GetHeadsetOn(hEmoState)); }
/// <summary> /// Returns short term excitement level of the user /// </summary> /// <returns>excitement level (0.0 to 1.0)</returns> public Single AffectivGetExcitementShortTermScore() { return(EdkDll.ES_AffectivGetExcitementShortTermScore(hEmoState)); }
/// <summary> /// Query the contact quality of a specific EEG electrode /// </summary> /// <param name="electroIdx">The index of the electrode for query</param> /// <returns>Enumerated value that characterizes the EEG electrode contact for the specified input channel</returns> public EdkDll.IEE_EEG_ContactQuality_t GetContactQuality(Int32 electroIdx) { return(EdkDll.IS_GetContactQuality(hEmoState, electroIdx)); }
/// <summary> /// Query whether the signal is too noisy for Affectiv detection to be active /// </summary> /// <param name="type">Affectiv detection type</param> /// <returns>detection state (false: Not Active, true: Active)</returns> public Boolean AffectivIsActive(EdkDll.EE_AffectivAlgo_t type) { return(EdkDll.ES_AffectivIsActive(hEmoState, type)); }
/// <summary> /// Query of the current wireless signal strength /// </summary> /// <returns> /// wireless signal strength [No Signal, Bad, Fair, Good, Excellent]. /// </returns> public EdkDll.IEE_SignalStrength_t GetWirelessSignalStatus() { return(EdkDll.IS_GetWirelessSignalStatus(hEmoState)); }
/// <summary> /// Returns frustration level of the user /// </summary> /// <returns>frustration level (0.0 to 1.0)</returns> public Single AffectivGetFrustrationScore() { return(EdkDll.ES_AffectivGetFrustrationScore(hEmoState)); }
/// <summary> /// Query whether the user is winking right at the time the EmoState is captured. /// </summary> /// <returns>right wink status (true: wink, false: not wink)</returns> public Boolean FacialExpressionIsRightWink() { return(EdkDll.IS_FacialExpressionIsRightWink(hEmoState)); }
/// <summary> /// Returns engagement/boredom level of the user /// </summary> /// <returns>engagement/boredom level (0.0 to 1.0)</returns> public Single AffectivGetEngagementBoredomScore() { return(EdkDll.ES_AffectivGetEngagementBoredomScore(hEmoState)); }
/// <summary> /// Copy constructor /// </summary> /// <param name="es">EmoState to be copied</param> public EmoState(EmoState es) { this.hEmoState = EdkDll.IS_Create(); EdkDll.IS_Copy(this.hEmoState, es.GetHandle()); }
/// <summary> /// Returns the detected Cognitiv action of the user /// </summary> /// <returns>Cognitiv action type</returns> public EdkDll.EE_CognitivAction_t CognitivGetCurrentAction() { return(EdkDll.ES_CognitivGetCurrentAction(hEmoState)); }
/// <summary> /// Query whether the user is looking right at the time the EmoState is captured with EPOC/EPOC+ headset. /// </summary> /// <returns>return eye position (1: looking right, 0: not looking right)</returns> public Int32 FacialExpressionIsLookingRight() { return(EdkDll.IS_FacialExpressionIsLookingRight(hEmoState)); }
/// <summary> /// Returns the detected Cognitiv action power of the user /// </summary> /// <returns>Cognitiv action power (0.0 to 1.0)</returns> public Single CognitivGetCurrentActionPower() { return(EdkDll.ES_CognitivGetCurrentActionPower(hEmoState)); }
/// <summary> /// Query whether the signal is too noisy for Affectiv detection to be active /// </summary> /// <param name="type">Affectiv detection type</param> /// <returns>detection state (false: Not Active, true: Active)</returns> public Boolean AffectivIsActive(EdkDll.EE_AffectivAlgo_t type) { return EdkDll.ES_AffectivIsActive(hEmoState, type); }
/// <summary> /// Query whether the signal is too noisy for Cognitiv detection to be active /// </summary> /// <returns>detection state (false: Not Active, true: Active)</returns> public Boolean CognitivIsActive() { return(EdkDll.ES_CognitivIsActive(hEmoState)); }
/// <summary> /// Resets all settings and user-specific profile data for the specified detection suite /// </summary> /// <param name="userId">user ID</param> /// <param name="suite">detection suite (Expressiv, Affectiv, or Cognitiv)</param> /// <param name="detectionBitVector">identifies specific detections. Set to zero for all detections.</param> public void ResetDetection(UInt32 userId, EdkDll.EE_EmotivSuite_t suite, UInt32 detectionBitVector) { errorHandler(EdkDll.EE_ResetDetection(userId, suite, detectionBitVector)); }
/// <summary> /// Check whether two states are with identical 'emotiv' state /// </summary> /// <param name="state">EmoState</param> /// <returns>true: Equal, false: Different</returns> public Boolean AffectivEqual(EmoState state) { return(EdkDll.ES_AffectivEqual(GetHandle(), state.GetHandle())); }
/// <summary> /// Set a list of vital algorithms of specific suite to optimization parameter /// </summary> /// <param name="suite">suite that you are interested in</param> /// <param name="vitalAlgorithmBitVector">a list of vital algorithm composed of EE_ExpressivAlgo_t, EE_AffectivAlgo_t or EE_CognitivAction_t depended on the suite parameter passed in</param> public void SetVitalAlgorithm(EdkDll.EE_EmotivSuite_t suite, UInt32 vitalAlgorithmBitVector) { EmoEngine.errorHandler(EdkDll.EE_OptimizationSetVitalAlgorithm(hOptimizationParam, suite, vitalAlgorithmBitVector)); }
/// <summary> /// Check whether two states are with identical Expressiv state, i.e. are both state representing the same facial expression /// </summary> /// <param name="state">EmoState</param> /// <returns>true: Equal, false: Different</returns> public Boolean ExpressivEqual(EmoState state) { return(EdkDll.ES_ExpressivEqual(GetHandle(), state.GetHandle())); }
/// <summary> /// Set the type of Cognitiv action to be trained /// </summary> /// <param name="userId">user ID</param> /// <param name="action">which action would like to be trained</param> public void CognitivSetTrainingAction(UInt32 userId, EdkDll.EE_CognitivAction_t action) { errorHandler(EdkDll.EE_CognitivSetTrainingAction(userId, action)); }
/// <summary> /// Check whether two states are with identical Cognitiv state /// </summary> /// <param name="state">EmoState</param> /// <returns>true: Equal, false: Different</returns> public Boolean CognitivEqual(EmoState state) { return(EdkDll.ES_CognitivEqual(GetHandle(), state.GetHandle())); }
/// <summary> /// Get threshold from Expressiv algorithms /// </summary> /// <param name="userId">user ID</param> /// <param name="algoName">Expressiv algorithm type</param> /// <param name="thresholdName">Expressiv threshold type</param> /// <returns>receives threshold value</returns> public Int32 ExpressivGetThreshold(UInt32 userId, EdkDll.EE_ExpressivAlgo_t algoName, EdkDll.EE_ExpressivThreshold_t thresholdName) { Int32 valueOut = 0; errorHandler(EdkDll.EE_ExpressivGetThreshold(userId, algoName, thresholdName, out valueOut)); return valueOut; }
/// <summary> /// Check whether two states are with identical EmoEngine state. /// </summary> /// <remarks> /// This function is comparing the time since EmoEngine start, /// the wireless signal strength and the signal quality of different channels /// </remarks> /// <param name="state">EmoState</param> /// <returns>true: Equal, false: Different</returns> public Boolean EmoEngineEqual(EmoState state) { return(EdkDll.ES_EmoEngineEqual(GetHandle(), state.GetHandle())); }
/// <summary> /// Set threshold for Expressiv algorithms /// </summary> /// <param name="userId">user ID</param> /// <param name="algoName">Expressiv algorithm type</param> /// <param name="thresholdName">Expressiv threshold type</param> /// <param name="value">threshold value (min: 0 max: 1000)</param> public void ExpressivSetThreshold(UInt32 userId, EdkDll.EE_ExpressivAlgo_t algoName, EdkDll.EE_ExpressivThreshold_t thresholdName, Int32 value) { errorHandler(EdkDll.EE_ExpressivSetThreshold(userId, algoName, thresholdName, value)); }
/// <summary> /// Check whether two EmoStateHandles are identical /// </summary> /// <param name="a">EmoState</param> /// <param name="b">EmoState</param> /// <returns></returns> public Boolean Equals(EmoState a, EmoState b) { return(EdkDll.ES_Equal(a.GetHandle(), b.GetHandle())); }
/// <summary> /// Set the control flag for Expressiv training /// </summary> /// <param name="userId">user ID</param> /// <param name="control">pre-defined control command</param> public void ExpressivSetTrainingControl(UInt32 userId, EdkDll.EE_ExpressivTrainingControl_t control) { errorHandler(EdkDll.EE_ExpressivSetTrainingControl(userId, control)); }
/// <summary> /// Return the time since EmoEngine has been successfully connected to the headset /// </summary> /// <remarks> /// If the headset is disconnected from EmoEngine due to low battery or weak /// wireless signal, the time will be reset to zero. /// </remarks> /// <returns>time in second</returns> public Single GetTimeFromStart() { return(EdkDll.ES_GetTimeFromStart(hEmoState)); }
/// <summary> /// Query whether the signal is too noisy for Expressiv detection to be active /// </summary> /// <param name="type">Expressiv detection type</param> /// <returns>detection state (false: Not Active, true: Active)</returns> public Boolean FacialExpressionIsActive(EdkDll.IEE_FacialExpressionAlgo_t type) { return EdkDll.IS_FacialExpressionIsActive(hEmoState, type); }
/// <summary> /// Get averge band power values for a channel /// </summary> /// <param name="userId">user ID</param> /// <param name="channel">channel that is interested in</param> /// <param name="theta">theta band value (4-8 Hz)</param> /// <param name="alpha">alpha band value (8-12 Hz)</param> /// <param name="low_beta">low-beta value (12-16 Hz)</param> /// <param name="high_beta">high-beta value (16-25 Hz)</param> /// <param name="gamma">gamma value (25-45 Hz)</param> /// <returns>EDK_ERROR_CODE</returns> public Int32 IEE_GetAverageBandPowers(UInt32 userId, EdkDll.IEE_DataChannel_t channel, Double[] theta, Double[] alpha, Double[] low_beta, Double[] high_beta, Double[] gamma) { return EdkDll.IEE_GetAverageBandPowers(userId, channel, theta, alpha, low_beta, high_beta, gamma); }