// Update is called once per frame void Update() { //Debug.Log ("1"); PXCMEmotion ft = SenseToolkitManager.Instance.Emotion; //int numfaces = ft.QueryNumFaces (); //Debug.Log (numfaces.ToString); if (ft != null) { // //GZ DisplayPicture(pp.QueryImageByType(PXCMImage.ImageType.IMAGE_TYPE_COLOR)); // PXCMCapture.Sample sample = stkm.SenseManager.QueryEmotionSample (); // if (sample == null) { // stkm.SenseManager.ReleaseFrame (); // Debug.Log ("3"); // return; // } //Debug.Log ("4"); PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[NUM_EMOTIONS]; if (ft.QueryAllEmotionData(0, out arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { DrawLocation(arrData); } } }
public PXCMEmotion.EmotionData[] AdaptData(PXCMEmotion.EmotionData[] _arrData) { PXCMEmotion.EmotionData[] new_arrData = new PXCMEmotion.EmotionData[3]; new_arrData[0] = _arrData[0]; // anger new_arrData[1] = _arrData[4]; // joy new_arrData[2] = _arrData[5]; // sadness return new_arrData; }
private void DisplayLocation(PXCMEmotion ft) { int numFaces = ft.QueryNumFaces(); for (int i = 0; i < numFaces; i++) { /* Retrieve emotionDet location data */ PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[form.NUM_EMOTIONS]; if (ft.QueryAllEmotionData(i, out arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { form.DrawLocation(arrData); } } }
static void ProcessEmotions(PXCMEmotion emotionModule) { // Querying how many faces were detected int numberOfFaces = emotionModule.QueryNumFaces(); Console.WriteLine("{0} face(s) were detected.", numberOfFaces); // Querying the emotion information about detected faces for (int i = 0; i < numberOfFaces; i++) { PXCMEmotion.EmotionData[] allEmotionData; emotionModule.QueryAllEmotionData(i, out allEmotionData); // Print emotions detected and intensity foreach (PXCMEmotion.EmotionData emotionData in allEmotionData) { if (emotionData.intensity > 0.6) { if (emotionData.eid.ToString().Equals("EMOTION_PRIMARY_JOY")) { if (currentEmotion != 1) { String target = (String)iot_form.comboTarget.SelectedItem; String hopen = (String)iot_form.comboSmile.SelectedItem; //SendMqttMessage(target + "/" + hopen); SendMqttMessage(hopen); currentEmotion = 1; //System.Diagnostics.Process.Start("http://www.globalcode.com.br"); //SendMqttMessage("surfboard2/relay?1"); } } if (emotionData.eid.ToString().Equals("EMOTION_PRIMARY_SURPRISE")) { if (currentEmotion != 2) { String target = (String)iot_form.comboTarget.SelectedItem; String hopen = (String)iot_form.comboSurprise.SelectedItem; //SendMqttMessage(target + "/" + hopen); SendMqttMessage(hopen); currentEmotion = 2; //System.Diagnostics.Process.Start("http://www.globalcode.com.br"); //SendMqttMessage("surfboard2/relay?1"); } } } Console.WriteLine("{0} - Intensity {1}", emotionData.eid, emotionData.intensity); } } }
/// <summary> /// Called when an emotion is detected. /// </summary> /// <param name="module">The module.</param> private void OnEmotionCallback(PXCMEmotion module) { PXCMEmotion.EmotionData[] emotions; int faces = module.QueryNumFaces(); // Debug.WriteLine("{0} Faces detected: {1}", Time(), faces); for (int face = 0; face < faces; face++) { module.QueryAllEmotionData(face, out emotions); foreach (var emotion in emotions) { if (emotion.evidence <= 0) { continue; } Debug.WriteLine("{0} Faces #{1} has {2} with evidence {3} and intensity {4} at rectangle {5},{6},{7},{8}", Time(), emotion.fid, emotion.eid, emotion.evidence, emotion.intensity, emotion.rectangle.x, emotion.rectangle.y, emotion.rectangle.w, emotion.rectangle.h); } } }
public void ProcessMyEmoData(PXCMEmotion.EmotionData[] data) { lock (this) { int epidx = -1; int maxscoreE = -3; float maxscoreI = 0; for (int i = 0; i < data.Length; i++) { if (data[i].evidence < maxscoreE) continue; if (data[i].intensity < maxscoreI) continue; maxscoreE = data[i].evidence; maxscoreI = data[i].intensity; epidx = i; } if ((epidx != -1) && (maxscoreI > 0.4)) { form_module3.patientEmotion = myEmotionLabels[epidx]; } else { form_module3.patientEmotion = "pNeutral"; } } }
public static void start(IoT_RealSense_Surfing frm) { keepLooping = true; iot_form = frm; InitializeMqqtClient(); PXCMSession session = PXCMSession.CreateInstance(); // Querying the SDK version Console.WriteLine(frm.comboTarget.SelectedText); if (session != null) { // Optional steps to send feedback to Intel Corporation to understand how often each SDK sample is used. PXCMMetadata md = session.QueryInstance <PXCMMetadata>(); if (md != null) { string sample_name = "Emotion Viewer CS"; md.AttachBuffer(1297303632, System.Text.Encoding.Unicode.GetBytes(sample_name)); } //Application.Run(new MainForm(session)); //session.Dispose(); } //PXCMSession.ImplVersion version = session.QueryVersion(); //Console.WriteLine("RealSense SDK Version {0}.{1}", version.major, version.minor); session.CreateImpl <PXCMRotation>(out rotationHelper); // Creating the SenseManager PXCMSenseManager senseManager = session.CreateSenseManager(); if (senseManager == null) { Console.WriteLine("Failed to create the SenseManager object."); return; } // Enabling Emotion Module pxcmStatus enablingModuleStatus = senseManager.EnableEmotion(); if (enablingModuleStatus != pxcmStatus.PXCM_STATUS_NO_ERROR) { Console.WriteLine("Failed to enable the Emotion Module"); return; } // Getting the instance of the Emotion Module PXCMEmotion emotionModule = senseManager.QueryEmotion(); if (emotionModule == null) { Console.WriteLine("Failed to query the emotion module"); return; } // Initializing the camera if (senseManager.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { Console.WriteLine("Failed to initialize the SenseManager"); return; } // Enabling the Hand module pxcmStatus enablingModuleStatus1 = senseManager.EnableHand("Hand Module"); if (enablingModuleStatus1 != pxcmStatus.PXCM_STATUS_NO_ERROR) { Console.WriteLine("Failed to enable the Hand Module"); return; } // Getting the instance of the Hand Module PXCMHandModule handModule = senseManager.QueryHand(); if (handModule == null) { Console.WriteLine("Failed to get the HandModule object."); return; } // Creating an active configuration PXCMHandConfiguration handConfiguration = handModule.CreateActiveConfiguration(); if (handConfiguration == null) { Console.WriteLine("Failed to create the HandConfiguration object."); return; } // Listing the available gestures int supportedGesturesCount = handConfiguration.QueryGesturesTotalNumber(); if (supportedGesturesCount > 0) { Console.WriteLine("Supported gestures:"); for (int i = 0; i < supportedGesturesCount; i++) { string gestureName = string.Empty; if (handConfiguration.QueryGestureNameByIndex(i, out gestureName) == pxcmStatus.PXCM_STATUS_NO_ERROR) { Console.WriteLine("\t" + gestureName); } } } // Enabling some gestures String[] enabledGestures = { GESTURE_CLICK, GESTURE_VSIGN, GESTURE_FIST, GESTURE_SPREADFINGERS }; foreach (String gesture in enabledGestures) { if (!handConfiguration.IsGestureEnabled(gesture)) { handConfiguration.EnableGesture(gesture); } } handConfiguration.ApplyChanges(); // Creating a data output object PXCMHandData handData = handModule.CreateOutput(); if (handData == null) { Console.WriteLine("Failed to create the HandData object."); return; } // Initializing the SenseManager if (senseManager.Init() != pxcmStatus.PXCM_STATUS_NO_ERROR) { Console.WriteLine(senseManager.Init()); return; } // Looping to query the hands information while (keepLooping) { // Acquiring a frame if (senseManager.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } // Updating the hand data if (handData != null) { handData.Update(); } //ProcessHands(handData); ProcessGestures(handData); ProcessEmotions(emotionModule); // Releasing the acquired frame senseManager.ReleaseFrame(); /* using another frame to process different stuff? may be... * // Acquiring a frame * if (senseManager.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) * { * break; * } * * // Processing Emotions * ProcessEmotions(emotionModule); * * // Releasing the acquired frame * senseManager.ReleaseFrame();*/ } // Releasing resources if (handData != null) { handData.Dispose(); } if (handConfiguration != null) { handConfiguration.Dispose(); } rotationHelper.Dispose(); senseManager.Close(); senseManager.Dispose(); session.Dispose(); client.Disconnect(); }
public override void OnEmotionSetup(ref PXCMEmotion.ProfileInfo finfo) { PXCMEmotion ftl = QueryEmotion(); ftl.QueryProfile(profileIndex, out finfo); }
void OnEnable() { Initialized = false; /* Create a SenseManager instance */ SenseManager = PXCMSenseManager.CreateInstance(); if (SenseManager == null) { print("Unable to create the pipeline instance"); return; } if (_speechCommandsRef.Count != 0) { SetSenseOption(SenseOption.SenseOptionID.Speech); } int numberOfEnabledModalities = 0; //Set mode according to RunMode - play from file / record / live stream if (RunMode == MCTTypes.RunModes.PlayFromFile) { //CHECK IF FILE EXISTS if (!System.IO.File.Exists(FilePath)) { Debug.LogWarning("No Filepath Set Or File Doesn't Exist, Run Mode Will Be Changed to Live Stream"); RunMode = MCTTypes.RunModes.LiveStream; } else { PXCMCaptureManager cManager = SenseManager.QueryCaptureManager(); cManager.SetFileName(FilePath, false); Debug.Log("SenseToolkitManager: Playing from file: " + FilePath); } } if (RunMode == MCTTypes.RunModes.RecordToFile) { //CHECK IF PATH string PathOnly = FilePath; while (!PathOnly[PathOnly.Length - 1].Equals('\\')) { PathOnly = PathOnly.Remove(PathOnly.Length - 1, 1); } if (!System.IO.Directory.Exists(PathOnly)) { Debug.LogWarning("No Filepath Set Or Path Doesn't Exist, Run Mode Will Be Changed to Live Stream"); RunMode = MCTTypes.RunModes.LiveStream; } else { PXCMCaptureManager cManager = SenseManager.QueryCaptureManager(); cManager.SetFileName(FilePath, true); Debug.Log("SenseToolkitManager: Recording to file: " + FilePath); } } /* Enable modalities according to the set options*/ if (IsSenseOptionSet(SenseOption.SenseOptionID.Face, true)) { SenseManager.EnableFace(); SenseManager.EnableEmotion(); _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Face).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Face).Enabled = true; SetSenseOption(SenseOption.SenseOptionID.VideoColorStream); numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.Hand, true)) { _sts = SenseManager.EnableHand(); _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Hand).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Hand).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.Object, true)) { _sts = SenseManager.EnableTracker(); _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Object).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Object).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.Speech, true)) { if (!SpeechManager.IsInitialized) { if (SpeechManager.InitalizeSpeech()) { _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Enabled = true; numberOfEnabledModalities++; } else { UnsetSenseOption(SenseOption.SenseOptionID.Speech); } } else { _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Speech).Enabled = true; numberOfEnabledModalities++; } } if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoDepthStream, true) || IsSenseOptionSet(SenseOption.SenseOptionID.PointCloud, true)) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 0, 0, 0); _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoDepthStream).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoIRStream, true)) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, 0, 0, 0); _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoIRStream).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoColorStream, true)) { if (ColorImageQuality == MCTTypes.RGBQuality.FullHD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 0); } else if (ColorImageQuality == MCTTypes.RGBQuality.HD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0); } else if (ColorImageQuality == MCTTypes.RGBQuality.HalfHD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 960, 540, 0); } else { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0); } _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoColorStream).Enabled = true; numberOfEnabledModalities++; } if (IsSenseOptionSet(SenseOption.SenseOptionID.VideoSegmentation, true)) { if (ColorImageQuality == MCTTypes.RGBQuality.FullHD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1920, 1080, 0); } else if (ColorImageQuality == MCTTypes.RGBQuality.HD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 1280, 720, 0); } else if (ColorImageQuality == MCTTypes.RGBQuality.HalfHD) { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 960, 540, 0); } else { SenseManager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 0); } SenseManager.Enable3DSeg(); _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Initialized = true; _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.VideoSegmentation).Enabled = true; numberOfEnabledModalities++; } /* Initialize the execution */ _sts = SenseManager.Init(); if (_sts < pxcmStatus.PXCM_STATUS_NO_ERROR) { if (numberOfEnabledModalities > 0) { Debug.LogError("Unable to initialize all modalities"); } return; } //Set different configurations: // Face if (IsSenseOptionSet(SenseOption.SenseOptionID.Face, true)) { var faceModule = SenseManager.QueryFace(); var faceConfiguration = faceModule.CreateActiveConfiguration(); if (faceConfiguration == null) { throw new UnityException("CreateActiveConfiguration returned null"); } faceConfiguration.Update(); faceConfiguration.detection.isEnabled = true; faceConfiguration.detection.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED; faceConfiguration.landmarks.isEnabled = true; faceConfiguration.landmarks.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED; faceConfiguration.pose.isEnabled = true; faceConfiguration.pose.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED; faceConfiguration.DisableAllAlerts(); faceConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_APPEARANCE_TIME; if ((NumberOfDetectedFaces < 1) || (NumberOfDetectedFaces > 15)) { Debug.Log("Ilegal value for Number Of Detected Faces, value is set to 1"); NumberOfDetectedFaces = 1; } faceConfiguration.detection.maxTrackedFaces = NumberOfDetectedFaces; faceConfiguration.landmarks.maxTrackedFaces = NumberOfDetectedFaces; faceConfiguration.pose.maxTrackedFaces = NumberOfDetectedFaces; PXCMFaceConfiguration.ExpressionsConfiguration expressionConfig = faceConfiguration.QueryExpressions(); expressionConfig.Enable(); expressionConfig.EnableAllExpressions(); faceConfiguration.ApplyChanges(); faceConfiguration.Dispose(); FaceModuleOutput = faceModule.CreateOutput(); Emotion = SenseManager.QueryEmotion(); UnsetSenseOption(SenseOption.SenseOptionID.VideoColorStream); } // Hand if (IsSenseOptionSet(SenseOption.SenseOptionID.Hand, true)) { PXCMHandModule handAnalysis = SenseManager.QueryHand(); PXCMHandConfiguration handConfiguration = handAnalysis.CreateActiveConfiguration(); if (handConfiguration == null) { throw new UnityException("CreateActiveConfiguration returned null"); } handConfiguration.Update(); //handConfiguration.EnableAllGestures(); handConfiguration.EnableGesture("v_sign"); handConfiguration.EnableGesture("thumb_up"); handConfiguration.EnableGesture("thumb_down"); //handConfiguration.EnableGesture ("two_fingers_pinch_open"); handConfiguration.EnableGesture("spreadfingers"); handConfiguration.EnableGesture("fist"); handConfiguration.EnableStabilizer(true); handConfiguration.DisableAllAlerts(); handConfiguration.EnableSegmentationImage(false); handConfiguration.ApplyChanges(); handConfiguration.Dispose(); HandDataOutput = handAnalysis.CreateOutput(); } if (IsSenseOptionSet(SenseOption.SenseOptionID.Object, true)) { if (_senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Object).Enabled != true) { _senseOptions.Find(i => i.ID == SenseOption.SenseOptionID.Object).Enabled = true; OnDisable(); OnEnable(); } } if (IsSenseOptionSet(SenseOption.SenseOptionID.Speech, true)) { UpdateSpeechCommands(); SpeechManager.Start(); } // Create an instance for the projection & blob extractor if (Projection == null) { Projection = SenseManager.QueryCaptureManager().QueryDevice().CreateProjection(); } if (BlobExtractor == null) { SenseManager.session.CreateImpl <PXCMBlobExtractor>(out BlobExtractor); } // Set initialization flag Initialized = true; }
//顔のフレームの更新処理 private void updateFaceFrame() { // フレームデータを取得する PXCMCapture.Sample sample = senseManager.QuerySample(); if (sample != null) { UpdateColorImage(sample.color); } this.emotionDet = this.senseManager.QueryEmotion(); if (this.emotionDet != null) { //SenceManagerモジュールの顔のデータを更新する this.faceData.Update(); //それぞれの顔ごとに情報取得および描画処理を行う for (int index = 0; index <= this.faceData.QueryNumberOfDetectedFaces() - 1; index++) { var face = this.faceData.QueryFaceByIndex(index); if (face != null) { // ここで、顔の位置を取得:Colorで取得する var detection = face.QueryDetection(); if (detection != null) { PXCMRectI32 faceRect; detection.QueryBoundingRect(out faceRect); //顔の位置に合わせて長方形を変更 TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y); rect[index].Width = faceRect.w; rect[index].Height = faceRect.h; rect[index].Stroke = Brushes.Blue; rect[index].StrokeThickness = 3; rect[index].RenderTransform = transform; //顔のデータか表出情報のデータの情報を得る var expressionData = face.QueryExpressions(); if (expressionData != null) { PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult; //顔の位置に合わせて姿勢情報を表示 expression_tb[index, 0].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 15); expression_tb[index, 1].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 30); expression_tb[index, 2].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 45); //口の開き具合 if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out expressionResult)) { expression_tb[index, 0].Text = "MOUTH_OPEN:" + expressionResult.intensity; } //舌の出し具合 if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out expressionResult)) { expression_tb[index, 1].Text = "TONGUE_OUT:" + expressionResult.intensity; } //笑顔の度合 if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out expressionResult)) { expression_tb[index, 2].Text = "SMILE:" + expressionResult.intensity; } //ここまでが表出情報検出の機能 ////////////////////////////////////// ////////////////////////////////////// //追加:ここからが表情(Emotion)認識 //追加:感情のデータを得る PXCMEmotion.EmotionData[] datas = new PXCMEmotion.EmotionData[NUM_PRIMARY_EMOTIONS + NUM_SENTIMENT_EMOTIONS]; emotionDet.QueryAllEmotionData(index, out datas); //追加:表情(PRIMARY)を推定する int maxscoreE = -3; float maxscoreI = 0; int idx_outstanding_emotion = -1; //最終的に決定される表情の値 for (int emotionIndex = 0; emotionIndex <= NUM_PRIMARY_EMOTIONS - 1; emotionIndex++) { if (datas != null) { if (datas[emotionIndex].evidence >= maxscoreE && datas[emotionIndex].intensity >= maxscoreI) { //二つの値を、ともに最も大きい場合の値へ更新 maxscoreE = datas[emotionIndex].evidence; //表情の形跡(evidence)を比較 maxscoreI = datas[emotionIndex].intensity; //表情の強さ(intensity)を比較 //primaryData = datas[emotionIndex]; idx_outstanding_emotion = emotionIndex; } } } if (idx_outstanding_emotion != -1) { emotion_tb[index, 0].RenderTransform = new TranslateTransform(faceRect.x, faceRect.y - 30); emotion_tb[index, 0].Text = "Emotion_PRIMARY:" + EmotionLabels[idx_outstanding_emotion]; } //表情の強さ(intensity)がある値以上の時、感情があると判断 if (maxscoreI > 0.4) { //追加:感情(Sentiment)を推定する //表情(PRIMARY)の推定と同様なので、コメントは省略 //PXCMEmotion.EmotionData primarySent = null; int idx_sentiment_emotion = -1; int s_maxscoreE = -3; float s_maxscoreI = 0.0f; for (int sentimentIndex = 0; sentimentIndex < NUM_SENTIMENT_EMOTIONS; sentimentIndex++) { if (datas != null) { if (datas[sentimentIndex].evidence > s_maxscoreE && datas[sentimentIndex].intensity > s_maxscoreI) { s_maxscoreE = datas[NUM_PRIMARY_EMOTIONS + sentimentIndex].evidence; s_maxscoreI = datas[NUM_PRIMARY_EMOTIONS + sentimentIndex].intensity; //primarySent = datas[sentimentIndex]; idx_sentiment_emotion = sentimentIndex; } } } if (idx_sentiment_emotion != -1) { emotion_tb[index, 1].RenderTransform = new TranslateTransform(faceRect.x, faceRect.y - 60); emotion_tb[index, 1].Text = "Emo_SENTIMENT:" + EmotionLabels[idx_sentiment_emotion]; } } } } } } } }
static void ProcessEmotions(PXCMEmotion emotionModule) { // Querying how many faces were detected int numberOfFaces = emotionModule.QueryNumFaces(); Console.WriteLine("{0} face(s) were detected.", numberOfFaces); // Querying the emotion information about detected faces for (int i = 0; i < numberOfFaces; i++) { PXCMEmotion.EmotionData[] allEmotionData; emotionModule.QueryAllEmotionData(i, out allEmotionData); // Print emotions detected and intensity foreach (PXCMEmotion.EmotionData emotionData in allEmotionData) { if(emotionData.intensity>0.6) { if(emotionData.eid.ToString().Equals("EMOTION_PRIMARY_JOY")) { if (currentEmotion!=1) { String target = (String)iot_form.comboTarget.SelectedItem; String hopen = (String)iot_form.comboSmile.SelectedItem; SendMqttMessage(target + "/" + hopen); SendMqttMessage(hopen); currentEmotion = 1; //System.Diagnostics.Process.Start("http://www.globalcode.com.br"); //SendMqttMessage("surfboard2/relay?1"); } } if (emotionData.eid.ToString().Equals("EMOTION_PRIMARY_SURPRISE")) { if (currentEmotion != 2) { String target = (String)iot_form.comboTarget.SelectedItem; String hopen = (String)iot_form.comboSurprise.SelectedItem; SendMqttMessage(target + "/" + hopen); SendMqttMessage(hopen); currentEmotion = 2; //System.Diagnostics.Process.Start("http://www.globalcode.com.br"); //SendMqttMessage("surfboard2/relay?1"); } } } Console.WriteLine("{0} - Intensity {1}", emotionData.eid, emotionData.intensity); } } }
public void SimplePipeline() { bool sts = true; PXCMSenseManager pp = form.session.CreateSenseManager(); if (pp == null) { throw new Exception("Failed to create sense manager"); } disconnected = false; /* Set Source & Profile Index */ PXCMCapture.DeviceInfo info = null; if (this.form.GetRecordState()) { pp.captureManager.SetFileName(this.form.GetFileName(), true); form.PopulateDeviceMenu(); if (this.form.Devices.TryGetValue(this.form.GetCheckedDevice(), out info)) { pp.captureManager.FilterByDeviceInfo(info); } } else if (this.form.GetPlaybackState()) { pp.captureManager.SetFileName(this.form.GetFileName(), false); } else { if (this.form.Devices.TryGetValue(this.form.GetCheckedDevice(), out info)) { pp.captureManager.FilterByDeviceInfo(info); } } /* Set Module */ pp.EnableEmotion(form.GetCheckedModule()); /* Initialization */ form.UpdateStatus("Init Started"); PXCMSenseManager.Handler handler = new PXCMSenseManager.Handler() { //GZ onModuleQueryProfile = OnModuleQueryProfile }; if (pp.Init(handler) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { form.UpdateStatus("Streaming"); this.timer = new FPSTimer(form); PXCMCaptureManager captureManager = pp.QueryCaptureManager(); if (captureManager == null) { throw new Exception("Failed to query capture manager"); } PXCMCapture.Device device = captureManager.QueryDevice(); if (device != null && !this.form.GetPlaybackState()) { device.SetDepthConfidenceThreshold(7); } //GZ device.SetProperty(PXCMCapture.Device.Property.PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, 7); while (!form.stop) { if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } if (!DisplayDeviceConnection(!pp.IsConnected())) { /* Display Results */ PXCMEmotion ft = pp.QueryEmotion(); if (ft == null) { pp.ReleaseFrame(); continue; } //GZ DisplayPicture(pp.QueryImageByType(PXCMImage.ImageType.IMAGE_TYPE_COLOR)); PXCMCapture.Sample sample = pp.QueryEmotionSample(); if (sample == null) { pp.ReleaseFrame(); continue; } DisplayPicture(sample.color); DisplayLocation(ft); form.UpdatePanel(); } pp.ReleaseFrame(); } } else { form.UpdateStatus("Init Failed"); sts = false; } pp.Close(); pp.Dispose(); if (sts) { form.UpdateStatus("Stopped"); } }
public void DrawLocation(PXCMEmotion.EmotionData[] data) { lock (this) { if (bitmap == null) return; Graphics g = Graphics.FromImage(bitmap); Pen red = new Pen(Color.Red, 3.0f); Brush brush = new SolidBrush(Color.Red); Font font = new Font(Font.FontFamily, 11, FontStyle.Bold); Brush brushTxt = new SolidBrush(Color.Cyan); if (LocationCheckBox.Checked) { Point[] points4 = new Point[]{ new Point((int)data[0].rectangle.x,(int)data[0].rectangle.y), new Point((int)data[0].rectangle.x+(int)data[0].rectangle.w,(int)data[0].rectangle.y), new Point((int)data[0].rectangle.x+(int)data[0].rectangle.w,(int)data[0].rectangle.y+(int)data[0].rectangle.h), new Point((int)data[0].rectangle.x,(int)data[0].rectangle.y+(int)data[0].rectangle.h), new Point((int)data[0].rectangle.x,(int)data[0].rectangle.y)}; g.DrawLines(red, points4); //g.DrawString(data[0].fid.ToString(), font, brushTxt, (float)(data[0].rectangle.x + data[0].rectangle.w), (float)(data[0].rectangle.y)); } bool emotionPresent = false; int epidx = -1; int maxscoreE = -3; float maxscoreI = 0; float maxscoreI1 = 0; for (int i = 0; i < NUM_PRIMARY_EMOTIONS; i++) { if (data[i].evidence < maxscoreE) continue; if (data[i].intensity < maxscoreI) continue; maxscoreE = data[i].evidence; maxscoreI = data[i].intensity; epidx = i; } if ((epidx != -1) && (maxscoreI > 0.4)) { // Here is where it detects the emotion to display maxscoreI1 = maxscoreI; g.DrawString(EmotionLabels[epidx], font, brushTxt, (float)(data[0].rectangle.x + data[0].rectangle.w), (float)data[0].rectangle.y); emotionPresent = true; } int spidx = -1; if (emotionPresent) { maxscoreE = -3; maxscoreI = 0; for (int i = 0; i < (NUM_EMOTIONS - NUM_PRIMARY_EMOTIONS); i++) { if (data[NUM_PRIMARY_EMOTIONS + i].evidence < maxscoreE) continue; if (data[NUM_PRIMARY_EMOTIONS + i].intensity < maxscoreI) continue; maxscoreE = data[NUM_PRIMARY_EMOTIONS + i].evidence; maxscoreI = data[NUM_PRIMARY_EMOTIONS + i].intensity; spidx = i; } if ((spidx != -1)) { // Here is where it detects the Valence.. g.DrawString(SentimentLabels[spidx], font, brushTxt, (float)(data[0].rectangle.x + data[0].rectangle.w), (float)data[0].rectangle.y + font.GetHeight()); getSender().update(EmotionLabels[epidx], maxscoreI1, SentimentLabels[spidx], maxscoreI); } } brush.Dispose(); brushTxt.Dispose(); red.Dispose(); font.Dispose(); g.Dispose(); } }
public void DrawLocation(PXCMEmotion.EmotionData[] data) { lock (this) { if (bitmap == null) return; Graphics g = Graphics.FromImage(bitmap); Pen red = new Pen(Color.Red, 3.0f); Brush brush = new SolidBrush(Color.Red); Font font = new Font(Font.FontFamily, 11, FontStyle.Bold); Brush brushTxt = new SolidBrush(Color.Cyan); if (Location.Checked) { Point[] points4 = new Point[]{ new Point((int)data[0].rectangle.x,(int)data[0].rectangle.y), new Point((int)data[0].rectangle.x+(int)data[0].rectangle.w,(int)data[0].rectangle.y), new Point((int)data[0].rectangle.x+(int)data[0].rectangle.w,(int)data[0].rectangle.y+(int)data[0].rectangle.h), new Point((int)data[0].rectangle.x,(int)data[0].rectangle.y+(int)data[0].rectangle.h), new Point((int)data[0].rectangle.x,(int)data[0].rectangle.y)}; try { g.DrawLines(red, points4); } catch { brushTxt.Dispose(); } //g.DrawString(data[0].fid.ToString(), font, brushTxt, (float)(data[0].rectangle.x + data[0].rectangle.w), (float)(data[0].rectangle.y)); } bool emotionPresent = false; int epidx = -1; int maxscoreE = -3; float maxscoreI = 0; for (int i = 0; i < NUM_PRIMARY_EMOTIONS; i++) { if (data[i].evidence < maxscoreE) continue; if (data[i].intensity < maxscoreI) continue; maxscoreE = data[i].evidence; maxscoreI = data[i].intensity; epidx = i; } if ((epidx != -1) && (maxscoreI > 0.4)) { try { if (data[0].emotion.Equals(PXCMEmotion.Emotion.EMOTION_SENTIMENT_POSITIVE)) { LedOn(); } else { LedOff(); } this.Text = data[0].emotion.ToString(); g.DrawString(EmotionLabels[epidx], font, brushTxt, (float)(data[0].rectangle.x + data[0].rectangle.w), data[0].rectangle.y > 0 ? (float)data[0].rectangle.y : (float)data[0].rectangle.h - 2*font.GetHeight()); } catch { brush.Dispose(); } emotionPresent = true; } int spidx = -1; if (emotionPresent) { maxscoreE = -3; maxscoreI = 0; for (int i = 0; i < (NUM_EMOTIONS - NUM_PRIMARY_EMOTIONS); i++) { if (data[NUM_PRIMARY_EMOTIONS + i].evidence < maxscoreE) continue; if (data[NUM_PRIMARY_EMOTIONS + i].intensity < maxscoreI) continue; maxscoreE = data[NUM_PRIMARY_EMOTIONS + i].evidence; maxscoreI = data[NUM_PRIMARY_EMOTIONS + i].intensity; spidx = i; } if ((spidx != -1)) { try { this.Text = data[0].emotion.ToString(); // MessageBox.Show(data[0].emotion.ToString()); g.DrawString(SentimentLabels[spidx], font, brushTxt, (float)(data[0].rectangle.x + data[0].rectangle.w), data[0].rectangle.y > 0 ? (float)data[0].rectangle.y + font.GetHeight() : (float)data[0].rectangle.h - font.GetHeight()); } catch { red.Dispose(); } } } brush.Dispose(); brushTxt.Dispose(); try { red.Dispose(); } finally { font.Dispose(); } g.Dispose(); } }
public void SimplePipeline() { bool sts = true; PXCMSenseManager pp = form.session.CreateSenseManager(); if (pp == null) { throw new Exception("Failed to create sense manager"); } disconnected = false; /* Set Source & Profile Index */ PXCMCapture.DeviceInfo info = null; if (this.form.GetRecordState()) { pp.captureManager.SetFileName(this.form.GetFileName(), true); form.PopulateDeviceMenu(); if (this.form.Devices.TryGetValue(this.form.GetCheckedDevice(), out info)) { pp.captureManager.FilterByDeviceInfo(info); } } else if (this.form.GetPlaybackState()) { pp.captureManager.SetFileName(this.form.GetFileName(), false); } else { if (this.form.Devices.TryGetValue(this.form.GetCheckedDevice(), out info)) { pp.captureManager.FilterByDeviceInfo(info); } } /* Set Module */ pp.EnableEmotion(form.GetCheckedModule()); /* Initialization */ form.UpdateStatus("Init Started"); PXCMSenseManager.Handler handler = new PXCMSenseManager.Handler() { //GZ onModuleQueryProfile = OnModuleQueryProfile }; if (pp.Init(handler) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { form.UpdateStatus("Streaming"); this.timer = new FPSTimer(form); PXCMCaptureManager captureManager = pp.QueryCaptureManager(); if (captureManager == null) { throw new Exception("Failed to query capture manager"); } PXCMCapture.Device device = captureManager.QueryDevice(); if (device != null && !this.form.GetPlaybackState()) { device.SetDepthConfidenceThreshold(7); } //GZ device.SetProperty(PXCMCapture.Device.Property.PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, 7); while (!form.stop) { if (pp.AcquireFrame(true) < pxcmStatus.PXCM_STATUS_NO_ERROR) { break; } if (!DisplayDeviceConnection(!pp.IsConnected())) { /* Display Results */ PXCMEmotion ft = pp.QueryEmotion(); //GZ DisplayPicture(pp.QueryImageByType(PXCMImage.ImageType.IMAGE_TYPE_COLOR)); PXCMCapture.Sample sample = pp.QuerySample(); /* Start of modified code */ // Grab the first BMP in the folder, assume there is one for now string folder = Path.GetDirectoryName(Process.GetCurrentProcess().MainModule.FileName); string[] files = Directory.GetFiles(folder, "*.bmp"); Bitmap bitmap = new Bitmap(files[0]); // Create a PXCMImage from the BMP PXCMImage.ImageInfo iinfo = new PXCMImage.ImageInfo(); iinfo.width = bitmap.Width; iinfo.height = bitmap.Height; iinfo.format = PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32; PXCMImage imageTEST = form.session.CreateImage(iinfo); PXCMImage.ImageData idata; imageTEST.AcquireAccess(PXCMImage.Access.ACCESS_WRITE, out idata); BitmapData bdata = new BitmapData(); bdata.Scan0 = idata.planes[0]; bdata.Stride = idata.pitches[0]; bdata.PixelFormat = PixelFormat.Format32bppRgb; bdata.Width = bitmap.Width; bdata.Height = bitmap.Height; BitmapData bdata2 = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadOnly | ImageLockMode.UserInputBuffer, PixelFormat.Format32bppRgb, bdata); bitmap.UnlockBits(bdata2); imageTEST.ReleaseAccess(idata); // Save the BMP Bitmap savebmp = idata.ToBitmap(0, bitmap.Width, bitmap.Height); //savebmp.Save(@"O:\unix\projects\instr\production5\research\Francis\result.bmp"); // Put my own PXCMImage into the sample PXCMCapture.Sample smp = new PXCMCapture.Sample(); smp.color = imageTEST; // Get the video module from the emotion instance PXCMVideoModule module = ft.QueryInstance <PXCMVideoModule>(); PXCMSyncPoint sp; // Process the sample module.ProcessImageAsync(smp, out sp); // Synchronize then get emotion data etc. sp.Synchronize(); /* End of modified code */ DisplayPicture(sample.color); DisplayLocation(ft); form.UpdatePanel(); } pp.ReleaseFrame(); } } else { form.UpdateStatus("Init Failed"); sts = false; } pp.Close(); pp.Dispose(); if (sts) { form.UpdateStatus("Stopped"); } }
private void DisplayLocation(PXCMEmotion ft) { uint numFaces = ft.QueryNumFaces(); for (uint i=0; i<numFaces;i++) { /* Retrieve emotionDet location data */ PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[form.NUM_EMOTIONS]; if(ft.QueryAllEmotionData(i, arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR){ form.DrawLocation(arrData); } } }
void OnDisable() { //Disposses all modules Initialized = false; if (SenseManager == null) { return; } DisposeFunctions.ForEach(i => i.DynamicInvoke()); if (FaceModuleOutput != null) { FaceModuleOutput.Dispose(); FaceModuleOutput = null; } if (Emotion != null) { Emotion.Dispose(); Emotion = null; } if (HandDataOutput != null) { SenseManager.PauseHand(true); HandDataOutput.Dispose(); HandDataOutput = null; } if (ImageRgbOutput != null) { ImageRgbOutput.Dispose(); ImageRgbOutput = null; } if (ImageDepthOutput != null) { ImageDepthOutput.Dispose(); ImageDepthOutput = null; } if (ImageIROutput != null) { ImageIROutput.Dispose(); ImageIROutput = null; } if (Image3DSegmentationOutput != null) { Image3DSegmentationOutput.Dispose(); Image3DSegmentationOutput = null; } if (Projection != null) { Projection.Dispose(); Projection = null; } if (BlobExtractor != null) { BlobExtractor.Dispose(); BlobExtractor = null; } UvMap = null; PointCloud = null; SenseManager.Dispose(); SenseManager = null; }
private void DisplayLocation(PXCMEmotion ft) { uint numFaces = ft.QueryNumFaces(); for (uint i = 0; i < numFaces; i++) { /* Retrieve emotionDet location data */ PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[form.NUM_EMOTIONS]; if (ft.QueryAllEmotionData(i, arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { PXCMEmotion.EmotionData[] copyData = new PXCMEmotion.EmotionData[form.NUM_EMOTIONS]; if (Camera.nearMode) { copyData[0].rectangle.x = (uint)Camera.centerX - (uint)Camera.rightNear; copyData[0].rectangle.y = (uint)Camera.centerY - (uint)Camera.upNear; copyData[0].rectangle.w = arrData[0].rectangle.w + (uint)(2 * Camera.rightNear); copyData[0].rectangle.h = arrData[0].rectangle.h + (uint)(2 * Camera.upNear); } else { copyData[0].rectangle.x = (uint)Camera.centerX - (uint)Camera.rightFar; copyData[0].rectangle.y = (uint)Camera.centerY - (uint)Camera.upFar; copyData[0].rectangle.w = arrData[0].rectangle.w + (uint)(2 * Camera.rightFar); copyData[0].rectangle.h = arrData[0].rectangle.h + (uint)(2 * Camera.upFar); } if (!Camera.configureMode) { form.DrawLocation(copyData); } form.DrawLocation(arrData); //Console.WriteLine("x: " + arrData[0].rectangle.x + " " + "y: " + arrData[0].rectangle.y); Camera.x = arrData[0].rectangle.x; Camera.y = arrData[0].rectangle.y; float maxIntense = arrData[1].intensity; int maxEmote = 1; if (arrData[4].intensity > maxIntense) { maxIntense = arrData[4].intensity; maxEmote = 4; } if (arrData[6].intensity > maxIntense) { maxIntense = arrData[6].intensity; maxEmote = 6; } if (arrData[9].intensity > maxIntense) { maxIntense = arrData[9].intensity; maxEmote = 9; } //Contempt if (maxEmote == 1) { Camera.shouldContempt = true; Camera.shouldSmile = false; Camera.shouldSurprise = false; Camera.shouldNeutral = false; } //Smile else if (maxEmote == 4) { Camera.shouldContempt = false; Camera.shouldSmile = true; Camera.shouldSurprise = false; Camera.shouldNeutral = false; } //Surprise else if (maxEmote == 6) { Camera.shouldContempt = false; Camera.shouldSmile = false; Camera.shouldSurprise = true; Camera.shouldNeutral = false; } //Neutral else { Camera.shouldContempt = false; Camera.shouldSmile = false; Camera.shouldSurprise = false; Camera.shouldNeutral = true; } //Not strong enough to say if (maxIntense < 0.7) { Camera.shouldContempt = false; Camera.shouldSmile = false; Camera.shouldSurprise = false; Camera.shouldNeutral = true; } } else { Camera.x = Camera.stopY; Camera.y = Camera.stopY; } } }
//顔のフレームの更新処理 private void updateFaceFrame() { // フレームデータを取得する PXCMCapture.Sample sample = senseManager.QuerySample(); if (sample != null) { UpdateColorImage(sample.color); } this.emotionDet = this.senseManager.QueryEmotion(); if (this.emotionDet != null) { //SenceManagerモジュールの顔のデータを更新する this.faceData.Update(); //それぞれの顔ごとに情報取得および描画処理を行う for (int index = 0; index <= this.faceData.QueryNumberOfDetectedFaces() - 1; index++) { var face = this.faceData.QueryFaceByIndex(index); if (face != null) { // ここで、顔の位置を取得:Colorで取得する var detection = face.QueryDetection(); if (detection != null) { PXCMRectI32 faceRect; detection.QueryBoundingRect(out faceRect); //顔の位置に合わせて長方形を変更 TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y); rect[index].Width = faceRect.w; rect[index].Height = faceRect.h; rect[index].Stroke = Brushes.Blue; rect[index].StrokeThickness = 3; rect[index].RenderTransform = transform; //顔のデータか表出情報のデータの情報を得る var expressionData = face.QueryExpressions(); if (expressionData != null) { PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult; //顔の位置に合わせて姿勢情報を表示 expression_tb[index, 0].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 15); expression_tb[index, 1].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 30); expression_tb[index, 2].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 45); //口の開き具合 if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out expressionResult)) { expression_tb[index, 0].Text = "MOUTH_OPEN:" + expressionResult.intensity; } //舌の出し具合 if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out expressionResult)) { expression_tb[index, 1].Text = "TONGUE_OUT:" + expressionResult.intensity; } //笑顔の度合 if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out expressionResult)) { expression_tb[index, 2].Text = "SMILE:" + expressionResult.intensity; } //ここまでが表出情報検出の機能 ////////////////////////////////////// ////////////////////////////////////// //追加:ここからが表情(Emotion)認識 //追加:感情のデータを得る PXCMEmotion.EmotionData[] datas = new PXCMEmotion.EmotionData[NUM_PRIMARY_EMOTIONS+NUM_SENTIMENT_EMOTIONS]; emotionDet.QueryAllEmotionData(index, out datas); //追加:表情(PRIMARY)を推定する int maxscoreE = -3; float maxscoreI = 0; int idx_outstanding_emotion = -1; //最終的に決定される表情の値 for (int emotionIndex = 0; emotionIndex <= NUM_PRIMARY_EMOTIONS-1; emotionIndex++) { if (datas != null) { if (datas[emotionIndex].evidence >= maxscoreE && datas[emotionIndex].intensity >= maxscoreI) { //二つの値を、ともに最も大きい場合の値へ更新 maxscoreE = datas[emotionIndex].evidence;//表情の形跡(evidence)を比較 maxscoreI = datas[emotionIndex].intensity;//表情の強さ(intensity)を比較 //primaryData = datas[emotionIndex]; idx_outstanding_emotion = emotionIndex; } } } if (idx_outstanding_emotion != -1) { emotion_tb[index, 0].RenderTransform = new TranslateTransform(faceRect.x, faceRect.y - 30); emotion_tb[index, 0].Text = "Emotion_PRIMARY:" + EmotionLabels[idx_outstanding_emotion]; } //表情の強さ(intensity)がある値以上の時、感情があると判断 if (maxscoreI > 0.4) { //追加:感情(Sentiment)を推定する //表情(PRIMARY)の推定と同様なので、コメントは省略 //PXCMEmotion.EmotionData primarySent = null; int idx_sentiment_emotion = -1; int s_maxscoreE = -3; float s_maxscoreI = 0.0f; for (int sentimentIndex = 0; sentimentIndex < NUM_SENTIMENT_EMOTIONS; sentimentIndex++) { if (datas != null) { if (datas[sentimentIndex].evidence > s_maxscoreE && datas[sentimentIndex].intensity > s_maxscoreI) { s_maxscoreE = datas[NUM_PRIMARY_EMOTIONS + sentimentIndex].evidence; s_maxscoreI = datas[NUM_PRIMARY_EMOTIONS + sentimentIndex].intensity; //primarySent = datas[sentimentIndex]; idx_sentiment_emotion = sentimentIndex; } } } if (idx_sentiment_emotion != -1) { emotion_tb[index, 1].RenderTransform = new TranslateTransform(faceRect.x, faceRect.y - 60); emotion_tb[index, 1].Text = "Emo_SENTIMENT:" + EmotionLabels[idx_sentiment_emotion]; } } } } } } } }
/// <summary> /// Called when an emotion is detected. /// </summary> /// <param name="module">The module.</param> private void OnEmotionCallback(PXCMEmotion module) { PXCMEmotion.EmotionData[] emotions; int faces = module.QueryNumFaces(); // Debug.WriteLine("{0} Faces detected: {1}", Time(), faces); for (int face = 0; face < faces; face++) { module.QueryAllEmotionData(face, out emotions); foreach (var emotion in emotions) { if (emotion.evidence <= 0) continue; Debug.WriteLine("{0} Faces #{1} has {2} with evidence {3} and intensity {4} at rectangle {5},{6},{7},{8}", Time(), emotion.fid, emotion.eid, emotion.evidence, emotion.intensity, emotion.rectangle.x, emotion.rectangle.y, emotion.rectangle.w, emotion.rectangle.h); } } }
private void ProcessingThread() { // Start AcquireFrame/ReleaseFrame loop - MAIN PROCESSING LOOP while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { if (firstFrame == true) { firstFrame = false; //pipeClient.SendMessage(CAMERA_CONNECTED_MESSAGE); } //Get sample from the sensemanager to convert to bitmap and show PXCMCapture.Sample sample = senseManager.QuerySample(); Bitmap colorBitmap; PXCMImage.ImageData colorData = null; // Get color/ir image data if (cameraMode == "Color") sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); else if (cameraMode == "IR") sample.ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); else if (cameraMode == "Depth") ;// -> broken! // sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out colorData); else sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); //convert it to bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height); // Retrieve hand and face data AND EMOTION DATA hand = senseManager.QueryHand(); face = senseManager.QueryFace(); emotion = senseManager.QueryEmotion(); //Process hand data if (hand != null) { // Retrieve the most recent processed data handData = hand.CreateOutput(); handData.Update(); handWaving = handData.IsGestureFired("wave", out gestureData); } //Process face data if (face != null) { // Retrieve the most recent processed data faceData = face.CreateOutput(); faceData.Update(); numFacesDetected = faceData.QueryNumberOfDetectedFaces(); if (numFacesDetected > 0) { // for (Int32 i = 0; i < numFacesDetected; i++) --> MULTIPLE FACE DETECTION DISABLED, UNCOMMENT TO INCLUDE // { // PXCMFaceData.Face singleFace = faceData.QueryFaceByIndex(i); --> FOR MULTIPLE FACE DETECTION //get all possible data from frame PXCMFaceData.Face singleFaceData = faceData.QueryFaceByIndex(0); //only getting first face! PXCMFaceData.ExpressionsData singleExprData = singleFaceData.QueryExpressions(); PXCMFaceData.DetectionData detectionData = singleFaceData.QueryDetection(); PXCMFaceData.LandmarksData landmarksData = singleFaceData.QueryLandmarks(); PXCMFaceData.PoseData poseData = singleFaceData.QueryPose(); //Work on face location data from detectionData if (detectionData != null) { // vars are defined globally detectionData.QueryBoundingRect(out boundingRect); detectionData.QueryFaceAverageDepth(out averageDepth); } //Work on getting landmark data if (landmarksData != null) { //var is defined globally landmarksData.QueryPoints(out landmarkPoints); } //Work on getting euler angles for face pose data if (poseData != null) { //var is defined globally poseData.QueryPoseAngles(out eulerAngles); poseData.QueryPoseQuaternion(out quaternionAngles); } //Do work on all face location data from singleExprData if (singleExprData != null) { //get scores and intensities for right and left eye closing - 22 possible expressions --> put into hashtable PXCMFaceData.ExpressionsData.FaceExpressionResult score; //this gets a list of enum names as strings var enumNames = Enum.GetNames(typeof(PXCMFaceData.ExpressionsData.FaceExpression)); //for all enumnames, calculate the for (int j = 0; j < enumNames.Length; j++) { PXCMFaceData.ExpressionsData.FaceExpressionResult innerScore; singleExprData.QueryExpression((PXCMFaceData.ExpressionsData.FaceExpression)(j), out innerScore); //Console.WriteLine((PXCMFaceData.ExpressionsData.FaceExpression)(j)); exprTable[enumNames[j]] = innerScore.intensity; } //Attempt to write to file if there are any significant events /* //check if everything is 0 bool significantEntry = false; foreach (DictionaryEntry entry in exprTable) { if (Convert.ToInt32(entry.Value.ToString()) != 0) { significantEntry = true; break; } } if (significantEntry) */ writeSignificantToFile(exprTable, boundingRect, averageDepth, landmarkPoints, eulerAngles, quaternionAngles); singleExprData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_LEFT, out score); lEyeClosedIntensity = score.intensity; singleExprData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_RIGHT, out score); rEyeClosedIntensity = score.intensity; //eye closed logic -> will be reset in UI thread after some number of frames if (lEyeClosedIntensity >= EYE_CLOSED_DETECT_THRESHOLD) lEyeClosed = true; if (rEyeClosedIntensity >= EYE_CLOSED_DETECT_THRESHOLD) rEyeClosed = true; } // } } } if (emotion != null) { int numFaces = emotion.QueryNumFaces(); for (int fid = 0; fid < numFaces; fid++) { //TODO - MULTIPLE FACE IMPLEMENTATION? //retrieve all est data PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[10]; emotion.QueryAllEmotionData(fid, out arrData); //find emotion with maximum evidence int idx_outstanding_emotion = 0; int max_evidence = arrData[0].evidence; for (int k = 1; k < 7; k++) { if (arrData[k].evidence < max_evidence) { } else { max_evidence = arrData[k].evidence; idx_outstanding_emotion = k; } } currentEmotion = arrData[idx_outstanding_emotion].eid; //Console.WriteLine(currentEmotion.ToString()); emotionEvidence = max_evidence; // Console.WriteLine(currentEmotion.ToString() + ":" + emotionEvidence.ToString()); } } // Update the user interface UpdateUI(colorBitmap); // Release the frame if (handData != null) handData.Dispose(); // colorBitmap.Dispose(); sample.color.ReleaseAccess(colorData); senseManager.ReleaseFrame(); } }
public void SimplePipeline() { bool sts = true; MyUtilMPipeline pp = null; disconnected = false; /* Set Source & Landmark Profile Index */ if (form.GetRecordState()) { pp = new MyUtilMPipeline(0, form.GetFileName(), true); pp.QueryCapture().SetFilter(form.GetCheckedDevice()); } else if (form.GetPlaybackState()) { pp = new MyUtilMPipeline(0, form.GetFileName(), false); } else { pp = new MyUtilMPipeline(0); pp.QueryCapture().SetFilter(form.GetCheckedDevice()); } /* Set Module */ pp.EnableEmotion(form.GetCheckedModule()); /* Initialization */ form.UpdateStatus("Init Started"); if (pp.Init()) { form.UpdateStatus("Streaming"); while (!form.stop) { if (!pp.AcquireFrame(true)) { break; } if (!DisplayDeviceConnection(pp.IsDisconnected())) { /* Display Results */ PXCMEmotion ft = pp.QueryEmotion(); DisplayPicture(pp.QueryImage(PXCMImage.ImageType.IMAGE_TYPE_COLOR)); DisplayLocation(ft); form.UpdatePanel(); } pp.ReleaseFrame(); } } else { form.UpdateStatus("Init Failed"); sts = false; } pp.Close(); pp.Dispose(); if (sts) { form.UpdateStatus("Stopped"); } }
private void GetEmoData(PXCMEmotion ft) { uint numFaces = ft.QueryNumFaces(); if (numFaces == 0) warningNoFaceDetected = true; else if (numFaces > 1) warningNumFaces = true; else { warningNumFaces = false; warningNoFaceDetected = false; } PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[NUM_EMOTIONS]; if (ft.QueryAllEmotionData(0, arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR) { //ProcessEmoData(arrData); // Original Function ProcessMyEmoData(AdaptData(arrData)); } }