Exemplo n.º 1
0
    // Update is called once per frame
    void Update()
    {
        //Debug.Log ("1");

        PXCMEmotion ft = SenseToolkitManager.Instance.Emotion;

        //int numfaces = ft.QueryNumFaces ();
        //Debug.Log (numfaces.ToString);
        if (ft != null)
        {
            //			//GZ DisplayPicture(pp.QueryImageByType(PXCMImage.ImageType.IMAGE_TYPE_COLOR));
            //			PXCMCapture.Sample sample = stkm.SenseManager.QueryEmotionSample ();
            //			if (sample == null) {
            //				stkm.SenseManager.ReleaseFrame ();
            //				Debug.Log ("3");
            //				return;
            //			}

            //Debug.Log ("4");
            PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[NUM_EMOTIONS];
            if (ft.QueryAllEmotionData(0, out arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                DrawLocation(arrData);
            }
        }
    }
 public PXCMEmotion.EmotionData[] AdaptData(PXCMEmotion.EmotionData[] _arrData)
 {
     PXCMEmotion.EmotionData[] new_arrData = new PXCMEmotion.EmotionData[3];
     new_arrData[0] = _arrData[0]; // anger
     new_arrData[1] = _arrData[4]; // joy
     new_arrData[2] = _arrData[5]; // sadness
     return new_arrData;
 }
Exemplo n.º 3
0
        private void DisplayLocation(PXCMEmotion ft)
        {
            int numFaces = ft.QueryNumFaces();

            for (int i = 0; i < numFaces; i++)
            {
                /* Retrieve emotionDet location data */
                PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[form.NUM_EMOTIONS];
                if (ft.QueryAllEmotionData(i, out arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.DrawLocation(arrData);
                }
            }
        }
Exemplo n.º 4
0
        private void DisplayLocation(PXCMEmotion ft)
        {
            uint numFaces = ft.QueryNumFaces();

            for (uint i = 0; i < numFaces; i++)
            {
                /* Retrieve emotionDet location data */
                PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[form.NUM_EMOTIONS];
                if (ft.QueryAllEmotionData(i, arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    PXCMEmotion.EmotionData[] copyData = new PXCMEmotion.EmotionData[form.NUM_EMOTIONS];
                    if (Camera.nearMode)
                    {
                        copyData[0].rectangle.x = (uint)Camera.centerX - (uint)Camera.rightNear;
                        copyData[0].rectangle.y = (uint)Camera.centerY - (uint)Camera.upNear;
                        copyData[0].rectangle.w = arrData[0].rectangle.w + (uint)(2 * Camera.rightNear);
                        copyData[0].rectangle.h = arrData[0].rectangle.h + (uint)(2 * Camera.upNear);
                    }
                    else
                    {
                        copyData[0].rectangle.x = (uint)Camera.centerX - (uint)Camera.rightFar;
                        copyData[0].rectangle.y = (uint)Camera.centerY - (uint)Camera.upFar;
                        copyData[0].rectangle.w = arrData[0].rectangle.w + (uint)(2 * Camera.rightFar);
                        copyData[0].rectangle.h = arrData[0].rectangle.h + (uint)(2 * Camera.upFar);
                    }
                    if (!Camera.configureMode)
                    {
                        form.DrawLocation(copyData);
                    }
                    form.DrawLocation(arrData);

                    //Console.WriteLine("x: " + arrData[0].rectangle.x + " " + "y: " + arrData[0].rectangle.y);
                    Camera.x = arrData[0].rectangle.x;
                    Camera.y = arrData[0].rectangle.y;
                    float maxIntense = arrData[1].intensity;
                    int   maxEmote   = 1;

                    if (arrData[4].intensity > maxIntense)
                    {
                        maxIntense = arrData[4].intensity;
                        maxEmote   = 4;
                    }

                    if (arrData[6].intensity > maxIntense)
                    {
                        maxIntense = arrData[6].intensity;
                        maxEmote   = 6;
                    }
                    if (arrData[9].intensity > maxIntense)
                    {
                        maxIntense = arrData[9].intensity;
                        maxEmote   = 9;
                    }
                    //Contempt
                    if (maxEmote == 1)
                    {
                        Camera.shouldContempt = true;
                        Camera.shouldSmile    = false;
                        Camera.shouldSurprise = false;
                        Camera.shouldNeutral  = false;
                    }
                    //Smile
                    else if (maxEmote == 4)
                    {
                        Camera.shouldContempt = false;
                        Camera.shouldSmile    = true;
                        Camera.shouldSurprise = false;
                        Camera.shouldNeutral  = false;
                    }
                    //Surprise
                    else if (maxEmote == 6)
                    {
                        Camera.shouldContempt = false;
                        Camera.shouldSmile    = false;
                        Camera.shouldSurprise = true;
                        Camera.shouldNeutral  = false;
                    }
                    //Neutral
                    else
                    {
                        Camera.shouldContempt = false;
                        Camera.shouldSmile    = false;
                        Camera.shouldSurprise = false;
                        Camera.shouldNeutral  = true;
                    }
                    //Not strong enough to say
                    if (maxIntense < 0.7)
                    {
                        Camera.shouldContempt = false;
                        Camera.shouldSmile    = false;
                        Camera.shouldSurprise = false;
                        Camera.shouldNeutral  = true;
                    }
                }
                else
                {
                    Camera.x = Camera.stopY;
                    Camera.y = Camera.stopY;
                }
            }
        }
Exemplo n.º 5
0
 private void DisplayLocation(PXCMEmotion ft)
 {
     uint numFaces = ft.QueryNumFaces();
     for (uint i=0; i<numFaces;i++) {
         /* Retrieve emotionDet location data */
         PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[form.NUM_EMOTIONS];
         if(ft.QueryAllEmotionData(i, arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR){
             form.DrawLocation(arrData);
         }
     }
 }
 private void GetEmoData(PXCMEmotion ft)
 {
     uint numFaces = ft.QueryNumFaces();
     if (numFaces == 0) warningNoFaceDetected = true;
     else if (numFaces > 1) warningNumFaces = true;
     else
     {
         warningNumFaces = false;
         warningNoFaceDetected = false;
     }
     PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[NUM_EMOTIONS];
     if (ft.QueryAllEmotionData(0, arrData) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
     {
         //ProcessEmoData(arrData); // Original Function
         ProcessMyEmoData(AdaptData(arrData));
     }
 }
Exemplo n.º 7
0
        private void ProcessingThread()
        {
            // Start AcquireFrame/ReleaseFrame loop - MAIN PROCESSING LOOP
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                if (firstFrame == true)
                {
                    firstFrame = false;
                    //pipeClient.SendMessage(CAMERA_CONNECTED_MESSAGE);
                }

                //Get sample from the sensemanager to convert to bitmap and show
                PXCMCapture.Sample sample = senseManager.QuerySample();
                Bitmap colorBitmap;
                PXCMImage.ImageData colorData = null;

                // Get color/ir image data
                if (cameraMode == "Color")
                    sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);
                else if (cameraMode == "IR")
                    sample.ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);
                else if (cameraMode == "Depth")
                    ;// -> broken! // sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out colorData);
                else
                    sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);

                //convert it to bitmap
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Retrieve hand and face data AND EMOTION DATA
                hand = senseManager.QueryHand();
                face = senseManager.QueryFace();
                emotion = senseManager.QueryEmotion();

                //Process hand data
                if (hand != null)
                {
                    // Retrieve the most recent processed data
                    handData = hand.CreateOutput();
                    handData.Update();
                    handWaving = handData.IsGestureFired("wave", out gestureData);
                }

                //Process face data
                if (face != null)
                {
                    // Retrieve the most recent processed data
                    faceData = face.CreateOutput();
                    faceData.Update();
                    numFacesDetected = faceData.QueryNumberOfDetectedFaces();
                    if (numFacesDetected > 0)
                    {
                        // for (Int32 i = 0; i < numFacesDetected; i++) --> MULTIPLE FACE DETECTION DISABLED, UNCOMMENT TO INCLUDE
                        // {
                        // PXCMFaceData.Face singleFace = faceData.QueryFaceByIndex(i); --> FOR MULTIPLE FACE DETECTION

                        //get all possible data from frame
                        PXCMFaceData.Face singleFaceData = faceData.QueryFaceByIndex(0); //only getting first face!
                        PXCMFaceData.ExpressionsData singleExprData = singleFaceData.QueryExpressions();
                        PXCMFaceData.DetectionData detectionData = singleFaceData.QueryDetection();
                        PXCMFaceData.LandmarksData landmarksData = singleFaceData.QueryLandmarks();
                        PXCMFaceData.PoseData poseData = singleFaceData.QueryPose();

                        //Work on face location data from detectionData
                        if (detectionData != null)
                        {
                            // vars are defined globally
                            detectionData.QueryBoundingRect(out boundingRect);
                            detectionData.QueryFaceAverageDepth(out averageDepth);
                        }

                        //Work on getting landmark data
                        if (landmarksData != null)
                        {
                            //var is defined globally
                            landmarksData.QueryPoints(out landmarkPoints);
                        }

                        //Work on getting euler angles for face pose data
                        if (poseData != null)
                        {

                            //var is defined globally
                            poseData.QueryPoseAngles(out eulerAngles);
                            poseData.QueryPoseQuaternion(out quaternionAngles);

                        }

                        //Do work on all face location data from singleExprData
                        if (singleExprData != null)
                        {
                            //get scores and intensities for right and left eye closing - 22 possible expressions --> put into hashtable
                            PXCMFaceData.ExpressionsData.FaceExpressionResult score;

                            //this gets a list of enum names as strings
                            var enumNames = Enum.GetNames(typeof(PXCMFaceData.ExpressionsData.FaceExpression));
                            //for all enumnames, calculate the
                            for (int j = 0; j < enumNames.Length; j++)
                            {
                                PXCMFaceData.ExpressionsData.FaceExpressionResult innerScore;
                                singleExprData.QueryExpression((PXCMFaceData.ExpressionsData.FaceExpression)(j), out innerScore);

                                //Console.WriteLine((PXCMFaceData.ExpressionsData.FaceExpression)(j));
                                exprTable[enumNames[j]] = innerScore.intensity;

                            }

                            //Attempt to write to file if there are any significant events
                            /*   //check if everything is 0
                               bool significantEntry = false;
                               foreach (DictionaryEntry entry in exprTable)
                               {
                                   if (Convert.ToInt32(entry.Value.ToString()) != 0)
                                   {
                                       significantEntry = true;
                                       break;
                                   }

                               }
                               if (significantEntry) */
                            writeSignificantToFile(exprTable, boundingRect, averageDepth, landmarkPoints, eulerAngles, quaternionAngles);

                            singleExprData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_LEFT, out score);
                            lEyeClosedIntensity = score.intensity;

                            singleExprData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_RIGHT, out score);
                            rEyeClosedIntensity = score.intensity;

                            //eye closed logic -> will be reset in UI thread after some number of frames
                            if (lEyeClosedIntensity >= EYE_CLOSED_DETECT_THRESHOLD)
                                lEyeClosed = true;

                            if (rEyeClosedIntensity >= EYE_CLOSED_DETECT_THRESHOLD)
                                rEyeClosed = true;
                        }

                        // }
                    }

                }

                if (emotion != null)
                {
                    int numFaces = emotion.QueryNumFaces();
                    for (int fid = 0; fid < numFaces; fid++)
                    {
                        //TODO - MULTIPLE FACE IMPLEMENTATION?
                        //retrieve all est data
                        PXCMEmotion.EmotionData[] arrData = new PXCMEmotion.EmotionData[10];
                        emotion.QueryAllEmotionData(fid, out arrData);

                        //find emotion with maximum evidence
                        int idx_outstanding_emotion = 0;
                        int max_evidence = arrData[0].evidence;
                        for (int k = 1; k < 7; k++)
                        {
                            if (arrData[k].evidence < max_evidence)
                            {

                            }
                            else
                            {
                                max_evidence = arrData[k].evidence;
                                idx_outstanding_emotion = k;
                            }

                        }

                        currentEmotion = arrData[idx_outstanding_emotion].eid;
                        //Console.WriteLine(currentEmotion.ToString());
                        emotionEvidence = max_evidence;

                       // Console.WriteLine(currentEmotion.ToString() + ":" + emotionEvidence.ToString());

                    }
                }

                // Update the user interface
                UpdateUI(colorBitmap);

                // Release the frame
                if (handData != null) handData.Dispose();
               // colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                senseManager.ReleaseFrame();

            }
        }
        //顔のフレームの更新処理
        private void updateFaceFrame()
        {
            // フレームデータを取得する
            PXCMCapture.Sample sample = senseManager.QuerySample();
            if (sample != null)
            {
                UpdateColorImage(sample.color);
            }

            this.emotionDet = this.senseManager.QueryEmotion();
            if (this.emotionDet != null)
            {
                //SenceManagerモジュールの顔のデータを更新する
                this.faceData.Update();

                //それぞれの顔ごとに情報取得および描画処理を行う
                for (int index = 0; index <= this.faceData.QueryNumberOfDetectedFaces() - 1; index++)
                {
                    var face = this.faceData.QueryFaceByIndex(index);
                    if (face != null)
                    {
                        // ここで、顔の位置を取得:Colorで取得する
                        var detection = face.QueryDetection();
                        if (detection != null)
                        {
                            PXCMRectI32 faceRect;
                            detection.QueryBoundingRect(out faceRect);

                            //顔の位置に合わせて長方形を変更
                            TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y);
                            rect[index].Width = faceRect.w;
                            rect[index].Height = faceRect.h;
                            rect[index].Stroke = Brushes.Blue;
                            rect[index].StrokeThickness = 3;
                            rect[index].RenderTransform = transform;

                            //顔のデータか表出情報のデータの情報を得る
                            var expressionData = face.QueryExpressions();
                            if (expressionData != null)
                            {
                                PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult;
                                //顔の位置に合わせて姿勢情報を表示
                                expression_tb[index, 0].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 15);
                                expression_tb[index, 1].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 30);
                                expression_tb[index, 2].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 45);

                                //口の開き具合
                                if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out expressionResult))
                                {
                                    expression_tb[index, 0].Text = "MOUTH_OPEN:" + expressionResult.intensity;
                                }

                                //舌の出し具合
                                if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out expressionResult))
                                {
                                    expression_tb[index, 1].Text = "TONGUE_OUT:" + expressionResult.intensity;
                                }

                                //笑顔の度合
                                if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out expressionResult))
                                {
                                    expression_tb[index, 2].Text = "SMILE:" + expressionResult.intensity;
                                }

                                //ここまでが表出情報検出の機能
                                //////////////////////////////////////

                                //////////////////////////////////////
                                //追加:ここからが表情(Emotion)認識
                                //追加:感情のデータを得る
                                PXCMEmotion.EmotionData[] datas = new PXCMEmotion.EmotionData[NUM_PRIMARY_EMOTIONS+NUM_SENTIMENT_EMOTIONS];
                                emotionDet.QueryAllEmotionData(index, out datas);

                                //追加:表情(PRIMARY)を推定する
                                int maxscoreE = -3;
                                float maxscoreI = 0;
                                int idx_outstanding_emotion = -1;		//最終的に決定される表情の値

                                for (int emotionIndex = 0; emotionIndex <= NUM_PRIMARY_EMOTIONS-1; emotionIndex++)
                                {
                                    if (datas != null) {
                                        if (datas[emotionIndex].evidence >= maxscoreE
                                        && datas[emotionIndex].intensity >= maxscoreI)
                                        {
                                            //二つの値を、ともに最も大きい場合の値へ更新
                                            maxscoreE = datas[emotionIndex].evidence;//表情の形跡(evidence)を比較
                                            maxscoreI = datas[emotionIndex].intensity;//表情の強さ(intensity)を比較
                                            //primaryData = datas[emotionIndex];
                                            idx_outstanding_emotion = emotionIndex;
                                        }
                                    }

                                }

                                if (idx_outstanding_emotion != -1)
                                {
                                    emotion_tb[index, 0].RenderTransform = new TranslateTransform(faceRect.x, faceRect.y - 30);
                                    emotion_tb[index, 0].Text = "Emotion_PRIMARY:" + EmotionLabels[idx_outstanding_emotion];
                                }

                                //表情の強さ(intensity)がある値以上の時、感情があると判断
                                if (maxscoreI > 0.4)
                                {
                                    //追加:感情(Sentiment)を推定する
                                    //表情(PRIMARY)の推定と同様なので、コメントは省略
                                    //PXCMEmotion.EmotionData primarySent = null;
                                    int idx_sentiment_emotion = -1;
                                    int s_maxscoreE = -3;
                                    float s_maxscoreI = 0.0f;
                                    for (int sentimentIndex = 0; sentimentIndex < NUM_SENTIMENT_EMOTIONS; sentimentIndex++)
                                    {
                                        if (datas != null)
                                        {
                                            if (datas[sentimentIndex].evidence > s_maxscoreE && datas[sentimentIndex].intensity > s_maxscoreI)
                                            {
                                                s_maxscoreE = datas[NUM_PRIMARY_EMOTIONS + sentimentIndex].evidence;
                                                s_maxscoreI = datas[NUM_PRIMARY_EMOTIONS + sentimentIndex].intensity;
                                                //primarySent = datas[sentimentIndex];
                                                idx_sentiment_emotion = sentimentIndex;
                                            }
                                        }
                                    }
                                    if (idx_sentiment_emotion != -1)
                                    {
                                        emotion_tb[index, 1].RenderTransform = new TranslateTransform(faceRect.x, faceRect.y - 60);
                                        emotion_tb[index, 1].Text = "Emo_SENTIMENT:" + EmotionLabels[idx_sentiment_emotion];
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
Exemplo n.º 9
0
        //顔のフレームの更新処理
        private void updateFaceFrame()
        {
            // フレームデータを取得する
            PXCMCapture.Sample sample = senseManager.QuerySample();
            if (sample != null)
            {
                UpdateColorImage(sample.color);
            }

            this.emotionDet = this.senseManager.QueryEmotion();
            if (this.emotionDet != null)
            {
                //SenceManagerモジュールの顔のデータを更新する
                this.faceData.Update();

                //それぞれの顔ごとに情報取得および描画処理を行う
                for (int index = 0; index <= this.faceData.QueryNumberOfDetectedFaces() - 1; index++)
                {
                    var face = this.faceData.QueryFaceByIndex(index);
                    if (face != null)
                    {
                        // ここで、顔の位置を取得:Colorで取得する
                        var detection = face.QueryDetection();
                        if (detection != null)
                        {
                            PXCMRectI32 faceRect;
                            detection.QueryBoundingRect(out faceRect);

                            //顔の位置に合わせて長方形を変更
                            TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y);
                            rect[index].Width           = faceRect.w;
                            rect[index].Height          = faceRect.h;
                            rect[index].Stroke          = Brushes.Blue;
                            rect[index].StrokeThickness = 3;
                            rect[index].RenderTransform = transform;



                            //顔のデータか表出情報のデータの情報を得る
                            var expressionData = face.QueryExpressions();
                            if (expressionData != null)
                            {
                                PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult;
                                //顔の位置に合わせて姿勢情報を表示
                                expression_tb[index, 0].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 15);
                                expression_tb[index, 1].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 30);
                                expression_tb[index, 2].RenderTransform = new TranslateTransform(transform.X, transform.Y + faceRect.h + 45);

                                //口の開き具合
                                if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out expressionResult))
                                {
                                    expression_tb[index, 0].Text = "MOUTH_OPEN:" + expressionResult.intensity;
                                }

                                //舌の出し具合
                                if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out expressionResult))
                                {
                                    expression_tb[index, 1].Text = "TONGUE_OUT:" + expressionResult.intensity;
                                }

                                //笑顔の度合
                                if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out expressionResult))
                                {
                                    expression_tb[index, 2].Text = "SMILE:" + expressionResult.intensity;
                                }

                                //ここまでが表出情報検出の機能
                                //////////////////////////////////////

                                //////////////////////////////////////
                                //追加:ここからが表情(Emotion)認識
                                //追加:感情のデータを得る
                                PXCMEmotion.EmotionData[] datas = new PXCMEmotion.EmotionData[NUM_PRIMARY_EMOTIONS + NUM_SENTIMENT_EMOTIONS];
                                emotionDet.QueryAllEmotionData(index, out datas);

                                //追加:表情(PRIMARY)を推定する
                                int   maxscoreE = -3;
                                float maxscoreI = 0;
                                int   idx_outstanding_emotion = -1;             //最終的に決定される表情の値

                                for (int emotionIndex = 0; emotionIndex <= NUM_PRIMARY_EMOTIONS - 1; emotionIndex++)
                                {
                                    if (datas != null)
                                    {
                                        if (datas[emotionIndex].evidence >= maxscoreE &&
                                            datas[emotionIndex].intensity >= maxscoreI)
                                        {
                                            //二つの値を、ともに最も大きい場合の値へ更新
                                            maxscoreE = datas[emotionIndex].evidence;  //表情の形跡(evidence)を比較
                                            maxscoreI = datas[emotionIndex].intensity; //表情の強さ(intensity)を比較
                                            //primaryData = datas[emotionIndex];
                                            idx_outstanding_emotion = emotionIndex;
                                        }
                                    }
                                }

                                if (idx_outstanding_emotion != -1)
                                {
                                    emotion_tb[index, 0].RenderTransform = new TranslateTransform(faceRect.x, faceRect.y - 30);
                                    emotion_tb[index, 0].Text            = "Emotion_PRIMARY:" + EmotionLabels[idx_outstanding_emotion];
                                }

                                //表情の強さ(intensity)がある値以上の時、感情があると判断
                                if (maxscoreI > 0.4)
                                {
                                    //追加:感情(Sentiment)を推定する
                                    //表情(PRIMARY)の推定と同様なので、コメントは省略
                                    //PXCMEmotion.EmotionData primarySent = null;
                                    int   idx_sentiment_emotion = -1;
                                    int   s_maxscoreE           = -3;
                                    float s_maxscoreI           = 0.0f;
                                    for (int sentimentIndex = 0; sentimentIndex < NUM_SENTIMENT_EMOTIONS; sentimentIndex++)
                                    {
                                        if (datas != null)
                                        {
                                            if (datas[sentimentIndex].evidence > s_maxscoreE && datas[sentimentIndex].intensity > s_maxscoreI)
                                            {
                                                s_maxscoreE = datas[NUM_PRIMARY_EMOTIONS + sentimentIndex].evidence;
                                                s_maxscoreI = datas[NUM_PRIMARY_EMOTIONS + sentimentIndex].intensity;
                                                //primarySent = datas[sentimentIndex];
                                                idx_sentiment_emotion = sentimentIndex;
                                            }
                                        }
                                    }
                                    if (idx_sentiment_emotion != -1)
                                    {
                                        emotion_tb[index, 1].RenderTransform = new TranslateTransform(faceRect.x, faceRect.y - 60);
                                        emotion_tb[index, 1].Text            = "Emo_SENTIMENT:" + EmotionLabels[idx_sentiment_emotion];
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }