Exemplo n.º 1
0
        private void TrackPose(PXCMFaceData.Face faceDataFace)
        {
            PXCMFaceData.PoseData poseData = faceDataFace.QueryPose();

            PXCMFaceData.HeadPosition    headPosition;
            PXCMFaceData.PoseEulerAngles poseAngles;
            //TODO: What the hell is quaternion pose?
            //PXCMFaceData.PoseQuaternion pose;

            if (poseData.QueryHeadPosition(out headPosition) && poseData.QueryPoseAngles(out poseAngles))
            {
                UserHeadPose.Tracked = true;
                UserHeadPose.Center  = new Point()
                {
                    X = headPosition.headCenter.x,
                    Y = headPosition.headCenter.y,
                    Z = headPosition.headCenter.z
                };
                UserHeadPose.Yaw   = poseAngles.yaw;
                UserHeadPose.Pitch = poseAngles.pitch;
                UserHeadPose.Roll  = poseAngles.roll;
            }
            else
            {
                UserHeadPose.Reset();
            }
        }
Exemplo n.º 2
0
        /// <summary>
        /// 保存筛选32点的数据
        /// </summary>
        /// <param name="qface"></param>
        /// <param name="frameCount"></param>
        private void Savedata_less(PXCMFaceData.Face qface, int frameCount)
        {
            ////zz

            PXCMFaceData.PoseData        posedata = qface.QueryPose();
            PXCMFaceData.LandmarksData   Idata    = qface.QueryLandmarks();
            PXCMFaceData.LandmarkPoint[] points;
            PXCMFaceData.PoseEulerAngles angles;
            PXCMFaceData.HeadPosition    headpostion;

            posedata.QueryPoseAngles(out angles);
            posedata.QueryHeadPosition(out headpostion);
            Idata.QueryPoints(out points);

            string time = DateTime.Now.ToString("yyyy-MM-dd") + " " + DateTime.Now.ToString("hh:mm:ss:fff");

            sw_less.Write(frameCount.ToString().PadRight(5) + '\t' + time + '\t' + expressionNumber.ToString() + '\t');

            int[] a = new int[32] {
                76, 77, 12, 16, 14, 10, 20, 24, 18, 22, 71, 0, 4, 74, 5, 9, 29, 26, 31, 30, 32, 39, 33, 47, 46, 48, 51, 52, 50, 56, 66, 61
            };
            int t = 0;

            for (int i = 0; i < 32; i++)
            {
                //string LandmarkPointName = "points[a[t]].source.index";
                string LandmarkPointName = MarkPointName(points[a[t]].source.index);
                float  Positionworld_x   = points[a[t]].world.x;
                float  Positionworld_y   = points[a[t]].world.y;
                float  Positionworld_z   = points[a[t]].world.z;

                float PositionImage_x = points[a[t]].image.x;
                float PositionImage_y = points[a[t++]].image.y;

                sw_less.Write((a[i]).ToString() + '\t'
                              + LandmarkPointName.ToString() + '\t'
                              + Positionworld_x.ToString().PadRight(25) + '\t'
                              + Positionworld_y.ToString().PadRight(25) + '\t'
                              + Positionworld_z.ToString().PadRight(25) + '\t'
                              + PositionImage_x.ToString().PadRight(25) + '\t'
                              + PositionImage_y.ToString().PadRight(25) + '\t');
            }

            float HeadCenter_x = headpostion.headCenter.x;
            float HeadCenter_y = headpostion.headCenter.y;
            float HeadCenter_z = headpostion.headCenter.z;

            float PoseEulerAngles_pitch = angles.pitch;
            float PoseEulerAngles_roll  = angles.roll;
            float PoseEulerAngles_yaw   = angles.yaw;

            sw_less.Write(HeadCenter_x.ToString().PadRight(25) + '\t'
                          + HeadCenter_y.ToString().PadRight(25) + '\t'
                          + HeadCenter_z.ToString().PadRight(25) + '\t'
                          + PoseEulerAngles_pitch.ToString().PadRight(25) + '\t'
                          + PoseEulerAngles_roll.ToString().PadRight(25) + '\t'
                          + PoseEulerAngles_yaw.ToString().PadRight(25) + '\t');

            sw_less.WriteLine();
        }
        private void TrackUserPosition(PXCMFaceData.Face faceDataFace)
        {
            if (faceDataFace == null)
            {
                ResetUserTrackData();
                return;
            }

            PXCMFaceData.PoseData        poseData = faceDataFace.QueryPose();
            PXCMFaceData.HeadPosition    headPosition;
            PXCMFaceData.PoseEulerAngles poseAngles;

            if (poseData != null &&
                poseData.QueryHeadPosition(out headPosition) && poseData.QueryPoseAngles(out poseAngles))
            {
                IsUserTracked = true;

                UserHeadPositionX = headPosition.headCenter.x;
                UserHeadPositionY = headPosition.headCenter.y;
                UserHeadPositionZ = headPosition.headCenter.z;

                UserHeadPositionYaw   = poseAngles.yaw;
                UserHeadPositionPitch = poseAngles.pitch;
                UserHeadPositionRoll  = poseAngles.roll;
            }
            else
            {
                ResetUserTrackData();
            }
        }
Exemplo n.º 4
0
        //顔のフレームの更新処理
        private void updateFaceFrame()
        {
            // フレームデータを取得する
            PXCMCapture.Sample sample = senceManager.QuerySample();
            UpdateColorImage(sample.color);

            //SenceManagerモジュールの顔のデータを更新する
            faceData.Update();

            //検出した顔の数を取得する
            int numFaces = faceData.QueryNumberOfDetectedFaces();

            //追加:顔の姿勢情報を格納するための変数を用意する
            PXCMFaceData.PoseEulerAngles[] poseAngle = new PXCMFaceData.PoseEulerAngles[POSE_MAXFACES];



            if (senceManager != null)
            {
                //それぞれの顔ごとに情報取得および描画処理を行う
                for (int i = 0; i < numFaces; ++i)
                {
                    //顔の情報を取得する
                    PXCMFaceData.Face face = faceData.QueryFaceByIndex(i);

                    // 顔の位置を取得:Depthで取得する
                    var detection = face.QueryDetection();
                    if (detection != null)
                    {
                        PXCMRectI32 faceRect;
                        detection.QueryBoundingRect(out faceRect);

                        //顔の位置に合わせて長方形を変更
                        TranslateTransform transform = new TranslateTransform(faceRect.x, faceRect.y);
                        rect[i].Width           = faceRect.w;
                        rect[i].Height          = faceRect.h;
                        rect[i].Stroke          = Brushes.Blue;
                        rect[i].StrokeThickness = 3;
                        rect[i].RenderTransform = transform;

                        //追加:ポーズ(顔の向きを取得):Depth使用時のみ
                        PXCMFaceData.PoseData pose = face.QueryPose();
                        if (pose != null)
                        {
                            //顔の位置に合わせて姿勢情報を表示
                            tb[i, 0].RenderTransform = new TranslateTransform(transform.X, transform.Y - 30);
                            tb[i, 1].RenderTransform = new TranslateTransform(transform.X, transform.Y - 60);
                            tb[i, 2].RenderTransform = new TranslateTransform(transform.X, transform.Y - 90);

                            //追加:顔の姿勢情報(Yaw, Pitch, Roll)の情報
                            pose.QueryPoseAngles(out poseAngle[i]);
                            tb[i, 0].Text = "pitch:" + poseAngle[i].pitch;
                            tb[i, 1].Text = "roll:" + poseAngle[i].roll;
                            tb[i, 2].Text = "yaw:" + poseAngle[i].yaw;
                        }
                    }
                }
            }
        }
        public void DrawPose(PXCMFaceData.Face face)
        {
            Debug.Assert(face != null);
            PXCMFaceData.PoseEulerAngles poseAngles;
            PXCMFaceData.PoseData        pdata = face.QueryPose();
            if (pdata == null)
            {
                return;
            }
            if (!Pose.Checked || !pdata.QueryPoseAngles(out poseAngles))
            {
                return;
            }

            if (isRegisteredDetected(face)) // Main condition for fixed head
            {
                //SendFaceValue(FaceData);
                lock (m_bitmapLock)
                {
                    using (Graphics graphics = Graphics.FromImage(m_bitmap))
                        using (var brush = new SolidBrush(m_faceTextOrganizer.Colour))
                            using (var font = new Font(FontFamily.GenericMonospace, m_faceTextOrganizer.FontSize, FontStyle.Bold))
                            {
                                string yawText = String.Format("Yaw = {0}",
                                                               Convert.ToInt32(poseAngles.yaw).ToString(CultureInfo.InvariantCulture));
                                graphics.DrawString(yawText, font, brush, m_faceTextOrganizer.PoseLocation.X,
                                                    m_faceTextOrganizer.PoseLocation.Y);

                                string pitchText = String.Format("Pitch = {0}",
                                                                 Convert.ToInt32(poseAngles.pitch).ToString(CultureInfo.InvariantCulture));
                                graphics.DrawString(pitchText, font, brush, m_faceTextOrganizer.PoseLocation.X,
                                                    m_faceTextOrganizer.PoseLocation.Y + m_faceTextOrganizer.FontSize);

                                string rollText = String.Format("Roll = {0}",
                                                                Convert.ToInt32(poseAngles.roll).ToString(CultureInfo.InvariantCulture));
                                graphics.DrawString(rollText, font, brush, m_faceTextOrganizer.PoseLocation.X,
                                                    m_faceTextOrganizer.PoseLocation.Y + 2 * m_faceTextOrganizer.FontSize);

                                /*
                                 * Invoking and Sending Value to the Viewer Window form
                                 * */
                                //SendFaceValue(poseAngles.yaw.ToString());
                                FaceDataValue = poseAngles.yaw.ToString();

                                //SendFaceData sfdd = new SendFaceData(DrawPose);
                                //this.Invoke(sfdd, new object[] { poseAngles.yaw.ToString() });
                                Invoke(new SendFaceData(SendFaceValue), new Object[] { (poseAngles.yaw), (poseAngles.pitch) });
                                Thread.Sleep(1);
                            }
                }
            }
        }
Exemplo n.º 6
0
        private void GetFacePose(PXCMFaceData.Face face, out float pitch, out float yaw, out float roll)
        {
            pitch = -99.0f; yaw = -99.0f; roll = -99.0f;

            PXCMFaceData.PoseData poseData = face.QueryPose();
            if (poseData != null)
            {
                PXCMFaceData.PoseEulerAngles angles;
                poseData.QueryPoseAngles(out angles);

                pitch = angles.pitch;
                yaw   = angles.yaw;
                roll  = angles.roll;
            }
        }
Exemplo n.º 7
0
        private void ElaborateSample(PXCMCapture.Sample sample, PXCMFaceData.Face face)
        {
            if (sample == null)
            {
                return;
            }

            WriteableBitmap imageRGB = null;
            bool            isPoseVisible = false;
            float           pitchValue = 0, yawValue = 0, rollValue = 0;

            if (sample.color != null)
            {
                imageRGB = sample.color.GetImage();
            }

            if (face != null)
            {
                PXCMFaceData.PoseData poseData = face.QueryPose();

                PXCMFaceData.PoseEulerAngles poseAngles;
                if (poseData.QueryPoseAngles(out poseAngles))
                {
                    isPoseVisible = true;
                    pitchValue    = poseAngles.pitch;
                    yawValue      = poseAngles.yaw;
                    rollValue     = poseAngles.roll;
                }
            }

            if (imageRGB != null)
            {
                imageRGB.Freeze();
            }

            Dispatcher.Invoke(() =>
            {
                this.ImageRGB      = imageRGB;
                this.IsPoseVisible = isPoseVisible;
                this.Pitch         = pitchValue;
                this.Roll          = rollValue;
                this.Yaw           = yawValue;
            });
        }
Exemplo n.º 8
0
        private static void UpdateFace()
        {
            landmarksData = null;
            if (faceData == null)
            {
                return;
            }

            faceData.Update();

            PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);
            if (face != null)
            {
                landmarksData  = face.QueryLandmarks();
                expressionData = face.QueryExpressions();
                poseData       = face.QueryPose();

                UpdateFaceOrientation(poseData);
                UpdateFacePosition(poseData);
                UpdateFaceGeometry(landmarksData);
            }
        }
Exemplo n.º 9
0
        public void DrawPose(PXCMFaceData.Face face)
        {
            Debug.Assert(face != null);
            PXCMFaceData.PoseEulerAngles poseAngles;
            PXCMFaceData.PoseData        pdata = face.QueryPose();
            if (pdata == null)
            {
                return;
            }
            if (!Pose.Checked || !pdata.QueryPoseAngles(out poseAngles))
            {
                return;
            }

            lock (m_bitmapLock)
            {
                using (Graphics graphics = Graphics.FromImage(m_bitmap))
                    using (var brush = new SolidBrush(m_faceTextOrganizer.Colour))
                        using (var font = new Font(FontFamily.GenericMonospace, m_faceTextOrganizer.FontSize, FontStyle.Bold))
                        {
                            string yawText = String.Format("Yaw = {0}",
                                                           Convert.ToInt32(poseAngles.yaw).ToString(CultureInfo.InvariantCulture));
                            graphics.DrawString(yawText, font, brush, m_faceTextOrganizer.PoseLocation.X,
                                                m_faceTextOrganizer.PoseLocation.Y);

                            string pitchText = String.Format("Pitch = {0}",
                                                             Convert.ToInt32(poseAngles.pitch).ToString(CultureInfo.InvariantCulture));
                            graphics.DrawString(pitchText, font, brush, m_faceTextOrganizer.PoseLocation.X,
                                                m_faceTextOrganizer.PoseLocation.Y + m_faceTextOrganizer.FontSize);

                            string rollText = String.Format("Roll = {0}",
                                                            Convert.ToInt32(poseAngles.roll).ToString(CultureInfo.InvariantCulture));
                            graphics.DrawString(rollText, font, brush, m_faceTextOrganizer.PoseLocation.X,
                                                m_faceTextOrganizer.PoseLocation.Y + 2 * m_faceTextOrganizer.FontSize);
                        }
            }
        }
Exemplo n.º 10
0
    /// <summary>
    /// Update is called every frame, if the MonoBehaviour is enabled.
    /// </summary>
    void Update()
    {
        /* Make sure PXCMSenseManager Instance is Initialized */
        if (psm == null)
        {
            return;
        }

        /* Wait until any frame data is available true(aligned) false(unaligned) */
        if (psm.AcquireFrame(true) != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            return;
        }

        /* Retrieve face tracking data if ready */
        faceAnalyzer = psm.QueryFace();
        if (faceAnalyzer != null)
        {
            PXCMFaceData _outputData = faceAnalyzer.CreateOutput();
            if (_outputData != null)
            {
                _outputData.Update();

                for (int i = 0; i < _outputData.QueryNumberOfDetectedFaces(); i++)
                {
                    PXCMFaceData.Face _iFace = _outputData.QueryFaceByIndex(i);
                    if (_iFace != null)
                    {
                        /* Retrieve 78 Landmark Points */
                        PXCMFaceData.LandmarksData LandmarkData = _iFace.QueryLandmarks();
                        if (LandmarkData != null)
                        {
                            PXCMFaceData.LandmarkPoint[] landmarkPoints = new PXCMFaceData.LandmarkPoint[MaxPoints];
                            if (LandmarkData.QueryPoints(out landmarkPoints))
                            {
                                faceRenderer.DisplayJoints2D(landmarkPoints);
                            }
                        }

                        /* Retrieve Detection Data */
                        PXCMFaceData.DetectionData detectionData = _iFace.QueryDetection();
                        if (detectionData != null)
                        {
                            PXCMRectI32 rect;
                            if (detectionData.QueryBoundingRect(out rect))
                            {
                                faceRenderer.SetDetectionRect(rect);
                            }
                        }

                        /* Retrieve Pose Data */
                        PXCMFaceData.PoseData poseData = _iFace.QueryPose();
                        if (poseData != null)
                        {
                            PXCMFaceData.PoseQuaternion poseQuaternion;
                            if (poseData.QueryPoseQuaternion(out poseQuaternion))
                            {
                                faceRenderer.DisplayPoseQuaternion(poseQuaternion);
                            }
                        }

                        /* Retrieve Expression Data */
                        PXCMFaceData.ExpressionsData expressionData = _iFace.QueryExpressions();
                        if (expressionData != null)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult;
                            if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out expressionResult))
                            {
                                faceRenderer.DisplayExpression(expressionResult, PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN);
                            }
                        }
                    }
                }

                /* Retrieve Alert Data */
                PXCMFaceData.AlertData _alertData;
                for (int i = 0; i < _outputData.QueryFiredAlertsNumber(); i++)
                {
                    if (_outputData.QueryFiredAlertData(i, out _alertData) == pxcmStatus.PXCM_STATUS_NO_ERROR)
                    {
                        faceRenderer.DisplayAlerts(_alertData);
                    }
                }
            }
            _outputData.Dispose();
        }

        /* Retrieve a sample from the camera */
        PXCMCapture.Sample sample = psm.QueryFaceSample();
        if (sample != null)
        {
            colorImage = sample.color;
            if (colorImage != null)
            {
                if (colorTexture2D == null)
                {
                    /* If not allocated, allocate a Texture2D */
                    colorTexture2D = new Texture2D(colorImage.info.width, colorImage.info.height, TextureFormat.ARGB32, false);

                    /* Associate the Texture2D with a gameObject */
                    colorPlane.renderer.material.mainTexture = colorTexture2D;
                    //colorPlane.renderer.material.mainTextureScale = new Vector2(-1f, 1f);
                }

                /* Retrieve the image data in Texture2D */
                PXCMImage.ImageData colorImageData;
                colorImage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorImageData);
                colorImageData.ToTexture2D(0, colorTexture2D);
                colorImage.ReleaseAccess(colorImageData);

                /* Apply the texture to the GameObject to display on */
                colorTexture2D.Apply();
            }
        }

        /* Realease the frame to process the next frame */
        psm.ReleaseFrame();
    }
Exemplo n.º 11
0
        private void updateFaceFrame()
        {
            PXCMCapture.Sample sample = this.senseManager.QuerySample();
            this.image = sample.color;

            if (this.image != null)
            {
                this.invalidate = true;
            }

            this.faceData.Update();

            // get number of faces
            FOutFaceLandmarkPoints.SliceCount    = 0;
            FOutFaceExpressionsResult.SliceCount = 0;
            int numFaces = this.faceData.QueryNumberOfDetectedFaces();

            for (int i = 0; i < numFaces; ++i)
            {
                // get faces info
                PXCMFaceData.Face face = this.faceData.QueryFaceByIndex(i);

                // get face position by Depth
                var detection = face.QueryDetection();
                if (detection != null)
                {
                    // detection
                    PXCMRectI32 faceRect;
                    detection.QueryBoundingRect(out faceRect);
                    int sliceCount = i + 1;
                    FOutFacePosition.SliceCount = sliceCount;
                    FOutFacePosition[i]         = new Vector2D(faceRect.x, faceRect.y);
                    FOutFaceWidth.SliceCount    = sliceCount;
                    FOutFaceWidth[i]            = faceRect.w;
                    FOutFaceHeight.SliceCount   = sliceCount;
                    FOutFaceHeight[i]           = faceRect.h;

                    // pose(only use Depth mode)
                    PXCMFaceData.PoseData pose = face.QueryPose();
                    if (pose != null)
                    {
                        // faces angle
                        PXCMFaceData.PoseEulerAngles poseAngle = new PXCMFaceData.PoseEulerAngles();
                        pose.QueryPoseAngles(out poseAngle);
                        FOutFacePose.SliceCount = sliceCount;
                        FOutFacePose[i]         = new Vector3D(poseAngle.pitch, poseAngle.yaw, poseAngle.roll);
                    }

                    // landmarks
                    PXCMFaceData.LandmarksData landmarks = face.QueryLandmarks();
                    FOutFaceLandmarkBinSize.SliceCount = sliceCount;
                    if (landmarks != null)
                    {
                        // number of feature points from landmarks
                        int numPoints = landmarks.QueryNumPoints();
                        FOutFaceLandmarkBinSize[i] = numPoints;

                        PXCMFaceData.LandmarkPoint[] landmarkPoints = new PXCMFaceData.LandmarkPoint[numPoints];
                        int prevSliceCount = FOutFaceLandmarkPoints.SliceCount;
                        FOutFaceLandmarkPoints.SliceCount = prevSliceCount + numPoints;

                        if (landmarks.QueryPoints(out landmarkPoints))
                        {
                            for (int j = 0; j < numPoints; j++)
                            {
                                int index = prevSliceCount + j;
                                FOutFaceLandmarkPoints[index] = new Vector2D(landmarkPoints[j].image.x, landmarkPoints[j].image.y);
                            }
                        }
                    }
                    else
                    {
                        FOutFaceLandmarkBinSize[i]        = 0;
                        FOutFaceLandmarkPoints.SliceCount = 0;
                    }

                    PXCMFaceData.ExpressionsData expressionData = face.QueryExpressions();
                    if (expressionData != null)
                    {
                        for (int j = 0; j < FInExpressions.SliceCount; j++)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult;
                            if (expressionData.QueryExpression(FInExpressions[j], out expressionResult))
                            {
                                FOutFaceExpressionsResult.SliceCount++;
                                FOutFaceExpressionsResult[j] = expressionResult.intensity;
                            }
                            else
                            {
                                // do nothing
                            }
                        }
                    }
                    else
                    {
                        FOutFaceExpressionsResult.SliceCount = 0;
                    }

                    PXCMFaceData.PulseData pulseData = face.QueryPulse();
                    if (pulseData != null)
                    {
                        FOutPulse.SliceCount = sliceCount;
                        FOutPulse[i]         = pulseData.QueryHeartRate();
                    }
                }
            }
        }
Exemplo n.º 12
0
        public override bool Process(Trigger trigger)
        {
            trigger.ErrorDetected = false;

            if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.Face))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            if (!(trigger is TrackTrigger))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            // make sure we have valid values
            if (RealWorldBoxDimensions.x <= 0)
            {
                RealWorldBoxDimensions.x = 1;
            }

            if (RealWorldBoxDimensions.y <= 0)
            {
                RealWorldBoxDimensions.y = 1;
            }

            if (RealWorldBoxDimensions.z <= 0)
            {
                RealWorldBoxDimensions.z = 1;
            }

            if (SenseToolkitManager.Instance.Initialized
                &&
                SenseToolkitManager.Instance.FaceModuleOutput != null)
            {
                if (SenseToolkitManager.Instance.FaceModuleOutput.QueryNumberOfDetectedFaces() == 0)
                {
                    ((TrackTrigger)trigger).Position = Vector3.zero;
                    return(false);
                }

                PXCMFaceData.Face singleFaceOutput = null;

                singleFaceOutput = SenseToolkitManager.Instance.FaceModuleOutput.QueryFaceByIndex(FaceIndex);

                bool success = false;
                if (singleFaceOutput != null && singleFaceOutput.QueryUserID() >= 0)
                {
                    // Process Tracking
                    if (trigger is TrackTrigger)
                    {
                        TrackTrigger specificTrigger = (TrackTrigger)trigger;

                        var  landmarksData = singleFaceOutput.QueryLandmarks();
                        bool hasLandmarks  = false;

                        if (landmarksData != null)
                        {
                            PXCMFaceData.LandmarkPoint outpt = null;
                            bool hasPoint = landmarksData.QueryPoint(landmarksData.QueryPointIndex(LandmarkToTrack), out outpt);
                            if (hasPoint)
                            {
                                hasLandmarks = outpt.confidenceWorld != 0;
                            }
                        }

                        if (!hasLandmarks && useBoundingBox)
                        {
                            PXCMRectI32 rect = new PXCMRectI32();
                            if (singleFaceOutput.QueryDetection() != null && singleFaceOutput.QueryDetection().QueryBoundingRect(out rect))
                            {
                                float depth;
                                singleFaceOutput.QueryDetection().QueryFaceAverageDepth(out depth);
                                float bbCenterX = (rect.x + rect.w / 2);
                                float bbCenterY = (rect.y + rect.h / 2);

                                Vector3 vec = new Vector3();

                                if (_pos_ijz == null)
                                {
                                    _pos_ijz = new PXCMPoint3DF32[1] {
                                        new PXCMPoint3DF32()
                                    };
                                }
                                _pos_ijz[0].x = bbCenterX;
                                _pos_ijz[0].y = bbCenterY;
                                _pos_ijz[0].z = depth;

                                if (_pos3d == null)
                                {
                                    _pos3d = new PXCMPoint3DF32[1] {
                                        new PXCMPoint3DF32()
                                    };
                                }

                                SenseToolkitManager.Instance.Projection.ProjectDepthToCamera(_pos_ijz, _pos3d);

                                Vector3 position = new Vector3();
                                vec.x = _pos3d[0].x / 10f;
                                vec.y = _pos3d[0].y / 10f;
                                vec.z = _pos3d[0].z / 10f;

                                // Clamp and normalize to the Real World Box
                                TrackingUtilityClass.ClampToRealWorldInputBox(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);
                                TrackingUtilityClass.Normalize(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);

                                if (!float.IsNaN(vec.x) && !float.IsNaN(vec.y) && !float.IsNaN(vec.z))
                                {
                                    specificTrigger.Position = vec;
                                    return(true);
                                }
                            }
                            else
                            {
                                specificTrigger.Position = Vector3.zero;
                                return(false);
                            }
                        }
                        else if (landmarksData == null && !useBoundingBox)
                        {
                            specificTrigger.Position = Vector3.zero;
                            return(false);
                        }
                        else
                        {
                            int landmarkId = landmarksData.QueryPointIndex(LandmarkToTrack);

                            PXCMFaceData.LandmarkPoint point = null;

                            landmarksData.QueryPoint(landmarkId, out point);

                            // Translation
                            if (point != null)
                            {
                                Vector3 vec = new Vector3();
                                vec.x = -point.world.x * 100f;
                                vec.y = point.world.y * 100f;
                                vec.z = point.world.z * 100f;

                                if (vec.x + vec.y + vec.z == 0)
                                {
                                    specificTrigger.Position = Vector3.zero;
                                    return(false);
                                }

                                // Clamp and normalize to the Real World Box
                                TrackingUtilityClass.ClampToRealWorldInputBox(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);
                                TrackingUtilityClass.Normalize(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);

                                if (!float.IsNaN(vec.x) && !float.IsNaN(vec.y) && !float.IsNaN(vec.z))
                                {
                                    specificTrigger.Position = vec;
                                    success = true;
                                }
                            }

                            //Rotation
                            PXCMFaceData.PoseData poseData = singleFaceOutput.QueryPose();
                            if (success && poseData != null)
                            {
                                PXCMFaceData.PoseEulerAngles angles;
                                if (poseData.QueryPoseAngles(out angles))
                                {
                                    if (!float.IsNaN(angles.pitch) && !float.IsNaN(angles.yaw) && !float.IsNaN(angles.roll))
                                    {
                                        Quaternion q = Quaternion.Euler(-angles.pitch, angles.yaw, -angles.roll);

                                        specificTrigger.RotationQuaternion = q;

                                        return(true);
                                    }
                                }
                            }
                        }
                    }
                }
            }
            return(false);
        }
Exemplo n.º 13
0
        // Update is called once per frame
        void Update()
        {
            if (session == null)
            {
                return;
            }

            // For accessing hand data
            handAnalyzer = session.QueryHand();
            faceAnalyzer = session.QueryFace();


            if (handAnalyzer != null)
            {
                PXCMHandData handData = handAnalyzer.CreateOutput();
                if (handData != null)
                {
                    handData.Update();

                    PXCMHandData.IHand IHAND; // Ihand instance for accessing future data
                    //   Int32 IhandData; // for QueryOpenness Value
                    //    PXCMPoint3DF32 location; // Stores hand tracking position

                    //Fills IHAND with information to later be grabbed and used for tracking + openness
                    handData.QueryHandData(PXCMHandData.AccessOrderType.ACCESS_ORDER_NEAR_TO_FAR, 0, out IHAND);


                    // If there is data in Ihand
                    if (IHAND != null)
                    {
                        // Debug.DrawLine(transform.position, hit.point, Color.red);


                        // Inits hand tracking from the center of the hand.
                        //        location = IHAND.QueryMassCenterWorld();
                        //      if (mCurrentDart != null)
                        //    {
                        //      Vector3 locationUnity = new Vector3(location.x, location.y, location.z);
                        //      mCurrentDart.transform.localPosition = locationUnity * RSScale;
                        // }
                    }
                }
                handAnalyzer.Dispose();
                session.ReleaseFrame();
            }


            if (faceAnalyzer != null)
            {
                PXCMFaceData facedata = faceAnalyzer.CreateOutput();
                if (facedata != null)
                {
                    Int32 nfaces = facedata.QueryNumberOfDetectedFaces();
                    for (Int32 i = 0; i < nfaces; i++)
                    {
                        // Retrieve the face landmark data instance

                        PXCMFaceData.Face face = facedata.QueryFaceByIndex(i);

                        PXCMFaceData.PoseData pdata = face.QueryPose();



                        // retrieve the pose information

                        PXCMFaceData.PoseEulerAngles angles;

                        pdata.QueryPoseAngles(out angles);
                        Debug.Log("Eular Angles yaw : " + angles.yaw);
                        Debug.Log("Eular Angles pitch: " + angles.pitch);
                        Debug.Log("Eular Angles Roll: " + angles.roll);
                        angles.pitch = gameObject.transform.rotation.z;
                        angles.yaw   = gameObject.transform.rotation.y;
                    }

                    // device is a PXCMCapture.Device instance
                }
            }
        }
        public override bool Process(Trigger trigger)
        {
            trigger.ErrorDetected = false;

            if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.Face))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            if (!(trigger is TrackTrigger))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            bool success = false;

            // make sure we have valid values
            if (RealWorldBoxDimensions.x <= 0)
            {
                RealWorldBoxDimensions.x = 1;
            }

            if (RealWorldBoxDimensions.y <= 0)
            {
                RealWorldBoxDimensions.y = 1;
            }

            if (RealWorldBoxDimensions.z <= 0)
            {
                RealWorldBoxDimensions.z = 1;
            }

            if (SenseToolkitManager.Instance.Initialized
                &&
                SenseToolkitManager.Instance.FaceModuleOutput != null)
            {
                if (SenseToolkitManager.Instance.FaceModuleOutput.QueryNumberOfDetectedFaces() == 0)
                {
                    return(false);
                }

                PXCMFaceData.Face singleFaceOutput = null;

                singleFaceOutput = SenseToolkitManager.Instance.FaceModuleOutput.QueryFaceByIndex(FaceIndex);


                if (singleFaceOutput != null && singleFaceOutput.QueryUserID() >= 0)
                {
                    // Process Tracking
                    if (trigger is TrackTrigger)
                    {
                        TrackTrigger specificTrigger = (TrackTrigger)trigger;

                        var landmarksData = singleFaceOutput.QueryLandmarks();
                        if (landmarksData == null)
                        {
                            return(false);
                        }

                        int landmarkId = landmarksData.QueryPointIndex(LandmarkToTrack);

                        PXCMFaceData.LandmarkPoint point = null;

                        landmarksData.QueryPoint(landmarkId, out point);

                        // Translation
                        if (point != null)
                        {
                            Vector3 vec = new Vector3();
                            vec.x = point.world.x * 100;
                            vec.y = point.world.y * 100;
                            vec.z = point.world.z * 100;

                            if (vec.x + vec.y + vec.z == 0)
                            {
                                return(false);
                            }

                            // Clamp and normalize to the Real World Box
                            TrackingUtilityClass.ClampToRealWorldInputBox(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);
                            TrackingUtilityClass.Normalize(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);

                            if (!float.IsNaN(vec.x) && !float.IsNaN(vec.y) && !float.IsNaN(vec.z))
                            {
                                specificTrigger.Position = vec;
                                success = true;
                            }
                        }

                        //Rotation
                        PXCMFaceData.PoseData poseData = singleFaceOutput.QueryPose();
                        if (poseData != null)
                        {
                            PXCMFaceData.PoseEulerAngles angles;
                            if (poseData.QueryPoseAngles(out angles))
                            {
                                if (!float.IsNaN(angles.pitch) && !float.IsNaN(angles.yaw) && !float.IsNaN(angles.roll))
                                {
                                    Quaternion q = Quaternion.Euler(-angles.pitch, angles.yaw, -angles.roll);

                                    specificTrigger.RotationQuaternion = q;

                                    success = true;
                                }
                            }
                        }
                    }
                }

                return(success);
            }

            return(success);
        }
Exemplo n.º 15
0
        //zz
        private void Savedata(PXCMFaceData faceOutput)
        {
            ////zz
            PXCMFaceData.Face            qface    = faceOutput.QueryFaceByIndex(0);
            PXCMFaceData.PoseData        posedata = qface.QueryPose();
            PXCMFaceData.LandmarksData   Idata    = qface.QueryLandmarks();
            PXCMFaceData.LandmarkPoint[] points;
            PXCMFaceData.PoseEulerAngles angles;
            PXCMFaceData.HeadPosition    headpostion;

            posedata.QueryPoseAngles(out angles);
            posedata.QueryHeadPosition(out headpostion);
            Idata.QueryPoints(out points);


            string connSting;

            connSting = "server=localhost;database=RealSense;Integrated Security=True ";
            SqlConnection sConn = new SqlConnection(connSting);

            try
            {
                sConn.Open();
            }
            catch (Exception ex)
            {
                Console.WriteLine("链接错误:" + ex.Message);
            }

            int[] a = new int[32] {
                76, 77, 12, 16, 14, 10, 20, 24, 18, 22, 70, 0, 4, 7, 5, 9, 29, 26, 31, 30, 32, 39, 33, 47, 46, 48, 51, 52, 50, 56, 65, 61
            };
            int t = 0;

            for (int i = 0; i < 32; i++)
            {
                string sql_Insert;

                string times = DateTime.Now.ToString("yyyy-MM-dd") + " " + DateTime.Now.ToString("hh:mm:ss");

                string LandmarkPointName = MarkPointName(points[a[t]].source.index);

                float Positionworld_x = points[a[t]].world.x;
                float Positionworld_y = points[a[t]].world.y;
                float Positionworld_z = points[a[t]].world.z;

                float PositionImage_x = points[a[t]].image.x;
                float PositionImage_y = points[a[t++]].image.y;

                float HeadCenter_x = headpostion.headCenter.x;
                float HeadCenter_y = headpostion.headCenter.y;
                float HeadCenter_z = headpostion.headCenter.z;

                float PoseEulerAngles_pitch = angles.pitch;
                float PoseEulerAngles_roll  = angles.roll;
                float PoseEulerAngles_yaw   = angles.yaw;

                sql_Insert = "insert into FaceData(time,LandmarkPointName,[Positionworld.x],[Positionworld.y],[Positionworld.z],[PositionImage.x],[PositionImage.y],[HeadCenter.x],[HeadCenter.y],[HeadCenter.z],[PoseEulerAngles.pitch],[PoseEulerAngles.roll],[PoseEulerAngles.yaw])values('"
                             + times + "','"
                             + LandmarkPointName + "','"
                             + Positionworld_x + "','"
                             + Positionworld_y + "','"
                             + Positionworld_z + "','"
                             + PositionImage_x + "','"
                             + PositionImage_y + "','"
                             + HeadCenter_x + "','"
                             + HeadCenter_y + "','"
                             + HeadCenter_z + "','"
                             + PoseEulerAngles_pitch + "','"
                             + PoseEulerAngles_roll + "','"
                             + PoseEulerAngles_yaw + "')";

                SqlCommand sCmd = new SqlCommand(sql_Insert, sConn);
                sCmd.ExecuteNonQuery();
            }


            sConn.Close();
        }
Exemplo n.º 16
0
        private void ProcessingThread()
        {
            string videoName, nameColor, nameDepth, nameIr;
            int    width  = 640;
            int    height = 480;

            int       frameIndex      = 0;
            string    formatImageFile = ".png";
            int       nframes         = 0;
            int       lostFrames      = 0;
            string    landmarks       = null;
            long      frameTimeStamp  = 0;
            PXCMImage color;
            PXCMImage depth;
            PXCMImage ir;

            PXCMCapture.Sample    sample;
            PXCMImage.ImageData   imageColor;
            PXCMImage.ImageData   imageDepth;
            PXCMImage.ImageData   imageIr;
            WriteableBitmap       wbm1, wbm2, wbm3;
            Int32Rect             rect2crop;
            PXCMFaceModule        faceModule;
            PXCMFaceConfiguration faceConfig;
            PXCMFaceData          faceData = null;
            //Offset Cropped rectangle
            Offset offset = new Offset(0, 0, 0, 0);

            //For each directory, extract all landmarks or images streams from all videos
            foreach (var dir in dirsSource)
            {
                //If the folder is not empty
                if (Directory.EnumerateFileSystemEntries(dir).Any())
                {
                    dictPaths.TryGetValue(dir, out paths); //This dict contains all source and output dirs
                    List <string> fileList = new List <string>(Directory.GetFiles(dir, "*.rssdk"));
                    //For each video
                    foreach (var inputFile in fileList)
                    {
                        lostFrames = 0;
                        videoName  = inputFile.Split('\\').Last().Split('.')[0];
                        // Create a SenseManager instance
                        sm = PXCMSenseManager.CreateInstance();
                        // Recording mode: true
                        // Playback mode: false
                        // Settings for playback mode (read rssdk files and extract frames)
                        sm.captureManager.SetFileName(inputFile, false);
                        sm.captureManager.SetRealtime(false);
                        nframes = sm.captureManager.QueryNumberOfFrames();

                        //Update in realtime the current extraction
                        Application.Current.Dispatcher.BeginInvoke(new Action(() =>
                        {
                            textBox2.Text = nframes.ToString();
                            textBox3.Text = String.Format("Record: {0}\nVideo: {1}", paths.root, videoName);
                        }));

                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, width, height, 0);
                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, width, height);
                        sm.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_IR, width, height);

                        //Extract Landmarks
                        sm.EnableFace();
                        faceModule = sm.QueryFace();
                        faceConfig = faceModule.CreateActiveConfiguration();
                        faceConfig.landmarks.maxTrackedFaces = 1;
                        faceConfig.landmarks.isEnabled       = true;
                        faceConfig.detection.maxTrackedFaces = 1;
                        faceConfig.detection.isEnabled       = true;
                        faceConfig.EnableAllAlerts();
                        faceConfig.ApplyChanges();

                        sm.Init();

                        // This string stores all data before saving to csv file
                        landmarks = null;
                        // Start AcquireFrame/ReleaseFrame loop
                        var stopwatch = new Stopwatch();
                        stopwatch.Start();

                        while (sm.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            // Retrieve face data
                            faceModule = sm.QueryFace();
                            frameIndex = sm.captureManager.QueryFrameIndex();
                            if (faceModule != null)
                            {
                                // Retrieve the most recent processed data
                                faceData = faceModule.CreateOutput();
                                faceData.Update();
                            }
                            if (faceData != null)
                            {
                                Int32 nfaces = faceData.QueryNumberOfDetectedFaces();

                                frameTimeStamp = sm.captureManager.QueryFrameTimeStamp();
                                //PXCMCapture.Sample sample = senseManager.QuerySample();
                                if (nfaces == 0) //If none face was detected, we will consider as a "lost frame"
                                {
                                    lostFrames += 1;
                                }
                                for (Int32 i = 0; i < nfaces; i++)
                                {
                                    //Retrieve the image
                                    sample = sm.QuerySample();
                                    // Work on the images
                                    color = sample.color;
                                    depth = sample.depth;
                                    ir    = sample.ir;

                                    PXCMFaceData.Face          face         = faceData.QueryFaceByIndex(i);
                                    PXCMFaceData.LandmarksData landmarkData = face.QueryLandmarks();
                                    PXCMFaceData.DetectionData ddata        = face.QueryDetection();
                                    PXCMFaceData.PoseData      poseData     = face.QueryPose();
                                    poseData.QueryHeadPosition(out PXCMFaceData.HeadPosition headPosition);
                                    poseData.QueryPoseAngles(out PXCMFaceData.PoseEulerAngles poseEulerAngles);
                                    Debug.WriteLine(headPosition.headCenter.x + " " + headPosition.headCenter.y + " " + headPosition.headCenter.z + " " + poseEulerAngles.pitch + " " + poseEulerAngles.roll + " " + poseEulerAngles.yaw);

                                    //Rectangle coordenates from detected face
                                    ddata.QueryBoundingRect(out PXCMRectI32 rect);

                                    //See the offset struct to define the values
                                    rect2crop = new Int32Rect(rect.x + offset.x, rect.y + offset.y, rect.w + offset.w, rect.h + offset.h);
                                    ddata.QueryFaceAverageDepth(out Single depthDistance);

                                    color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out imageColor);
                                    depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_RAW, out imageDepth);
                                    ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out imageIr);

                                    //Convert it to Bitmap
                                    wbm1 = imageColor.ToWritableBitmap(0, color.info.width, color.info.height, 100.0, 100.0);
                                    wbm2 = imageDepth.ToWritableBitmap(0, depth.info.width, depth.info.height, 100.0, 100.0);
                                    wbm3 = imageIr.ToWritableBitmap(0, ir.info.width, ir.info.height, 100.0, 100.0);

                                    color.ReleaseAccess(imageColor);
                                    depth.ReleaseAccess(imageDepth);
                                    ir.ReleaseAccess(imageIr);

                                    nameColor = paths.rgbFolder + "\\" + videoName + "\\" + videoName + "_color_" + frameIndex + formatImageFile;
                                    nameDepth = paths.depthFolder + "\\" + videoName + "\\" + videoName + "_depth_" + frameIndex + formatImageFile;
                                    nameIr    = paths.irFolder + "\\" + videoName + "\\" + videoName + "_ir_" + frameIndex + formatImageFile;

                                    //Crops the face images!
                                    CreateThumbnail(nameColor, new CroppedBitmap(wbm1, rect2crop));
                                    CreateThumbnail(nameDepth, new CroppedBitmap(wbm2, rect2crop));
                                    CreateThumbnail(nameIr, new CroppedBitmap(wbm3, rect2crop));

                                    //Debug.WriteLine((depthDistance /1000 ) + " m" + " " + rect.x + " " + rect.y + " " + rect.w + " " + rect.h);

                                    /*
                                     * x - The horizontal coordinate of the top left pixel of the rectangle.
                                     * y - The vertical coordinate of the top left pixel of the rectangle.
                                     * w - The rectangle width in pixels.
                                     * h -The rectangle height in pixels.*/

                                    if (landmarkData != null)
                                    {
                                        PXCMFaceData.LandmarkPoint[] landmarkPoints;
                                        landmarkData.QueryPoints(out landmarkPoints);

                                        Application.Current.Dispatcher.BeginInvoke(new Action(() => textBox1.Text = frameIndex.ToString()));

                                        landmarks += inputFile.Split('\\').Last() + ";" + frameIndex + ";" + nameColor + ";" + nameDepth + ";" + nameIr + ";" + frameTimeStamp + ";" + depthDistance.ToString("F") + ";" + poseEulerAngles.yaw.ToString("F") + ";" + poseEulerAngles.pitch.ToString("F") + ";" + poseEulerAngles.roll.ToString("F") + ";"; // Begin line with frame info

                                        for (int j = 0; j < landmarkPoints.Length; j++)                                                                                                                                                                                                                                                                    // Writes landmarks coordinates along the line
                                        {
                                            //get world coordinates
                                            landmarks += /*landmarkPoints[j].source.index + ";" +*/ (landmarkPoints[j].world.x * 1000).ToString("F") + ";" + (landmarkPoints[j].world.y * 1000).ToString("F") + ";" + (landmarkPoints[j].world.z * 1000).ToString("F") + ";";
                                        }
                                        for (int j = 0; j < landmarkPoints.Length; j++)
                                        {
                                            //get coordinate of the image pixel
                                            landmarks += /*landmarkPoints[j].confidenceImage + ";" + */ landmarkPoints[j].image.x.ToString("F") + ";" + landmarkPoints[j].image.y.ToString("F") + ";";
                                        }
                                        landmarks += '\n'; // Breaks line after the end of the frame coordinates
                                    }
                                }
                            }
                            // Release the frame
                            if (faceData != null)
                            {
                                faceData.Dispose();
                            }
                            sm.ReleaseFrame();

                            WriteToFile(paths.csvFile, landmarks);
                            landmarks = null;
                        }
                        sm.Dispose();
                        stopwatch.Stop();
                        //Update in realtime the current extraction
                        Application.Current.Dispatcher.BeginInvoke(new Action(() =>
                        {
                            elapsedLabel.Content = String.Format("Elapsed Time: {0} (s)", stopwatch.Elapsed.TotalSeconds.ToString("F"));
                        }));
                    }
                }
            }
        }
        private void ProcessingThread()
        {
            // Start AcquireFrame/ReleaseFrame loop
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire the color image data
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                int            topScore   = 0;
                FaceExpression expression = FaceExpression.None;

                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                try
                {
                    IBarcodeReader reader = new BarcodeReader();
                    // load a bitmap
                    //var barcodeBitmap = (Bitmap)Bitmap.LoadFrom("C:\\sample-barcode-image.png");
                    // detect and decode the barcode inside the bitmap
                    var result = reader.Decode(colorBitmap);
                    // do something with the result
                    if (result != null)
                    {
                        MessageBox.Show(result.BarcodeFormat.ToString());
                        MessageBox.Show(result.Text);
                    }
                }
                catch (Exception ex)
                {
                }

                // Get face data
                if (faceData != null)
                {
                    faceData.Update();
                    numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (numFacesDetected > 0)
                    {
                        // Get the first face detected (index 0)
                        PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);

                        // Retrieve face location data
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                        if (faceDetectionData != null)
                        {
                            PXCMRectI32 faceRectangle;
                            faceDetectionData.QueryBoundingRect(out faceRectangle);
                            if ((faceRectangle.h > 90) || (faceRectangle.w > 90))
                            {
                                faceRectangleHeight = faceRectangle.h * 3 / 2;
                                faceRectangleWidth  = faceRectangle.w * 3 / 2;
                            }
                            else if (((faceRectangle.h < 90) || (faceRectangle.w < 90)) && ((faceRectangle.h > 70) || (faceRectangle.w > 70)))
                            {
                                faceRectangleHeight = faceRectangle.h * 2;
                                faceRectangleWidth  = faceRectangle.w * 2;
                            }
                            else
                            {
                                faceRectangleHeight = faceRectangle.h * 5 / 2;
                                faceRectangleWidth  = faceRectangle.w * 5 / 2;
                            }
                            faceRectangleX = faceRectangle.x;
                            faceRectangleY = faceRectangle.y;
                        }

                        // Retrieve pose estimation data
                        PXCMFaceData.PoseData facePoseData = face.QueryPose();
                        if (facePoseData != null)
                        {
                            PXCMFaceData.PoseEulerAngles headAngles;
                            facePoseData.QueryPoseAngles(out headAngles);
                            headRoll  = headAngles.roll;
                            headPitch = headAngles.pitch;
                            headYaw   = headAngles.yaw;
                        }

                        // Retrieve expression data
                        PXCMFaceData.ExpressionsData expressionData = face.QueryExpressions();

                        if (expressionData != null)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult score;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_KISS, out score);
                            expressionScore[Convert.ToInt32(FaceExpression.Kiss)] = score.intensity;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out score);
                            expressionScore[Convert.ToInt32(FaceExpression.Open)] = score.intensity;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out score);
                            expressionScore[Convert.ToInt32(FaceExpression.Smile)] = score.intensity;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out score);


                            expressionScore[Convert.ToInt32(FaceExpression.Tongue)] = score.intensity;

                            // Determine the highest scoring expression
                            for (int i = 1; i < TotalExpressions; i++)
                            {
                                if (expressionScore[i] > topScore)
                                {
                                    expression = (FaceExpression)i;
                                }
                            }
                        }

                        // Process face recognition data
                        if (face != null)
                        {
                            // Retrieve the recognition data instance
                            recognitionData = face.QueryRecognition();

                            // Set the user ID and process register/unregister logic
                            if (recognitionData.IsRegistered())
                            {
                                userId = Convert.ToString(recognitionData.QueryUserID());

                                if (doUnregister)
                                {
                                    recognitionData.UnregisterUser();
                                    SaveDatabaseToFile();
                                    doUnregister = false;
                                    if (_persistentDict.ContainsKey(userId) == true)
                                    {
                                        _persistentDict.Remove(userId);
                                    }
                                }
                            }
                            else
                            {
                                if (doRegister)
                                {
                                    int uId = recognitionData.RegisterUser();
                                    SaveDatabaseToFile();

                                    if (newUserName != "")
                                    {
                                        if (_persistentDict.ContainsKey(uId.ToString()) == false)
                                        {
                                            _persistentDict.Add(uId.ToString(), newUserName);
                                            _persistentDict.Flush();
                                            newUserName = "";
                                        }
                                    }

                                    // Capture a jpg image of registered user
                                    colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                                    doRegister = false;
                                }
                                else
                                {
                                    userId = "New User";
                                }
                            }
                        }
                    }
                    else
                    {
                        userId = "No users in view";
                    }
                }

                //hand = senseManager.QueryHand();

                //if (hand != null)
                //{

                //    // Retrieve the most recent processed data
                //    handData = hand.CreateOutput();
                //    handData.Update();

                //    // Get number of tracked hands
                //    nhands = handData.QueryNumberOfHands();

                //    if (nhands > 0)
                //    {
                //        // Retrieve hand identifier
                //        handData.QueryHandId(PXCMHandData.AccessOrderType.ACCESS_ORDER_BY_TIME, 0, out handId);

                //        // Retrieve hand data
                //        handData.QueryHandDataById(handId, out ihand);

                //        PXCMHandData.BodySideType bodySideType = ihand.QueryBodySide();
                //        if (bodySideType == PXCMHandData.BodySideType.BODY_SIDE_LEFT)
                //        {
                //            leftHand = true;
                //        }
                //        else if (bodySideType == PXCMHandData.BodySideType.BODY_SIDE_RIGHT)
                //        {
                //            leftHand = false;
                //        }



                //        // Retrieve all hand joint data
                //        for (int i = 0; i < nhands; i++)
                //        {
                //            for (int j = 0; j < 0x20; j++)
                //            {
                //                PXCMHandData.JointData jointData;
                //                ihand.QueryTrackedJoint((PXCMHandData.JointType)j, out jointData);
                //                nodes[i][j] = jointData;
                //            }
                //        }

                //        // Get world coordinates for tip of middle finger on the first hand in camera range
                //        handTipX = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.x;
                //        handTipY = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.y;
                //        handTipZ = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionWorld.z;


                //        swipehandTipX = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.x;
                //        swipehandTipY = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.y;
                //        swipehandTipZ = nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].positionImage.z;

                //        //Console.Out.WriteLine("Before x={0}", swipehandTipX);
                //        //Console.Out.WriteLine("Before speed={0}", nodes[0][Convert.ToInt32(PXCMHandData.JointType.JOINT_MIDDLE_TIP)].speed.x);

                //        // Retrieve gesture data
                //        if (handData.IsGestureFired("spreadfingers", out gestureData)) { gesture = Gesture.FingerSpread; }
                //        else if (handData.IsGestureFired("two_fingers_pinch_open", out gestureData)) { gesture = Gesture.Pinch; }
                //        else if (handData.IsGestureFired("wave", out gestureData)) { gesture = Gesture.Wave; }
                //        else if (handData.IsGestureFired("swipe_left", out gestureData)) { gesture = Gesture.SwipeLeft; }
                //        else if (handData.IsGestureFired("swipe_right", out gestureData)) { gesture = Gesture.SwipeRight; }
                //        else if (handData.IsGestureFired("fist", out gestureData)) { gesture = Gesture.Fist; }
                //        else if (handData.IsGestureFired("thumb_up", out gestureData)) { gesture = Gesture.Thumb; }

                //    }
                //    else
                //    {
                //        gesture = Gesture.Undefined;
                //    }

                //    //UpdateUI();
                //    if (handData != null) handData.Dispose();
                //}

                // Display the color stream and other UI elements
                //UpdateUI(colorBitmap, expression, gesture);


                UpdateUI(colorBitmap, expression);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sample.color.Dispose();

                // Release the frame
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 18
0
        private void ProcessingThread()
        {
            // Start AcquireFrame/ReleaseFrame loop
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                // Acquire the color image data
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                Bitmap              colorBitmap;
                PXCMImage.ImageData colorData;
                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);
                colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                // Get face data
                if (faceData != null)
                {
                    faceData.Update();
                    numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                    if (numFacesDetected > 0)
                    {
                        // Get the first face detected (index 0)
                        PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);
                        face.QueryExpressions();
                        PXCMFaceData.PoseData poseData = face.QueryPose();
                        //  PXCMPoint3DF32 outHeadPosition = new PXCMPoint3DF32(); //F200 has added confidence into struct
                        PXCMFaceData.HeadPosition outHeadPosition = new PXCMFaceData.HeadPosition();

                        //processing the head pose data to find the head center position
                        poseData.QueryHeadPosition(out outHeadPosition);
                        System.Windows.Media.Media3D.Point3DCollection points = new System.Windows.Media.Media3D.Point3DCollection();
                        points.Add(new System.Windows.Media.Media3D.Point3D(outHeadPosition.headCenter.x,
                                                                            outHeadPosition.headCenter.y, outHeadPosition.headCenter.z));

                        Console.WriteLine("head center position: " + points);
                        // poseData.QueryHeadPosition(out outHeadPosition);
                        PXCMFaceData.PoseEulerAngles outPoseEulerAngles = new PXCMFaceData.PoseEulerAngles();
                        poseData.QueryPoseAngles(out outPoseEulerAngles);
                        roll  = (int)outPoseEulerAngles.roll;
                        pitch = (int)outPoseEulerAngles.pitch;
                        yaw   = (int)outPoseEulerAngles.yaw;
                        // PXCMFaceData.LandmarkType.LANDMARK_EYE_LEFT_CENTER what can I do with this?
                        if (pitch + 12 > 10)
                        {
                            headUp = true;
                        }
                        else
                        {
                            headUp = false;
                        }
                        if (pitch < -10)
                        {
                            headDown = true;
                        }
                        else
                        {
                            headDown = false;
                        }
                        if (roll > 5)
                        {
                            headTiltLeft = true;
                        }
                        else
                        {
                            headTiltLeft = false;
                        }
                        if (roll < -5)
                        {
                            headTiltRight = true;
                        }
                        else
                        {
                            headTiltRight = false;
                        }
                        if (yaw > 5)
                        {
                            headTurnLeft = true;
                        }
                        else
                        {
                            headTurnLeft = false;
                        }
                        if (yaw < -5)
                        {
                            headTurnRight = true;
                        }
                        else
                        {
                            headTurnRight = false;
                        }

                        //Console.WriteLine("Rotation: " + outPoseEulerAngles.roll + " " + outPoseEulerAngles.pitch + " " + outPoseEulerAngles.yaw);
                        PXCMFaceData.ExpressionsData edata = face.QueryExpressions();
                        // retrieve the expression information
                        PXCMFaceData.ExpressionsData.FaceExpressionResult smileScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult eyesUpScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult eyesDownScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult eyesTurnLeftScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult eyesTurnRightScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult headTiltedLeftScore;
                        PXCMFaceData.ExpressionsData.FaceExpressionResult headTurnedLeftScore;
                        // PXCMFaceData.ExpressionsData.FaceExpressionResult headUpScore;
                        //PXCMFaceData.ExpressionsData.FaceExpressionResult headDownScore;
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out smileScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_UP, out eyesUpScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_DOWN, out eyesDownScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_LEFT, out eyesTurnLeftScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_RIGHT, out eyesTurnRightScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_TILT_LEFT, out headTiltedLeftScore);
                        edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_TURN_LEFT, out headTurnedLeftScore);
                        // edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_UP, out headUpScore);
                        //edata.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_HEAD_DOWN, out headDownScore);
                        smile  = smileScore.intensity;
                        eyesUp = eyesUpScore.intensity;
                        if (eyesUp == 100)
                        {
                            eyeIsUp = true;
                        }
                        else
                        {
                            eyeIsUp = false;
                        }
                        eyesDown = eyesDownScore.intensity;
                        if (eyesDown == 100)
                        {
                            eyeIsDown = true;
                        }
                        else
                        {
                            eyeIsDown = false;
                        }

                        eyesTurnLeft  = eyesTurnLeftScore.intensity;
                        eyesTurnRight = eyesTurnRightScore.intensity;
                        //  headTiltLeft = headTiltedLeftScore.intensity;
                        // headTurnLeft= headTurnedLeftScore.intensity;
                        // headUp = headUpScore.intensity;
                        //headDown = headDownScore.intensity;
                        PXCMCapture.Device device = senseManager.captureManager.device;
                        device.SetIVCAMAccuracy(PXCMCapture.Device.IVCAMAccuracy.IVCAM_ACCURACY_FINEST);
                        // eyeIsUP= CheckFaceExpression(edata, FaceExpression.EXPRESSION_EYES_UP, 15);
                        if ((headTiltLeft | headTurnLeft) & headUp & (eyesTurnLeft == 100) & (!eyeIsDown))
                        {
                            looksForward = true;
                        }

                        else if ((headTiltRight | headTurnRight) & headUp & (eyesTurnRight == 100) & (!eyeIsDown))
                        {
                            looksForward = true;
                        }


                        /* else if (headTiltRight & (headDown|headUp) & (!headTurnRight) & (eyesTurnRight==100))
                         *   looksForward = true;
                         * else if (headTiltLeft & (headDown|headUp) &  (!headTurnLeft) & (eyesTurnLeft == 100))
                         *   looksForward = true;
                         * */
                        else
                        {
                            looksForward = eyeIsUp;
                        }
                        //  headTiltLeftThreshold = CheckFaceExpression(edata, FaceExpression.EXPRESSION_HEAD_TILT_LEFT, 15);

                        //csv mona
                        // var csv = new StringBuilder();
                        // outputs 10:00 PM
                        //    var newLine = string.Format("{0},{1},{2},{3},{4}{5}", DateTime.Now.ToString("dd-MM-yyyy-hh:mm:ss:fff"), roll, pitch, yaw, eyesUp, Environment.NewLine);
                        //     csv.Append(newLine);
                        // string pathString = System.IO.Path.Combine(filePath, fileName);

                        //   File.AppendAllText(pathString, csv.ToString());



                        // Retrieve face location data
                        PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                        if (faceDetectionData != null)
                        {
                            PXCMRectI32 faceRectangle;
                            faceDetectionData.QueryBoundingRect(out faceRectangle);
                            faceRectangleHeight = faceRectangle.h;
                            faceRectangleWidth  = faceRectangle.w;
                            faceRectangleX      = faceRectangle.x;
                            faceRectangleY      = faceRectangle.y;
                        }


                        // Process face recognition data
                        if (face != null)
                        {
                            // Retrieve the recognition data instance
                            recognitionData = face.QueryRecognition();

                            // Set the user ID and process register/unregister logic
                            if (recognitionData.IsRegistered())
                            {
                                userId = Convert.ToString(recognitionData.QueryUserID());

                                if (doUnregister)
                                {
                                    recognitionData.UnregisterUser();
                                    doUnregister = false;
                                }
                            }
                            else
                            {
                                if (doRegister)
                                {
                                    recognitionData.RegisterUser();

                                    // Capture a jpg image of registered user
                                    colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                                    doRegister = false;
                                }
                                else
                                {
                                    userId = "Unrecognized";
                                }
                            }
                        }
                    }
                    else
                    {
                        userId = "No users in view";
                    }
                }

                // Display the color stream and other UI elements
                UpdateUI(colorBitmap);

                // Release resources
                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorData);
                sample.color.Dispose();

                // Release the frame
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 19
0
        private void CaptureProcess()
        {
            Sender = new UDPSender(CurrentIpAdress, Convert.ToInt32(currentPort));
            while (senseManager.AcquireFrame(true).IsSuccessful())
            {
                PXCMCapture.Sample  sample = senseManager.QuerySample();
                PXCMImage.ImageData colorImageData;
                Bitmap colorBitmap;

                sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorImageData);
                colorBitmap = colorImageData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                if (faceModule != null)
                {
                    PXCMFaceData faceData = faceModule.CreateOutput();
                    faceData.Update();
                    numberTrackedFace = faceData.QueryNumberOfDetectedFaces();

                    PXCMFaceData.Face faceDataFace = faceData.QueryFaceByIndex(0);

                    if (faceDataFace != null)
                    {
                        PXCMFaceData.DetectionData faceDetectionData = faceDataFace.QueryDetection();
                        PXCMFaceData.LandmarksData landMarksData     = faceDataFace.QueryLandmarks();


                        if (faceDetectionData != null) //Запись переменных нахождения фейса
                        {
                            PXCMRectI32 faceRectangle;
                            faceDetectionData.QueryFaceAverageDepth(out faceAverageDepth);
                            faceDetectionData.QueryBoundingRect(out faceRectangle);
                            faceRectangleHeight = faceRectangle.h;
                            faceRectangleWidth  = faceRectangle.w;
                            faceRectangleX      = faceRectangle.x;
                            faceRectangleY      = faceRectangle.y;

                            PXCMFaceData.LandmarkPoint[] points; //Херачим точки на фейс
                            if (landMarksData != null)
                            {
                                bool res = landMarksData.QueryPoints(out points);

                                Graphics graphics = Graphics.FromImage(colorBitmap);
                                Font     font     = new Font(System.Drawing.FontFamily.GenericMonospace, 12, System.Drawing.FontStyle.Bold);

                                foreach (PXCMFaceData.LandmarkPoint landmark in points)
                                {
                                    point.X = landmark.image.x + LandMarkAlingment;
                                    point.Y = landmark.image.y + LandMarkAlingment;
                                    //Console.WriteLine(point.X);

                                    if (landmark.confidenceImage == 0)
                                    {
                                        graphics.DrawString("X", font, System.Drawing.Brushes.Brown, point);
                                    }
                                    else
                                    {
                                        graphics.DrawString("*", font, System.Drawing.Brushes.CornflowerBlue, point);
                                    }
                                    Connect = Math.Min(landmark.confidenceImage, 1);
                                }
                            }
                        }

                        var connectMessage = new SharpOSC.OscMessage("/expressions/connectMessage", Connect);
                        Sender.Send(connectMessage);

                        PXCMFaceData.PoseData facePoseData = faceDataFace.QueryPose(); //переменные поворота для анимации головы
                        if (facePoseData != null)
                        {
                            PXCMFaceData.PoseEulerAngles headAngles;
                            facePoseData.QueryPoseAngles(out headAngles);
                            headRoll  = headAngles.roll;
                            headYaw   = headAngles.yaw;
                            headPitch = headAngles.pitch;
                        }

                        PXCMFaceData.ExpressionsData expressionData = faceDataFace.QueryExpressions();

                        if (expressionData != null)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult score;

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_KISS, out score);
                            Dispatcher.Invoke(() => kissExpression.Text = Convert.ToString(score.intensity));
                            var kissMessage = new SharpOSC.OscMessage("/expressions/kiss", score.intensity);
                            Sender.Send(kissMessage);

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out score);
                            Dispatcher.Invoke(() => mouthExpression.Text = Convert.ToString(score.intensity));
                            var mouthMessage = new SharpOSC.OscMessage("/expressions/mouth", score.intensity);
                            Sender.Send(mouthMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out score);
                            Dispatcher.Invoke(() => smileExpression.Text = Convert.ToString(score.intensity));
                            var smileMessage = new SharpOSC.OscMessage("/expressions/smile", score.intensity);
                            Sender.Send(smileMessage);

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT, out score);
                            Dispatcher.Invoke(() => tongueExpression.Text = Convert.ToString(score.intensity));
                            var tongueOutMessage = new SharpOSC.OscMessage("/expressions/tongueout", score.intensity);
                            Sender.Send(tongueOutMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_LOWERER_LEFT, out score);
                            Dispatcher.Invoke(() => leftBrowLowExpression.Text = Convert.ToString(score.intensity));
                            var leftBrowLowMessage = new SharpOSC.OscMessage("/expressions/leftBrowLow", score.intensity);
                            Sender.Send(leftBrowLowMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_LOWERER_RIGHT, out score);
                            Dispatcher.Invoke(() => rightBrowLowExpression.Text = Convert.ToString(score.intensity));
                            var rightBrowLowMessage = new SharpOSC.OscMessage("/expressions/rightBrowLow", score.intensity);
                            Sender.Send(rightBrowLowMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_RAISER_LEFT, out score);
                            Dispatcher.Invoke(() => leftBrowRaiseExpression.Text = Convert.ToString(score.intensity));
                            var leftBrowRaiseMessage = new SharpOSC.OscMessage("/expressions/leftBrowRaise", score.intensity);
                            Sender.Send(leftBrowRaiseMessage);

                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_RAISER_RIGHT, out score);
                            Dispatcher.Invoke(() => rightBrowRaiseExpression.Text = Convert.ToString(score.intensity));
                            var rightBrowRaiseMessage = new SharpOSC.OscMessage("/expressions/rightBrowRaise", score.intensity);
                            Sender.Send(rightBrowRaiseMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_LEFT, out score);
                            Dispatcher.Invoke(() => leftEyeClosedExpression.Text = Convert.ToString(score.intensity));
                            var leftEyeClosedMessage = new SharpOSC.OscMessage("/expressions/leftEyeClosed", score.intensity);
                            Sender.Send(leftEyeClosedMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_RIGHT, out score);
                            Dispatcher.Invoke(() => rightEyeClosedExpression.Text = Convert.ToString(score.intensity));
                            var rightEyeClosedMessage = new SharpOSC.OscMessage("/expressions/rightEyeClosed", score.intensity);
                            Sender.Send(rightEyeClosedMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_LEFT, out score);
                            Dispatcher.Invoke(() => eyesTurnLeftExpression.Text = Convert.ToString(score.intensity));
                            var eyesTurnLeftMessage = new SharpOSC.OscMessage("/expressions/eyesTurnLeft", score.intensity);
                            Sender.Send(eyesTurnLeftMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_TURN_RIGHT, out score);
                            Dispatcher.Invoke(() => eyesTurnRightExpression.Text = Convert.ToString(score.intensity));
                            var eyesTurnRightMessage = new SharpOSC.OscMessage("/expressions/eyesTurnRight", score.intensity);
                            Sender.Send(eyesTurnRightMessage);



                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_DOWN, out score);
                            Dispatcher.Invoke(() => eyesDownExpression.Text = Convert.ToString(score.intensity));
                            var eyesDownMessage = new SharpOSC.OscMessage("/expressions/eyesDown", score.intensity);
                            Sender.Send(eyesDownMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_UP, out score);
                            Dispatcher.Invoke(() => eyesUpExpression.Text = Convert.ToString(score.intensity));
                            var eyesUpMessage = new SharpOSC.OscMessage("/expressions/eyesUp", score.intensity);
                            Sender.Send(eyesUpMessage);


                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_PUFF_LEFT, out score);
                            Dispatcher.Invoke(() => puffLeftExpression.Text = Convert.ToString(score.intensity));
                            var leftPuffMessage = new SharpOSC.OscMessage("/expressions/leftPuff", score.intensity);
                            Sender.Send(leftPuffMessage);



                            expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_PUFF_RIGHT, out score);
                            Dispatcher.Invoke(() => puffRightExpression.Text = Convert.ToString(score.intensity));
                            var rightPuffMessage = new SharpOSC.OscMessage("/expressions/rightPuff", score.intensity);
                            Sender.Send(rightPuffMessage);
                        }
                    }
                    faceData.Dispose();
                }
                UpdateUI(colorBitmap);

                colorBitmap.Dispose();
                sample.color.ReleaseAccess(colorImageData);
                senseManager.ReleaseFrame();
            }
        }