示例#1
0
        private bool UpdateFace(AnimationTrigger animationTrigger)
        {
            animationTrigger.IsAnimationDataValid = false;

            // Get the closest face
            PXCMFaceData.Face pxcmFace = SenseToolkitManager.Instance.FaceModuleOutput.QueryFaceByIndex(0);

            if (pxcmFace == null)
            {
                return(false);
            }

            if (SenseToolkitManager.Instance.FaceModuleOutput.QueryNumberOfDetectedFaces() <= 0)
            {
                // PXCMFaceData.QueryFaceByIndex(0) failed.
                return(false);
            }

            PXCMFaceData.LandmarksData pxcmLandmarksData = pxcmFace.QueryLandmarks();
            if (pxcmLandmarksData == null)
            {
                // PXCMFaceData.Face.QueryLandmarks() failed.
                return(false);
            }

            Dictionary <PXCMFaceData.LandmarkType, PXCMFaceData.LandmarkPoint> landmarkPoints = new Dictionary <PXCMFaceData.LandmarkType, PXCMFaceData.LandmarkPoint>();

            PXCMFaceData.LandmarkType[] landmarkTypes = new PXCMFaceData.LandmarkType[]
            {
                PXCMFaceData.LandmarkType.LANDMARK_NOSE_TIP,
                PXCMFaceData.LandmarkType.LANDMARK_NOSE_TOP,
                PXCMFaceData.LandmarkType.LANDMARK_EYEBROW_LEFT_CENTER,
                PXCMFaceData.LandmarkType.LANDMARK_EYE_LEFT_CENTER,
                PXCMFaceData.LandmarkType.LANDMARK_EYEBROW_RIGHT_CENTER,
                PXCMFaceData.LandmarkType.LANDMARK_EYE_RIGHT_CENTER,
                PXCMFaceData.LandmarkType.LANDMARK_UPPER_LIP_CENTER,
                PXCMFaceData.LandmarkType.LANDMARK_LOWER_LIP_CENTER,
                PXCMFaceData.LandmarkType.LANDMARK_EYELID_LEFT_TOP,
                PXCMFaceData.LandmarkType.LANDMARK_EYELID_LEFT_BOTTOM,
                PXCMFaceData.LandmarkType.LANDMARK_EYELID_RIGHT_TOP,
                PXCMFaceData.LandmarkType.LANDMARK_EYELID_RIGHT_BOTTOM
            };

            foreach (PXCMFaceData.LandmarkType landmarkType in landmarkTypes)
            {
                PXCMFaceData.LandmarkPoint landmarkPoint = GetLandmarkPoint(pxcmLandmarksData, landmarkType);

                if (landmarkPoint == null)
                {
                    // PXCMFaceData.LandmarksData.QueryPointIndex() failed.
                    return(false);
                }

                landmarkPoints.Add(landmarkType, landmarkPoint);
            }

            PXCMFaceData.ExpressionsData pxcmExpressionsData = pxcmFace.QueryExpressions();
            if (pxcmExpressionsData == null)
            {
                // PXCMFaceData.Face.QueryExpressions() failed.
                return(false);
            }

            animationTrigger.IsAnimationDataValid = true;

            PXCMCapture.Device.MirrorMode mirrorMode = PXCMCapture.Device.MirrorMode.MIRROR_MODE_DISABLED;

            PXCMCaptureManager pxcmCaptureManager = SenseToolkitManager.Instance.SenseManager.QueryCaptureManager();

            if (pxcmCaptureManager != null)
            {
                PXCMCapture.Device pxcmCaptureDevice = pxcmCaptureManager.QueryDevice();
                if (pxcmCaptureDevice != null)
                {
                    mirrorMode = pxcmCaptureDevice.QueryMirrorMode();
                }
            }

            animationTrigger.Animations.Clear();

            float faceHeight = landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_NOSE_TIP].image.y - landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_NOSE_TOP].image.y;

            float leftEyebrowUp  = GetNormalizedLandmarksDistance(landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_EYE_LEFT_CENTER].image.y, landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_EYEBROW_LEFT_CENTER].image.y, OPTIMAL_RELATIVE_FACE_HEIGHT, faceHeight, EYEBROW_UP_INITIAL_DISTANCE, OPTIMAL_EYEBROW_UP_MAX_DISTANCE, NORMALIZE_MAX_FACIAL_EXPRESSION_VALUE, true);
            float rightEyebrowUp = GetNormalizedLandmarksDistance(landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_EYE_RIGHT_CENTER].image.y, landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_EYEBROW_RIGHT_CENTER].image.y, OPTIMAL_RELATIVE_FACE_HEIGHT, faceHeight, EYEBROW_UP_INITIAL_DISTANCE, OPTIMAL_EYEBROW_UP_MAX_DISTANCE, NORMALIZE_MAX_FACIAL_EXPRESSION_VALUE, true);

            float leftEyeClose  = GetNormalizedLandmarksDistance(landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_EYELID_LEFT_BOTTOM].image.y, landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_EYELID_LEFT_TOP].image.y, OPTIMAL_RELATIVE_FACE_HEIGHT, faceHeight, EYE_CLOSE_INITIAL_DISTANCE, OPTIMAL_EYE_CLOSE_MAX_DISTANCE, NORMALIZE_MAX_FACIAL_EXPRESSION_VALUE, true);
            float rightEyeClose = GetNormalizedLandmarksDistance(landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_EYELID_RIGHT_BOTTOM].image.y, landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_EYELID_RIGHT_TOP].image.y, OPTIMAL_RELATIVE_FACE_HEIGHT, faceHeight, EYE_CLOSE_INITIAL_DISTANCE, OPTIMAL_EYE_CLOSE_MAX_DISTANCE, NORMALIZE_MAX_FACIAL_EXPRESSION_VALUE, true);

            if (mirrorMode == PXCMCapture.Device.MirrorMode.MIRROR_MODE_HORIZONTAL)
            {
                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_RAISER_LEFT.ToString(), rightEyebrowUp);
                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_RAISER_RIGHT.ToString(), leftEyebrowUp);

                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_LEFT.ToString(), rightEyeClose);
                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_RIGHT.ToString(), leftEyeClose);
            }
            else
            {
                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_RAISER_LEFT.ToString(), leftEyebrowUp);
                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_BROW_RAISER_RIGHT.ToString(), rightEyebrowUp);

                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_LEFT.ToString(), leftEyeClose);
                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_EYES_CLOSED_RIGHT.ToString(), rightEyeClose);
            }

            // Instead of LANDMARK_LOWER_LIP_CENTER, we need landmark 51 (lower lip upper center)
            // Instead of LANDMARK_UPPER_LIP_CENTER, we need landmark 47 (upper lip lower center)
            animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN.ToString(), GetNormalizedLandmarksDistance(landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_LOWER_LIP_CENTER].image.y, landmarkPoints[PXCMFaceData.LandmarkType.LANDMARK_UPPER_LIP_CENTER].image.y, OPTIMAL_RELATIVE_FACE_HEIGHT, faceHeight, MOUTH_OPEN_INITIAL_DISTANCE, OPTIMAL_MOUTH_OPEN_MAX_DISTANCE, NORMALIZE_MAX_FACIAL_EXPRESSION_VALUE, true));

            PXCMFaceData.ExpressionsData.FaceExpressionResult pxcmFaceExpressionResult = new PXCMFaceData.ExpressionsData.FaceExpressionResult();

            if (pxcmExpressionsData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE, out pxcmFaceExpressionResult))
            {
                animationTrigger.Animations.Add(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE.ToString(), (float)pxcmFaceExpressionResult.intensity);
            }
            else
            {
                // Error querying expression: EXPRESSION_SMILE.
                return(false);
            }

            return(true);
        }
示例#2
0
        public override bool Process(Trigger trigger)
        {
            trigger.ErrorDetected = false;

            if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.Face))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            if (!(trigger is TrackTrigger))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            bool success = false;

            // make sure we have valid values
            if (RealWorldBoxDimensions.x <= 0)
            {
                RealWorldBoxDimensions.x = 1;
            }

            if (RealWorldBoxDimensions.y <= 0)
            {
                RealWorldBoxDimensions.y = 1;
            }

            if (RealWorldBoxDimensions.z <= 0)
            {
                RealWorldBoxDimensions.z = 1;
            }

            if (SenseToolkitManager.Instance.Initialized
                &&
                SenseToolkitManager.Instance.FaceModuleOutput != null)
            {
                if (SenseToolkitManager.Instance.FaceModuleOutput.QueryNumberOfDetectedFaces() == 0)
                {
                    return(false);
                }

                PXCMFaceData.Face singleFaceOutput = null;

                singleFaceOutput = SenseToolkitManager.Instance.FaceModuleOutput.QueryFaceByIndex(FaceIndex);


                if (singleFaceOutput != null && singleFaceOutput.QueryUserID() >= 0)
                {
                    // Process Tracking
                    if (trigger is TrackTrigger)
                    {
                        TrackTrigger specificTrigger = (TrackTrigger)trigger;

                        var landmarksData = singleFaceOutput.QueryLandmarks();
                        if (landmarksData == null)
                        {
                            return(false);
                        }

                        int landmarkId = landmarksData.QueryPointIndex(LandmarkToTrack);

                        PXCMFaceData.LandmarkPoint point = null;

                        landmarksData.QueryPoint(landmarkId, out point);

                        // Translation
                        if (point != null)
                        {
                            Vector3 vec = new Vector3();
                            vec.x = point.world.x * 100;
                            vec.y = point.world.y * 100;
                            vec.z = point.world.z * 100;

                            // Clamp and normalize to the Real World Box
                            TrackingUtilityClass.ClampToRealWorldInputBox(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);
                            TrackingUtilityClass.Normalize(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);

                            if (!float.IsNaN(vec.x) && !float.IsNaN(vec.y) && !float.IsNaN(vec.z))
                            {
                                specificTrigger.Position = vec;
                                success = true;
                            }
                        }

                        //Rotation
                        PXCMFaceData.PoseData poseData = singleFaceOutput.QueryPose();
                        if (poseData != null)
                        {
                            PXCMFaceData.PoseEulerAngles angles;
                            if (poseData.QueryPoseAngles(out angles))
                            {
                                if (!float.IsNaN(angles.pitch) && !float.IsNaN(angles.yaw) && !float.IsNaN(angles.roll))
                                {
                                    Quaternion q = Quaternion.Euler(-angles.pitch, angles.yaw, -angles.roll);

                                    specificTrigger.RotationQuaternion = q;

                                    success = true;
                                }
                            }
                        }
                    }
                }

                return(success);
            }

            return(success);
        }
示例#3
0
        private void StartTrackingLoop()
        {
            PXCMFaceData FaceData = FaceModule.CreateOutput();

            while (!_shouldStop)
            {
                pxcmStatus acquireFrameStatus = SenseManager.AcquireFrame(true);
                if (acquireFrameStatus < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    ResetTrackData();
                    Console.WriteLine("SenseManager.AcquireFrame(true) error: " + acquireFrameStatus.ToString());
                    continue;
                }

                PXCMCapture.Sample captureSample = SenseManager.QueryFaceSample();
                if (captureSample == null)
                {
                    ResetTrackData();
                    SenseManager.ReleaseFrame();
                    continue;
                }
                //TODO: Image Daten holen
                TrackImageData(captureSample);

                FaceData.Update();

                int numberOfDetectedFaces = FaceData.QueryNumberOfDetectedFaces();
                if (numberOfDetectedFaces != 1)
                {
                    ResetTrackData();
                    SenseManager.ReleaseFrame();
                    continue;
                }

                PXCMFaceData.Face faceDataFace = FaceData.QueryFaceByIndex(0);

                TrackPose(faceDataFace);
                TrackExpressions(faceDataFace);
                //TrackLandmarks(faceDataFace);
                //TrackGaze();


                //FaceData.QueryRecognitionModule();

                //im nächsten object steckt boundingrectangle und avarageDepth drin
                //PXCMFaceData.DetectionData faceDataDetectionData = faceDataFace.QueryDetection();
                //faceDataDetectionData.QueryFaceAverageDepth();
                //faceDataDetectionData.QueryBoundingRect();

                SenseManager.ReleaseFrame();

                Thread.Sleep(250);
            }

            if (FaceData != null)
            {
                FaceData.Dispose();
            }

            FaceConfiguration.Dispose();
            SenseManager.Close();
            SenseManager.Dispose();
        }
        public override bool Process(Trigger trigger)
        {
            trigger.ErrorDetected = false;

            if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.Face))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            if (!(trigger is TrackTrigger))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            // make sure we have valid values
            if (RealWorldBoxDimensions.x <= 0)
            {
                RealWorldBoxDimensions.x = 1;
            }

            if (RealWorldBoxDimensions.y <= 0)
            {
                RealWorldBoxDimensions.y = 1;
            }

            if (RealWorldBoxDimensions.z <= 0)
            {
                RealWorldBoxDimensions.z = 1;
            }

            if (SenseToolkitManager.Instance.Initialized
                &&
                SenseToolkitManager.Instance.FaceModuleOutput != null)
            {
                if (SenseToolkitManager.Instance.FaceModuleOutput.QueryNumberOfDetectedFaces() == 0)
                {
                    ((TrackTrigger)trigger).Position = Vector3.zero;
                    return(false);
                }

                PXCMFaceData.Face singleFaceOutput = null;

                singleFaceOutput = SenseToolkitManager.Instance.FaceModuleOutput.QueryFaceByIndex(FaceIndex);

                bool success = false;
                if (singleFaceOutput != null && singleFaceOutput.QueryUserID() >= 0)
                {
                    // Process Tracking
                    if (trigger is TrackTrigger)
                    {
                        TrackTrigger specificTrigger = (TrackTrigger)trigger;

                        var  landmarksData = singleFaceOutput.QueryLandmarks();
                        bool hasLandmarks  = false;

                        if (landmarksData != null)
                        {
                            PXCMFaceData.LandmarkPoint outpt = null;
                            bool hasPoint = landmarksData.QueryPoint(landmarksData.QueryPointIndex(LandmarkToTrack), out outpt);
                            if (hasPoint)
                            {
                                hasLandmarks = outpt.confidenceWorld != 0;
                            }
                        }

                        if (!hasLandmarks && useBoundingBox)
                        {
                            PXCMRectI32 rect = new PXCMRectI32();
                            if (singleFaceOutput.QueryDetection() != null && singleFaceOutput.QueryDetection().QueryBoundingRect(out rect))
                            {
                                float depth;
                                singleFaceOutput.QueryDetection().QueryFaceAverageDepth(out depth);
                                float bbCenterX = (rect.x + rect.w / 2);
                                float bbCenterY = (rect.y + rect.h / 2);

                                Vector3 vec = new Vector3();

                                if (_pos_ijz == null)
                                {
                                    _pos_ijz = new PXCMPoint3DF32[1] {
                                        new PXCMPoint3DF32()
                                    };
                                }
                                _pos_ijz[0].x = bbCenterX;
                                _pos_ijz[0].y = bbCenterY;
                                _pos_ijz[0].z = depth;

                                if (_pos3d == null)
                                {
                                    _pos3d = new PXCMPoint3DF32[1] {
                                        new PXCMPoint3DF32()
                                    };
                                }

                                SenseToolkitManager.Instance.Projection.ProjectDepthToCamera(_pos_ijz, _pos3d);

                                Vector3 position = new Vector3();
                                vec.x = _pos3d[0].x / 10f;
                                vec.y = _pos3d[0].y / 10f;
                                vec.z = _pos3d[0].z / 10f;

                                // Clamp and normalize to the Real World Box
                                TrackingUtilityClass.ClampToRealWorldInputBox(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);
                                TrackingUtilityClass.Normalize(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);

                                if (!float.IsNaN(vec.x) && !float.IsNaN(vec.y) && !float.IsNaN(vec.z))
                                {
                                    specificTrigger.Position = vec;
                                    return(true);
                                }
                            }
                            else
                            {
                                specificTrigger.Position = Vector3.zero;
                                return(false);
                            }
                        }
                        else if (landmarksData == null && !useBoundingBox)
                        {
                            specificTrigger.Position = Vector3.zero;
                            return(false);
                        }
                        else
                        {
                            int landmarkId = landmarksData.QueryPointIndex(LandmarkToTrack);

                            PXCMFaceData.LandmarkPoint point = null;

                            landmarksData.QueryPoint(landmarkId, out point);

                            // Translation
                            if (point != null)
                            {
                                Vector3 vec = new Vector3();
                                vec.x = -point.world.x * 100f;
                                vec.y = point.world.y * 100f;
                                vec.z = point.world.z * 100f;

                                if (vec.x + vec.y + vec.z == 0)
                                {
                                    specificTrigger.Position = Vector3.zero;
                                    return(false);
                                }

                                // Clamp and normalize to the Real World Box
                                TrackingUtilityClass.ClampToRealWorldInputBox(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);
                                TrackingUtilityClass.Normalize(ref vec, RealWorldBoxCenter, RealWorldBoxDimensions);

                                if (!float.IsNaN(vec.x) && !float.IsNaN(vec.y) && !float.IsNaN(vec.z))
                                {
                                    specificTrigger.Position = vec;
                                    success = true;
                                }
                            }

                            //Rotation
                            PXCMFaceData.PoseData poseData = singleFaceOutput.QueryPose();
                            if (success && poseData != null)
                            {
                                PXCMFaceData.PoseEulerAngles angles;
                                if (poseData.QueryPoseAngles(out angles))
                                {
                                    if (!float.IsNaN(angles.pitch) && !float.IsNaN(angles.yaw) && !float.IsNaN(angles.roll))
                                    {
                                        Quaternion q = Quaternion.Euler(-angles.pitch, angles.yaw, -angles.roll);

                                        specificTrigger.RotationQuaternion = q;

                                        return(true);
                                    }
                                }
                            }
                        }
                    }
                }
            }
            return(false);
        }
        //---------------------------------------------------------ProcessingThread-----------------------------------------------------------------------------------------------



        private void ProcessingThread()
        {
            /* Start AcquireFrame/ReleaseFrame loop
             * (ADS) DESCREVER O LOOP: verificar qual o retorno da função 'AcquireFrame(true)' e o retorno de 'pxcmStatus.PXCM_STATUS_NO_ERRO'
             * The frame processing is paused between the 'AcquireFrame' function and the next 'ReleaseFrame' function.
             * 'AcquireFrame(true)' Pausa o processamento de frame, lê o frame atual e salva em algum local que é acessado pela função 'QuerySample()'.
             *  Mais abaixo o processamento de frame é liberado com a função 'ReleaseFrame()'
             */
            try
            {
                while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    /* Acquire the color image data
                     * Consulta a sample (imagem salva com a chamada da função 'AcquireFrame(true)')
                     * e Atribui para a variável sample(amostra) (Dados na forma Bruta)
                     */
                    PXCMCapture.Sample sample = senseManager.QuerySample();

                    /* Cria uma variável que é uma estrutura apropriada para receber a um tipo de imagem. No caso a imagem bruta recebida
                     * pela função 'AcquireFrame()' e convertida pela função 'color.AcquireAccess()'
                     */
                    PXCMImage.ImageData colorData;

                    /* Converte a imagem(dados brutos) retornada para Sample e salva na estrutura de
                     * imagem ImageData por meio o ultimo parâmetro da função 'color.AcquireAccess(out colorData)'
                     */
                    sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData);

                    /* Converte para Bitmap
                     */
                    Bitmap colorBitmap = colorData.ToBitmap(0, sample.color.info.width, sample.color.info.height);

                    // Get face data
                    if (faceData != null)
                    {
                        faceData.Update();
                        numFacesDetected = faceData.QueryNumberOfDetectedFaces();

                        if (numFacesDetected > 0)
                        {
                            // Get the first face detected (index 0)
                            PXCMFaceData.Face face = faceData.QueryFaceByIndex(0);

                            // Retrieve face location data
                            PXCMFaceData.DetectionData faceDetectionData = face.QueryDetection();
                            if (faceDetectionData != null)
                            {
                                PXCMRectI32 faceRectangle;
                                faceDetectionData.QueryBoundingRect(out faceRectangle);
                                faceRectangleHeight = faceRectangle.h;
                                faceRectangleWidth  = faceRectangle.w;
                                faceRectangleX      = faceRectangle.x;
                                faceRectangleY      = faceRectangle.y;

                                //int faceRectangleX2 = (faceRectangleX - 510) * -1;
                            }

                            // Process face recognition data
                            if (face != null)
                            {
                                // Retrieve the recognition data instance
                                recognitionData = face.QueryRecognition();

                                // Set the user ID and process register/unregister logic
                                if (recognitionData.IsRegistered())
                                {
                                    userId = Convert.ToString(recognitionData.QueryUserID());

                                    if (flagUserId != userId)
                                    {
                                        Actions.LoadUser(Convert.ToInt16(userId));
                                        flagUserId = userId;
                                    }

                                    if (doUnregister)
                                    {
                                        recognitionData.UnregisterUser();
                                        doUnregister = false;
                                    }
                                }
                                else
                                {
                                    if (doRegister)
                                    {
                                        recognitionData.RegisterUser();

                                        // Capture a jpg image of registered user
                                        colorBitmap.Save("image.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);

                                        doRegister = false;
                                    }
                                    else
                                    {
                                        userId = "Unrecognized";
                                    }
                                }
                            }
                        }
                        else
                        {
                            userId = "No users in view";
                        }
                    }

                    // Display the color stream and other UI elements
                    UpdateUI(colorBitmap);

                    // Release resources
                    colorBitmap.Dispose();
                    sample.color.ReleaseAccess(colorData);
                    sample.color.Dispose();

                    /* Release the frame
                     * 'ReleaseFrame' libera o bloqueio sobre o quadro atual. O processamento de frame continua.
                     */
                    senseManager.ReleaseFrame();

                    coords = faceRectangleX.ToString() + " " + faceRectangleY.ToString() + " " + faceRectangleWidth.ToString() + " " + faceRectangleHeight.ToString();
                    Server.sendMsg(255, "rect", coords, userId);
                }
            }
            catch
            {
                Console.WriteLine("ERRORRRRRRRRRRRRRRRRRRRRRR");
            }
        }
示例#6
0
    /// <summary>
    /// Update is called every frame, if the MonoBehaviour is enabled.
    /// </summary>
    void Update()
    {
        /* Make sure PXCMSenseManager Instance is Initialized */
        if (psm == null)
        {
            return;
        }

        /* Wait until any frame data is available true(aligned) false(unaligned) */
        if (psm.AcquireFrame(true) != pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            return;
        }

        /* Retrieve face tracking data if ready */
        faceAnalyzer = psm.QueryFace();
        if (faceAnalyzer != null)
        {
            PXCMFaceData _outputData = faceAnalyzer.CreateOutput();
            if (_outputData != null)
            {
                _outputData.Update();

                for (int i = 0; i < _outputData.QueryNumberOfDetectedFaces(); i++)
                {
                    PXCMFaceData.Face _iFace = _outputData.QueryFaceByIndex(i);
                    if (_iFace != null)
                    {
                        /* Retrieve 78 Landmark Points */
                        PXCMFaceData.LandmarksData LandmarkData = _iFace.QueryLandmarks();
                        if (LandmarkData != null)
                        {
                            PXCMFaceData.LandmarkPoint[] landmarkPoints = new PXCMFaceData.LandmarkPoint[MaxPoints];
                            if (LandmarkData.QueryPoints(out landmarkPoints))
                            {
                                faceRenderer.DisplayJoints2D(landmarkPoints);
                            }
                        }

                        /* Retrieve Detection Data */
                        PXCMFaceData.DetectionData detectionData = _iFace.QueryDetection();
                        if (detectionData != null)
                        {
                            PXCMRectI32 rect;
                            if (detectionData.QueryBoundingRect(out rect))
                            {
                                faceRenderer.SetDetectionRect(rect);
                            }
                        }

                        /* Retrieve Pose Data */
                        PXCMFaceData.PoseData poseData = _iFace.QueryPose();
                        if (poseData != null)
                        {
                            PXCMFaceData.PoseQuaternion poseQuaternion;
                            if (poseData.QueryPoseQuaternion(out poseQuaternion))
                            {
                                faceRenderer.DisplayPoseQuaternion(poseQuaternion);
                            }
                        }

                        /* Retrieve Expression Data */
                        PXCMFaceData.ExpressionsData expressionData = _iFace.QueryExpressions();
                        if (expressionData != null)
                        {
                            PXCMFaceData.ExpressionsData.FaceExpressionResult expressionResult;
                            if (expressionData.QueryExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN, out expressionResult))
                            {
                                faceRenderer.DisplayExpression(expressionResult, PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN);
                            }
                        }
                    }
                }

                /* Retrieve Alert Data */
                PXCMFaceData.AlertData _alertData;
                for (int i = 0; i < _outputData.QueryFiredAlertsNumber(); i++)
                {
                    if (_outputData.QueryFiredAlertData(i, out _alertData) == pxcmStatus.PXCM_STATUS_NO_ERROR)
                    {
                        faceRenderer.DisplayAlerts(_alertData);
                    }
                }
            }
            _outputData.Dispose();
        }

        /* Retrieve a sample from the camera */
        PXCMCapture.Sample sample = psm.QueryFaceSample();
        if (sample != null)
        {
            colorImage = sample.color;
            if (colorImage != null)
            {
                if (colorTexture2D == null)
                {
                    /* If not allocated, allocate a Texture2D */
                    colorTexture2D = new Texture2D(colorImage.info.width, colorImage.info.height, TextureFormat.ARGB32, false);

                    /* Associate the Texture2D with a gameObject */
                    colorPlane.renderer.material.mainTexture = colorTexture2D;
                    //colorPlane.renderer.material.mainTextureScale = new Vector2(-1f, 1f);
                }

                /* Retrieve the image data in Texture2D */
                PXCMImage.ImageData colorImageData;
                colorImage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out colorImageData);
                colorImageData.ToTexture2D(0, colorTexture2D);
                colorImage.ReleaseAccess(colorImageData);

                /* Apply the texture to the GameObject to display on */
                colorTexture2D.Apply();
            }
        }

        /* Realease the frame to process the next frame */
        psm.ReleaseFrame();
    }