private void FaceFrameReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            try
            {
                using (FaceFrame faceFrame = e.FrameReference.AcquireFrame())
                {
                    if (faceFrame == null)
                    {
                        return;
                    }
                    bool tracked;
                    tracked = faceFrame.IsTrackingIdValid;
                    if (!tracked)
                    {
                        return;
                    }

                    FaceFrameResult faceResult = faceFrame.FaceFrameResult;
                    int             index      = GetFaceSourceIndex(faceFrame.FaceFrameSource);
                    faceFrameResults[index] = faceResult;
                }
            }
            catch (Exception exception)
            {
                MessageBox.Show(exception.Message);
                Close();
            }
        }
Ejemplo n.º 2
0
        //check if someone is talking
        private void CheckStartRecording(ColorFrame colorframe)
        {
            bool isTalking = false;

            for (int f = 0; f < 6; f++)
            {
                if (_faceFrameResults[f] != null)
                {
                    FaceFrameResult face  = _faceFrameResults[f];
                    bool            moved = face.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Yes || face.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Maybe;
                    bool            open  = face.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Yes || face.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Maybe;
                    isTalking = moved || open; //is talking condition = mouse moved and open (yes or maybe)
                    if (isTalking)
                    {
                        break;
                    }
                }
            }
            if (isTalking) //start to record now
            {
                if (!_audioSource.IsRecording())
                {
                    _audioSource.Start();
                }
            }
        }
        void DrawFaceFrameResult(int faceIndex, FaceFrameResult faceResult, DrawingContext drawingContext)
        {
            //Brush/Pen
            Brush drawingBrush = faceBrush[0];

            if (faceIndex < bodyCount)
            {
                drawingBrush = faceBrush[faceIndex];
            }
            Pen drawingPen = new Pen(drawingBrush, 5);

            //Face Points
            var facePoints = faceResult.FacePointsInColorSpace;

            foreach (PointF pointF in facePoints.Values)
            {
                Point points = new Point(pointF.X, pointF.Y);

                RectI box = faceResult.FaceBoundingBoxInColorSpace;

                Target.Width  = box.Right - box.Left;
                Target.Height = box.Bottom - box.Top;

                Canvas.SetLeft(Target, (points.X / 4) - Target.Width / 2);
                Canvas.SetTop(Target, points.Y / 4 - Target.Height / 2);
            }
        }
Ejemplo n.º 4
0
        private void OnFaceReaderFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    FaceFrameResult result = frame.FaceFrameResult;

                    if (result != null)
                    {
                        _faceState.IsHappy          = result.FaceProperties[FaceProperty.Happy] == DetectionResult.Yes;
                        _faceState.IsLeftEyeClosed  = result.FaceProperties[FaceProperty.LeftEyeClosed] == DetectionResult.Yes;
                        _faceState.IsRightEyeClosed = result.FaceProperties[FaceProperty.RightEyeClosed] == DetectionResult.Yes;
                        _faceState.IsMouthMoved     = result.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Yes;
                        _faceState.IsMouthOpen      = result.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Yes;
                        _faceState.IsWearingGlasses = result.FaceProperties[FaceProperty.WearingGlasses] == DetectionResult.Yes;

                        if (this.OnFaceChanged != null)
                        {
                            this.OnFaceChanged(sender, _faceState);
                        }
                    }
                }
            }
        }
Ejemplo n.º 5
0
        //check if in this frame there is someone engaged
        private void EngageOnFrame(ColorFrame colorFrame)
        {
            bool engage = false;

            if (colorFrame != null)
            {
                for (int f = 0; f < 6; f++)
                {
                    if (_faceFrameResults[f] != null)
                    {
                        FaceFrameResult face = _faceFrameResults[f];
                        bool            eng  = face.FaceProperties[FaceProperty.Engaged] == DetectionResult.Yes ||
                                               face.FaceProperties[FaceProperty.Engaged] == DetectionResult.Maybe;
                        float dist = _bodies[f].Joints[JointType.Head].Position.Z;
                        engage = eng && dist < _distanceEngaged; //engage condition (eyes engaged and distance)
                        if (engage)
                        {
                            break;
                        }
                    }
                }
            }
            //count frame with engage and not engage condition, 5 consecutive frames of a kind nullify the other
            if (engage)
            {
                _frameEngaged++;
                _frameNotEngaged = _frameEngaged > 5 ? (short)0 : _frameNotEngaged;
            }
            else
            {
                _frameNotEngaged++;
                _frameEngaged = _frameNotEngaged > 5 ? (short)0 : _frameEngaged;
            }
        }
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        private void InitializeHDFace()
        {
            this.CurrentBuilderStatus = "Ready To Start Capture";

            this.sensor     = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;

            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;
            this.bodyCount = this.sensor.BodyFrameSource.BodyCount;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.currentFaceModel     = new FaceModel();
            this.currentFaceAlignment = new FaceAlignment();

            this.coordinateMapper = this.sensor.CoordinateMapper;

            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;


            // create the face frame source with the required face frame features and an initial tracking Id of 0
            this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures);

            // open the corresponding reader
            this.faceFrameReader = this.faceFrameSource.OpenReader();


            this.faceFrameResult = null;


            // wire handler for face frame arrival
            if (this.faceFrameReader != null)
            {
                // wire handler for face frame arrival
                this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived;
            }

            this.InitializeMesh();


            this.UpdateMesh();

            this.sensor.Open();
        }
Ejemplo n.º 7
0
        private void DrawFace(int index, FaceFrameResult faceFrameResult, DrawingContext drawingContext)
        {
            Brush drawingBrush = faceBrush[0];

            if (index < 6)
            {
                drawingBrush = faceBrush[index];
            }

            Pen drawingPen = new Pen(drawingBrush, 4);

            var  faceBoxSource = faceFrameResult.FaceBoundingBoxInInfraredSpace;
            Rect faceBox       = new Rect(faceBoxSource.Left, faceBoxSource.Top, faceBoxSource.Right - faceBoxSource.Left, faceBoxSource.Bottom - faceBoxSource.Top);

            drawingContext.DrawRectangle(null, drawingPen, faceBox);

            if (faceFrameResult.FaceBoundingBoxInInfraredSpace != null)
            {
                foreach (PointF pointF in faceFrameResult.FacePointsInInfraredSpace.Values)
                {
                    drawingContext.DrawEllipse(null, drawingPen, new Point(pointF.X, pointF.Y), 0.4, 0.4);
                }
            }

            string faceText = string.Empty;

            if (faceFrameResult.FaceProperties != null)
            {
                if (faceFrameResult.FaceProperties[FaceProperty.Happy] == DetectionResult.Yes)
                {
                    Point nosePoint = new Point(faceFrameResult.FacePointsInInfraredSpace[FacePointType.Nose].X,
                                                faceFrameResult.FacePointsInInfraredSpace[FacePointType.Nose].Y);
                    drawingContext.DrawText(new FormattedText(
                                                "☺",
                                                System.Globalization.CultureInfo.GetCultureInfo("en-us"),
                                                FlowDirection.RightToLeft,
                                                new Typeface("Segoe UI"),
                                                68,
                                                drawingBrush),
                                            nosePoint);
                }
            }

            if (faceFrameResult.FaceProperties != null)
            {
                if (faceFrameResult.FaceProperties[FaceProperty.Happy] == DetectionResult.Yes)
                {
                    Point nosePoint = new Point(faceFrameResult.FacePointsInInfraredSpace[FacePointType.Nose].X,
                                                faceFrameResult.FacePointsInInfraredSpace[FacePointType.Nose].Y);
                    drawingContext.DrawText(new FormattedText(
                                                "☺",
                                                System.Globalization.CultureInfo.GetCultureInfo("en-us"),
                                                FlowDirection.RightToLeft,
                                                new Typeface("Segoe UI"),
                                                68,
                                                drawingBrush),
                                            nosePoint);
                }
            }
        }
Ejemplo n.º 8
0
 /// <summary>
 /// Handles the face frame data arriving from the sensor
 /// </summary>
 /// <param name="sender">object sending the event</param>
 /// <param name="e">event arguments</param>
 private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
 {
     using (FaceFrame faceFrame = e.FrameReference.AcquireFrame())
     {
         if (faceFrame != null)
         {
             if (faceFrameSource != faceFrame.FaceFrameSource)
             {
                 return;
             }
             // store this face frame result
             FaceFrameResult faceFrameResult = faceFrame.FaceFrameResult;
             if (faceFrameResult != null && faceFrameResult.FaceProperties != null)
             {
                 isMouthOpen = (faceFrameResult.FaceProperties[FaceProperty.MouthOpen] == (DetectionResult.Yes | DetectionResult.Maybe));
                 //isMouthOpen = (faceFrameResult.FaceProperties[FaceProperty.MouthOpen] != DetectionResult.No);
                 mouthCornerLeft  = faceFrameResult.FacePointsInInfraredSpace[FacePointType.MouthCornerLeft];
                 mouthCornerRight = faceFrameResult.FacePointsInInfraredSpace[FacePointType.MouthCornerRight];
                 mouthCenterY     = (int)((mouthCornerLeft.Y + mouthCornerRight.Y) / 2f);
                 mouthLeft        = (int)mouthCornerLeft.X;
                 mouthWidth       = (int)(mouthCornerRight.X - mouthCornerLeft.X);
                 mouthHeight      = mouthWidth / 2;
                 mouthTop         = mouthCenterY - mouthHeight / 2;
             }
         }
     }
 }
Ejemplo n.º 9
0
        private void AddToFaceCaptureQueue(FaceFrameResult frameResult, RectF colorBoundingBox, byte[] face)
        {
            Joint head = _LastKnownJoints[JointType.Head];

            if (head.IsTracked() && _HighQualityFaceCaptures.Count < MaxHighQualityFaceCaptures && _FaceCaptureQueue.Count < MaxFaceQueueSize)
            {
                int pitch;
                int yaw;
                int roll;

                frameResult.FaceRotationQuaternion.ExtractFaceRotationInDegrees(out pitch, out yaw, out roll);

                _Yaw   = yaw;
                _Roll  = roll;
                _Pitch = pitch;

                FaceCapture capture = new FaceCapture(face, Convert.ToInt32(colorBoundingBox.Width), Convert.ToInt32(colorBoundingBox.Height))
                {
                    Distance = Convert.ToInt32(head.Position.DistanceToCamera() * 1000),
                    Pitch    = pitch,
                    Yaw      = yaw,
                    Roll     = roll,
                    Left     = Convert.ToInt32(colorBoundingBox.X),
                    Top      = Convert.ToInt32(colorBoundingBox.Y)
                };

                //Console.WriteLine("Face Capture Created! Current Face Capture Count : {0}", _HighQualityFaceCaptures.Count);
                _FaceCaptureQueue.Enqueue(capture);
            }
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Process Face Frames
        /// </summary>
        private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            // Retrieve the face reference
            FaceFrameReference faceRef = e.FrameReference;

            if (faceRef == null)
            {
                return;
            }

            // Acquire the face frame
            using (FaceFrame faceFrame = faceRef.AcquireFrame())
            {
                if (faceFrame == null)
                {
                    return;
                }

                // Retrieve the face frame result
                FaceFrameResult frameResult = faceFrame.FaceFrameResult;

                if (frameResult != null)
                {
                    // Update trackers
                    UpdateTrackers(frameResult);
                }
            }
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Face フレームの結果を、出力ウインドウに表示します。
        /// </summary>
        /// <param name="faceIndex">the index of the face frame corresponding to a specific body in the FOV</param>
        /// <param name="faceResult">container of all face frame results</param>
        /// <param name="drawingContext">drawing context to render to</param>
        private void DrawFaceFrameResults(FaceFrameResult faceResult)
        {
            Debug.WriteLine("");

            // 設定した Face フレームのプロパティの情報を出力します。
            if (faceResult.FaceProperties != null)
            {
                foreach (var item in faceResult.FaceProperties)
                {
                    if (item.Value == DetectionResult.Maybe)
                    {
                        Debug.WriteLine(item.Key.ToString() + " : " + DetectionResult.No);
                    }
                    else
                    {
                        Debug.WriteLine(item.Key.ToString() + " : " + item.Value.ToString());
                    }
                }
            }

            // 顔の回転角を、オイラー角として、表示します。
            if (!faceResult.FaceRotationQuaternion.Equals(null))
            {
                int pitch, yaw, roll;
                ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                Debug.WriteLine("FaceYaw : " + yaw);
                Debug.WriteLine("FacePitch : " + pitch);
                Debug.WriteLine("FacenRoll : " + roll);
            }
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Process the face frame
        /// </summary>
        private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            // Retrieve the face reference
            FaceFrameReference faceRef = e.FrameReference;

            if (faceRef == null)
            {
                return;
            }

            // Acquire the face frame
            using (FaceFrame faceFrame = faceRef.AcquireFrame())
            {
                if (faceFrame == null)
                {
                    return;
                }

                // Retrieve the face frame result
                FaceFrameResult frameResult = faceFrame.FaceFrameResult;

                // Display the values
                HappyResult.Text       = frameResult.FaceProperties[FaceProperty.Happy].ToString();
                EngagedResult.Text     = frameResult.FaceProperties[FaceProperty.Engaged].ToString();
                GlassesResult.Text     = frameResult.FaceProperties[FaceProperty.WearingGlasses].ToString();
                LeftEyeResult.Text     = frameResult.FaceProperties[FaceProperty.LeftEyeClosed].ToString();
                RightEyeResult.Text    = frameResult.FaceProperties[FaceProperty.RightEyeClosed].ToString();
                MouthOpenResult.Text   = frameResult.FaceProperties[FaceProperty.MouthOpen].ToString();
                MouthMovedResult.Text  = frameResult.FaceProperties[FaceProperty.MouthMoved].ToString();
                LookingAwayResult.Text = frameResult.FaceProperties[FaceProperty.LookingAway].ToString();
            }
        }
Ejemplo n.º 13
0
        private void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    // Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;

                    if (result != null)
                    {
                        // Get the face points, mapped in the color space
                        //var eyeLeft = result.FacePointsInColorSpace[FacePointType.EyeLeft];
                        //var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight];
                        var nose = result.FacePointsInColorSpace[FacePointType.Nose];
                        //var mouthLeft = result.FacePointsInColorSpace[FacePointType.MouthCornerLeft];
                        //var mouthRight = result.FacePointsInColorSpace[FacePointType.MouthCornerRight];

                        // Get the face characteristics
                        //var eyeLeftClosed = result.FaceProperties[FaceProperty.LeftEyeClosed];
                        //var eyeRightClosed = result.FaceProperties[FaceProperty.RightEyeClosed];
                        //var mouthOpen = result.FaceProperties[FaceProperty.MouthOpen];

                        SerialPortHelper.SendBytesOverCom(sp, nose.X.ToString() + ',' + nose.Y.ToString());
                    }
                }
            }
        }
Ejemplo n.º 14
0
 public void Update(Windows.Kinect.Vector4 floor, Body body, FaceFrameResult face)
 {
     this.Floor      = floor.ToUnityVector4();
     this.Body       = body;
     this.Face       = face;
     this.IsTracked  = body.IsTracked;
     this.TrackingId = body.TrackingId;
 }
Ejemplo n.º 15
0
 /// <summary>
 /// Update the FaceFeatureTrackers
 /// </summary>
 /// <param name="frameResult">Face tracking frame</param>
 private void UpdateTrackers(FaceFrameResult frameResult)
 {
     // Loop all trackers
     foreach (FaceProperty feature in _featureAnalytics.Keys)
     {
         // Track the detection results
         _featureAnalytics[feature].Track(frameResult.FaceProperties[feature]);
     }
 }
Ejemplo n.º 16
0
 public KinectActor(UnityEngine.Vector3 origin)
 {
     this.CoordinateOrigin = origin;
     this.Floor            = UnityEngine.Vector4.zero;
     this.IsTracked        = false;
     this.TrackingId       = 0;
     this.Body             = null;
     this.Face             = null;
 }
Ejemplo n.º 17
0
        //疲劳主调度器
        public double Scheduler(FaceFrameResult faceFrameResult)
        {
            //记录异常帧
            Note(faceFrameResult);

            this.NumeFaigute = calculatFaigute();

            return(this.NumeFaigute);
        }
Ejemplo n.º 18
0
        private void DrawFaceFrameResults(int faceIndex, FaceFrameResult faceResult)
        {
            // draw the face bounding box
            var faceBoxSource = faceResult.FaceBoundingBoxInColorSpace;

            if (previousResults[faceIndex] != null)
            {
                var    previousFaceBox = previousResults[faceIndex].FaceBoundingBoxInColorSpace;
                double movementX       = faceBoxSource.Left - previousFaceBox.Left;
                double movementY       = faceBoxSource.Top - previousFaceBox.Top;
                if (drawingSegmentsFaces.ContainsKey(faceIndex))
                {
                    for (int j = 0; j < drawingSegmentsFaces[faceIndex].Count; j++)
                    {
                        // Console.WriteLine(drawingSegmentsFaces[faceIndex][j].Points);
                        if (movementX != 0 || movementY != 0)
                        {
                            PointCollection polyline = drawingSegmentsFaces[faceIndex][j].Points;

                            for (int i = 0; i < polyline.Count; i++)
                            {
                                polyline[i] = new Point(polyline[i].X + movementX, polyline[i].Y + movementY);
                            }
                        }
                    }
                }
            }
            previousResults[faceIndex] = faceResult;


            Rectangle faceBox = new Rectangle()
            {
                HorizontalAlignment = HorizontalAlignment.Left,
                Height          = faceBoxSource.Bottom - faceBoxSource.Top,
                Width           = faceBoxSource.Right - faceBoxSource.Left,
                StrokeThickness = 5,
                Stroke          = Brushes.Blue
            };

            Canvas.SetLeft(faceBox, faceBoxSource.Left);
            Canvas.SetTop(faceBox, faceBoxSource.Top);
            // drawArea.Children.Add(faceBox);

            string faceText = string.Empty;

            // extract face rotation in degrees as Euler angles
            if (faceResult.FaceRotationQuaternion != null)
            {
                int pitch, yaw, roll;
                ExtractFaceRotationInDegrees(faceResult.FaceRotationQuaternion, out pitch, out yaw, out roll);
                faceText += "FaceYaw : " + yaw + "\n" +
                            "FacePitch : " + pitch + "\n" +
                            "FacenRoll : " + roll + "\n";
            }
            //Console.WriteLine(faceText);
        }
Ejemplo n.º 19
0
        /// <summary>
        /// Adds to the history collection keeping an up-to-date buffer of body state datapoints.
        /// Used for moving average values of attributes.
        /// </summary>
        /// <param name="frameResult">Push in a frame result</param>
        private void AddBodyAttributeHistory(FaceFrameResult frameResult)
        {
            if (_BodyAttributesHistory.Count >= MaxAttributesHistory)
            {
                IReadOnlyDictionary <FaceProperty, DetectionResult> attributes;
                _BodyAttributesHistory.TryDequeue(out attributes);
            }

            _BodyAttributesHistory.Enqueue(frameResult.FaceProperties);
        }
Ejemplo n.º 20
0
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="trackingId">Tracking ID</param>
        /// <param name="frameResult">Frame result</param>
        public FaceFrameResultEventArgs(ulong trackingId, FaceFrameResult frameResult)
        {
            if (frameResult == null)
            {
                throw new ArgumentNullException("frameResult");
            }

            this.trackingId  = trackingId;
            this.frameResult = frameResult;
        }
Ejemplo n.º 21
0
 public KinectBodyFrame()
 {
     this.trackingId           = 0;
     this.lean                 = Vector2.zero;
     this.body                 = null;
     this.face                 = null;
     this.joints               = new KinectJoint[KinectHelper.jointTypeCount];
     this.rawJoints            = new Dictionary <JointType, Windows.Kinect.Joint>(KinectHelper.jointTypeCount);
     this.rawJointOrientations = new Dictionary <JointType, JointOrientation>(KinectHelper.jointTypeCount);
 }
Ejemplo n.º 22
0
 private void FaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
 {
     using (var frame = e.FrameReference.AcquireFrame())
     {
         if (frame != null)
         {
             _faceResult = frame.FaceFrameResult;
         }
     }
 }
Ejemplo n.º 23
0
        /// <summary>
        /// Handles the face frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (FaceFrame faceFrame = e.FrameReference.AcquireFrame())
            {
                if (faceFrame != null)
                {
                    // get the index of the face source from the face source array
                    //int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource);

                    // check if this face frame has valid face frame results
                    if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult))
                    {
                        // store this face frame result to draw later
                        //this.faceFrameResults[index] = faceFrame.FaceFrameResult;
                        this.faceFrameResult = faceFrame.FaceFrameResult;

                        string faceText = string.Empty;

                        if (this.faceFrameResult.FaceProperties != null)
                        {
                            foreach (var item in faceFrameResult.FaceProperties)
                            {
                                faceText += item.Key.ToString() + " : ";

                                if (item.Value == DetectionResult.Maybe)
                                {
                                    faceText += DetectionResult.Yes + "\n";
                                }
                                else
                                {
                                    faceText += item.Value.ToString() + "\n";
                                }

                                //判断人物表情
                                if (item.Key == FaceProperty.Happy)
                                {
                                    //if (item.Value == DetectionResult.Yes || item.Value == DetectionResult.Maybe)
                                    //    faceHappy = true;
                                    //else
                                    //    faceHappy = false;
                                }
                            }
                        }

                        //this.StatusText = faceText;
                    }
                    else
                    {
                        // indicates that the latest face frame result from this reader is invalid
                        //this.faceFrameResults[index] = null;
                        this.faceFrameResult = null;
                    }
                }
            }
        }
Ejemplo n.º 24
0
        internal void OnUpdateTrackingData(KinectManager manager, Body body, FaceFrameResult face, long frame)
        {
            this.updatedAtFrame = frame;
            this.m_BodyFrame.RefreshFrameData(body, face, manager.floorClipPlane);

            KinectJoint.TransformJointData(this.m_BodyFrame.joints, this.m_Joints, manager.transform);

            RecalculatePositionAndBounds();

            this.onTrackingDataUpdated?.Invoke();
        }
 /// <summary>
 /// 臉部事件
 /// </summary>
 private void _faceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
 {
     using (var face_frame = e.FrameReference.AcquireFrame())
     {
         if (face_frame != null)
         {
             // Get the face frame result. Get the face points, mapped in the color space. in the Posetures.cs
             face_result = face_frame.FaceFrameResult;
         }
     }
 }
Ejemplo n.º 26
0
        /// <summary>
        /// Refreshes preallocated buffers for frame and joint data.
        /// The goal is to avoid per frame allocations in the <see cref="Windows.Kinect.Body.Joints"/>
        /// and <see cref="Windows.Kinect.Body.JointOrientations"/> properties.
        /// </summary>
        public void RefreshFrameData(Body body, FaceFrameResult face, UnityEngine.Vector4 floorClipPlane)
        {
            this.body         = body;
            this.face         = face;
            this.trackingId   = this.body.GetTrackingIdFast();
            this.lean         = this.body.GetLeanDirection();
            this.faceRotation = this.face == null ? Quaternion.identity : KinectHelper.FaceRotationToRealSpace(face.FaceRotationQuaternion);
            body.RefreshJointsFast(this.rawJoints);
            body.RefreshJointOrientationsFast(this.rawJointOrientations);

            KinectJoint.RefreshJointData(this.joints, floorClipPlane, this.rawJoints, this.rawJointOrientations);
        }
Ejemplo n.º 27
0
 private void OnFaceFrameArrived(object sender, FaceFrameArrivedEventArgs e)
 {
     if (e.FrameReference != null)
     {
         using (FaceFrame faceFrame = e.FrameReference.AcquireFrame())
         {
             if (faceFrame != null)
             {
                 _CurrentFaceFrameResult = faceFrame.FaceFrameResult;
             }
         }
     }
 }
Ejemplo n.º 28
0
        void NormalFaceReader_FrameArrived(object sender, FaceFrameArrivedEventArgs e)
        {
            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    // 4) Get the face frame result
                    FaceFrameResult result = frame.FaceFrameResult;

                    if (result != null)
                    {
                        // 5) Do magic!
                        var f = new FaceNormal(result);
                        _faceData.addNormalData(f);
                        infoNormal.Text  = f.dump_str();
                        infoNormal.Text += _faceData.dump_str();

                        // Get the face points, mapped in the color space.

                        var eyeLeft  = result.FacePointsInColorSpace[FacePointType.EyeLeft];
                        var eyeRight = result.FacePointsInColorSpace[FacePointType.EyeRight];

                        // Position the canvas UI elements
                        Canvas.SetLeft(ellipseEyeLeft, eyeLeft.X - ellipseEyeLeft.Width / 2.0);
                        Canvas.SetTop(ellipseEyeLeft, eyeLeft.Y - ellipseEyeLeft.Height / 2.0);

                        Canvas.SetLeft(ellipseEyeRight, eyeRight.X - ellipseEyeRight.Width / 2.0);
                        Canvas.SetTop(ellipseEyeRight, eyeRight.Y - ellipseEyeRight.Height / 2.0);

                        // Display or hide the ellipses
                        if (f.eyeLeftClosed == DetectionResult.Yes || f.eyeLeftClosed == DetectionResult.Maybe)
                        {
                            ellipseEyeLeft.Visibility = Visibility.Collapsed;
                        }
                        else
                        {
                            ellipseEyeLeft.Visibility = Visibility.Visible;
                        }

                        if (f.eyeRightClosed == DetectionResult.Yes || f.eyeRightClosed == DetectionResult.Maybe)
                        {
                            ellipseEyeRight.Visibility = Visibility.Collapsed;
                        }
                        else
                        {
                            ellipseEyeRight.Visibility = Visibility.Visible;
                        }
                    }
                }
            }
        }
Ejemplo n.º 29
0
        public ClosestFaceFrame(FaceFrameResult faceFrameResult, ClosestBodyFrame closestBodyFrame)
        {
            Type = FrameType.ClosestFace;

            if (faceFrameResult != null && closestBodyFrame != null && faceFrameResult.TrackingId == closestBodyFrame.TrackingId)
            {
                this.closestBodyFrame = closestBodyFrame;
                this.faceFrameResult  = faceFrameResult;
                FaceFound             = true;
            }
            else
            {
                FaceFound = false;
            }
        }
Ejemplo n.º 30
0
        //Checks if a face has a balid bounding box
        private bool ValidateFaceBoundingBox(FaceFrameResult faceFrameResult)
        {
            bool isFaceValid = faceFrameResult != null;

            if (isFaceValid)
            {
                RectI boundingBox = faceFrameResult.FaceBoundingBoxInColorSpace;
                if (boundingBox != null)
                {
                    isFaceValid = (boundingBox.Right - boundingBox.Left) > 0 &&
                                  (boundingBox.Bottom - boundingBox.Top) > 0 &&
                                  boundingBox.Right <= _kinect.ColorFrameSource.FrameDescription.Width &&
                                  boundingBox.Bottom <= _kinect.ColorFrameSource.FrameDescription.Height;
                }
            }
            return(isFaceValid);
        }