Пример #1
0
        /// <summary>
        /// Attempt to get a frame & track bodies each frame
        /// </summary>
        private bool GetAndRefreshBodyData(BodyFrameArrivedEventArgs e, bool dataReceived)
        {
            // Represents a frame that contains all the computed real-time tracking information
            // about people that are in view of the sensor.
            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                // Not able to get a frame?
                //if (bodyFrame == null) return !dataReceived;
                //TODO: Can probably get rid of this
                if (bodyFrame == null)
                {
                    kinectState = KinectState.notTracked;
                    Debug.WriteLine("Cannot get Frame Reference");
                }

                // Able to get a frame, but bodies is currently null
                if (bodyFrame != null && this.bodies == null)
                {
                    this.bodies = new Body[bodyFrame.BodyCount];
                    // TODO: Determine the correct Kinect State
                    Debug.WriteLine("Current _body count: " + bodyFrame.BodyCount);
                }
                // Return current status of bodies to Kinect
                bodyFrame?.GetAndRefreshBodyData(this.bodies);
                kinectState = KinectState.bodyTrackedAndIsTracking;
            }
            return(false);
        }
        public Body Select(BodyFrame frame)
        {
            if (frame == null || frame.BodyCount == 0)
            {
                this.trackingId = 0;
                return null;
            }

            var bodies = new Body[frame.BodyCount];
            frame.GetAndRefreshBodyData(bodies);

            var trackedBody = bodies.FirstOrDefault(b =>
                b.TrackingId == this.trackingId &&
                b.Joints[JointType.HandLeft].Position.Y > b.Joints[JointType.HipLeft].Position.Y + 0.05);

            if (trackedBody != null)
            {
                return trackedBody;
            }

            var activeBody = bodies.FirstOrDefault(b =>
                b.IsTracked &&
                b.HandLeftState == HandState.Open &&
                b.Joints[JointType.HandLeft].Position.Y > b.Joints[JointType.ShoulderLeft].Position.Y);

            if (activeBody != null)
            {
                this.trackingId = activeBody.TrackingId;
                return activeBody;
            }

            this.trackingId = 0;
            return null;
        }
        /// <summary>
        /// 骨架事件
        /// </summary>
        private void BodyFrameReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame == null)
                {
                    return;
                }
                bodies = new Body[bodyFrame.BodyCount];
                bodyFrame.GetAndRefreshBodyData(bodies);

                Body body = bodies.Where(b => b.IsTracked).FirstOrDefault();
                if (!_faceSource.IsTrackingIdValid)
                {
                    if (body != null)
                    {
                        //Assign a tracking ID to the face source
                        _faceSource.TrackingId = body.TrackingId;
                    }
                }

                //判斷骨架,並傳入參數
                for (int i = 0; i < bodies.Length; i++)
                {
                    //如果骨架有追蹤到才能執行
                    if (bodies[i].IsTracked)
                    {
                        if (StopDetect != true)
                        {
                            //Body class ,posture_number ,face class
                            //poseture.Poseture_Detected(bodies[i], vocabulary.Give_Vocabulary(posture_number), face_result);

                            //如果是空就跳出
                            if (TaskWords == null)
                            {
                                return;
                            }
                            //把從句子中取得的字典單字拿出來給姿勢做偵測
                            for (int j = 0; j < TaskWords.Count; j++)
                            {
                                poseture.Poseture_Detected(bodies[i], TaskWords[j], face_result);
                            }
                        }
                    }
                }
            }
        }
Пример #4
0
        private void OnBodyFrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            // Get frame reference
            BodyFrameReference refer = e.FrameReference;

            if (refer == null)
            {
                return;
            }
            {
                //Get body frame
                BodyFrame frame = refer.AcquireFrame();

                if (frame == null)
                {
                    StatusText.Text       = "BodyOff";
                    StatusText.Visibility = Visibility.Visible;
                    return;
                }

                else
                {
                    StatusText.Text       = "BodyOn";
                    StatusText.Visibility = Visibility.Visible;
                }

                //Process it
                using (frame)
                {
                    //Aquire body data
                    frame.GetAndRefreshBodyData(_bodies);

                    //Clear Skeleton Canvas
                    SkeletonCanvas.Children.Clear();

                    //Loop all bodies
                    foreach (Body body in _bodies)
                    {
                        //Only process tracked bodyie
                        if (body.IsTracked)
                        {
                            DrawBody(body);
                        }
                    }
                }
            }
        }
Пример #5
0
        /// <summary>
        /// Handles the body frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            bool dataReceived = false;

            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    if (this.bodies == null)
                    {
                        this.bodies = new Body[bodyFrame.BodyCount];
                    }

                    // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
                    // As long as those body objects are not disposed and not set to null in the array,
                    // those body objects will be re-used.
                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                    dataReceived = true;
                }
            }

            if (dataReceived)
            {
                foreach (Body body in this.bodies)
                {
                    if (body.IsTracked)
                    {
                        IReadOnlyDictionary <JointType, Joint> joints = body.Joints;

                        foreach (JointType jointType in joints.Keys)
                        {
                            // sometimes the depth(Z) of an inferred joint may show as negative
                            // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity)
                            CameraSpacePoint position = joints[jointType].Position;
                            if (position.Z < 0)
                            {
                                position.Z = InferredZPositionClamp;
                            }
                        }
                        SetBodyFrameValues(body);

                        string data = "";
                        OnKinectDataReceived(new clsKinectArgs(e, data, DateTime.Now));
                    }
                }
            }
        }
Пример #6
0
 // Token: 0x060029E3 RID: 10723 RVA: 0x000D5564 File Offset: 0x000D3964
 public void Refresh()
 {
     if (this._Reader != null)
     {
         BodyFrame bodyFrame = this._Reader.AcquireLatestFrame();
         if (bodyFrame != null)
         {
             if (this._Data == null)
             {
                 this._Data = new Body[this._Sensor.BodyFrameSource.BodyCount];
             }
             bodyFrame.GetAndRefreshBodyData(this._Data);
             this.FloorClipPlane = bodyFrame.FloorClipPlane;
             bodyFrame.Dispose();
         }
     }
 }
Пример #7
0
        private void ShowBodyJoints(BodyFrame bodyFrame)
        {
            Body[] bodies = new Body[
                this.kinectSensor.BodyFrameSource.BodyCount];
            bool dataReceived = false;

            if (bodyFrame != null)
            {
                bodyFrame.GetAndRefreshBodyData(bodies);
                dataReceived = true;
            }

            if (dataReceived)
            {
                this.bodiesManager.UpdateBodiesAndEdges(bodies);
            }
        }
Пример #8
0
        /// <summary>
        /// this method extracts a single InfraredFrame from the FrameReference in the event args,
        /// checks that the frame is not null and its dimensions match the bitmap initialized
        /// </summary>
        //    private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e)
        //{
        //    bool infraredFrameProcessed = false;

        //    // InfraredFrame is IDisposable
        //    using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame())
        //    {
        //        if (infraredFrame != null)
        //        {
        //            FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;

        //            // verify data and write the new infrared frame data to the display bitmap
        //            if (((infraredFrameDescription.Width * infraredFrameDescription.Height) == this.infraredFrameData.Length) &&
        //                (infraredFrameDescription.Width == this.bitmap.PixelWidth) &&
        //                (infraredFrameDescription.Height == this.bitmap.PixelHeight))
        //            {
        //                // copy the infrared frame into the infraredFrameData array class variable which is used in the next stage
        //                infraredFrame.CopyFrameDataToArray(this.infraredFrameData);

        //                infraredFrameProcessed = true;
        //            }
        //        }
        //    }

        //    // if a frame is received successfully, convert and render
        //    if (infraredFrameProcessed)
        //    {
        //        ConvertInfraredDataToPixels();
        //        RenderPixelArray(this.infraredPixels);
        //    }
        //}

        private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame reference = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (reference == null)
            {
                return;
            }

            ColorFrame colorFrame = null;
            BodyFrame  bodyFrame  = null;

            //InfraredFrame infraredFrame = null;
            //BodyIndexFrame bodyIndexFrame = null;
            //IBuffer bodyIndexFrameData = null;

            // Open color frame
            using (colorFrame = reference.ColorFrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    ShowColorFrame(colorFrame);
                }
            }

            // Gesture detection and joints overlay=
            using (bodyFrame = reference.BodyFrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    var bodies = new Body[bodyFrame.BodyCount];
                    bodyFrame.GetAndRefreshBodyData(bodies);

                    foreach (Body body in bodies)
                    {
                        if (body.IsTracked)
                        {
                            RegisterGesture(bodyFrame);
                            ShowBodyJoints(bodyFrame);
                            PrintJointAngles(bodyFrame);
                        }
                    }
                }
            }
        }
Пример #9
0
        private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            bool dataReceived = false;

            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) {
                if (bodyFrame != null)
                {
                    if (this.bodies == null)
                    {
                        // creates an array of 6 bodies, which is the max number of bodies that Kinect can track simultaneously
                        this.bodies = new Body[bodyFrame.BodyCount];
                    }

                    // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
                    // As long as those body objects are not disposed and not set to null in the array,
                    // those body objects will be re-used.
                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                    dataReceived = true;
                }
            }

            if (dataReceived)
            {
                // we may have lost/acquired bodies, so update the corresponding gesture detectors
                if (this.bodies != null)
                {
                    // loop through all bodies to see if any of the gesture detectors need to be updated
                    int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;
                    for (int i = 0; i < maxBodies; ++i)
                    {
                        Body  body       = this.bodies[i];
                        ulong trackingId = body.TrackingId;

                        // if the current body TrackingId changed, update the corresponding gesture detector with the new value
                        if (trackingId != this.gestureDetectorList[i].TrackingId)
                        {
                            this.gestureDetectorList[i].TrackingId = trackingId;

                            // if the current body is tracked, unpause its detector to get VisualGestureBuilderFrameArrived events
                            // if the current body is not tracked, pause its detector so we don't waste resources trying to get invalid gesture results
                            this.gestureDetectorList[i].IsPaused = trackingId == 0;
                        }
                    }
                }
            }
        }
Пример #10
0
        /// <summary>
        /// Handles the body frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            bool dataReceived   = false;
            bool hasTrackedBody = false;

            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                    dataReceived = true;
                }
            }

            if (dataReceived)
            {
                this.BeginBodiesUpdate();

                // iterate through each body
                for (int bodyIndex = 0; bodyIndex < this.bodies.Length; bodyIndex++)
                {
                    Body body = this.bodies[bodyIndex];

                    if (body.IsTracked)
                    {
                        // check if this body clips an edge
                        this.UpdateClippedEdges(body, hasTrackedBody);

                        this.UpdateBody(body, bodyIndex);

                        hasTrackedBody = true;
                    }
                    else
                    {
                        // collapse this body from canvas as it goes out of view
                        this.ClearBody(bodyIndex);
                    }
                }

                if (!hasTrackedBody)
                {
                    // clear clipped edges if no bodies are tracked
                    this.ClearClippedEdges();
                }
            }
        }
Пример #11
0
    // Update is called once per frame
    void Update()
    {
        IsAvailable = _sensor.IsAvailable;

        if (_bodyFrameReader != null)
        {
            BodyFrame frame = _bodyFrameReader.AcquireLatestFrame();

            if (frame != null)
            {
                frame.GetAndRefreshBodyData(_bodies);

                foreach (Body body in _bodies)
                {
                    if (!body.IsTracked)
                    {
                        continue;
                    }

                    IsAvailable = true;

                    if (body.HandRightConfidence == TrackingConfidence.High && body.HandRightState == HandState.Lasso)
                    {
                        GameManager.instance.StartNewGame();
                    }

                    float x = 0.0f;
                    float y = 0.0f;

                    Windows.Kinect.Joint RightHand = body.Joints[JointType.HandRight];
                    x            = RescalingToRangesB(-1, 1, -8, 8, RightHand.Position.X);
                    y            = RescalingToRangesB(-1, 1, -8, 8, RightHand.Position.Y);
                    RightHandPos = new Vector3(x, y, 0);

                    Windows.Kinect.Joint LeftHand = body.Joints[JointType.HandLeft];
                    x           = RescalingToRangesB(-1, 1, -8, 8, LeftHand.Position.X);
                    y           = RescalingToRangesB(-1, 1, -8, 8, LeftHand.Position.Y);
                    LeftHandPos = new Vector3(x, y, 0);
                }

                frame.Dispose();
                frame = null;
            }
        }
    }
Пример #12
0
        void lecteurBody_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            BodyFrameReference refer = e.FrameReference;

            if (refer == null)
            {
                return;
            }

            // Obtenir body frame
            BodyFrame frame = refer.AcquireFrame();

            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                // Obtenir les données des joueurs
                frame.GetAndRefreshBodyData(bodies);

                // Clear Skeleton Canvas
                CanvasCameraPrincipale.Children.Clear();

                // Loop all bodies
                foreach (Body body in bodies)
                {
                    // Only process tracked bodies and if the timer is over
                    if (body.IsTracked && tempsFini)
                    {
                        if (body.HandLeftState == HandState.Closed)                        //Si la main gauche est fermée on met l'image d'avant
                        {
                            indexPhoto--;
                            afficherPhoto();
                        }
                        else if (body.HandRightState == HandState.Closed)                         //Si la main droite est fermée on met l'image d'après
                        {
                            indexPhoto++;
                            afficherPhoto();
                        }
                    }
                }
            }
        }
Пример #13
0
        private void BodyFrameReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame == null)
                {
                    return;
                }
                bodies = new Body[bodyFrame.BodyCount];
                bodyFrame.GetAndRefreshBodyData(bodies);

                Body body = bodies.Where(b => b.IsTracked).FirstOrDefault();
                if (!_faceSource.IsTrackingIdValid)
                {
                    if (body != null)
                    {
                        // 4) Assign a tracking ID to the face source
                        _faceSource.TrackingId = body.TrackingId;
                    }
                }

                //判斷骨架,並傳入參數
                for (int i = 0; i < bodies.Length; i++)
                {
                    if (bodies[i].IsTracked)   //如果骨架有追蹤到才能執行

                    /*
                     * poseture.Poseture_Detected(
                     *  bodies[i].Joints[JointType.Head],
                     *  bodies[i].Joints[JointType.HandRight],
                     *  bodies[i].Joints[JointType.ElbowRight],
                     *  bodies[i].Joints[JointType.HandLeft],
                     *  bodies[i].Joints[JointType.ElbowLeft],
                     *  bodies[i].Joints[JointType.ShoulderRight],
                     *  bodies[i].Joints[JointType.ShoulderLeft],
                     *  bodies[i].Joints[JointType.SpineMid],
                     *  posture_number,//posture_number 從伺服器收到的值,傳入判斷式
                     *  face_result);//Face features 類別直接傳過去,讓他自己呼叫類別的屬性
                     */
                    {
                        poseture.Poseture_Detected(bodies[i], vocabulary.Give_Vocabulary(posture_number), face_result);
                    }
                }
            }
        }
Пример #14
0
    /// <summary>
    /// Handles the body frame data arriving from the sensor
    /// </summary>
    /// <param name="sender">object sending the event</param>
    /// <param name="e">event arguments</param>
    private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
    {
        bool dataReceived = false;

        using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) {
            if (bodyFrame != null)
            {
                if (this.bodies == null)
                {
                    this.bodies = new Body[bodyFrame.BodyCount];
                }

                // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
                // As long as those body objects are not disposed and not set to null in the array,
                // those body objects will be re-used.
                bodyFrame.GetAndRefreshBodyData(this.bodies);
                dataReceived = true;
            }
        }

        if (dataReceived)
        {
            activeBodies = new List <Body>();
            for (int i = 0; i < this.bodies.Length; i++)
            {
                Body body = this.bodies[i];

                if (body.IsTracked)
                {
                    /*Dictionary<JointType, Windows.Kinect.Joint> joints = body.Joints;
                     *
                     * foreach (JointType jointType in joints.Keys) {
                     *  // sometimes the depth(Z) of an inferred joint may show as negative
                     *  // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity)
                     *  CameraSpacePoint position = joints[jointType].Position;
                     *  if (position.Z < 0) {
                     *      position.Z = InferredZPositionClamp;
                     *  }
                     * }*/
                    activeBodies.Add(body);
                    //Debug.Log(joints[JointType.HandLeft].Position.X + " " + joints[JointType.HandLeft].Position.Y + " " + joints[JointType.HandLeft].Position.Z);
                }
            }
        }
    }
Пример #15
0
        /// <summary>
        /// Handles the body frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            bool dataReceived = false;

            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    if (this.bodies == null)
                    {
                        this.bodies = new Body[bodyFrame.BodyCount];
                    }

                    // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
                    // As long as those body objects are not disposed and not set to null in the array,
                    // those body objects will be re-used.
                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                    dataReceived = true;
                }
            }

            if (dataReceived)
            {
                for (int i = 0; i < bodies.Length; i++)
                {
                    Body body = bodies[i];

                    if (body.IsTracked && body.TrackingId == engagedBodyId)
                    {
                        if (engagedHandType == HandType.LEFT)
                        {
                            processHandState(body.HandLeftState);
                        }
                        else if (engagedHandType == HandType.RIGHT)
                        {
                            processHandState(body.HandRightState);
                        }
                        else
                        {
                            processHandStateClosed(false);
                        }
                    }
                }
            }
        }
Пример #16
0
        /// <summary>
        /// Handle the new body frames
        /// </summary>
        private async void OnBodiesArrive(object sender, BodyFrameArrivedEventArgs e)
        {
            // Retrieve the body reference
            BodyFrameReference bodyRef = e.FrameReference;

            if (bodyRef == null)
            {
                return;
            }

            // Acquire the body frame
            using (BodyFrame frame = bodyRef.AcquireFrame())
            {
                if (frame == null)
                {
                    return;
                }

                // Create a new collection when required
                if (_bodies == null || _bodies.Count() != frame.BodyCount)
                {
                    _bodies = new Body[frame.BodyCount];
                }

                // Refresh the bodies
                frame.GetAndRefreshBodyData(_bodies);

                // Start tracking faces
                foreach (Body body in _bodies)
                {
                    if (body.IsTracked)
                    {
                        // Create a new tracker if required
                        if (!_trackers.ContainsKey(body.TrackingId))
                        {
                            FaceTracker tracker = new FaceTracker(body.TrackingId, _faceFrameFeatures, _kinect);
                            tracker.FaceAnalyticsAvailable += OnFaceAnalyticsAvailable;

                            // Add to dictionary
                            _trackers.Add(body.TrackingId, tracker);
                        }
                    }
                }
            }
        }
        void multiReader_MultiSourceFrameArrived(object sender,
                                                 MultiSourceFrameArrivedEventArgs e)
        {
            var multiFrame = e.FrameReference.AcquireFrame(); // マルチフレームを取得する

            if (multiFrame == null)
            {
                return;
            }

            ////////////////////////////////////////////////////各種データ取得////////////////////////////////////////////////////
            UpdateColorFrame(multiFrame);
            UpdateBodyIndexFrame(multiFrame);
            UpdateDepthFrame(multiFrame);
            UpdateBodyFrame(multiFrame);

            if (BODY_CAPTURE != 0)
            {
                TextBlock7.Visibility = System.Windows.Visibility.Visible;
                TextBlock7.Text       = BODY_CAPTURE.ToString() + "人検出";
            }

            if (!gestureFrameSource.IsTrackingIdValid)
            {
                using (BodyFrame bodyFrame = multiFrame.BodyFrameReference.AcquireFrame())
                {
                    if (bodyFrame != null)
                    {
                        bodyFrame.GetAndRefreshBodyData(bodies);
                        foreach (var body in bodies)
                        {
                            if (body != null && body.IsTracked)
                            {
                                // ジェスチャー判定対象としてbodyを選択
                                gestureFrameSource.TrackingId = body.TrackingId;
                                // ジェスチャー判定開始
                                gestureFrameReader.IsPaused = false;
                                break;
                            }
                        }
                    }
                }
            }
            draw();
        }
Пример #18
0
    // Update is called once per frame
    void Update()
    {
        if (_bodyFrameReader != null)
        {
            using (BodyFrame frame = _bodyFrameReader.AcquireLatestFrame())
            {
                if (frame != null)
                {
                    if (_bodyData == null)
                    {
                        _bodyData = new Body[_sensor.BodyFrameSource.BodyCount];
                    }

                    frame.GetAndRefreshBodyData(_bodyData);
                }
            }
        }
    }
Пример #19
0
        private static void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            // get total number of bodies from BodyFrameSource
            var bodies = new Body[(sender as BodyFrameReader).BodyFrameSource.BodyCount];

            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    bodyFrame.GetAndRefreshBodyData(bodies);

                    if (bodies.ToList().Any(x => x.IsTracked))
                    {
                        var body = bodies.ToList().Where(x => x.IsTracked).First();
                    }
                }
            }
        }
Пример #20
0
        private void SkeletonReady(object sender, BodyFrameArrivedEventArgs e)
        {
            using (BodyFrame skeletonFrame = e.FrameReference.AcquireFrame())
            {
                if (skeletonFrame != null)
                {
                    skeletonFrame.GetAndRefreshBodyData(this.lastframe);

                    bool  found    = false;
                    float minZ     = float.MaxValue;
                    ulong cloestId = 0;

                    for (int i = 0; i < this.lastframe.Length; i++)
                    {
                        if (this.lastframe[i].IsTracked)
                        {
                            found = true;
                            var z = this.lastframe[i].Joints[JointType.Head].Position.Z;
                            if (z < minZ)
                            {
                                z        = minZ;
                                cloestId = this.lastframe[i].TrackingId;
                            }
                        }
                    }

                    if (found)
                    {
                        this.faceFrameSource.TrackingId = cloestId;
                        this.FOuTrackingId[0]           = this.faceFrameSource.TrackingId;
                        this.faceFrameReader.IsPaused   = false;
                    }
                    else
                    {
                        this.faceFrameReader.IsPaused = true;
                        this.FOuTrackingId[0]         = 0;
                    }



                    skeletonFrame.Dispose();
                }
            }
        }
        void myBodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            if (this.IsPaused)
            {
                return;
            }

            PreposeGesturesFrame retFrame = null;
            PreposeGesturesFrameArrivedEventArgs upArgs = new PreposeGesturesFrameArrivedEventArgs();

            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    // Perform the gesture matching on this frame
                    var z3body = new Z3Body();
                    bodyFrame.GetAndRefreshBodyData(this.bodies);

                    foreach (var body in this.bodies)
                    {
                        if (body.TrackingId == this.mySource.TrackingId)
                        {
                            // We are at the correct body - go ahead and feed it to the BodyMatcher
                            IReadOnlyDictionary <Microsoft.Kinect.JointType, Joint> joints = body.Joints;
                            z3body = Z3KinectConverter.CreateZ3Body(joints);
                            var result = this.PreposeGesturesFrameSource.myMatcher.TestBody(z3body);

                            // Fill in the gesture results for this frame
                            retFrame         = new PreposeGesturesFrame();
                            retFrame.results = result;
                            break;
                        }
                    }
                }
            }

            // TODO: revisit the way the PreposeGesturesFrameReference is implemented to avoid keeping around massive amounts of frames
            PreposeGesturesFrameReference retFrameReference = new PreposeGesturesFrameReference(retFrame);

            upArgs.FrameReference = retFrameReference;

            // Signal that we have a new PreposeGesturesFrame arrived
            FrameArrived(this, upArgs);
        }
Пример #22
0
    // Update is called once per frame
    void Update()
    {
        // ensure the readers are valid
        if (this.bodyFrameReader != null)
        {
            // process bodies
            bool newBodyData = false;
            using (BodyFrame bodyFrame = this.bodyFrameReader.AcquireLatestFrame())
            {
                if (bodyFrame != null)
                {
                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                    newBodyData = true;
                }
            }

            if (newBodyData)
            {
                // update gesture detectors with the correct tracking id
                for (int bodyIndex = 0; bodyIndex < this.bodyCount; bodyIndex++)
                {
                    var body = this.bodies[bodyIndex];
                    if (body != null)
                    {
                        var trackingId = body.TrackingId;

                        // if the current body TrackingId changed, update the corresponding gesture detector with the new value
                        for (int gestureIndex = 0; gestureIndex < dataList.Count; gestureIndex++)///////////////////////////////////// not complate...
                        {
                            if (trackingId != this.gestureList[gestureIndex][bodyIndex].TrackingId)
                            {
                                this.gestureList[gestureIndex][bodyIndex].TrackingId = trackingId;

                                // if the current body is tracked, unpause its detector to get VisualGestureBuilderFrameArrived events
                                // if the current body is not tracked, pause its detector so we don't waste resources trying to get invalid gesture results
                                this.gestureList[gestureIndex][bodyIndex].IsPaused           = (trackingId == 0);
                                this.gestureList[gestureIndex][bodyIndex].OnGestureDetected += CreateOnGestureHandler(bodyIndex, gestureList[gestureIndex][bodyIndex]);
                            }
                        }
                    }
                }
            }
        }
    }
Пример #23
0
        /// <summary>
        /// Process body frames
        /// </summary>
        private void OnBodyFrameReceived(object sender, BodyFrameArrivedEventArgs e)
        {
            // Get Frame ref
            BodyFrameReference bodyRef = e.FrameReference;

            if (bodyRef == null)
            {
                return;
            }

            // Get body frame
            using (BodyFrame frame = bodyRef.AcquireFrame())
            {
                if (frame == null)
                {
                    return;
                }

                // Allocate array when required
                if (_bodies == null)
                {
                    _bodies = new Body[frame.BodyCount];
                }

                // Refresh bodies
                frame.GetAndRefreshBodyData(_bodies);

                foreach (Body body in _bodies)
                {
                    if (body.IsTracked && _faceSource == null)
                    {
                        // Create new sources with body TrackingId
                        _faceSource = new FaceFrameSource(_kinect, body.TrackingId, _faceFrameFeatures);

                        // Create new reader
                        _faceReader = _faceSource.OpenReader();

                        // Wire events
                        _faceReader.FrameArrived   += OnFaceFrameArrived;
                        _faceSource.TrackingIdLost += OnTrackingIdLost;
                    }
                }
            }
        }
Пример #24
0
        /// <summary>
        /// Write Body Frame
        /// </summary>
        /// <param name="multiFrame">MultiSourceFrame retrieved from Kinect.</param>
        private void WriteBody(MultiSourceFrame multiFrame)
        {
            if (multiFrame == null)
            {
                return;
            }

            using (BodyFrame bodyFrame = multiFrame.BodyFrameReference.AcquireFrame())
            {
                if (bodyFrame == null)
                {
                    return;
                }

                bodyFrame.GetAndRefreshBodyData(bodies);

                string time = bodyFrame.RelativeTime.ToString();
                //string time = this.time.ToString("yyyy/MM/dd HH:mm:ss.fff", CultureInfo.CurrentUICulture.DateTimeFormat);

                foreach (Body body in bodies.Where(body => body != null))
                {
                    if (!body.IsTracked)
                    {
                        continue;
                    }

                    csv.Write($"{time},{body.TrackingId}");
                    foreach (var joint in body.Joints)
                    {
                        if (joint.Value.TrackingState == TrackingState.Tracked)
                        {
                            csv.Write($",{joint.Value.Position.X}");
                            csv.Write($",{joint.Value.Position.Y}");
                            csv.Write($",{joint.Value.Position.Z}");
                        }
                        else
                        {
                            csv.Write(",,,");
                        }
                    }
                    csv.Write("\n");
                }
            }
        }
        /// <summary>
        /// Handles the body frame data arriving from the sensor and updates the associated gesture detector object for each body
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            bool dataReceived = false;

            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    if (bodies == null)
                    {
                        bodies = new List <Body>(new Body[bodyFrame.BodyCount].ToList());
                    }

                    bodyFrame.GetAndRefreshBodyData(bodies);
                    dataReceived = true;
                }
            }

            if (dataReceived)
            {
                if (bodies != null)
                {
                    int maxBodies = this.kinectRegion.KinectSensor.BodyFrameSource.BodyCount;
                    for (int i = 0; i < maxBodies; ++i)
                    {
                        if (i < bodies.Count)
                        {
                            Body  body       = this.bodies[i];
                            ulong trackingId = body.TrackingId;

                            if (maxBodies < this.gestureDetectorList.Count)
                            {
                                if (trackingId != this.gestureDetectorList[i].TrackingId)
                                {
                                    gestureDetectorList[i].TrackingId = trackingId;

                                    gestureDetectorList[i].IsPaused = trackingId == 0;
                                }
                            }
                        }
                    }
                }
            }
        }
        void reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            BodyFrameReference BFReference = e.FrameReference;

            try
            {
                BodyFrame frame = BFReference.AcquireFrame();
                if (frame != null)
                {
                    using (frame)
                    {
                        using (DrawingContext dc = SkeletonDrawing.Open())
                        {
                            dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, DisplayWidth, DisplayHeight));
                            if (Bodies == null)
                            {
                                Bodies = new Body[frame.BodyCount];
                            }
                            frame.GetAndRefreshBodyData(Bodies);
                            foreach (Body b in Bodies)
                            {
                                if (b.IsTracked)
                                {
                                    this.DrawClippedEdges(b, dc);
                                    IReadOnlyDictionary <JointType, Joint> joints = b.Joints;

                                    Dictionary <JointType, Point> jointsPoints = new Dictionary <JointType, Point>();
                                    foreach (JointType jt in joints.Keys)
                                    {
                                        DepthSpacePoint dsp = CordMapper.MapCameraPointToDepthSpace(joints[jt].Position);
                                        jointsPoints[jt] = new Point(dsp.X, dsp.Y);
                                    }
                                    this.DrawBody(joints, jointsPoints, dc);
                                    DrawHand(b.HandLeftState, jointsPoints[JointType.HandLeft], dc);
                                    DrawHand(b.HandRightState, jointsPoints[JointType.HandRight], dc);
                                }
                                this.SkeletonDrawing.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, DisplayWidth, DisplayHeight));
                            }
                        }
                    }
                }
            }
            catch (Exception) { }
        }
Пример #27
0
    // Update is called once per frame
    void Update()
    {
        if (_reader != null)
        {
            BodyFrame frame = _reader.AcquireLatestFrame();
            if (frame != null)
            {
                if (_data == null)
                {
                    _data = new Body[_sensor.BodyFrameSource.BodyCount];
                }

                frame.GetAndRefreshBodyData(_data);

                frame.Dispose();
                frame = null;
            }
        }
    }
Пример #28
0
        private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e)
        {
            bool dataReceived = false;

            using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    if (this.bodies == null)
                    {
                        bodies = new Body[bodyFrame.BodyCount];
                    }
                    // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
                    // As long as those body objects are not disposed and not set to null in the array,
                    // those body objects will be re-used.
                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                    dataReceived = true;
                }
            }

            if (dataReceived)
            {
                var trackedBodies = bodies.Where(b => b.IsTracked == true);
                if (trackedBodies == null || trackedBodies.Count() == 0)
                {
                    return;
                }
                var activeBody = trackedBodies.OrderBy(b => b.Joints[JointType.FootLeft].Position.Z).First();
                if (activeBody != null)
                {
                    _gestureController.UpdateAllGestures(activeBody);
                }

                //foreach (Body body in this.bodies)
                //{
                //    if (body.IsTracked)
                //    {
                //        //LogHelper.GetInstance().ShowMsg(body.Joints[JointType.HandLeft].Position.X + "," + body.Joints[JointType.ElbowLeft].Position.X);
                //        _gestureController.UpdateAllGestures(body);
                //    }
                //}
            }
        }
Пример #29
0
        private void multiFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame multiFrame = e.FrameReference.AcquireFrame();

            // Colorの取得と表示
            using (var colorFrame = multiFrame.ColorFrameReference.AcquireFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                // RGB画像の表示
                colorBuffer = new byte[colorFrameDescription.Width * colorFrameDescription.Height * colorFrameDescription.BytesPerPixel];
                colorFrame.CopyConvertedFrameDataToArray(colorBuffer, ColorImageFormat.Bgra);

                ImageColor.Source = BitmapSource.Create(colorFrameDescription.Width, colorFrameDescription.Height, 96, 96,
                                                        PixelFormats.Bgra32, null, colorBuffer, colorFrameDescription.Width * (int)colorFrameDescription.BytesPerPixel);
            }

            // Bodyを1つ探し、ジェスチャー取得の対象として設定
            if (!gestureFrameSource.IsTrackingIdValid)
            {
                using (BodyFrame bodyFrame = multiFrame.BodyFrameReference.AcquireFrame())
                {
                    if (bodyFrame != null)
                    {
                        bodyFrame.GetAndRefreshBodyData(bodies);

                        foreach (var body in bodies)
                        {
                            if (body != null && body.IsTracked)
                            {
                                // ジェスチャー判定対象としてbodyを選択
                                gestureFrameSource.TrackingId = body.TrackingId;
                                // ジェスチャー判定開始
                                gestureFrameReader.IsPaused = false;
                            }
                        }
                    }
                }
            }
        }
Пример #30
0
        /// <summary>
        /// Capture des corps
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private void bodyReader_FrameArrived(BodyFrameReader sender, BodyFrameArrivedEventArgs args)
        {
            CoreWindow c = Window.Current.CoreWindow;

            Debug.WriteLine(DateTime.Now + " IS Body FRame NUll ");
            if (args.FrameReference != null)
            {
                using (BodyFrame bodyFrame = args.FrameReference.AcquireFrame())
                {
                    if (bodyFrame != null)
                    {
                        this.bodies = new Body[bodyFrame.BodyCount];

                        bodyFrame.GetAndRefreshBodyData(this.bodies);


                        foreach (Body body in this.bodies)
                        {
                            if (body.IsTracked)
                            {
                                if (app.kinectRegion.KinectEngagementManager != null && app.kinectRegion.KinectEngagementManager.KinectManualEngagedHands != null)
                                {
                                    Debug.WriteLine(app.kinectRegion.KinectEngagementManager.KinectManualEngagedHands);
                                }
                                else
                                {
                                    Debug.WriteLine("IS NULL --- KINECT REGION");
                                }

                                if (body.HandRightState == HandState.Open)
                                {
                                    btnColor.Background = new SolidColorBrush(Colors.Red);
                                }
                                else if (body.HandRightState == HandState.Closed)
                                {
                                    btnColor.Background = new SolidColorBrush(Colors.Blue);
                                }
                            }
                        }
                    }
                }
            }
        }
Пример #31
0
        private void AcquireBodyFrames()
        {
            MultiSourceFrame multiFrame = this.m_MultiSourceFrameReader.AcquireLatestFrame();

            if (multiFrame == null)
            {
                return;
            }

            using (BodyFrame bodyFrame = multiFrame.BodyFrameReference.AcquireFrame())
            {
                if (bodyFrame != null && bodyFrame.RelativeTime > this.m_RelativeTime)
                {
                    bodyFrame.GetAndRefreshBodyData(this.m_Bodies);

                    this.m_BodyCount = 0;

                    // Count tracked bodies and move them to the
                    // start of the array.
                    for (int i = 0, j = this.m_Bodies.Length - 1; i < this.m_Bodies.Length && i < j; ++i)
                    {
                        if (this.m_Bodies[i] == null || !this.m_Bodies[i].GetIsTrackedFast())
                        {
                            var temp = this.m_Bodies[i];
                            this.m_Bodies[i--] = this.m_Bodies[j];
                            this.m_Bodies[j--] = temp;
                            continue;
                        }

                        ++this.m_BodyCount;
                    }

                    this.m_RelativeTime   = bodyFrame.RelativeTime;
                    this.m_FloorClipPlane = bodyFrame.FloorClipPlane;
                    ++this.m_Frame;
                }
            }

            // In the documentation the MultiSourceFrame implements IDisposable
            // but this is not true for the provided scripts. Instead the finalizer
            // needs to be called to cleanup the resources.
            multiFrame = null;
        }
Пример #32
0
        void BodyFrameReady(BodyFrame frame)
        {
            frame.GetAndRefreshBodyData(m_bodies);

            // now update the player-index-to-body mapping
            for (int i = 0; i < PlayerCount; i++) {
                UpdatePlayerMapping(i);
            }

            for (int i = 0; i < PlayerCount; i++) {
                if (m_eventSinkArray[i] != null)
                {
                    // Update the bodies with the latest pose state.  This may fire events and state machine actions.
                    m_holofunkBodies[i].Update(this,
                        m_eventSinkArray[i].OnLeftHand,
                        m_eventSinkArray[i].OnLeftArm,
                        m_eventSinkArray[i].OnRightHand,
                        m_eventSinkArray[i].OnRightArm);
                }
            }

            if (m_bodyFrameUpdateAction != null) {
                m_bodyFrameUpdateAction(this);
            }
        }
Пример #33
0
        /// <summary>
        /// Finds the closest body from the sensor if any
        /// </summary>
        /// <param name="bodyFrame">A body frame</param>
        /// <returns>Closest body, null of none</returns>
        private static Body FindClosestBody(BodyFrame bodyFrame)
        {
            Body result = null;
            double closestBodyDistance = double.MaxValue;

            Body[] bodies = new Body[bodyFrame.BodyCount];
            bodyFrame.GetAndRefreshBodyData(bodies);

            foreach (var body in bodies)
            {
                if (body.IsTracked)
                {
                    var currentLocation = body.Joints[JointType.SpineBase].Position;

                    var currentDistance = VectorLength(currentLocation);

                    if (result == null || currentDistance < closestBodyDistance)
                    {
                        result = body;
                        closestBodyDistance = currentDistance;
                    }
                }
            }

            return result;
        }
Пример #34
0
        /// <summary>
        /// Find if there is a body tracked with the given trackingId
        /// </summary>
        /// <param name="bodyFrame">A body frame</param>
        /// <param name="trackingId">The tracking Id</param>
        /// <returns>The body object, null of none</returns>
        private static Body FindBodyWithTrackingId(BodyFrame bodyFrame, ulong trackingId)
        {
            Body result = null;

            Body[] bodies = new Body[bodyFrame.BodyCount];
            bodyFrame.GetAndRefreshBodyData(bodies);

            foreach (var body in bodies)
            {
                if (body.IsTracked)
                {
                    if (body.TrackingId == trackingId)
                    {
                        result = body;
                        break;
                    }
                }
            }

            return result;
        }
        private void ProcessFrames(DepthFrame depthFrame, ColorFrame colorFrame, BodyIndexFrame bodyIndexFrame, BodyFrame bodyFrame)
        {



            FrameDescription depthFrameDescription = depthFrame.FrameDescription;
            FrameDescription colorFrameDescription = colorFrame.FrameDescription;
            FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription;




            int bodyIndexWidth = bodyIndexFrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrameDescription.Height;


            // The ImageModel object is used to transfer Kinect data into the DataFlow rotunies. 
            ImageModel imageModel = new ImageModel()
            {
                DepthWidth = depthFrameDescription.Width,
                DepthHeight = depthFrameDescription.Height,
                ColorWidth = colorFrameDescription.Width,
                ColorHeight = colorFrameDescription.Height,
                ShowTrails = _vm.LeaveTrails,
                PersonFill = _vm.PersonFill,
                MaxDistance = _vm.BackgroundDistance
            };
            imageModel.ColorFrameData = new byte[imageModel.ColorWidth * imageModel.ColorHeight * this.bytesPerPixel];

            imageModel.DisplayPixels = new byte[_PreviousFrameDisplayPixels.Length];
            imageModel.BodyIndexFrameData = new byte[imageModel.DepthWidth * imageModel.DepthHeight];
            imageModel.ColorPoints = new ColorSpacePoint[imageModel.DepthWidth * imageModel.DepthHeight];
            imageModel.BytesPerPixel = bytesPerPixel;
            imageModel.Bodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount];
            bodyFrame.GetAndRefreshBodyData(imageModel.Bodies);
            imageModel.DepthData = new ushort[imageModel.DepthWidth * imageModel.DepthHeight];
            
            depthFrame.CopyFrameDataToArray(imageModel.DepthData);
            depthFrame.CopyFrameDataToArray(this.DepthFrameData);
            
            if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                colorFrame.CopyRawFrameDataToArray(imageModel.ColorFrameData);
            }
            else
            {
                colorFrame.CopyConvertedFrameDataToArray(imageModel.ColorFrameData, ColorImageFormat.Bgra);
            }
            imageModel.PixelFormat = PixelFormats.Bgra32;



            _ColorBitmap.WritePixels(new Int32Rect(0, 0, imageModel.ColorWidth, imageModel.ColorHeight),
                                          imageModel.ColorFrameData,
                                          imageModel.ColorWidth * imageModel.BytesPerPixel,
                                          0);


            //RenderTargetBitmap renderBitmap = new RenderTargetBitmap((int)CompositeImage.ActualWidth, (int)CompositeImage.ActualHeight, 96.0, 96.0, PixelFormats.Pbgra32);
            //DrawingVisual dv = new DrawingVisual();
            //VisualBrush brush = new VisualBrush(CompositeImage);

            //foreach(Body body in _bodies)
            //{
            //    if (body.IsTracked)
            //    {
            //        Joint joint = body.Joints[JointType.HandRight];
            //        using (DrawingContext dc = dv.RenderOpen())
            //        {

            //            dc.DrawRectangle(brush, null, new Rect(new Point(), new Size(CompositeImage.ActualWidth, CompositeImage.ActualHeight)));
            //            ImageBrush brush2 = new ImageBrush(_pointerBitmap);
            //            brush2.Opacity = 1.0;
            //            dc.DrawRectangle(brush2, null, new Rect(new Point(0, CompositeImage.ActualHeight - _Overlay.Height), new Size(_pointerBitmap.Width, _pointerBitmap.Height)));
            //        }
            //    }
            //}

            //ConvertIRDataToByte();






            ImagePreview.Source = _ColorBitmap;


            bodyIndexFrame.CopyFrameDataToArray(imageModel.BodyIndexFrameData);

            this.coordinateMapper.MapDepthFrameToColorSpace(DepthFrameData, imageModel.ColorPoints);

            if (_vm.LeaveTrails)
            {
                Array.Copy(this._PreviousFrameDisplayPixels, imageModel.DisplayPixels, this._PreviousFrameDisplayPixels.Length);
            }


            try
            {
                //Send the imageModel to the DataFlow transformer
                _ImageTransformer.Post(imageModel);
            }
            catch (Exception ex)
            {
#if DEBUG
                Console.WriteLine(ex);
#endif
            }


        }
Пример #36
0
        private void ShowBodyJoints(BodyFrame bodyFrame)
        {
            Body[] bodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount];
            bool dataReceived = false;
            if (bodyFrame != null)
            {
                bodyFrame.GetAndRefreshBodyData(bodies);
                dataReceived = true;
            }

            if (dataReceived)
            {
                this.bodiesManager.UpdateBodiesAndEdges(bodies);
            }
        }
Пример #37
0
        private void updateBodies(BodyFrame frame)
        {
            if (this.bodies == null)
            {
                this.bodies = new Body[frame.BodyCount];
            }

            // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
            // As long as those body objects are not disposed and not set to null in the array,
            // those body objects will be re-used.
            frame.GetAndRefreshBodyData(this.bodies);
        }
        private void RenderBodyFrame(BodyFrame bodyFrame)
        {
            bool dataReceived = false;

            Body[] bodies = null;
            if (bodyFrame != null)
            {
                if (bodies == null)
                {
                    bodies = new Body[bodyFrame.BodyCount];
                }

                // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
                // As long as those body objects are not disposed and not set to null in the array,
                // those body objects will be re-used.
                bodyFrame.GetAndRefreshBodyData(bodies);
                dataReceived = true;
            }

            if (dataReceived)
            {

                using (DrawingContext dc = this.drawingGroup.Open())
                {
                    // Draw a transparent background to set the render size
                    dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, Instance._displayWidth, Instance._displayHeight));

                    int penIndex = 0;
                    foreach (Body body in bodies)
                    {
                        Pen drawPen = this.bodyColors[penIndex++];

                        if (body.IsTracked)
                        {

                            IReadOnlyDictionary<JointType, Joint> joints = body.Joints;

                            // convert the joint points to depth (display) space
                            Dictionary<JointType, Point> jointPoints = new Dictionary<JointType, Point>();

                            foreach (JointType jointType in joints.Keys)
                            {
                                // sometimes the depth(Z) of an inferred joint may show as negative
                                // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity)
                                CameraSpacePoint position = joints[jointType].Position;
                                if (position.Z < 0)
                                {
                                    position.Z = InferredZPositionClamp;
                                }

                                DepthSpacePoint depthSpacePoint = this.coordinateMapper.MapCameraPointToDepthSpace(position);
                                jointPoints[jointType] = new Point(depthSpacePoint.X, depthSpacePoint.Y);
                            }

                            this.DrawBody(joints, jointPoints, dc, drawPen, body.JointOrientations);
                            if (Instance.DrawOrientationVectors == true)
                            {
                                this.DrawLocalCoordinates(body.JointOrientations, joints, jointPoints, dc);
                            }
                        }
                    }

                    // prevent drawing outside of our render area
                    this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, Instance._displayWidth, Instance._displayHeight));
                }

            }
        }
        public void processBodyFrame(BodyFrame frame)
        {
            frame.GetAndRefreshBodyData(bodies);

            foreach (var body in Bodies)
            {
                if (!eManager.users.ContainsKey(body.TrackingId))
                {
                    eManager.users[body.TrackingId] = new MyHuman(body);
                    eManager.holdTime[body.TrackingId] = 0;
                }

                // Multithreading maybe
                drawer.currentCanvasName = "body";
                drawer.drawSkeleton(body);
            }
        }
Пример #40
0
        private void ShowInfraredFrame(InfraredFrame infraredFrame, BodyFrame bodyFrame)
        {
            bool infraredFrameProcessed = false;

            if (infraredFrame != null)
            {
                FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;

                // verify data and write the new infrared frame data to the display bitmap
                if (((infraredFrameDescription.Width * infraredFrameDescription.Height)
                    == this.infraredFrameData.Length) &&
                    (infraredFrameDescription.Width == this.bitmap.PixelWidth) &&
                    (infraredFrameDescription.Height == this.bitmap.PixelHeight))
                {

                    //Debug.WriteLine("Width is " + infraredFrameDescription.Width);
                    //Debug.WriteLine("Height is " + infraredFrameDescription.Height);

                    infraredWidth = infraredFrameDescription.Width;
                    infraredHeight = infraredFrameDescription.Height;

                    // Copy the pixel data from the image to a temporary array
                    infraredFrame.CopyFrameDataToArray(this.infraredFrameData);

                    infraredFrameProcessed = true;
                }
            }

         
            if (bodyFrame != null && bodyFrame.BodyCount > 0)
            {
                myBodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount];
                bodyFrame.GetAndRefreshBodyData(myBodies);
            }

            // we got a frame, convert and render
            if (infraredFrameProcessed)
            {
                this.ConvertInfraredDataToPixels(myBodies);
                this.RenderPixelArray(this.infraredPixels);
            }
        }
        private void processBodyFrame(BodyFrame bodyFrame, bool showSkeleton, bool showHandStates)
        {
            if (this.bodies == null)
            {
                this.bodies = new Body[bodyFrame.BodyCount];
            }

            // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
            // As long as those body objects are not disposed and not set to null in the array,
            // those body objects will be re-used.
            bodyFrame.GetAndRefreshBodyData(this.bodies);

            players[0] = null;
            players[1] = null;
            Body[] tempBodies = new Body[2];
            int playerCount = 0;
            foreach (Body body in this.bodies)
            {
                if (body.IsTracked && playerCount < 2)
                {
                    tempBodies[playerCount++] = body;
                }
            }

            if (playerCount == 2)
            {
                CameraSpacePoint player0Pos = tempBodies[0].Joints[JointType.SpineMid].Position;
                CameraSpacePoint player1Pos = tempBodies[1].Joints[JointType.SpineMid].Position;

                if (player0Pos.X < player1Pos.X)
                {
                    players[0] = new Player(tempBodies[0]);
                    players[1] = new Player(tempBodies[1]);
                }
                else
                {
                    players[1] = new Player(tempBodies[0]);
                    players[0] = new Player(tempBodies[1]);
                }
            }

            using (DrawingContext dc = this.drawingGroup.Open())
            {
                // Draw a transparent background to set the render size
                dc.DrawRectangle(Brushes.Transparent, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight));

                int penIndex = 0;
                foreach (Player player in players)
                {
                    if (player == null) continue;
                    Body body = player.body;

                    Pen drawPen = this.bodyColors[penIndex++];

                    if (body.IsTracked)
                    {
                        //this.DrawClippedEdges(body, dc);

                        IReadOnlyDictionary<JointType, Joint> joints = body.Joints;

                        // convert the joint points to depth (display) space
                        Dictionary<JointType, Point> jointPoints = new Dictionary<JointType, Point>();
                        Dictionary<JointType, float> jointPointDepths = new Dictionary<JointType, float>();

                        foreach (JointType jointType in joints.Keys)
                        {
                            // sometimes the depth(Z) of an inferred joint may show as negative
                            // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity)
                            CameraSpacePoint position = joints[jointType].Position;
                            if (position.Z < 0)
                            {
                                position.Z = InferredZPositionClamp;
                            }

                            ColorSpacePoint colorSpacePoint = this.coordinateMapper.MapCameraPointToColorSpace(position);
                            jointPoints[jointType] = new Point(colorSpacePoint.X, colorSpacePoint.Y);
                            jointPointDepths[jointType] = position.Z;
                        }

                        // Maps the actual joint points to the player
                        player.jointPoints = jointPoints;
                        player.jointPointDepths = jointPointDepths;

                        if (showSkeleton)
                        {
                            this.DrawBody(joints, jointPoints, dc, drawPen);
                        }

                        if (showHandStates)
                        {
                            this.DrawHand(body.HandLeftState, jointPoints[JointType.HandLeft], dc);
                            this.DrawHand(body.HandRightState, jointPoints[JointType.HandRight], dc);
                        }

                        // THIS IS WHERE THE UPDATES GO. CAN'T MOVE IT BECAUSE OF THE GC OF DRAWING CONTEXT
                        /*
                        this.targetCircleL.update(
                            body.HandLeftState,
                            body.HandRightState,
                            jointPoints[JointType.HandLeft],
                            jointPoints[JointType.HandRight],
                            jointPointDepths[JointType.HandLeft],
                            jointPointDepths[JointType.HandLeft]
                        );
                        this.targetCircleL.draw(dc);
                         */
                        //basketballManager.update(player[0], player[1]);
                        //basketballManager.draw(dc);
                        //processBasketballManager(players[0], players[1], dc);
                    }
                }
                if (players[0] != null && players[1] != null)
                    processBasketballManager(players[0], players[1], dc);

                // prevent drawing outside of our render area
                this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, this.displayWidth, this.displayHeight));
            }

            bodyFrame.Dispose();
        }
Пример #42
0
        private void ShowBody( BodyFrame bodyFrame )
        {
            frameCount++;
            if ( sw.ElapsedMilliseconds >= 1000 ) {
                scratch.AddSensorValue( "FrameCount", frameCount.ToString() );

                sw.Restart();
                frameCount = 0;
            }

            var mapper = kinect.CoordinateMapper;

            CanvasBody.Children.Clear();

            bodyFrame.GetAndRefreshBodyData( bodies );
            var trackedBody = bodies.FirstOrDefault( b => b.IsTracked );
            if ( trackedBody != null ) {
                var body = trackedBody;
                foreach ( var jointType in body.Joints.Keys ) {
                    var joint = body.Joints[jointType];
                    if ( joint.TrackingState != TrackingState.NotTracked ) {
                        ShowJoint( mapper, joint );

                        int scale = 200;
                        AddSensorValue( jointType, "X", (int)(joint.Position.X * scale) );
                        AddSensorValue( jointType, "Y", (int)(joint.Position.Y * scale) );
                    }
                }

                if ( body.HandLeftConfidence == TrackingConfidence.High ) {
                    AddSensorValue( "HandLeftState", body.HandLeftState.ToString() );
                }
                else {
                    AddSensorValue( "HandLeftState", HandState.Unknown.ToString() );
                }

                if ( body.HandRightConfidence == TrackingConfidence.High ) {
                    AddSensorValue( "HandRightState", body.HandRightState.ToString() );
                }
                else {
                    AddSensorValue( "HandRightState", HandState.Unknown.ToString() );
                }
            }
            else {
                foreach ( JointType jointType in Enum.GetValues(typeof(JointType) ) ) {
                    AddSensorValue( jointType, "X", 0 );
                    AddSensorValue( jointType, "Y", 0 );
                }

                AddSensorValue( "HandLeftState", HandState.Unknown.ToString() );
                AddSensorValue( "HandRightState", HandState.Unknown.ToString() );
            }

            scratch.UpdateSensor();
        }
Пример #43
0
        void DoThingsWithBodyFrame(BodyFrame frame)
        {
            canvas.Children.Clear();

            bodies = new Body[frame.BodyFrameSource.BodyCount];

            frame.GetAndRefreshBodyData(bodies);

            foreach (Body body in bodies)
            {
                if (body != null)
                {
                    //Console.WriteLine("Body not null");
                    //canvas.DrawBody(body);

                    if (body.IsTracked)
                    {
                        Joint[] joints = {
                                             body.Joints[JointType.ThumbLeft],
                                             body.Joints[JointType.ThumbRight],
                                             body.Joints[JointType.HandLeft],
                                             body.Joints[JointType.HandRight]
                                         };
                        foreach (Joint joint in joints)
                        {
                            CameraSpacePoint jointPosition = joint.Position;

                            Point point = new Point();

                            if (joint.TrackingState == TrackingState.Tracked)
                            {
                                ColorSpacePoint colorPoint = sensor.CoordinateMapper.MapCameraPointToColorSpace(jointPosition);
                                point.X = float.IsInfinity(colorPoint.X) ? 0 : colorPoint.X;
                                point.Y = float.IsInfinity(colorPoint.Y) ? 0 : colorPoint.Y;
                            }

                            Ellipse ellipse = new Ellipse
                            {
                                Fill = Brushes.IndianRed,
                                Width = 15,
                                Height = 15
                            };

                            Canvas.SetLeft(ellipse, point.X - ellipse.Width / 2);
                            Canvas.SetTop(ellipse, point.Y - ellipse.Height / 2);

                            canvas.Children.Add(ellipse);
                        }
                    }
                    #region oldTest
                    //if (body.IsTracked)
                    //{

                    //    Joint handRight = body.Joints[JointType.HandRight];
                    //    CameraSpacePoint camHandRight = handRight.Position;
                    //    ColorSpacePoint hrPoint = sensor.CoordinateMapper.MapCameraPointToColorSpace(camHandRight);

                    //    Joint thumbRight = body.Joints[JointType.ThumbRight];
                    //    CameraSpacePoint camThumbRight = thumbRight.Position;
                    //    ColorSpacePoint trPoint = sensor.CoordinateMapper.MapCameraPointToColorSpace(camThumbRight);

                    //    Joint handLeft = body.Joints[JointType.HandLeft];
                    //    CameraSpacePoint camHandLeft = handLeft.Position;
                    //    ColorSpacePoint hlPoint = sensor.CoordinateMapper.MapCameraPointToColorSpace(camHandLeft);

                    //    Joint thumbLeft = body.Joints[JointType.ThumbLeft];
                    //    CameraSpacePoint camThumbLeft = thumbLeft.Position;
                    //    ColorSpacePoint tlPoint = sensor.CoordinateMapper.MapCameraPointToColorSpace(camThumbLeft);
                    //    if (handLeft.TrackingState != TrackingState.Tracked) return;
                    //    if (handRight.TrackingState != TrackingState.Tracked) return;
                    //    if (thumbLeft.TrackingState != TrackingState.Tracked) return;
                    //    if (thumbRight.TrackingState != TrackingState.Tracked) return;
                    //    canvas.DrawPoint(hrPoint, 0);
                    //    canvas.DrawPoint(trPoint, 1);
                    //    canvas.DrawPoint(hlPoint, 0);
                    //    canvas.DrawPoint(tlPoint, 1);

                    //}
                    #endregion
                }
            }
        }
Пример #44
0
        //lab 13
        private void RegisterGesture(BodyFrame bodyFrame)
        {
            bool dataReceived = false;
            Body[] bodies = null;

            if (bodyFrame != null)
            {
                if (bodies == null)
                {
                    // Creates an array of 6 bodies, which is the max number of bodies that Kinect can track simultaneously
                    bodies = new Body[bodyFrame.BodyCount];
                }

                // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
                // As long as those body objects are not disposed and not set to null in the array,
                // those body objects will be re-used.
                bodyFrame.GetAndRefreshBodyData(bodies);
                dataReceived = true;
            }

            if (dataReceived)
            {
                // We may have lost/acquired bodies, so update the corresponding gesture detectors
                if (bodies != null)
                {
                    // Loop through all bodies to see if any of the gesture detectors need to be updated
                    for (int i = 0; i < bodyFrame.BodyCount; ++i)
                    {
                        Body body = bodies[i];
                        ulong trackingId = body.TrackingId;

                        // If the current body TrackingId changed, update the corresponding gesture detector with the new value
                        if (trackingId != this.gestureDetectorList[i].TrackingId)
                        {
                            this.gestureDetectorList[i].TrackingId = trackingId;

                            // If the current body is tracked, unpause its detector to get VisualGestureBuilderFrameArrived events
                            // If the current body is not tracked, pause its detector so we don't waste resources trying to get invalid gesture results
                            this.gestureDetectorList[i].IsPaused = trackingId == 0;
                        }
                    }
                }
            }
        }
        private void ProcessFrames(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame, BodyFrame bodyFrame, byte [] psBytes0, byte [] psBytes1)
        {            
            // create multiframe to process
            long ticksCopyData = DateTime.Now.Ticks;

            MultiFrame multiFrame = new MultiFrame();
            multiFrame.FrameNb = Interlocked.Increment(ref frameNb);

            // color
            long ticksCreateColorData = DateTime.Now.Ticks;
            byte[] colorData = new byte[colorByteSize];
            Utils.UpdateTimer("CreateColorData", ticksCreateColorData);

            long ticksCopyColorData = DateTime.Now.Ticks;
            colorFrame.CopyConvertedFrameDataToArray(colorData, ColorImageFormat.Bgra);
            Utils.UpdateTimer("CopyColorData", ticksCopyColorData);

            // depth
            long ticksCreateDepthData = DateTime.Now.Ticks;
            ushort[] depthData = new ushort[depthPixelSize];
            depthFrame.CopyFrameDataToArray(depthData);            
            Utils.UpdateTimer("CreateDepthData", ticksCreateDepthData);

            // body index
            long ticksCreateBodyIndexData = DateTime.Now.Ticks;
            byte[] bodyIndexData = new byte[depthPixelSize];
            bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
            Utils.UpdateTimer("CreateBodyIndexData", ticksCreateBodyIndexData);

            // bodies
            long ticksCreateBodiesData = DateTime.Now.Ticks;
            Body[] bodies = new Body[bodyFrame.BodyCount];
            bodyFrame.GetAndRefreshBodyData(bodies);
            Utils.UpdateTimer("CreateBodiesData", ticksCreateBodiesData);

            // ps3eye
            byte[] psBytes = null;
            if (psBytes0 != null && psBytes1 != null)
            {
                long ticksCreatePS3EyeData = DateTime.Now.Ticks;
                psBytes = new byte[psByteSize * 2];
                Utils.UpdateTimer("CreatePS3EyeData", ticksCreatePS3EyeData);

                long ticksCopyPS3EyeData = DateTime.Now.Ticks;
                CopyPS3EyeDataMirror(psBytes, psBytes0, psBytes1);
                Utils.UpdateTimer("CopyPS3EyeData", ticksCopyPS3EyeData);
            }

            // multiFrame
            long ticksMultiFrame = DateTime.Now.Ticks;
            multiFrame.DepthData = depthData;
            multiFrame.ColorData = colorData;
            multiFrame.BodyIndexData = bodyIndexData;
            multiFrame.Bodies = bodies;
            multiFrame.PS3EyeData = psBytes;
            multiFrame.HasKinectData = true;
            multiFrame.HasPS3EyeData = psBytes != null ? true : false;
            Utils.UpdateTimer("MultiFrame", ticksMultiFrame);

            long ticksEnqueue = DateTime.Now.Ticks;
            ProcessingManager.Instance.EnqueueMultiFrame(multiFrame);
            Utils.UpdateTimer("Enqueue", ticksEnqueue);

            Utils.UpdateTimer("CopyFramesData", ticksCopyData);

            // display timers & queues
            Context.GUI.DisplayPerformance();
        }
Пример #46
0
        private void RegisterGesture(BodyFrame bodyFrame)
        {
            bool dataReceived = false;
            Body[] bodies = null;
            if (bodyFrame != null)
            {
                if (bodies == null)
                {
                    bodies = new Body[bodyFrame.BodyCount];
                }
                bodyFrame.GetAndRefreshBodyData(bodies);
                dataReceived = true;
            }
            if (dataReceived)
            {
                if (bodies != null)
                {
                    for (int i = 0; i < bodyFrame.BodyCount; i++)
                    {
                        Body body = bodies[i];
                        ulong trackingId = body.TrackingId;
                        if (trackingId != gestureDetectorList[i].TrackingId)
                        {
                            gestureDetectorList[i].TrackingId = trackingId;
                            gestureDetectorList[i].IsPaused = trackingId == 0;
                        }
                    }
                }
            }

        }
Пример #47
0
 // Reads in the bodyFrame (if it contains bodies)
 private void GetBodyJoints(BodyFrame bodyFrame)
 {
     //Makes sure the bodyFrame contains bodies, and if so, puts the data in bodies
     if(bodyFrame != null)
     {
         if (this.bodies == null)
         {
             bodies = new Body[bodyFrame.BodyCount];
         }
         // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array.
         // As long as those body objects are not disposed and not set to null in the array,
         // those body objects will be re-used.
         bodyFrame.GetAndRefreshBodyData(this.bodies);
         dataReceived = true;
     }
 }