/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; _isBitmapLocked = false; var multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null) || bodyFrame == null) { return; } ProcessBackgroundOld(depthFrame, colorFrame, bodyIndexFrame); ProcessBody(bodyFrame, false); } finally { if (_isBitmapLocked) { _bitmap.Unlock(); } depthFrame?.Dispose(); colorFrame?.Dispose(); bodyIndexFrame?.Dispose(); bodyFrame?.Dispose(); } }
// Update is called once per frame void Update() { if (_reader != null) { BodyFrame frame = _reader.AcquireLatestFrame(); if (frame != null) { if (_data == null) { _data = new Body[_sensor.BodyFrameSource.BodyCount]; } frame.GetAndRefreshBodyData(_data); frame.Dispose(); frame = null; } } }
public void FreeMultiSourceFrame(KinectInterop.SensorData sensorData) { // release all frames if (msBodyFrame != null) { msBodyFrame.Dispose(); msBodyFrame = null; } if (msBodyIndexFrame != null) { msBodyIndexFrame.Dispose(); msBodyIndexFrame = null; } if (msColorFrame != null) { msColorFrame.Dispose(); msColorFrame = null; } if (msDepthFrame != null) { msDepthFrame.Dispose(); msDepthFrame = null; } if (msInfraredFrame != null) { msInfraredFrame.Dispose(); msInfraredFrame = null; } if (multiSourceFrame != null) { multiSourceFrame = null; } }
void Update() { if (_Reader != null) { _Frame = _Reader.AcquireLatestFrame(); if (_Frame != null) { //_vec4 = _Frame.FloorClipPlane; //Vector3 normalVector = new Vector3 (_vec4.X, _vec4.Y, _vec4.Z); if (_Data == null) { _Data = new Body[_Sensor.BodyFrameSource.BodyCount]; } _Frame.GetAndRefreshBodyData(_Data); NewFrame(this, _Frame); _Frame.Dispose(); _Frame = null; } } }
private void Update() { if (frameReader != null) { MultiSourceFrame multiSourceFrame = frameReader.AcquireLatestFrame(); if (multiSourceFrame != null) { BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if (bodyFrame != null) { if (bodyData == null) { bodyData = new Body[kinectSensor.BodyFrameSource.BodyCount]; } bodyFrame.GetAndRefreshBodyData(bodyData); floor = new Floor(bodyFrame.FloorClipPlane); bodyFrame.Dispose(); bodyFrame = null; } DetectCenterBody(); multiSourceFrame = null; isNewFrame = true; } else { isNewFrame = false; } } }
void FixedUpdate() { if (_bfReader != null) { //Text_connect.text = "Connect Success"; //한 프레임 얻는다 BodyFrame frame = _bfReader.AcquireLatestFrame(); if (frame != null) { //몸 데이터 갱신 frame.GetAndRefreshBodyData(KinectAction.BodyArray); KinectAction.KinectActionUpdate(); //프레임 해제 frame.Dispose(); frame = null; } } else { //Text_connect.text = "Connect Fail"; } }
/// <summary> /// Kinect が複数種類のフレームを取得したとき実行されるメソッド(イベントハンドラ)。 /// </summary> /// <param name="sender"> /// イベントを通知したオブジェクト。ここでは Kinect になる。 /// </param> /// <param name="e"> /// イベントの発生時に渡されるデータ。 /// </param> void MultiSourceFrameReader_MultiSourceFrameArrived (object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame frames = this.multiSourceFrameReader.AcquireLatestFrame(); if (frames == null) { return; } ColorFrame colorFrame = frames.ColorFrameReference.AcquireFrame(); if (colorFrame == null) { return; } BodyFrame bodyFrame = frames.BodyFrameReference.AcquireFrame(); if (bodyFrame == null) { colorFrame.Dispose(); return; } //キャンバスをクリアして更新する。 this.canvas.Background = new ImageBrush(GetBitmapSource(colorFrame, colorFrameDescription)); this.canvas.Children.Clear(); Body[] bodies = new Body[bodyFrame.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodies); foreach (Body body in bodies) { if (body.IsTracked == false) { continue; } //描く関節を可視化する。 IReadOnlyDictionary <JointType, Joint> joints = body.Joints; foreach (KeyValuePair <JointType, Joint> joint in joints) //foreach (var joint in joints) // こちらでも可。 { if (joint.Value.TrackingState == TrackingState.Tracked) { DrawJointEllipseInColorSpace(joint.Value, 10, Colors.Aqua); } else if (joint.Value.TrackingState == TrackingState.Inferred) { DrawJointEllipseInColorSpace(joint.Value, 10, Colors.Yellow); } } //左手の状態を可視化する。 switch (body.HandLeftState) { //閉じてる(グー)。 case HandState.Closed: { DrawJointEllipseInColorSpace(joints[JointType.HandLeft], 20, Colors.Blue); break; } //チョキ(実際には精度の都合上、指一本でも反応するが) case HandState.Lasso: { DrawJointEllipseInColorSpace(joints[JointType.HandLeft], 20, Colors.Green); break; } //開いている(パー)。 case HandState.Open: { DrawJointEllipseInColorSpace(joints[JointType.HandLeft], 20, Colors.Red); break; } } } colorFrame.Dispose(); bodyFrame.Dispose(); }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; bool isBitmapLocked = false; try { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null) || (bodyFrame == null)) { return; } if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(this.bodies); bodyFrame.Dispose(); bodyFrame = null; bool isBodyTracked = false; foreach (Body body in this.bodies) { if (body.IsTracked) { isBodyTracked = true; continue; } } hasTrackedBodies = isBodyTracked; if (hasTrackedBodies && !isCapturing) { BeginCountdown(); } FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } depthFrame.Dispose(); depthFrame = null; this.liveBitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.liveBitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); colorFrame.Dispose(); colorFrame = null; using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { uint *bitmapPixelsPointer = (uint *)this.liveBitmap.BackBuffer; for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.liveBitmap.AddDirtyRect(new Int32Rect(0, 0, this.liveBitmap.PixelWidth, this.liveBitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.liveBitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }
private void processBodyFrame(BodyFrame bodyFrame, bool showSkeleton, bool showHandStates) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.bodies); players[0] = null; players[1] = null; Body[] tempBodies = new Body[2]; int playerCount = 0; foreach (Body body in this.bodies) { if (body.IsTracked && playerCount < 2) { tempBodies[playerCount++] = body; } } if (playerCount == 2) { CameraSpacePoint player0Pos = tempBodies[0].Joints[JointType.SpineMid].Position; CameraSpacePoint player1Pos = tempBodies[1].Joints[JointType.SpineMid].Position; if (player0Pos.X < player1Pos.X) { players[0] = new Player(tempBodies[0]); players[1] = new Player(tempBodies[1]); } else { players[1] = new Player(tempBodies[0]); players[0] = new Player(tempBodies[1]); } } using (DrawingContext dc = this.drawingGroup.Open()) { // Draw a transparent background to set the render size dc.DrawRectangle(Brushes.Transparent, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); int penIndex = 0; foreach (Player player in players) { if (player == null) continue; Body body = player.body; Pen drawPen = this.bodyColors[penIndex++]; if (body.IsTracked) { //this.DrawClippedEdges(body, dc); IReadOnlyDictionary<JointType, Joint> joints = body.Joints; // convert the joint points to depth (display) space Dictionary<JointType, Point> jointPoints = new Dictionary<JointType, Point>(); Dictionary<JointType, float> jointPointDepths = new Dictionary<JointType, float>(); foreach (JointType jointType in joints.Keys) { // sometimes the depth(Z) of an inferred joint may show as negative // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity) CameraSpacePoint position = joints[jointType].Position; if (position.Z < 0) { position.Z = InferredZPositionClamp; } ColorSpacePoint colorSpacePoint = this.coordinateMapper.MapCameraPointToColorSpace(position); jointPoints[jointType] = new Point(colorSpacePoint.X, colorSpacePoint.Y); jointPointDepths[jointType] = position.Z; } // Maps the actual joint points to the player player.jointPoints = jointPoints; player.jointPointDepths = jointPointDepths; if (showSkeleton) { this.DrawBody(joints, jointPoints, dc, drawPen); } if (showHandStates) { this.DrawHand(body.HandLeftState, jointPoints[JointType.HandLeft], dc); this.DrawHand(body.HandRightState, jointPoints[JointType.HandRight], dc); } // THIS IS WHERE THE UPDATES GO. CAN'T MOVE IT BECAUSE OF THE GC OF DRAWING CONTEXT /* this.targetCircleL.update( body.HandLeftState, body.HandRightState, jointPoints[JointType.HandLeft], jointPoints[JointType.HandRight], jointPointDepths[JointType.HandLeft], jointPointDepths[JointType.HandLeft] ); this.targetCircleL.draw(dc); */ //basketballManager.update(player[0], player[1]); //basketballManager.draw(dc); //processBasketballManager(players[0], players[1], dc); } } if (players[0] != null && players[1] != null) processBasketballManager(players[0], players[1], dc); // prevent drawing outside of our render area this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); } bodyFrame.Dispose(); }
/// <summary> /// Update to get a new frame. /// This code is similar to the code in the Kinect SDK samples. /// </summary> private static void Update() { if (!isConnected) { return; } dataAvailable.WaitOne(); MultiSourceFrame multiSourceFrame = null; DepthFrame depthFrame = null; InfraredFrame irFrame = null; BodyFrame bodyFrame = null; lock (updateLock) { try { if (frameReference != null) { multiSourceFrame = frameReference.AcquireFrame(); if (multiSourceFrame != null) { DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference; InfraredFrameReference irFrameReference = multiSourceFrame.InfraredFrameReference; BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference; depthFrame = depthFrameReference.AcquireFrame(); irFrame = irFrameReference.AcquireFrame(); if ((depthFrame != null) && (irFrame != null)) { FrameDescription depthFrameDescription = depthFrame.FrameDescription; FrameDescription irFrameDescription = irFrame.FrameDescription; int depthWidth = depthFrameDescription.Width; int depthHeight = depthFrameDescription.Height; int irWidth = irFrameDescription.Width; int irHeight = irFrameDescription.Height; // verify data and write the new registered frame data to the display bitmap if (((depthWidth * depthHeight) == depthFrameData.Length) && ((irWidth * irHeight) == irFrameData.Length)) { depthFrame.CopyFrameDataToArray(depthFrameData); irFrame.CopyFrameDataToArray(irFrameData); } if (bodyFrameReference != null) { bodyFrame = bodyFrameReference.AcquireFrame(); if (bodyFrame != null) { if (bodies == null || bodies.Length < bodyFrame.BodyCount) { bodies = new Body[bodyFrame.BodyCount]; } using (bodyFrame) { bodyFrame.GetAndRefreshBodyData(bodies); } } } } } } } catch (Exception) { // ignore if the frame is no longer available } finally { if (depthFrame != null) { depthFrame.Dispose(); depthFrame = null; } if (irFrame != null) { irFrame.Dispose(); irFrame = null; } if (bodyFrame != null) { bodyFrame.Dispose(); bodyFrame = null; } if (multiSourceFrame != null) { multiSourceFrame = null; } } } }
public void UpdateBodyFrame(BodyFrame frame) { if (frame == null) { return; } Windows.Kinect.Body[] data = new Body[_Sensor.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(data); frame.Dispose(); frame = null; if (data == null) { return; } trackedUsers = data; bool driverExist = false; foreach (var body in data) { if (body == null) { continue; } if (body.IsTracked) { if (driver == 0) { setDriver(body.TrackingId); } if (driver == body.TrackingId) { driverExist = true; DepthSpacePoint oldRightHandPos = rightHandPos; CalculateJointPositions(body, Windows.Kinect.JointType.HandRight); CalculateJointPositions(body, Windows.Kinect.JointType.HandLeft); DepthSpacePoint headPos = CalculateJointPositions(body, Windows.Kinect.JointType.Head); DepthSpacePoint leftHandPos = CalculateJointPositions(body, Windows.Kinect.JointType.HandLeft); menuGestureOk = (leftHandPos.Y - 20) < headPos.Y && (body.HandLeftState == HandState.Closed || body.HandLeftState == HandState.Unknown); float diffX = oldRightHandPos.X - rightHandPos.X; float diffY = oldRightHandPos.Y - rightHandPos.Y; float diffZ = oldRightHandDepth - rightHandDepth; if (body.HandRightState == HandState.Closed) //drive or menu { float moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier; Quaternion ort = (HmdRotatesY) ? CameraController.centerEyeAnchor.rotation : transform.rotation; Vector3 ortEuler = ort.eulerAngles; ortEuler.z = ortEuler.x = 0f; ort = Quaternion.Euler(ortEuler); Vector3 direction = Vector3.zero; direction.x = CalculateMovement(-diffX, 2) * 2f; //direction is different so we have to invert the sign direction.y = CalculateMovement(diffY, 2) * 2f; direction.z = CalculateMovement(diffZ, 5); if (!menu.enabled) //move if menu is disabled { if (body.HandLeftState == HandState.Lasso) //rotate { if (Math.Abs(diffX) > Math.Abs(diffY)) { addAtoms.rotateScene(Vector3.back * (diffX / Math.Abs(diffX)), addAtoms.getTargetPosition()); } else { addAtoms.rotateScene(Vector3.right * (diffY / Math.Abs(diffY)), addAtoms.getTargetPosition()); } } else if (body.HandLeftState == HandState.Closed) { addAtoms.translateScene(direction); } else { MoveThrottle += ort * (moveInfluence * direction); } } else //menu is showing, move mouse instead { menu.moveCursor(new Vector2(diffX * 2, diffY * 2)); } } else if (body.HandRightState == HandState.Lasso) { if (body.HandLeftState == HandState.Lasso) //select atom { RaycastHit hit; Quaternion ort = (HmdRotatesY) ? CameraController.centerEyeAnchor.rotation : transform.rotation; if (Physics.Raycast(transform.position, ort * Vector3.forward, out hit, 60)) { addAtoms.setTarget(hit.transform.gameObject); } } else //Zoom { if (diffX - 1 > 0) { Camera leftCamera = CameraController.leftEyeAnchor.GetComponent <Camera> (); Camera rightCamera = CameraController.rightEyeAnchor.GetComponent <Camera> (); if (leftCamera.fieldOfView > 2) { leftCamera.fieldOfView -= 2; } if (rightCamera.fieldOfView > 2) { rightCamera.fieldOfView -= 2; } } if (diffX + 1 < 0) { Camera leftCamera = CameraController.leftEyeAnchor.GetComponent <Camera> (); Camera rightCamera = CameraController.rightEyeAnchor.GetComponent <Camera> (); if (leftCamera.fieldOfView < 99) { leftCamera.fieldOfView += 2; } if (rightCamera.fieldOfView < 99) { rightCamera.fieldOfView += 2; } } } } if (body.HandLeftState == HandState.Lasso) { if (menu.enabled) { menu.Click(); } } oldRightHandDepth = rightHandDepth; } } } if (!driverExist) { driver = 0; } }
/// <summary> /// This method returns the skeletons which will be tracked. It adds the skeleton which raises the hand (hand above head and ellbow above shoulder) to the given list. /// <param name="bodies">Skeletons which will be checked if they raise a hand.</param> /// <param name="trackedSkeletons">currently tracked skeletons</param> /// <param name="id">id which is meant to be (re-)activated</param> /// <param name="sensor">KinectSensor</param> /// <returns>Returns the tracked skeletons</returns> /// </summary> public override List <TrackedSkeleton> Filter(Body[] bodies, List <TrackedSkeleton> trackedSkeletons, int id, BodyFrameReader reader) { if (trackedSkeletons == null) { return(trackedSkeletons); } int i = 0; foreach (Body t in bodies.Where(t => (int)t.TrackingId == id)) { trackedSkeletons.Add(new TrackedSkeleton((int)t.TrackingId)); return(trackedSkeletons); } foreach (Body s in bodies.Where(s => s.TrackingId != 0)) { Debug.WriteLine(s.TrackingId); Body[] tempSkeletons = new Body[6]; bool quit = false; while (!quit) { BodyFrame f = reader.AcquireLatestFrame(); i++; if (f != null) { f.GetAndRefreshBodyData(tempSkeletons); foreach (Body tracked in tempSkeletons.Where(tracked => tracked.TrackingId == s.TrackingId && tracked.IsTracked)) { quit = true; f.Dispose(); } } } // checks if an arm is raised foreach (Body skeleton in tempSkeletons) { if (skeleton.TrackingId == s.TrackingId && skeleton.IsTracked) { // check left hand if ((skeleton.Joints[JointType.Head].Position.Y < skeleton.Joints[JointType.WristLeft].Position.Y) && (skeleton.Joints[JointType.ShoulderLeft].Position.Y < skeleton.Joints[JointType.ElbowLeft].Position.Y)) { trackedSkeletons.Add(new TrackedSkeleton((int)skeleton.TrackingId, false)); } // check right hand if ((skeleton.Joints[JointType.Head].Position.Y < skeleton.Joints[JointType.WristRight].Position.Y) && (skeleton.Joints[JointType.ShoulderRight].Position.Y < skeleton.Joints[JointType.ElbowRight].Position.Y)) { trackedSkeletons.Add(new TrackedSkeleton((int)skeleton.TrackingId, true)); } } } } return(trackedSkeletons); }
/// <summary> /// Store body(skeleton) information /// </summary> /// <param name="bodyFrame">body(skeleton) information to be stored</param> /// <param name="frameNumber">frame number</param> public static void Handle_BodyFrame(BodyFrame bodyFrame, String frameNumber) { String skeletonInfoPath = FramesAndPaths.GetSkeletonFilePath(FramesAndPaths.FileType.SkeletonInfo, "SkeletonInfo.txt"); try { using (StreamWriter skeletonWriter = new StreamWriter(skeletonInfoPath, true)) { Body[] bodies = new Body[bodyFrame.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodies); // A string to store all the skeletons' information in this frame // 要写入 txt 文件的本帧所有骨骼信息 String peopleInfo = String.Empty; foreach (Body body in bodies) { if (body.IsTracked) { IReadOnlyDictionary <JointType, Joint> joints = body.Joints; IReadOnlyDictionary <JointType, JointOrientation> jointOrientations = body.JointOrientations; // Acquire coordinates on camera space // 获得 camera space 上关节点的三维坐标 int jointIndex = 0; foreach (JointType jointType in joints.Keys) { // sometimes the depth(Z) of an) inferred joint may show as negative // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity) cameraSpacePositions[jointIndex] = joints[jointType].Position; if (cameraSpacePositions[jointIndex].Z < 0) { cameraSpacePositions[jointIndex].Z = InferredZPositionClamp; } ++jointIndex; } // Acquire coordinates on color space // 获得 color space 上的 25 个关节点的二维坐标 MainWindow.coordinateMapper.MapCameraPointsToColorSpace(cameraSpacePositions, colorSpacePositions); // Acquire coordinates on depth space // 获得 depth space 上的 25 个关节点的二维坐标 MainWindow.coordinateMapper.MapCameraPointsToDepthSpace(cameraSpacePositions, depthSpacePositions); // Acquire orientation information // 获得关节点的旋转信息 jointIndex = 0; foreach (JointType jointType in jointOrientations.Keys) { JointOrientation tmpOrientation = jointOrientations[jointType]; orientations[jointIndex++] = tmpOrientation.Orientation; } // frame number、tracking % 6、floor ulong resizeId = body.TrackingId % 6; Vector4 floor = bodyFrame.FloorClipPlane; //String personInfo = String.Format("{0}, id = {1}, color = {2},\r\n",GlobalData.FrameNumberIncrement(GlobalData.FileType.SkeletonInfo),resizeId,bodyColors[resizeId]); // A string to store current skeleton's information in this frame // 要加入 peopleInfo 的本帧当前一具骨骼信息 personInfo String personInfo = String.Format("{0}, id = {1}, floor = {2} {3} {4} {5}", frameNumber, resizeId, floor.W, floor.X, floor.Y, floor.Z); personInfo += "\r\n"; // Append coordinates on camera space // 相机空间三维坐标 personInfo += "\tcamera_space_coordinates ="; personInfo += String.Format(" {0} {1} {2}", cameraSpacePositions[0].X, cameraSpacePositions[0].Y, cameraSpacePositions[0].Z); for (int i = 1; i < cameraSpacePositions.Length; ++i) { personInfo += String.Format(", {0} {1} {2}", cameraSpacePositions[i].X, cameraSpacePositions[i].Y, cameraSpacePositions[i].Z); } personInfo += "\r\n"; // Append coordiantes on color space // rgb骨架二维坐标 personInfo += "\tcolor_skeleton_coordinates ="; personInfo += String.Format(" {0} {1}", colorSpacePositions[0].X, colorSpacePositions[0].Y); for (int i = 1; i < colorSpacePositions.Length; ++i) { personInfo += String.Format(", {0} {1}", colorSpacePositions[i].X, colorSpacePositions[i].Y); } personInfo += "\r\n"; // Append coordinates on depth space // depth骨架二维坐标 personInfo += "\tdepth_skeleton_coordinates ="; personInfo += String.Format(" {0} {1}", depthSpacePositions[0].X, depthSpacePositions[0].Y); for (int i = 1; i < depthSpacePositions.Length; ++i) { personInfo += String.Format(", {0} {1}", depthSpacePositions[i].X, depthSpacePositions[i].Y); } personInfo += "\r\n"; personInfo += "\tskeleton_orientations ="; personInfo += String.Format(" {0} {1} {2} {3}", orientations[0].W, orientations[0].X, orientations[0].Y, orientations[0].Z); for (int i = 1; i < orientations.Length; ++i) { personInfo += String.Format(", {0} {1} {2} {3}", orientations[i].W, orientations[i].X, orientations[i].Y, orientations[i].Z); } peopleInfo += personInfo; peopleInfo += "\r\n"; } } skeletonWriter.Write(peopleInfo); } } catch (System.IO.IOException) { } // Release bodyFrame bodyFrame.Dispose(); }
private void ProcessMultiFrame(MultiSourceFrameArrivedEventArgs e) { long ticksAcqTotal = DateTime.Now.Ticks; // frames DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // ps3eye byte[] psBytes0 = null; byte[] psBytes1 = null; // if the frame has expired by the time we process this event, return (this actually never happens) if (multiSourceFrame == null) { return; } try { // get kinect frames depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); // optionally get ps3eye frames if (isPs3EyeRunning) { psBytes0 = frames[0]; psBytes1 = frames[1]; } // if any frame has expired by the time we process this event, return (dispose others in finally) // psBytes0 and psBytes1 may be null in the beginning (when ps3eye starts longer than kinect) if (colorFrame == null || depthFrame == null || bodyIndexFrame == null || bodyFrame == null || (isPs3EyeRunning && (psBytes0 == null || psBytes1 == null))) { Utils.UpdateCounter("Expired"); Utils.IncrementTotalLost(); return; } else { Utils.UpdateCounter("Expired", false); } // performance Utils.UpdateTimer("Acquire", ticksAcqTotal); Utils.UpdateCounter("Acquired"); // process ProcessFrames(colorFrame, depthFrame, bodyIndexFrame, bodyFrame, psBytes0, psBytes1); } catch { } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }
private void UpdateKinect() { if (bodyReader != null) { BodyFrame frame = bodyReader.AcquireLatestFrame(); if (frame == null) { return; } // You need to use the same body data array in each frame. frame.GetAndRefreshBodyData(bodies); // You need to make sure to dispose of the frame otherwise you can't get the next one. frame.Dispose(); frame = null; // Get the ids of the bodies that are currently being tracked. // Bodies are not guaranteed to be loaded in the same location frame to frame. List <ulong> trackedIDs = new List <ulong>(); foreach (Body body in bodies) { if (body == null) { continue; } if (body.IsTracked) { trackedIDs.Add(body.TrackingId); } } List <ulong> knownIDs = new List <ulong>(persons.Keys); // Delete persons that are no longer tracked foreach (ulong trackingID in knownIDs) { if (trackedIDs.Contains(trackingID) == false) { Destroy(persons[trackingID]); persons.Remove(trackingID); } } foreach (Body body in bodies) { if (body == null) { continue; } if (body.IsTracked) { if (persons.ContainsKey(body.TrackingId) == false) { persons.Add(body.TrackingId, CreateNewPerson(body.TrackingId)); } if (persons.ContainsKey(body.TrackingId)) { RefreshPerson(body, persons[body.TrackingId]); } } } } }
// Update is called once per frame void Update() { if (multiReader != null) { var frame = multiReader.AcquireLatestFrame(); if (frame != null) { BodyFrame bodyFrame = frame.BodyFrameReference.AcquireFrame(); if (bodyFrame != null) { if (bodyData == null) { bodyData = new Body[bodyFrame.BodyFrameSource.BodyCount]; } bodyFrame.GetAndRefreshBodyData(bodyData); bodyFrame.Dispose(); bodyFrame = null; } } frame = null; } if (bodyData == null) { return; } int index = -1; for (int i = 0; i < sensor.BodyFrameSource.BodyCount; i++) { if (bodyData[i] == null) { continue; } if (bodyData[i].IsTracked) { index = i; } } if (index > -1) { // float yPos = bodyData[index].Joints[JointType.HandLeft].Position.Y; // this.transform.position += transform.forward * yPos * Time.deltaTime * speed; float shoulderLeft = bodyData[index].Joints[JointType.ShoulderLeft].Position.Z; float shoulderRight = bodyData[index].Joints[JointType.ShoulderRight].Position.Z; float finalRotation = shoulderLeft > shoulderRight ? shoulderLeft * -1 : shoulderRight; if (Mathf.Abs(shoulderLeft - shoulderRight) > 0.1) { this.transform.Rotate(new Vector3(0, finalRotation, 0)); } //Debug.Log(shoulderLeft + " left"); //Debug.Log(shoulderRight + " right"); float head = bodyData[index].Joints[JointType.Head].Position.Y; float spineBase = bodyData[index].Joints[JointType.SpineBase].Position.Y; if (Mathf.Abs(head - spineBase) < 0.7) { this.transform.position += transform.forward * Time.deltaTime * speed; } //Debug.Log(head + " head"); //Debug.Log(spineBase + " spineBase"); if (startHead == 0) { startHead = head; } if (head > startHead + 0.1) { GetComponent <Rigidbody>().AddForce(Vector3.up * jumpPower); } Debug.Log(head + " head"); Debug.Log(startHead + " startHead"); } }
public bool PollBodyFrame(KinectInterop.SensorData sensorData, ref KinectInterop.BodyFrameData bodyFrame, ref Matrix4x4 kinectToWorld, bool bIgnoreJointZ) { bool bNewFrame = false; if ((multiSourceFrameReader != null && multiSourceFrame != null) || bodyFrameReader != null) { BodyFrame frame = multiSourceFrame != null ? msBodyFrame : bodyFrameReader.AcquireLatestFrame(); if (frame != null) { frame.GetAndRefreshBodyData(bodyData); bodyFrame.liPreviousTime = bodyFrame.liRelativeTime; bodyFrame.liRelativeTime = frame.RelativeTime.Ticks; frame.Dispose(); frame = null; for (int i = 0; i < sensorData.bodyCount; i++) { Body body = bodyData [i]; if (body == null) { bodyFrame.bodyData [i].bIsTracked = 0; continue; } bodyFrame.bodyData [i].bIsTracked = (short)(body.IsTracked ? 1 : 0); if (body.IsTracked) { // transfer body and joints data bodyFrame.bodyData [i].liTrackingID = (long)body.TrackingId; // cache the body joints (following the advice of Brian Chasalow) Dictionary <Windows.Kinect.JointType, Windows.Kinect.Joint> bodyJoints = body.Joints; for (int j = 0; j < sensorData.jointCount; j++) { Windows.Kinect.Joint joint = bodyJoints [(Windows.Kinect.JointType)j]; KinectInterop.JointData jointData = bodyFrame.bodyData [i].joint [j]; //jointData.jointType = (KinectInterop.JointType)j; jointData.trackingState = (KinectInterop.TrackingState)joint.TrackingState; if ((int)joint.TrackingState != (int)TrackingState.NotTracked) { float jPosZ = (bIgnoreJointZ && j > 0) ? bodyFrame.bodyData [i].joint [0].kinectPos.z : joint.Position.Z; jointData.kinectPos = new Vector3(joint.Position.X, joint.Position.Y, joint.Position.Z); jointData.position = kinectToWorld.MultiplyPoint3x4(new Vector3(joint.Position.X, joint.Position.Y, jPosZ)); } jointData.orientation = Quaternion.identity; // Windows.Kinect.Vector4 vQ = body.JointOrientations[jointData.jointType].Orientation; // jointData.orientation = new Quaternion(vQ.X, vQ.Y, vQ.Z, vQ.W); if (j == 0) { bodyFrame.bodyData [i].position = jointData.position; bodyFrame.bodyData [i].orientation = jointData.orientation; } bodyFrame.bodyData [i].joint [j] = jointData; } // tranfer hand states bodyFrame.bodyData [i].leftHandState = (KinectInterop.HandState)body.HandLeftState; bodyFrame.bodyData [i].leftHandConfidence = (KinectInterop.TrackingConfidence)body.HandLeftConfidence; bodyFrame.bodyData [i].rightHandState = (KinectInterop.HandState)body.HandRightState; bodyFrame.bodyData [i].rightHandConfidence = (KinectInterop.TrackingConfidence)body.HandRightConfidence; } } bNewFrame = true; } } return(bNewFrame); }
void UpdateKinect() { if (_Reader != null) { MultiSourceFrame frame = _Reader.AcquireLatestFrame(); if (frame != null) { //color processing with depth ColorFrame colorFrame = frame.ColorFrameReference.AcquireFrame(); if (colorFrame != null) { DepthFrame depthFrame = frame.DepthFrameReference.AcquireFrame(); if (depthFrame != null) { colorFrame.CopyConvertedFrameDataToArray(_ColorRawData, ColorImageFormat.Rgba); _ColorTexture.LoadRawTextureData(_ColorRawData); _ColorTexture.Apply(); depthFrame.CopyFrameDataToArray(_DepthData); depthFrame.Dispose(); depthFrame = null; } colorFrame.Dispose(); colorFrame = null; } //ir processing InfraredFrame irFrame = frame.InfraredFrameReference.AcquireFrame(); if (irFrame != null) { irFrame.CopyFrameDataToArray(_IRData); int index = 0; foreach (ushort ir in _IRData) { byte intensity = (byte)(ir >> 8); _IRRawData[index++] = intensity; _IRRawData[index++] = intensity; _IRRawData[index++] = intensity; _IRRawData[index++] = 255; // Alpha } //load raw data _IRTexture.LoadRawTextureData(_IRRawData); _IRTexture.Apply(); irFrame.Dispose(); irFrame = null; } //body processing BodyFrame bodyFrame = frame.BodyFrameReference.AcquireFrame(); if (bodyFrame != null) { if (_BodyData == null) { _BodyData = new Body[_Sensor.BodyFrameSource.BodyCount]; } bodyFrame.GetAndRefreshBodyData(_BodyData); bodyFrame.Dispose(); bodyFrame = null; } frame = null; } } }
private unsafe void FrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { _Stopwatch.Restart(); MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); if (multiSourceFrame != null) { ColorFrame colorFrame = null; DepthFrame depthFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; try { bool allRequiredDataReceived = true; if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Color)) { colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if (colorFrame != null) { fixed(byte *colorBytesPointer = ColorPixels) { IntPtr colorPtr = (IntPtr)colorBytesPointer; uint size = (uint)(_ColorFrameDescription.Width * _ColorFrameDescription.Height * _ColorFrameDescription.BytesPerPixel); if (colorFrame.RawColorImageFormat == ImageFormat) { colorFrame.CopyRawFrameDataToIntPtr(colorPtr, size); } else { colorFrame.CopyConvertedFrameDataToIntPtr(colorPtr, size, ImageFormat); } } } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Depth) && allRequiredDataReceived) { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); if (depthFrame != null) { fixed(ushort *depthBytesPointer = DepthPixels) { IntPtr depthPtr = (IntPtr)depthBytesPointer; depthFrame.CopyFrameDataToIntPtr(depthPtr, (uint)(_DepthFrameDescription.Width * _DepthFrameDescription.Height * _DepthFrameDescription.BytesPerPixel)); } } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Body) && allRequiredDataReceived) { bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(Bodies); } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.BodyIndex) && allRequiredDataReceived) { bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); if (bodyIndexFrame != null) { fixed(byte *bodyIndexBytesPointer = BodyIndexPixels) { IntPtr bodyIndexPtr = (IntPtr)bodyIndexBytesPointer; bodyIndexFrame.CopyFrameDataToIntPtr(bodyIndexPtr, (uint)(_BodyIndexFrameDescription.Width * _BodyIndexFrameDescription.Height * _BodyIndexFrameDescription.BytesPerPixel)); } } else { allRequiredDataReceived = false; } } if (allRequiredDataReceived) { _KinectFrameArrivedEventArgs.ColorPixels = ColorPixels; _KinectFrameArrivedEventArgs.DepthPixels = DepthPixels; _KinectFrameArrivedEventArgs.Bodies = Bodies; _KinectFrameArrivedEventArgs.BodyIndexPixels = BodyIndexPixels; _KinectFrameArrivedEventArgs.KinectSensor = multiSourceFrame.KinectSensor; _KinectFrameArrivedEventArgs.FrameNumber = _FrameNumber; EventHandler <KinectFrameArrivedEventArgs> handler = FrameArrived; if (handler != null) { handler(this, _KinectFrameArrivedEventArgs); } } } finally { if (colorFrame != null) { colorFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } } _Stopwatch.Stop(); RaiseKinectFrameComplete(_Stopwatch.Elapsed); _FrameNumber++; }
public bool GetMultiSourceFrame(KinectInterop.SensorData sensorData) { if (multiSourceFrameReader != null) { multiSourceFrame = multiSourceFrameReader.AcquireLatestFrame(); if (multiSourceFrame != null) { // try to get all frames at once msBodyFrame = (sensorFlags & KinectInterop.FrameSource.TypeBody) != 0 ? multiSourceFrame.BodyFrameReference.AcquireFrame() : null; msBodyIndexFrame = (sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0 ? multiSourceFrame.BodyIndexFrameReference.AcquireFrame() : null; bool bAllSet = ((sensorFlags & KinectInterop.FrameSource.TypeBody) == 0 || msBodyFrame != null) && ((sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) == 0 || msBodyIndexFrame != null); if (!bAllSet) { // release all frames if (msBodyFrame != null) { msBodyFrame.Dispose(); msBodyFrame = null; } if (msBodyIndexFrame != null) { msBodyIndexFrame.Dispose(); msBodyIndexFrame = null; } if (msColorFrame != null) { msColorFrame.Dispose(); msColorFrame = null; } if (msDepthFrame != null) { msDepthFrame.Dispose(); msDepthFrame = null; } if (msInfraredFrame != null) { msInfraredFrame.Dispose(); msInfraredFrame = null; } } // else // { // bool bNeedBody = (sensorFlags & KinectInterop.FrameSource.TypeBody) != 0; // bool bNeedBodyIndex = (sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0; // bool bNeedColor = (sensorFlags & KinectInterop.FrameSource.TypeColor) != 0; // bool bNeedDepth = (sensorFlags & KinectInterop.FrameSource.TypeDepth) != 0; // bool bNeedInfrared = (sensorFlags & KinectInterop.FrameSource.TypeInfrared) != 0; // // bAllSet = true; // } } return(multiSourceFrame != null); } return(false); }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // All frame counter this.frameCount++; if (this.frameCount % this.framesToCapture != 0) { return; } ColorFrame colorFrame = null; DepthFrame depthFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; Body body = null; SkeletonOfBody skel_up = new SkeletonOfBody(Constants.SKEL_UP_TOTAL_JOINTS); try { var frameReference = e.FrameReference.AcquireFrame(); colorFrame = frameReference.ColorFrameReference.AcquireFrame(); depthFrame = frameReference.DepthFrameReference.AcquireFrame(); bodyFrame = frameReference.BodyFrameReference.AcquireFrame(); bodyIndexFrame = frameReference.BodyIndexFrameReference.AcquireFrame(); if (colorFrame == null || depthFrame == null || bodyFrame == null || bodyIndexFrame == null) { return; } //-------------------------------------------- // Get the color frame //-------------------------------------------- using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { colorFrame.CopyConvertedFrameDataToArray(this.buffColor32, ColorImageFormat.Bgra); } //End ColorFrame //-------------------------------------------- // Get the depth frame //-------------------------------------------- using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { depthFrame.CopyFrameDataToArray(this.buffDepth16); //depthFrame.CopyFrameDataToArray(this.buffDepth16Copy); // Multiplication by 20 only to turn the depth visually more perceptible //int i = 0; //Array.ForEach(this.buffDepth16Copy, (x) => { this.buffDepth16Copy[i++] = (ushort)(x * 20); }); } //End DepthFrame //-------------------------------------------- // Get the body index frame //-------------------------------------------- using (KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { bodyIndexFrame.CopyFrameDataToArray(this.buffBodyIndex8); } //-------------------------------------------- // Get the body frame //-------------------------------------------- bodyFrame.GetAndRefreshBodyData(this.listBodies); //bodyFrame.FloorClipPlane. //-------------------------------------------- // Map the depth frame to it color frame //-------------------------------------------- { Array.Clear(this.buffColorSpacePoints, 0, this.buffColorSpacePoints.Length); Array.Clear(this.buffMapDepthToColor32, 0, this.buffMapDepthToColor32.Length); // Coordinate mapping this.coordinateMapper.MapDepthFrameToColorSpace(this.buffDepth16, this.buffColorSpacePoints); unsafe { fixed(ColorSpacePoint *depthMappedToColorPointsPointer = buffColorSpacePoints) { // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int idxDepth = 0; idxDepth < buffColorSpacePoints.Length; ++idxDepth) { float depthMappedToColorX = depthMappedToColorPointsPointer[idxDepth].X; float depthMappedToColorY = depthMappedToColorPointsPointer[idxDepth].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(depthMappedToColorX) && !float.IsNegativeInfinity(depthMappedToColorY)) { // Make sure the depth pixel maps to a valid point in color space int colorX = (int)(depthMappedToColorX + 0.5f); int colorY = (int)(depthMappedToColorY + 0.5f); // If the point is not valid, there is no body index there. if ((colorX >= 0) && (colorX < this.colorImageSize.Width) && (colorY >= 0) && (colorY < this.colorImageSize.Height)) { int idxColor = (colorY * this.colorImageSize.Width) + colorX; // If we are tracking a body for the current pixel, save the depth data if (this.buffBodyIndex8[idxDepth] != 0xff) { this.buffMapDepthToColor32[idxDepth * 4] = this.buffColor32[idxColor * 4]; // B this.buffMapDepthToColor32[idxDepth * 4 + 1] = this.buffColor32[idxColor * 4 + 1]; // G this.buffMapDepthToColor32[idxDepth * 4 + 2] = this.buffColor32[idxColor * 4 + 2]; // R } } } } } } //End Unsafe } //End Mapping //-------------------------------------------- // Process the face of the default body //-------------------------------------------- // Variable to save the detected face paramenters this.faceData = new FaceData(new BoxFace(0, 0, 0, 0), new BoxFace(0, 0, 0, 0)); #if FACE_DETECTION // Get the default body // Body body = this.listBodies.Where(b => b.IsTracked).FirstOrDefault(); if (this.faceFrameSource.IsActive) { // In our experiment we get the closest body body = Util.GetClosestBody(this.listBodies); if (body != null && body.IsTracked) { // Get the first skeleton skel_up = Util.GetSkeletonUpperBody(this.Mapper, body); // Draw skeleton joints if (this.drawingDepthMarkups) { Util.WriteSkeletonOverFrame(this, VisTypes.Depth, skel_up, 2, ref this.buffMapDepthToColor32); //Util.WriteSkeletonOverFrame(this, VisTypes.Depth, skeleton, 2, ref this.buffDepth16); } // Assign a tracking ID to the face source this.faceFrameSource.TrackingId = body.TrackingId; if (this.faceFrameResults != null) { var boxColor = this.faceFrameResults.FaceBoundingBoxInColorSpace; var boxDepth = this.faceFrameResults.FaceBoundingBoxInInfraredSpace; // If there are face results, then save data // We save in a format of rectangle [x, y, width, height] this.faceData.boxColor = new BoxFace(boxColor.Left, boxColor.Top, (boxColor.Right - boxColor.Left), (boxColor.Bottom - boxColor.Top)); this.faceData.boxDepth = new BoxFace(boxDepth.Left, boxDepth.Top, (boxDepth.Right - boxDepth.Left), (boxDepth.Bottom - boxDepth.Top)); // Draw the face if (this.drawingDepthMarkups) { Util.WriteFaceOverFrame(this, VisTypes.Depth, faceData.boxDepth, 1, ref this.buffMapDepthToColor32); //Util.WriteFaceOverFrame(this, VisTypes.Depth, faceData.boxDepth, 1, ref this.buffDepth16); } //End Drawing } //End FaceResult } //End Body } #endif // Update the data handler this.frameHandler( this.buffColor32, this.buffDepth16, this.buffBodyIndex8, this.buffMapDepthToColor32, this.listBodies, this.faceData ); // Recording state ture byte[] _colorData = null; ushort[] _depthData = null; byte[] _bodyIndexData = null; IList <Body> _bodies = null; //-------------------------------------------- // Record the data //-------------------------------------------- if (this.stateOfRecording) { // 25-09-15 // Discard frames where the hand is not corrected tracked (i.e., the hand has a zero coordinate) // To discard failures in hand tracking if (skel_up.jointDepthSpace[(int)JointUpType.HandLeft].X == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandLeft].Y == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandRight].X == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandRight].Y == 0) { Console.WriteLine("Neglect frame {0}", this.recordedFrames); return; } // Storage data; _colorData = new byte[this.buffColor32.Length]; _depthData = new ushort[this.buffDepth16.Length]; _bodyIndexData = new byte[this.buffBodyIndex8.Length]; _bodies = new Body[this.listBodies.Count]; colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra); depthFrame.CopyFrameDataToArray(_depthData); bodyIndexFrame.CopyFrameDataToArray(_bodyIndexData); bodyFrame.GetAndRefreshBodyData(_bodies); // Increase the counter this.recordedFrames++; this.dataContainer.AddColor = _colorData; this.dataContainer.AddDepth = _depthData; this.dataContainer.AddBodyIndex = _bodyIndexData; this.dataContainer.AddListOfBodies = _bodies; this.dataContainer.AddFaceData = this.faceData; // If the user only require to save a fixed number of frames if (this.fixedFrames == this.recordedFrames) { this.stateOfRecording = false; } } // Notice: // Array.Copy() --> how many elements to copy // Buffer.BlockCopy --> how many of bytes to copy // Update Frame Rate UpdateGrabberFrameRate(); } finally { if (this.frameCount > 100000000) { this.frameCount = 0; } if (colorFrame != null) { colorFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference; MultiSourceFrame multiSourceFrame = null; ColorFrame colorFrame = null; DepthFrame depthFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; try { using (_frameCounter.Increment()) { multiSourceFrame = reference.AcquireFrame(); if (multiSourceFrame == null) { return; } colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); if (colorFrame == null | depthFrame == null | bodyFrame == null | bodyIndexFrame == null) { return; } var colorDesc = colorFrame.FrameDescription; int colorWidth = colorDesc.Width; int colorHeight = colorDesc.Height; if (_colorFrameData == null) { int size = colorDesc.Width * colorDesc.Height; _colorFrameData = new byte[size * bytesPerPixel]; _displayFrame = new byte[size * bytesPerPixel]; } var depthDesc = depthFrame.FrameDescription; uint depthSize = depthDesc.LengthInPixels; _depthFrameData = new ushort[depthSize]; _colorSpacePoints = new ColorSpacePoint[depthSize]; FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription; int bodyIndexWidth = bodyIndexFrameDescription.Width; int bodyIndexHeight = bodyIndexFrameDescription.Height; if ((bodyIndexWidth * bodyIndexHeight) == bodyIndexFrameData.Length) { bodyIndexFrame.CopyFrameDataToArray(bodyIndexFrameData); } Array.Clear(_displayFrame, 0, _displayFrame.Length); colorFrame.CopyConvertedFrameDataToArray(_colorFrameData, ColorImageFormat.Bgra); depthFrame.CopyFrameDataToArray(_depthFrameData); kinectSensor.CoordinateMapper.MapDepthFrameToColorSpace(_depthFrameData, _colorSpacePoints); kinectSensor.CoordinateMapper.MapDepthFrameToCameraSpace(_depthFrameData, _cameraPoints); for (int depthIndex = 0; depthIndex < _depthFrameData.Length; ++depthIndex) { byte player = bodyIndexFrameData[depthIndex]; bool?c = OnlyPlayersMenuItem.IsChecked; bool val = c != null ? (bool)c : false; if (!val || player != 0xff) { ColorSpacePoint point = _colorSpacePoints[depthIndex]; CameraSpacePoint p = this._cameraPoints[depthIndex]; int colorX = (int)Math.Floor(point.X + 0.5); int colorY = (int)Math.Floor(point.Y + 0.5); int colorImageIndex = ((colorWidth * colorY) + colorX) * bytesPerPixel; if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight)) { if (p.Z > 0) { _displayFrame[colorImageIndex] = _colorFrameData[colorImageIndex]; // b _displayFrame[colorImageIndex + 1] = _colorFrameData[colorImageIndex + 1]; // g _displayFrame[colorImageIndex + 2] = _colorFrameData[colorImageIndex + 2]; // r _displayFrame[colorImageIndex + 3] = _colorFrameData[colorImageIndex + 3]; // a } } } } colorBitmap.WritePixels( new Int32Rect(0, 0, colorDesc.Width, colorDesc.Height), _displayFrame, //_colorFrameData, colorDesc.Width * bytesPerPixel, 0); if (calibratingSurface) { if (_pointsToDepth.Count > 0) { foreach (Point p in _pointsToDepth) { int depthIndex = Convert.ToInt32(p.Y) * depthDesc.Width + Convert.ToInt32(p.X); try { CameraSpacePoint cameraPoint = _cameraPoints[depthIndex]; if (!(Double.IsInfinity(cameraPoint.X)) && !(Double.IsInfinity(cameraPoint.Y)) && !(Double.IsInfinity(cameraPoint.Z) && cameraPoint.Z > 0)) { Console.WriteLine("" + p.X + " " + p.Y + " ---> " + cameraPoint.X + " " + cameraPoint.Y + " " + cameraPoint.Z); _calibrationPoints.Add(cameraPoint); drawEllipse(p.X, p.Y); } } catch { } } _pointsToDepth = new List <Point>(); } if (false && _calibrationPoints.Count == 3) { canvas.Children.Clear(); CameraSpacePoint a = VectorTools.subPoint(_calibrationPoints[0], _calibrationPoints[1]); CameraSpacePoint b = VectorTools.subPoint(_calibrationPoints[2], _calibrationPoints[1]); CameraSpacePoint up = VectorTools.cross(a, b); CameraSpacePoint c1 = VectorTools.cross(b, up); CameraSpacePoint c2 = VectorTools.mult(c1, -1f); CameraSpacePoint c; if (VectorTools.distance(_calibrationPoints[2], VectorTools.addPoint(_calibrationPoints[1], c1)) < VectorTools.distance(_calibrationPoints[2], VectorTools.addPoint(_calibrationPoints[1], c2))) { c = VectorTools.mult(VectorTools.normalize(c1), 9.0f / 16.0f * VectorTools.norm(a) /*norm(b)*/); } else { c = VectorTools.mult(VectorTools.normalize(c2), 9.0f / 16.0f * VectorTools.norm(a) /*norm(b)*/); } CameraSpacePoint BL = _calibrationPoints[0]; CameraSpacePoint BR = _calibrationPoints[1]; CameraSpacePoint TR = VectorTools.addPoint(BR, c); CameraSpacePoint TL = VectorTools.addPoint(BL, c); VectorTools.DebugPoint(BL); VectorTools.DebugPoint(BR); VectorTools.DebugPoint(TR); VectorTools.DebugPoint(TL); //_drawSurface(coordinateMapper.MapCameraPointToColorSpace(BL), // coordinateMapper.MapCameraPointToColorSpace(BR), // coordinateMapper.MapCameraPointToColorSpace(TR), // coordinateMapper.MapCameraPointToColorSpace(TL)); _calibrationPoints.Clear(); calibratingSurface = false; } } } } catch (Exception ex) { Console.WriteLine(ex.Message); Console.WriteLine(ex.StackTrace); Console.WriteLine(ex.Source); } finally { if (colorFrame != null) { colorFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
public override void ProcessRequest(JToken allAnnotations) { KinectSensor kinectSensor = SensorHandler.GetSensor(); int ms_slept = 0; while (!kinectSensor.IsAvailable) { Thread.Sleep(500); ms_slept += 500; System.Diagnostics.Debug.WriteLine("Waiting on sensor..."); if (ms_slept >= CONNECT_TIMEOUT_MS) { System.Environment.Exit(-1); } } CoordinateMapper coordinateMapper = kinectSensor.CoordinateMapper; FrameDescription frameDescription = kinectSensor.DepthFrameSource.FrameDescription; List <Tuple <JointType, JointType> > bones = new List <Tuple <JointType, JointType> >(); bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight)); bool dataReceived = false; Body[] bodies = null; Body body = null; ms_slept = 0; while (!dataReceived) { BodyFrame bodyFrame = null; System.Diagnostics.Debug.WriteLine("Waiting on body frame..."); while (bodyFrame == null) { bodyFrame = SensorHandler.GetBodyFrame(); } bodies = new Body[bodyFrame.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodies); System.Diagnostics.Debug.WriteLine("Checking if body is detected in frame..."); System.Diagnostics.Debug.WriteLine(bodyFrame.BodyCount + " bodies detected"); int count = 0; if (bodyFrame.BodyCount > 0) { foreach (Body b in bodies) { if (b.IsTracked) { System.Diagnostics.Debug.WriteLine("Found body frame."); body = b; dataReceived = true; count++; } } } System.Diagnostics.Debug.WriteLine(count + " bodies tracked"); Thread.Sleep(100); ms_slept += 100; if (ms_slept >= POINTING_TIMEOUT_MS) { System.Environment.Exit(-1); } bodyFrame.Dispose(); } //// convert the joint points to depth (display) space /// IReadOnlyDictionary <JointType, Joint> joints = body.Joints; Dictionary <JointType, CameraSpacePoint> jointPoints = new Dictionary <JointType, CameraSpacePoint>(); foreach (JointType jointType in joints.Keys) { // sometimes the depth(Z) of an inferred joint may show as negative // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity) CameraSpacePoint position = joints[jointType].Position; if (position.Z < 0) { position.Z = 0.1f; } DepthSpacePoint depthSpacePoint = coordinateMapper.MapCameraPointToDepthSpace(position); jointPoints[jointType] = position; } Tuple <JointType, JointType> bone = bones.First(); List <BlockData> blocks = this.GetBlocks(allAnnotations); this.ComputeConfidenceScores(bone, jointPoints, blocks); }
protected override void RunBackgroundThreadAsync(int id) { // Attempt to open Kinect 2 sensor. try { BackgroundData currentFrameData = new BackgroundData(); sensor = KinectSensor.GetDefault(); if (sensor != null) { // Open bodyFrameReader (part of Kinect 2 Sensor) bodyFrameReader = sensor.BodyFrameSource.OpenReader(); if (!sensor.IsOpen) { // Open Kinect V2 Sensor. sensor.Open(); UnityEngine.Debug.Log("Open Kinect2 device successful."); // Get Frames BodyFrame frame = null; while (m_runBackgroundThread) { if (bodyFrameReader != null) { // Get latest frame from Kinect V2 frame = bodyFrameReader.AcquireLatestFrame(); if (frame != null) { // Ensure that the thread is set to be running IsRunning = true; if (bodies == null) { bodies = new Windows.Kinect.Body[sensor.BodyFrameSource.BodyCount]; } // Extract frame data currentFrameData.NumOfBodies = (ulong)frame.BodyCount; frame.GetAndRefreshBodyData(bodies); foreach (var body in bodies) { if (body == null) { continue; } if (body.IsTracked) { //Copy this body to the 'currentFrameData' currentFrameData.Bodies[0].CopyFromBodyTrackingSdk(body, sensor.CoordinateMapper); continue; } } // Time stamp infomation copied from K4A file if (!readFirstFrame) { readFirstFrame = true; initialTimestamp = frame.RelativeTime; } currentFrameData.TimestampInMs = (float)(frame.RelativeTime - initialTimestamp).TotalMilliseconds; currentFrameData.DepthImageHeight = 100; //filler currentFrameData.DepthImageWidth = 100; //filler // Discard frame now that we are finished with it. frame.Dispose(); frame = null; // Update data variable that is being read in the UI thread. SetCurrentFrameData(ref currentFrameData); } } } // Close Resources once loop is exited. sensor.Close(); if (RawDataLoggingFile != null) { RawDataLoggingFile.Close(); } } } } catch (Exception e) { UnityEngine.Debug.Log(e); } }
public void Dispose() { ColorFrame?.Dispose(); BodyFrame?.Dispose(); DepthFrame?.Dispose(); }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { if (!(KinectStreamerConfig.ProvideBodyData || KinectStreamerConfig.ProvideColorData || KinectStreamerConfig.ProvideDepthData)) { return; } depthFrame = null; colorFrame = null; bodyFrame = null; multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyFrame == null)) { return; } // Process color stream if needed if (KinectStreamerConfig.ProvideColorData) { ProcessColorData(); } // Process depth frame if needed if (KinectStreamerConfig.ProvideDepthData) { ProcessDepthData(); } // Process body data if needed if (KinectStreamerConfig.ProvideBodyData) { ProcessBodyData(); } } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }
private void msfr_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { ///READ INPUT STATE FROM CLIENT inputState = Client.inputState; ///DECLARE FRAMES BodyFrame bodyFrame = null; DepthFrame depthFrame = null; ///ACQUIRE AND VALIDATE FRAME MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); if (multiSourceFrame == null) { return; } try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); if (inputState == "d") { var depthDesc = depthFrame.FrameDescription; ushort[] depthData = new ushort[depthDesc.LengthInPixels]; depthFrame.CopyFrameDataToArray(depthData); } bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if ((bodyFrame == null)) { return; } ///PROCESS BODY DATA this.bodies = new Body[bodyFrame.BodyCount]; ///REFRESH BODY DATA bodyFrame.GetAndRefreshBodyData(this.bodies); foreach (Body body in this.bodies) { if (body != null) { if (body.IsTracked) { Dictionary <JointType, Point3D> tdPoints = new Dictionary <JointType, Point3D>(); List <ColorSpacePoint> csPoints = new List <ColorSpacePoint>(); foreach (JointType type in body.Joints.Keys) { Joint joint = body.Joints[type]; Point3D point = new Point3D(joint.Position.X, joint.Position.Y, joint.Position.Z); ColorSpacePoint csp = this.kinect.CoordinateMapper.MapCameraPointToColorSpace(joint.Position); ///GET LIST OF JOINT POSITIONS tdPoints.Add(type, point); ///CANNOT BE SURE THERE WILL BE DATA IF "TRACKED" IS USED if (joint.TrackingState == TrackingState.Tracked) { ///CALCULATE POSITION TO DRAW POINT csPoints.Add(csp); } } DrawPoints(csPoints); ///TRANSFER DATA TO SERVER if (inputState == "t") { this.clientHelper.AddBodyData(BiometricID(tdPoints), tdPoints, csPoints); } } } } } finally { if (inputState == "t") { this.clientHelper.SendBodyData(); } Client.inputState = "z"; ///DISPOSE if (bodyFrame != null) { bodyFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } } }
private void MultiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { ColorFrame colorFrame = null; BodyFrame bodyFrame = null; try { MultiSourceFrame parentFrame = e.FrameReference.AcquireFrame(); colorFrame = parentFrame.ColorFrameReference.AcquireFrame(); bodyFrame = parentFrame.BodyFrameReference.AcquireFrame(); if (colorFrame != null && bodyFrame != null) { FPS = (int)(1.0 / colorFrame.ColorCameraSettings.FrameInterval.TotalSeconds); IgnoreFrame = FPS > 16 && Force15FPS ? !IgnoreFrame : false; if (ROI == null) { SetROI(colorFrame.FrameDescription); } if (bodyFrame != null && PositionHelper.HasValue) { Skeleton skeleton = bodyFrame.ToSkeleton(); if (skeleton != null) { if (Gait.POV == POV.Unknown) { if (Math.Abs(PositionHelper.Value) < FPS * 4) { ReferenceSkeletonSequence.Add(skeleton); PositionHelper += (skeleton.Joints[JointType.HandLeft].DistanceFromCamera() > skeleton.Joints[JointType.HandRight].DistanceFromCamera()) ? 1 : -1; } else { POV inferredPosition = PositionHelper < 0 ? POV.Left : POV.Right; Gait = new Gait(ReferenceSkeletonSequence.AvgSkeleton(), inferredPosition); } } else { Gait.Refresh(skeleton); } } } BitmapSource newBitmap = null; if (!IgnoreFrame) { newBitmap = colorFrame.ToCroppedBitmap(ROI); } OnFrameProcessed(newBitmap, DateTime.UtcNow.Ticks, FPS); } } finally { if (colorFrame != null) { colorFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }
private async void OnMultipleFramesArrivedHandler(MultiSourceFrame e) { BodyFrame bodyFrame = null; ColorFrame colorFrame = null; InfraredFrame infraredFrame = null; if (e == null) { return; } try { bodyFrame = e.BodyFrameReference.AcquireFrame(); colorFrame = e.ColorFrameReference.AcquireFrame(); infraredFrame = e.InfraredFrameReference.AcquireFrame(); if ((bodyFrame == null) || (colorFrame == null) || (infraredFrame == null)) { return; } //ColorFrame using (colorFrame) { //BodyFrame await ProcessBodyFrame(bodyFrame, colorFrame); } // InfraredFrame //if (infraredFrame != null) //{ // // the fastest way to process the infrared frame data is to directly access // // the underlying buffer // using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) // { // // verify data and write the new infrared frame data to the display bitmap // if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)) && // (this.infraredFrameDescription.Width == this.infraredBitmap.PixelWidth) && (this.infraredFrameDescription.Height == this.infraredBitmap.PixelHeight)) // { // this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); // } // } //} } finally { if (infraredFrame != null) { infraredFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }
private static void UpdateBody() { if (reader == null) { return; } BodyFrame frame = reader.AcquireLatestFrame(); if (frame == null) { return; } if (data == null) { data = new Body[sensor.BodyFrameSource.BodyCount]; } frame.GetAndRefreshBodyData(data); for (int bodyNr = 0; bodyNr < data.Length; bodyNr++) { if (data[bodyNr] == null) { continue; } if (data[bodyNr].IsTracked) { status = Status.Tracking; if (bodyID == -1) { bodyID = bodyNr; #if hFACE if (FaceFrameSource != null) { FaceFrameSource.TrackingId = data[bodyID].TrackingId; } #endif if (onPlayerAppears != null) { onPlayerAppears(bodyID); } } } else { if (bodyNr == bodyID) { if (onPlayerDisappears != null) { onPlayerDisappears(bodyID); } bodyID = -1; status = Status.Present; #if hFACE if (FaceFrameSource != null) { FaceFrameSource.TrackingId = 0; } #endif } } } if (bodyID > -1) { bodyData = data[bodyID]; } else { bodyData = null; } frame.Dispose(); }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void OnMultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyFrame bodyFrame = null; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if (depthFrame == null || colorFrame == null || bodyFrame == null) { return; } // Copy color data (using Bgra format) colorFrame.CopyConvertedFrameDataToIntPtr(colorPixels, COLOR_PIXEL_BYTES, ColorImageFormat.Bgra); if (ColorDataCheckBox.Checked) { Marshal.Copy(colorPixels, colorFrameData, 8, (int)COLOR_PIXEL_BYTES); colorFramePublisher.Send(new ZFrame(colorFrameData)); } if (BodyDataCheckBox.Checked) { // Copy data for Body tracking bodyArray = new Body[bodyFrame.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodyArray); // Remove old bodies bodyFrameData.Clear(); //At this point, we are just reserving 4 bytes for storing 'bodyCount' and we are going to modify it later AddArrayToList(ref bodyFrameData, new byte[4] { 0, 0, 0, 0 }); int bodyCount = 0; foreach (Body body in bodyArray) { if (!body.IsTracked) { continue; } AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(body.TrackingId)); //add 8 bytes for ulong TrackingId AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(ALL_JOINTS.Length)); //add 4 bytes for int TrackingId foreach (JointType jointType in ALL_JOINTS) { var joint = body.Joints[jointType]; AddArrayToList(ref bodyFrameData, BitConverter.GetBytes((int)joint.TrackingState)); //add 4 bytes for int TrackingState AddArrayToList(ref bodyFrameData, BitConverter.GetBytes((int)joint.JointType)); //add 4 bytes for int JointType AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(joint.Position.X)); //add 4 bytes for float X AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(joint.Position.Y)); //add 4 bytes for float Y AddArrayToList(ref bodyFrameData, BitConverter.GetBytes(joint.Position.Z)); //add 4 bytes for float Z } bodyCount++; } var bodyCountBytes = BitConverter.GetBytes(bodyCount);//4 bytes UpdateList(bodyCountBytes, ref bodyFrameData); bodyFramePublisher.Send(new ZFrame(bodyFrameData.ToArray())); } if (PointCloudDataCheckBox.Checked) { depthFrame.CopyFrameDataToIntPtr(depthFrameData, DEPTH_FRAME_BYTES); coordinateMapper.MapDepthFrameToCameraSpaceUsingIntPtr(depthFrameData, DEPTH_FRAME_BYTES, camerSpacePoints, CAMERA_SPACE_BYTES); coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr(depthFrameData, DEPTH_FRAME_BYTES, colorSpacePoints, COLOR_SPACE_BYTES); // Remove old points ClearPointCloud(); //At this point, we are just reserving 4 bytes for storing 'pointCloudSize' and we are going to modify it later AddPointsToCloud(new byte[4] { 0, 0, 0, 0 }); ComposePointCloud(); GetNonEmptyPointCloud(out byte[] pointCloud); pointCloudPublisher.Send(new ZFrame(pointCloud)); } } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }