private void HandsController_HandsDetected(object sender, HandCollection e) { // Display the results! foreach (Body body in bodies) { if (body.TrackingId == e.TrackingId) { if (e.HandLeft != null) { // Draw fingers. foreach (var finger in e.HandLeft.Fingers) { ColorSpacePoint center = coordinateMapper.MapCameraPointToColorSpace(body.Joints[JointType.WristLeft].Position); Point CenterPosition = new Point(center.X, center.Y); Point point = new Point(finger.ColorPoint.X, finger.ColorPoint.Y); DrawEllipse(point, Brushes.Yellow, 20.0); } } if (e.HandRight != null) { // Draw fingers. foreach (var finger in e.HandRight.Fingers) { ColorSpacePoint center = coordinateMapper.MapCameraPointToColorSpace(body.Joints[JointType.WristRight].Position); Point CenterPosition = new Point(center.X, center.Y); Point point = new Point(finger.ColorPoint.X, finger.ColorPoint.Y); DrawEllipse(point, Brushes.Yellow, 20.0); } } } } }
internal static void DrawBone(DrawingContext drawingContext, System.Windows.Media.Brush brush, JointType startJoint, JointType endJoint, IReadOnlyDictionary <JointType, Joint> joints, System.Windows.Rect rect, CoordinateMapper coordinateMapper, bool useDepthSpace = true, double line = 0.8F) { if (joints[startJoint].TrackingState != TrackingState.Tracked && joints[endJoint].TrackingState != TrackingState.Tracked) { return; } System.Windows.Point startPoint; System.Windows.Point endPoint; if (useDepthSpace) { startPoint = coordinateMapper.MapCameraPointToDepthSpace(joints[startJoint].Position).GetPoint(); endPoint = coordinateMapper.MapCameraPointToDepthSpace(joints[endJoint].Position).GetPoint(); } else { startPoint = coordinateMapper.MapCameraPointToColorSpace(joints[startJoint].Position).GetPoint(); endPoint = coordinateMapper.MapCameraPointToColorSpace(joints[endJoint].Position).GetPoint(); } if (rect.Contains(startPoint) && rect.Contains(endPoint)) { drawingContext.DrawLine(new System.Windows.Media.Pen(brush, line), startPoint, endPoint); } }
private void DrawJoints(Rect areaContent) { GUILayout.BeginArea(areaContent); Color orig2 = GUI.color; foreach (Dictionary <JointType, Windows.Kinect.Joint> jointDictionary in trackedBodyJoints) // each dictionary is a different body { foreach (Windows.Kinect.Joint joint in jointDictionary.Values) { ColorSpacePoint screenspace = coordinateMapper.MapCameraPointToColorSpace(joint.Position); if (joint.TrackingState == Windows.Kinect.TrackingState.Tracked) { GUI.color = Color.green; } else if (joint.TrackingState == Windows.Kinect.TrackingState.Inferred) { GUI.color = Color.red; } var jointRect = new Rect(-8 + (screenspace.X / ColorWidth) * areaContent.width, -8 + (screenspace.Y / ColorHeight) * areaContent.height, 16, 16); GUI.DrawTexture(jointRect, jointTrackerImage); } } GUI.color = orig2; GUILayout.EndArea(); }
public static Bitmap CropFaceBitmap(IReadOnlyList <CameraSpacePoint> points, CoordinateMapper mapper, byte[] pixels, bool grayScale) { int minX = (int)points.Min(x => mapper.MapCameraPointToColorSpace(x).X); int maxX = (int)points.Max(x => mapper.MapCameraPointToColorSpace(x).X); int minY = (int)points.Min(x => mapper.MapCameraPointToColorSpace(x).Y); int maxY = (int)points.Max(x => mapper.MapCameraPointToColorSpace(x).Y); int width = maxX - minX; var height = maxY - minY; PixelFormat targetFormat = grayScale ? PixelFormat.Format8bppIndexed : PixelFormat.Format32bppRgb; Bitmap target = new Bitmap(width, height, targetFormat); BitmapData bmapdata = target.LockBits(new System.Drawing.Rectangle(0, 0, width, height), ImageLockMode.WriteOnly, targetFormat); lock (pixels) { IntPtr ptr = bmapdata.Scan0; int rowLengthBytes = 7680; if (targetFormat == PixelFormat.Format32bppRgb) { int buffColStart = minX * 4; int cropWidthBytes = width * 4; for (int r = minY; r < minY + height; r++) { Marshal.Copy(pixels, buffColStart + (rowLengthBytes * r), ptr, cropWidthBytes); ptr = IntPtr.Add(ptr, bmapdata.Stride); } } else { for (int row = minY; row < minY + height; row++) { for (int col = minX; col < minX + width; col++) { int cur_pixel_ind = (col * 4) + (rowLengthBytes * row); int b = pixels[cur_pixel_ind]; int g = pixels[cur_pixel_ind + 1]; int r = pixels[cur_pixel_ind + 2]; int a = pixels[cur_pixel_ind + 3]; int gray = (int)(r * 0.2126 + g * 0.7152 + b * 0.0722); Marshal.WriteInt32(ptr, gray); ptr = IntPtr.Add(ptr, 1); } } } } target.UnlockBits(bmapdata); return(target); }
public void MapCameraPointsToColorSpace(Vector3[] cameraPoints, Vector2[] colorPoints) { for (int i = 0; i < cameraPoints.Length && i < colorPoints.Length; i++) { var colorSpacePoint = _coordinateMapper.MapCameraPointToColorSpace(new CameraSpacePoint() { X = cameraPoints[i].X, Y = cameraPoints[i].Y, Z = cameraPoints[i].Z }); colorPoints[i] = new Vector2(colorSpacePoint.X, colorSpacePoint.Y); } }
private Point ToPoint(Joint joint) { double x; double y; switch (Mode) { case VisualizationMode.Color: ColorSpacePoint colorPoint = CoordinateMapper.MapCameraPointToColorSpace(joint.Position); x = colorPoint.X; y = colorPoint.Y; break; case VisualizationMode.Depth: case VisualizationMode.Infrared: DepthSpacePoint depthPoint = CoordinateMapper.MapCameraPointToDepthSpace(joint.Position); x = depthPoint.X; y = depthPoint.Y; break; default: x = joint.Position.X; y = joint.Position.Y; break; } return(new Point(x, y)); }
// RGB画面上の座標に変換する private Vector3 GetVector3FromJoint(Windows.Kinect.Joint joint) { var valid = joint.TrackingState != Windows.Kinect.TrackingState.NotTracked; if (ConvertCamera != null || valid) { // KinectのCamera座標系(3次元)をColor座標系(2次元)に変換する var point = _CoordinateMapper.MapCameraPointToColorSpace(joint.Position); var point2 = new Vector3(point.X, point.Y, 0); if ((0 <= point2.x) && (point2.x < _KinectWidth) && (0 <= point2.y) && (point2.y < _KinectHeight)) { // スクリーンサイズで調整(Kinect->Unity) point2.x = point2.x * Screen.width / _KinectWidth; point2.y = point2.y * Screen.height / _KinectHeight; // Unityのワールド座標系(3次元)に変換 var colorPoint3 = ConvertCamera.ScreenToWorldPoint(point2); // 座標の調整 // Y座標は逆、Z座標は-1にする(Xもミラー状態によって逆にする必要あり) colorPoint3.y *= -1; colorPoint3.z = 0; return(colorPoint3); } } return(new Vector3(joint.Position.X * 10, joint.Position.Y * 10, 0)); }
/// <summary> /// Maps the 3D point to its corresponding 2D point. /// </summary> /// <param name="position">The 3D space point.</param> /// <returns>The X, Y coordinates of the point.</returns> public Point GetPoint(CameraSpacePoint position) { Point point = new Point(); switch (Visualization) { case Visualization.Color: { ColorSpacePoint colorPoint = CoordinateMapper.MapCameraPointToColorSpace(position); point.X = float.IsInfinity(colorPoint.X) ? 0.0 : colorPoint.X; point.Y = float.IsInfinity(colorPoint.Y) ? 0.0 : colorPoint.Y; } break; case Visualization.Depth: case Visualization.Infrared: { DepthSpacePoint depthPoint = CoordinateMapper.MapCameraPointToDepthSpace(position); point.X = float.IsInfinity(depthPoint.X) ? 0.0 : depthPoint.X; point.Y = float.IsInfinity(depthPoint.Y) ? 0.0 : depthPoint.Y; } break; default: break; } return(point); }
public void UpdateBody(Body body, FrameView frameView, CoordinateMapper coordinateMapper, Visualization visualization) { if (!Initialized) { return; } Mirrored = frameView.MirroredView; for (int i = 0; i < controller.JointCount(); i++) { Vector2 position = visualization == Visualization.Color ? coordinateMapper.MapCameraPointToColorSpace(body.Joints[controller.GetJointType(i)].Position).ToPoint() : coordinateMapper.MapCameraPointToDepthSpace(body.Joints[controller.GetJointType(i)].Position).ToPoint(); if (float.IsInfinity(position.x)) { position.x = 0f; } if (float.IsInfinity(position.y)) { position.y = 0f; } frameView.SetPositionOnFrame(ref position); controller.SetJointPosition(i, smoothJoints ? Vector3.Lerp(controller.GetJointPosition(i), position, smoothness) : (Vector3)position); } UpdateLines(); }
/// <summary> ///把相机空间里的点映射到指定的彩色或深度空间 /// </summary> /// <param name="bodyJoints"></param> /// <param name="isMapColor"></param> /// <returns></returns> public Dictionary <JointType, Joint2D> JointToJoint2Ds(IReadOnlyDictionary <JointType, Joint> bodyJoints, bool isMapColor = true) { //将关节点转换为2D深度(显示)空间 Dictionary <JointType, Joint2D> joints2 = new Dictionary <JointType, Joint2D>(); foreach (KeyValuePair <JointType, Joint> pair in bodyJoints) { //有时,推断关节的深度(Z)可能显示为负数 CameraSpacePoint position = pair.Value.Position; if (position.Z < 0) //深度为负值时 { position.Z = InferredZPositionClamp; } // 将点从相机空间映射到颜色空间。 ColorSpacePoint colorSpacePoint = CoordinateMapper.MapCameraPointToColorSpace(position); // 将点从相机空间映射到深度空间。 DepthSpacePoint depthSpacePoint = CoordinateMapper.MapCameraPointToDepthSpace(position); Point point = new Point { X = isMapColor ? colorSpacePoint.X : depthSpacePoint.X, Y = isMapColor ? colorSpacePoint.Y : depthSpacePoint.Y }; joints2[pair.Key] = new Joint2D() { Joint2DType = pair.Key, Position = point, TrackingState = pair.Value.TrackingState }; } return(joints2); }
/// <summary> /// Converts the specified CameraSpacePoint into a 2-D point. /// </summary> /// <param name="position">The CameraSpacePoint to convert.</param> /// <param name="visualization">The type of the conversion (color, depth, or infrared).</param> /// <param name="coordinateMapper">The CoordinateMapper to make the conversion.</param> /// <returns>The corresponding 2-D point.</returns> public static Point ToPoint(this CameraSpacePoint position, Visualization visualization, CoordinateMapper coordinateMapper) { Point point = new Point(); switch (visualization) { case Visualization.Color: { ColorSpacePoint colorPoint = coordinateMapper.MapCameraPointToColorSpace(position); point.X = (int)(float.IsInfinity(colorPoint.X) ? 0.0 : colorPoint.X); point.Y = (int)(float.IsInfinity(colorPoint.Y) ? 0.0 : colorPoint.Y); } break; case Visualization.Depth: case Visualization.Infrared: { DepthSpacePoint depthPoint = coordinateMapper.MapCameraPointToDepthSpace(position); point.X = (int)(float.IsInfinity(depthPoint.X) ? 0.0 : depthPoint.X); point.Y = (int)(float.IsInfinity(depthPoint.Y) ? 0.0 : depthPoint.Y); } break; default: break; } return(point); }
// --- body private void ProcessBodyData(MultiFrame multiFrame) { // NOTES (!!) // we assume there is only one body // we assume joints are always in the same order // 12 values per joint -> trackingStatus, posX, posY, posZ, depthX, depthY, colorX, colorY, orientX, orientY, orientZ, orientW // joint trackingstate enum: tracked = 2, nottracked = 0, inffered = 1? Body[] bodies = multiFrame.Bodies; Body body = null; foreach (var b in bodies) { if (b.IsTracked) { body = b; break; } } if (body == null) { multiFrame.BodyData = new float[25 * 12]; // return zeros if not found return; } int jointsCount = body.Joints.Count; float[] bodyData = new float[jointsCount * 12]; int idx = 0; foreach (var kvp in body.Joints) { var jointType = kvp.Key; var joint = kvp.Value; var jointOrientation = body.JointOrientations[jointType]; CameraSpacePoint position = joint.Position; if (position.Z < 0) { position.Z = 0.1f; // according to Kinect code sample (sometimes z < 0 and the mapping return -infinity) } DepthSpacePoint depthSpacePoint = coordMapper.MapCameraPointToDepthSpace(position); ColorSpacePoint colorSpacePoint = coordMapper.MapCameraPointToColorSpace(position); bodyData[idx++] = (int)joint.TrackingState; bodyData[idx++] = joint.Position.X; bodyData[idx++] = joint.Position.Y; bodyData[idx++] = joint.Position.Z; bodyData[idx++] = depthSpacePoint.X; bodyData[idx++] = depthSpacePoint.Y; bodyData[idx++] = colorSpacePoint.X; bodyData[idx++] = colorSpacePoint.Y; bodyData[idx++] = jointOrientation.Orientation.X; bodyData[idx++] = jointOrientation.Orientation.Y; bodyData[idx++] = jointOrientation.Orientation.Z; bodyData[idx++] = jointOrientation.Orientation.W; } multiFrame.BodyData = bodyData; }
//public static byte[] ReflectImage(byte[] bitMap, int width, int height) //{ // byte[] reflection = new byte[width*height]; // int imagePosition = 0; // // repeat for each row // for (int row = 0; row < height; row++) // { // // read from the left edge // int fromPos = imagePosition + (row * width); // // write to the right edge // int toPos = fromPos + width - 1; // while (fromPos < width) // { // reflection[toPos] = bitMap[fromPos]; // //copy the pixel // fromPos++; // move towards the middle // toPos--; // move back from the right edge // } // } // return reflection; //} #endregion #region CoordinateMapper public static Point MapCameraSpacePointToPointOnCanvas(this CoordinateMapper coorMap, CameraSpacePoint camSP, Canvas canvas, SpaceMode spaceMode) { ColorSpacePoint colorSP = coorMap.MapCameraPointToColorSpace(camSP); double normedColorSPX = colorSP.X / FrameDimensions[spaceMode].Item1; double normedColorSPY = colorSP.Y / FrameDimensions[spaceMode].Item2; return(new Point(normedColorSPX * canvas.Width, normedColorSPY * canvas.Height)); }
public static Shape DrawPoint(this Canvas canvas, Joint joint, CoordinateMapper mapper, SpaceMode mode, Brush brush) { Shape shapeToReturn = null; // 0) Check whether the joint is tracked. if (joint.TrackingState == TrackingState.NotTracked) { return(null); } SpacePointBase spPt; switch (mode) { case SpaceMode.Color: default: // 1a) Convert Joint positions to Color space coordinates. ColorSpacePoint colSpaceJoint = mapper.MapCameraPointToColorSpace(joint.Position); spPt = new SpacePointBase(colSpaceJoint); break; case SpaceMode.Depth: // 1b) Convert Joint positions to Depth space coordinates. DepthSpacePoint depSpacePoint = mapper.MapCameraPointToDepthSpace(joint.Position); spPt = new SpacePointBase(depSpacePoint); break; } #region Joint Mapping Messages if (spPt.X == float.NegativeInfinity || spPt.Y == float.NegativeInfinity) { //Debug.WriteLine($"Joint Mapping Error: Joint[{joint.JointType.ToString()}] ( {spPt.X} , {spPt.Y} )"); } else if ((spPt.X < 0 || spPt.Y < 0 || spPt.X > FrameDimensions[mode].Item1 || spPt.Y > FrameDimensions[mode].Item2)) { //Debug.WriteLine($"Joint Mapping Overflow: Joint[{joint.JointType.ToString()}] ( {spPt.X} , {spPt.Y} )"); } #endregion //-inf meaning Joint is not detected and no corresponding mapped space point if (spPt.IsValid) { // 2) Scale the mapped coordinates to window dimensions. spPt = spPt.ScaleTo(canvas.ActualWidth, canvas.ActualHeight, mode); //if (joint.JointType == 0) Debug.WriteLine($"Head Position in Color Space = {spPt.X}, {spPt.Y}"); // 3) Draw the point on Canvas shapeToReturn = spPt.DrawPoint(canvas, brush); } return(shapeToReturn); }
public static (Point point, float depth) ToCoordinatePoint(this Joint joint, CoordinateMapper coordinateMapper) { Point point = new Point(); CameraSpacePoint jointPosition = joint.Position; ColorSpacePoint colorPoint = coordinateMapper.MapCameraPointToColorSpace(jointPosition); point.X = float.IsInfinity(colorPoint.X) ? 0 : colorPoint.X; point.Y = float.IsInfinity(colorPoint.Y) ? 0 : colorPoint.Y; return(point, jointPosition.Z); }
private void DrawJoint(Joint joint, double radius, SolidColorBrush fill, double borderWidth, SolidColorBrush border) { //if (joint.TrackingState != TrackingState.Tracked) return; Point spacePoint1 = new Point(); if (Visualization == Visualization.Color) { //Map the CameraPoint to ColorSpace so they match //Mapeia do "CameraPoint" para o "ColorSpace" ColorSpacePoint colorPoint = _coordinateMapper.MapCameraPointToColorSpace(joint.Position); spacePoint1.X = colorPoint.X; spacePoint1.Y = colorPoint.Y; } else if (Visualization == Visualization.Depth || Visualization == Visualization.Infrared) { //Map the CameraPoint to ColorSpace so they match //Mapeia do "CameraPoint" para o "ColorSpace" DepthSpacePoint depthPoint = _coordinateMapper.MapCameraPointToDepthSpace(joint.Position); spacePoint1.X = depthPoint.X; spacePoint1.Y = depthPoint.Y; } //Create the UI element based on the parameters //Cria os elementos do Canvas baseado nos parâmetros Ellipse el = new Ellipse(); el.Fill = fill; el.Stroke = border; el.StrokeThickness = borderWidth; el.Width = el.Height = radius; //Add the Ellipse to the canvas //Adiciona a "Ellipse" no canvas Skeleton.Children.Add(el); //Avoid exceptions based on bad tracking //Evita exceções com base no mal rastreamento if (float.IsInfinity((float)(spacePoint1.X)) || float.IsInfinity((float)(spacePoint1.Y))) { return; } //Alinha a "Ellipse" no canvas Canvas.SetLeft(el, (spacePoint1.X - el.Width / 2)); Canvas.SetTop(el, (spacePoint1.Y - el.Height / 2)); }
public void DrawBone(Joint first, Joint second, Brush brush, double thickness) { if (first.TrackingState == TrackingState.NotTracked || second.TrackingState == TrackingState.NotTracked) { return; } Point firstPoint = new Point(_ratioX, _ratioY); Point secondPoint = new Point(_ratioX, _ratioY); switch (FrameType) { case VisualizationMode.Color: { ColorSpacePoint colorFirstPoint = CoordinateMapper.MapCameraPointToColorSpace(first.Position); firstPoint.X *= float.IsInfinity(colorFirstPoint.X) ? 0.0 : colorFirstPoint.X; firstPoint.Y *= float.IsInfinity(colorFirstPoint.Y) ? 0.0 : colorFirstPoint.Y; ColorSpacePoint colorSecondPoint = CoordinateMapper.MapCameraPointToColorSpace(second.Position); secondPoint.X *= float.IsInfinity(colorSecondPoint.X) ? 0.0 : colorSecondPoint.X; secondPoint.Y *= float.IsInfinity(colorSecondPoint.Y) ? 0.0 : colorSecondPoint.Y; } break; case VisualizationMode.Depth: case VisualizationMode.Infrared: { DepthSpacePoint depthFirstPoint = CoordinateMapper.MapCameraPointToDepthSpace(first.Position); firstPoint.X *= float.IsInfinity(depthFirstPoint.X) ? 0.0 : depthFirstPoint.X; firstPoint.Y *= float.IsInfinity(depthFirstPoint.Y) ? 0.0 : depthFirstPoint.Y; DepthSpacePoint depthSecondPoint = CoordinateMapper.MapCameraPointToDepthSpace(second.Position); secondPoint.X *= float.IsInfinity(depthSecondPoint.X) ? 0.0 : depthSecondPoint.X; secondPoint.Y *= float.IsInfinity(depthSecondPoint.Y) ? 0.0 : depthSecondPoint.Y; } break; default: break; } Line line = new Line { Tag = TAG, X1 = firstPoint.X, Y1 = firstPoint.Y, X2 = secondPoint.X, Y2 = secondPoint.Y, StrokeThickness = thickness, Stroke = brush }; canvas.Children.Add(line); }
private Vector3 GetVector3FromJoint(Kinect.Joint joint) { ColorSpacePoint colorPoint = _Mapper.MapCameraPointToColorSpace(joint.Position); //colorSpace座標轉化為螢幕座標 colorPoint.X = (int)((colorPoint.X * FRAMEWIDTH) / SCREENWIDTH); colorPoint.Y = (int)((colorPoint.Y * FRAMEHEIGH) / SCREENHEIGH); //回傳修正後的座標 return(new Vector3(-colorPoint.X + (FRAMEWIDTH / 2.0f), -colorPoint.Y + (FRAMEHEIGH / 2.0f), _targetDepth)); }
//TODO: remove, unless want to get a 3D point to image...? /* * public Vector2 GetImagePoint(Vector3 position) { * * CameraSpacePoint cameraPoint = new CameraSpacePoint(); * cameraPoint.X = position.x; * cameraPoint.Y = position.y; * cameraPoint.Z = -position.z; * * Vector2 point = new Vector2(); * * ColorSpacePoint colorPoint = coordinateMapper.MapCameraPointToColorSpace(cameraPoint); * point.x = float.IsInfinity(colorPoint.X) ? 0 : colorPoint.X; * point.y = float.IsInfinity(colorPoint.Y) ? 0 : colorPoint.Y; * * return point; * } */ public Vector2 GetImagePoint(CameraSpacePoint cameraPoint) { ColorSpacePoint colorSpacePoint = coordinateMapper.MapCameraPointToColorSpace(cameraPoint); Vector2 point = new Vector2 { x = float.IsInfinity(colorSpacePoint.X) ? 0 : colorSpacePoint.X, y = float.IsInfinity(colorSpacePoint.Y) ? 0 : colorSpacePoint.Y }; return(point); }
public static Point ToPoint(this Microsoft.Kinect.CameraSpacePoint position, CoordinateMapper coordinateMapper) { Point point = new Point(); ColorSpacePoint colorPoint = coordinateMapper.MapCameraPointToColorSpace(position); point.X = float.IsInfinity(colorPoint.X) ? 0.0 : colorPoint.X; point.Y = float.IsInfinity(colorPoint.Y) ? 0.0 : colorPoint.Y; return(point); }
public static Point Scale(this Joint joint, CoordinateMapper mapper) { Point point = new Point(); ColorSpacePoint colorPoint = mapper.MapCameraPointToColorSpace(joint.Position); point.X = float.IsInfinity(colorPoint.X) ? 0.0 : colorPoint.X; point.Y = float.IsInfinity(colorPoint.Y) ? 0.0 : colorPoint.Y; return(point); }
public static void DrawLine(this Canvas canvas, Joint first, Joint second, CoordinateMapper mapper, SpaceMode mode) { if (first.TrackingState == TrackingState.NotTracked || second.TrackingState == TrackingState.NotTracked) { return; } SpacePointBase myFirstPoint; SpacePointBase mySecondPoint; switch (mode) { case SpaceMode.Color: default: myFirstPoint = new SpacePointBase(mapper.MapCameraPointToColorSpace(first.Position)); mySecondPoint = new SpacePointBase(mapper.MapCameraPointToColorSpace(second.Position)); break; case SpaceMode.Depth: myFirstPoint = new SpacePointBase(mapper.MapCameraPointToDepthSpace(first.Position)); mySecondPoint = new SpacePointBase(mapper.MapCameraPointToDepthSpace(second.Position)); break; } //Both points that the line joins must be mapped correctly if ( (!float.IsNegativeInfinity(myFirstPoint.X) && !float.IsNegativeInfinity(myFirstPoint.Y)) || (!float.IsNegativeInfinity(mySecondPoint.X) && !float.IsNegativeInfinity(mySecondPoint.Y)) ) { myFirstPoint = myFirstPoint.ScaleTo(canvas.ActualWidth, canvas.ActualHeight, mode); mySecondPoint = mySecondPoint.ScaleTo(canvas.ActualWidth, canvas.ActualHeight, mode); //call static DrawLine from class SpacePointBae SpacePointBase.DrawLine(canvas, myFirstPoint, mySecondPoint); } }
public static Shape DrawLine(this Canvas canvas, Joint first, Joint second, CoordinateMapper mapper, SpaceMode mode, double thickness, Brush brush) { Shape lineToReturn = null; if (first.TrackingState == TrackingState.NotTracked || second.TrackingState == TrackingState.NotTracked) { return(null); } SpacePointBase myFirstPoint; SpacePointBase mySecondPoint; switch (mode) { case SpaceMode.Color: default: myFirstPoint = new SpacePointBase(mapper.MapCameraPointToColorSpace(first.Position)); mySecondPoint = new SpacePointBase(mapper.MapCameraPointToColorSpace(second.Position)); break; case SpaceMode.Depth: myFirstPoint = new SpacePointBase(mapper.MapCameraPointToDepthSpace(first.Position)); mySecondPoint = new SpacePointBase(mapper.MapCameraPointToDepthSpace(second.Position)); break; } //Both points that the line joins must be mapped correctly if (myFirstPoint.IsValid && mySecondPoint.IsValid) { myFirstPoint = myFirstPoint.ScaleTo(canvas.ActualWidth, canvas.ActualHeight, mode); mySecondPoint = mySecondPoint.ScaleTo(canvas.ActualWidth, canvas.ActualHeight, mode); //call static DrawLine from class SpacePointBae lineToReturn = SpacePointBase.DrawLine(canvas, myFirstPoint, mySecondPoint, thickness, brush); } return(lineToReturn); }
public static string Serialize(this List <Body> skeletons, CoordinateMapper mapper, Mode mode) { JSONSkeletonCollection jsonSkeletons = new JSONSkeletonCollection { Skeletons = new List <JSONSkeleton>() }; foreach (Body skeleton in skeletons) { JSONSkeleton jsonSkeleton = new JSONSkeleton(); if (skeleton.IsTracked) { jsonSkeleton.command = "bodyData"; jsonSkeleton.trackingID = skeleton.TrackingId.ToString(); jsonSkeleton.Joints = new List <JSONJoint>(); jsonSkeleton.HandLeftState = skeleton.HandLeftState; jsonSkeleton.HandRightState = skeleton.HandRightState; foreach (var joint in skeleton.Joints) { Point point = new Point(); switch (mode) { case Mode.Color: ColorSpacePoint colorPoint = mapper.MapCameraPointToColorSpace(joint.Value.Position); point.X = colorPoint.X; point.Y = colorPoint.Y; break; case Mode.Depth: DepthSpacePoint depthPoint = mapper.MapCameraPointToDepthSpace(joint.Value.Position); point.X = depthPoint.X; point.Y = depthPoint.Y; break; default: break; } jsonSkeleton.Joints.Add(new JSONJoint { Name = joint.Key.ToString().ToLower(), X = joint.Value.Position.X, Y = joint.Value.Position.Y, mappedX = point.X, mappedY = point.Y, Z = joint.Value.Position.Z }); } jsonSkeletons.Skeletons.Add(jsonSkeleton); } } return(JsonConvert.SerializeObject(jsonSkeletons)); }
private void UpdateBodyFrame(MultiSourceFrame multiFrame) { using (var bodyFrame = multiFrame.BodyFrameReference.AcquireFrame()) { if (bodyFrame == null) { return; } bodyFrame.GetAndRefreshBodyData(bodies); } CanvasBody.Children.Clear(); foreach (Body b in bodies) { if (b.IsTracked) { ColorSpacePoint cspL = mapper.MapCameraPointToColorSpace(b.Joints[JointType.HandLeft].Position); ColorSpacePoint cspR = mapper.MapCameraPointToColorSpace(b.Joints[JointType.HandRight].Position); Ellipse elpL = new Ellipse() { Width = 100, Height = 100, StrokeThickness = 5, Stroke = Brushes.Red }; elpL.RenderTransform = new TranslateTransform(cspL.X - 100, cspL.Y - 100); Ellipse elpR = new Ellipse() { Width = 100, Height = 100, StrokeThickness = 5, Stroke = Brushes.Blue }; elpR.RenderTransform = new TranslateTransform(cspR.X - 150, cspR.Y - 100); CanvasBody.Children.Add(elpL); CanvasBody.Children.Add(elpR); } } }
/// <summary> /// Project the camera space point using the given sensor coordinate mapper. /// </summary> /// <param name="inPoint"></param> /// <param name="coordinateMapper"></param> /// <returns></returns> public Point ProjectCameraPoint(CameraSpacePoint inPoint, CoordinateMapper coordinateMapper) { if (_useDepthMapper) { DepthSpacePoint depthPoint = coordinateMapper.MapCameraPointToDepthSpace(inPoint); return(new Point(depthPoint.X, depthPoint.Y)); } else { ColorSpacePoint colorPoint = coordinateMapper.MapCameraPointToColorSpace(inPoint); return(new Point(colorPoint.X, colorPoint.Y)); } }
private void colorFrameReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e) { // ColorFrame is IDisposable using (ColorFrame colorFrame = e.FrameReference.AcquireFrame()) { if (colorFrame != null) { FrameDescription colorFrameDescription = colorFrame.FrameDescription; using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { this.colorBitmap.Lock(); // verify data and write the new color frame data to the display bitmap if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight)) { if ((bool)chk_skin.IsChecked) { var handLeft = coordinateMapper.MapCameraPointToColorSpace(leftHandPostition); var handRight = coordinateMapper.MapCameraPointToColorSpace(rightHandPostition); colorFrame.CopyConvertedFrameDataToArray(colorFrameData, ColorImageFormat.Bgra); getSkinColor(handLeft, handRight); } colorFrame.CopyConvertedFrameDataToIntPtr( this.colorBitmap.BackBuffer, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * BytesPerPixel), ColorImageFormat.Bgra); //32Bit(4Byte) this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight)); } this.colorBitmap.Unlock(); } } } }
/// <summary> /// Constructor /// </summary> /// <param name="body">Kinect body</param> /// <param name="coordinateMapper">Coordinate mapper</param> public ColorSpaceKinectJoints(KinectBody body, CoordinateMapper coordinateMapper) { if (body == null) throw new ArgumentNullException("body"); if (coordinateMapper == null) throw new ArgumentNullException("coordinateMapper"); this.jointPositions = new Dictionary<JointType,ColorSpacePoint>(); foreach (Joint joint in body.Joints.Values) { this.jointPositions.Add(joint.JointType, coordinateMapper.MapCameraPointToColorSpace(joint.Position)); } }
private void MapCameraToImage() { CameraSpacePoint[] CameraPoints = GameObject.Find("BodyView").GetComponent <MineBodySourceView> ().getCameraPoints(); if (CameraPoints.Length > 0 && CameraPoints[0].X != 0 && CameraPoints[0].Y != 0 && CameraPoints[0].Z != 0) { // Debug.Log ("head camera position is " + CameraPoints[0].X + ","+CameraPoints[0].Y+","+CameraPoints[0].Z); head = m_pCoordinateMapper.MapCameraPointToColorSpace(CameraPoints[0]); neck = m_pCoordinateMapper.MapCameraPointToColorSpace(CameraPoints[1]); // Debug.Log ("head image position is " + head.X + ","+head.Y); int distance = (int)Mathf.Max(Mathf.Ceil(Mathf.Abs((head.X - neck.X) + (head.Y - neck.Y))), 150); // Debug.Log ("distance is " + distance ); int TX = (int)Mathf.Ceil(head.X + distance); int TY = (int)Mathf.Ceil(head.Y + distance); int BX = (int)Mathf.Ceil(head.X - distance); int BY = (int)Mathf.Ceil(head.Y - distance); _Key [0] = BX; _Key [1] = TX; _Key [2] = BY; _Key [3] = TY; // Debug.Log ("Top Right is " + TX+","+TY ); // Debug.Log ("Bottom Left is " + BX+","+BY ); } }
public static Point MapCameraSpacePointToPointOnCanvas(this CoordinateMapper coorMap, CameraSpacePoint camSP, Canvas canvas, SpaceMode spaceMode) { ColorSpacePoint colorSP = coorMap.MapCameraPointToColorSpace(camSP); if (float.IsInfinity(colorSP.X) || float.IsInfinity(colorSP.Y)) { throw new Exception("Error when calling MapCameraSpacePointToPointOnCanvas(): " + Environment.NewLine + "Resultant ColorSpacePoint at negative infinity."); } double normedColorSPX = colorSP.X / FrameDimensions[spaceMode].Item1; double normedColorSPY = colorSP.Y / FrameDimensions[spaceMode].Item2; return(new Point(normedColorSPX * canvas.ActualWidth, normedColorSPY * canvas.ActualHeight)); }
private void MapCameraToImage() { CameraSpacePoint[] CameraPoints = GameObject.Find("BodyView").GetComponent <MineBodySourceView> ().getCameraPoints(); if (CameraPoints.Length > 0 && CameraPoints[0].X != 0 && CameraPoints[0].Y != 0 && CameraPoints[0].Z != 0) { // Debug.Log ("head camera position is " + CameraPoints[0].X + ","+CameraPoints[0].Y+","+CameraPoints[0].Z); head = m_pCoordinateMapper.MapCameraPointToColorSpace(CameraPoints[0]); neck = m_pCoordinateMapper.MapCameraPointToColorSpace(CameraPoints[1]); // Debug.Log ("head image position is " + head.X + ","+head.Y); int distance = (int)Mathf.Ceil(Mathf.Abs((head.X - neck.X) + (head.Y - neck.Y))); // Debug.Log ("distance is " + distance ); int TX = (int)Mathf.Ceil(head.X + distance); int TY = (int)Mathf.Ceil(head.Y + distance); int BX = (int)Mathf.Ceil(head.X - distance); int BY = (int)Mathf.Ceil(head.Y - distance); _Key [0] = BX; _Key [1] = TX; _Key [2] = BY; _Key [3] = TY; // Debug.Log ("Top Right is " + TX+","+TY ); // Debug.Log ("Bottom Left is " + BX+","+BY ); for (int i = 0; i < cColorWidth * cColorHeight; i++) { int X = i % cColorWidth; int Y = i / cColorWidth; if (X > BX && X < TX && Y > BY && Y < TY) { _Mask [i] = 1; } else { _Mask [i] = 0; } } } }
/// <summary> /// Project the camera space point using the given sensor coordinate mapper. /// </summary> /// <param name="inPoint"></param> /// <param name="coordinateMapper"></param> /// <returns></returns> public Point ProjectCameraPoint(CameraSpacePoint inPoint, CoordinateMapper coordinateMapper) { if (_useDepthMapper) { DepthSpacePoint depthPoint = coordinateMapper.MapCameraPointToDepthSpace(inPoint); return new Point(depthPoint.X, depthPoint.Y); } else { ColorSpacePoint colorPoint = coordinateMapper.MapCameraPointToColorSpace(inPoint); return new Point(colorPoint.X, colorPoint.Y); } }
/// <summary> /// Translates between kinect and drawing points /// </summary> private System.Drawing.Point TranslatePoint(CameraSpacePoint point, CoordinateMapper mapper) { var colorPoint = mapper.MapCameraPointToColorSpace(point); return new System.Drawing.Point((int)colorPoint.X, (int)colorPoint.Y); }
/// <summary> /// Initializes a new instance of the MainWindow class. /// </summary> public MainWindow() { //Depth pixe> FILE string winDir = System.Environment.GetEnvironmentVariable("windir"); writer.WriteLine("START"); // get the kinectSensor object this.kinectSensor = KinectSensor.GetDefault(); // get the coordinate mapper coordinateMapper = this.kinectSensor.CoordinateMapper; // open the reader for the depth frames this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader(); this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // wire handler for frame arrival this.depthFrameReader.FrameArrived += this.Depth_Reader_FrameArrived; if (this.bodyFrameReader != null) { Console.Write("yO"); this.bodyFrameReader.FrameArrived += this.Body_Reader_FrameArrived; } // get FrameDescription from DepthFrameSource this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; // get size of joint space this.displayWidth = depthFrameDescription.Width; this.displayHeight = depthFrameDescription.Height; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // allocate space to put the pixels being received and converted this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height]; this.hist = new int[this.depthPixels.Length]; // create the bitmap to display this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example this.DataContext = this; this.ctr = 0; this.handpoint = coordinateMapper.MapCameraPointToColorSpace(lefthandpoint); this.lefthandpoint = this.lefthand.Position; // initialize the components (controls) of the window this.InitializeComponent(); }
public static Point Scale(this Joint joint, CoordinateMapper mapper) { Point point = new Point(); ColorSpacePoint colorPoint = mapper.MapCameraPointToColorSpace(joint.Position); point.X = float.IsInfinity(colorPoint.X) ? 0.0 : colorPoint.X; point.Y = float.IsInfinity(colorPoint.Y) ? 0.0 : colorPoint.Y; return point; }
/// <summary> /// Converts the specified CameraSpacePoint into a 2-D point. /// </summary> /// <param name="position">The CameraSpacePoint to convert.</param> /// <param name="visualization">The type of the conversion (color, depth, or infrared).</param> /// <param name="coordinateMapper">The CoordinateMapper to make the conversion.</param> /// <returns>The corresponding 2-D point.</returns> public static Point ToPoint(this CameraSpacePoint position, Visualization visualization, CoordinateMapper coordinateMapper) { Point point = new Point(); switch (visualization) { case Visualization.Color: { ColorSpacePoint colorPoint = coordinateMapper.MapCameraPointToColorSpace(position); point.X = float.IsInfinity(colorPoint.X) ? 0.0 : colorPoint.X; point.Y = float.IsInfinity(colorPoint.Y) ? 0.0 : colorPoint.Y; } break; case Visualization.Depth: case Visualization.Infrared: { DepthSpacePoint depthPoint = coordinateMapper.MapCameraPointToDepthSpace(position); point.X = float.IsInfinity(depthPoint.X) ? 0.0 : depthPoint.X; point.Y = float.IsInfinity(depthPoint.Y) ? 0.0 : depthPoint.Y; } break; default: break; } return point; }
//-------------------------------------------------------------------------------------- //Helper functions for imaging and debuging //-------------------------------------------------------------------------------------- public void DrawAxis(CoordinateMapper coordinateMapper, DrawingContext drawingContext) { Matrix3D t = RightPHIZTransform(); ColorSpacePoint p = coordinateMapper.MapCameraPointToColorSpace(PHIZTracker.ToVectorCS(GetOffset(t))); ColorSpacePoint px = coordinateMapper.MapCameraPointToColorSpace(ToVectorCS(Vector3D.Add(GetOffset(t), Vector3D.Multiply(.5,GetAxisDir(new Vector3D(1, 0, 0), t))))); ColorSpacePoint py = coordinateMapper.MapCameraPointToColorSpace(ToVectorCS(Vector3D.Add(GetOffset(t), Vector3D.Multiply(.5, GetAxisDir(new Vector3D(0, 1, 0), t))))); ColorSpacePoint pz = coordinateMapper.MapCameraPointToColorSpace(ToVectorCS(Vector3D.Add(GetOffset(t), Vector3D.Multiply(.5, GetAxisDir(new Vector3D(0, 0, 1), t))))); drawingContext.DrawEllipse(Brushes.Aqua, null, new Point(p.X, p.Y), 30, 30); drawingContext.DrawLine(new Pen(Brushes.Red, 15), new Point(p.X, p.Y), new Point(px.X, px.Y)); drawingContext.DrawLine(new Pen(Brushes.Green, 15), new Point(p.X, p.Y), new Point(py.X, py.Y)); drawingContext.DrawLine(new Pen(Brushes.Blue, 15), new Point(p.X, p.Y), new Point(pz.X, pz.Y)); }
internal static void DrawBone(DrawingContext drawingContext, System.Windows.Media.Brush brush, JointType startJoint, JointType endJoint, IReadOnlyDictionary<JointType, Joint> joints, System.Windows.Rect rect, CoordinateMapper coordinateMapper, bool useDepthSpace = true, double line = 0.8F) { if (joints[startJoint].TrackingState != TrackingState.Tracked && joints[endJoint].TrackingState != TrackingState.Tracked) { return; } System.Windows.Point startPoint; System.Windows.Point endPoint; if (useDepthSpace) { startPoint = coordinateMapper.MapCameraPointToDepthSpace(joints[startJoint].Position).GetPoint(); endPoint = coordinateMapper.MapCameraPointToDepthSpace(joints[endJoint].Position).GetPoint(); } else { startPoint = coordinateMapper.MapCameraPointToColorSpace(joints[startJoint].Position).GetPoint(); endPoint = coordinateMapper.MapCameraPointToColorSpace(joints[endJoint].Position).GetPoint(); } if (rect.Contains(startPoint) && rect.Contains(endPoint)) drawingContext.DrawLine(new System.Windows.Media.Pen(brush, line), startPoint, endPoint); }