private async void Window_Loaded(object sender, RoutedEventArgs e) { while (true) { // Get Capture Frame using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); })) { // Enque Capture tracker.EnqueueCapture(capture); // Pop Result using (K4ABT.Frame frame = tracker.PopResult()) // Get Color Image using (K4A.Image color_image = frame.Capture.Color) { // Get Color Buffer and Write Bitmap byte[] color_buffer = color_image.Memory.ToArray(); color_bitmap.WritePixels(color_rect, color_buffer, color_stride, 0, 0); // Clear All Ellipse from Canvas Canvas_Body.Children.Clear(); // Draw Skeleton for (uint body_index = 0; body_index < frame.NumberOfBodies; body_index++) { // Get Skeleton and ID K4ABT.Skeleton skeleton = frame.GetBodySkeleton(body_index); uint id = frame.GetBodyId(body_index); // Draw Joints for (int joint_index = 0; joint_index < (int)K4ABT.JointId.Count; joint_index++) { // Get Joint and Position K4ABT.Joint joint = skeleton.GetJoint(joint_index); Vector2? position = calibration.TransformTo2D(joint.Position, K4A.CalibrationDeviceType.Depth, K4A.CalibrationDeviceType.Color); if (!position.HasValue) { continue; } // Create Ellipse const int radius = 10; SolidColorBrush stroke_color = new SolidColorBrush(colors[id % colors.Length]); SolidColorBrush fill_color = new SolidColorBrush((joint.ConfidenceLevel >= K4ABT.JointConfidenceLevel.Medium) ? colors[id % colors.Length] : Colors.Transparent); Ellipse ellipse = new Ellipse() { Width = radius, Height = radius, StrokeThickness = 1, Stroke = stroke_color, Fill = fill_color }; // Add Ellipse to Canvas Canvas.SetLeft(ellipse, position.Value.X - (radius / 2)); Canvas.SetTop(ellipse, position.Value.Y - (radius / 2)); Canvas_Body.Children.Add(ellipse); } } } } } }
private async void Window_Loaded(object sender, RoutedEventArgs e) { while (true) { // Get Capture Frame using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); })) { // Enqueue Tracker tracker.EnqueueCapture(capture); // Pop Result using (K4ABT.Frame frame = tracker.PopResult()) // Get Color Image using (K4A.Image color_image = frame.Capture.Color) { // Get Color Buffer and Write Bitmap byte[] color_buffer = color_image.Memory.ToArray(); color_bitmap.WritePixels(color_rect, color_buffer, color_stride, 0, 0); // Transform Body Index Map to Color Camera (K4A.Image _, K4A.Image body_index_map)reult = transformation.DepthImageToColorCameraCustom( frame.Capture.Depth, frame.BodyIndexMap, K4A.TransformationInterpolationType.Nearest, K4ABT.Frame.BodyIndexMapBackground ); //Get Body Index Map Buffer byte[] body_index_map_buffer = reult.body_index_map.Memory.ToArray(); // Draw Body Index Map and Write Bitmap Parallel.For(0, body_index_map_buffer.Length, i => { uint index = (uint)body_index_map_buffer[i]; if (index != K4ABT.Frame.BodyIndexMapBackground) { Color color = colors[index % colors.Length]; colorized_body_index_map_buffer[i * 4 + 0] = color.B; colorized_body_index_map_buffer[i * 4 + 1] = color.G; colorized_body_index_map_buffer[i * 4 + 2] = color.R; colorized_body_index_map_buffer[i * 4 + 3] = color.A; } else { colorized_body_index_map_buffer[i * 4 + 0] = 0; colorized_body_index_map_buffer[i * 4 + 1] = 0; colorized_body_index_map_buffer[i * 4 + 2] = 0; colorized_body_index_map_buffer[i * 4 + 3] = 255; } }); body_index_map_bitmap.WritePixels(body_index_map_rect, colorized_body_index_map_buffer, body_index_map_stride, 0, 0); // Clear Body Index Map reult.body_index_map.Dispose(); } } } }
private async void StartCamera() { //Start body track and sensor camera using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration() { ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default })) using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16))) { // Local variables // // photo for prediction float[,] actualPhoto; // hand position in color camera Vector3 handPositionColor = new Vector3(); //hand position in deepth camera Vector3 HandPositionDepth = Vector3.Zero; //Elbow depth position Vector3 ElbowPositionDepth = Vector3.Zero; //tracked body id uint trackedBodyId = 0; // hand reading confidence level JointConfidenceLevel handConf = JointConfidenceLevel.Low; // elbow reading confindence level JointConfidenceLevel elbowConf = JointConfidenceLevel.Low; int x, y; //body skeleton Skeleton body; while (running) { // create reset prediction photo actualPhoto = new float[actualPhotoWidht, actualPhotoHeight]; for (int i = 0; i < 30; i++) { for (int j = 0; j < 30; j++) { actualPhoto[i, j] = 255; } } //get capture using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); })) { //get depth transform this.transform.DepthImageToColorCamera(capture, transformedDepth); // Queue latest frame from the sensor. tracker.EnqueueCapture(capture); //get position of the hand using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false)) { //check if we have bodies if (frame != null && frame.NumberOfBodies >= 1) { if (trackedBodyId <= 0) { trackedBodyId = frame.GetBody(0).Id; } //get body body = frame.GetBodySkeleton(0); //check confidence // we will use data only of confidence is medium or high handConf = body.GetJoint(JointId.HandRight).ConfidenceLevel; //skip if confidnece is not high or medium if (handConf == JointConfidenceLevel.Low || handConf == JointConfidenceLevel.None) { continue; } // get hand position HandPositionDepth = body.GetJoint(JointId.HandRight).Position; // elbow confidence and position ElbowPositionDepth = body.GetJoint(JointId.ElbowRight).Position; elbowConf = body.GetJoint(JointId.ElbowRight).ConfidenceLevel; // get hand position in color camera var handPositionColorQ = kinect.GetCalibration().TransformTo2D(HandPositionDepth, CalibrationDeviceType.Depth, CalibrationDeviceType.Color); handPositionColor.X = handPositionColorQ.Value.X; handPositionColor.Y = handPositionColorQ.Value.Y; handPositionColor.Z = HandPositionDepth.Z; // loop thorugh all color, and select depth that are insede the requerid square // show normal camera var color = capture.Color; // we need unsafe for use pointer to array unsafe { // get memory handler using (var pin = color.Memory.Pin()) { // get pointer to depth capture, this is basically a array ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer; // staring point of the hand square bool isFirst = true; int xref = 0, yref = 0; x = 0; y = 0; //loop through the depht pixel arrat for (int i = 0; i < this.colorHeight * this.colorWidth; i++) { //kepp coubnt of x and y for 2d array x++; if (i % 1280 == 0) { x = 0; y++; } //insede the square if ((y - squareRadious) < ((int)handPositionColor.Y) && (y + squareRadious) > ((int)handPositionColor.Y) && (x - squareRadious) < ((int)handPositionColor.X) && (x + squareRadious) > ((int)handPositionColor.X)) { //get corner of the square if is the first if (isFirst) { xref = x; yref = y; isFirst = false; } //select pixels insede the selected deep if (depthPixels[i] < handPositionColor.Z + d2 && depthPixels[i] > handPositionColor.Z - d1 && depthPixels[i] != 0) { //scale actual depth float deep = (((int)depthPixels[i] - ((int)handPositionColor.Z - d1)) * 250) / 200; uint xs = (uint)((x - xref) / 10); uint ys = (uint)((y - yref) / 10); // we only add the max or the first, is first when is 255 if ((uint)deep > actualPhoto[ys, xs] || actualPhoto[ys, xs] == 255) { actualPhoto[ys, xs] = deep; } } } } } }//unsafe } } } //add to queues images.Add(actualPhoto); bodyData.Add(new BodyData() { HandConfidence = handConf, HandPosition = HandPositionDepth, ElvowPosition = ElbowPositionDepth }); }//while (running) } }
private async void StartCamera() { //body track using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration() { ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default })) // sensor camera using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image( Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16))) { //local variables // photo for prediction float[,] lastImage; // for show lastImage Bitmap bm = null; // Image camera uint trackedBodyId = 0; JointConfidenceLevel conf = JointConfidenceLevel.Low; while (running) { using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); })) { bm = new Bitmap(30, 30); // for final image uint[,] foto = new uint[300, 300]; lastImage = new float[30, 30]; for (int i = 0; i < 30; i++) { for (int j = 0; j < 30; j++) { lastImage[i, j] = 1; bm.SetPixel(i, j, System.Drawing.Color.Red); } } // get depth this.transform.DepthImageToColorCamera(capture, transformedDepth); //this.StatusText = "Received Capture: " + capture.Depth.DeviceTimestamp; this.bitmapColorCamera.Lock(); // show normal camera var color = capture.Color; var region = new Int32Rect(0, 0, color.WidthPixels, color.HeightPixels); var region1 = new Int32Rect(0, 0, 30, 30); // Queue latest frame from the sensor. tracker.EnqueueCapture(capture); //get position of the hand using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false)) { //get hand position if (frame != null && frame.NumberOfBodies >= 1) { if (trackedBodyId <= 0) { trackedBodyId = frame.GetBody(0).Id; } var numOfSkeletos = frame.NumberOfBodies; var body = frame.GetBodySkeleton(0); var pos = body.GetJoint(JointId.HandRight).Position; conf = body.GetJoint(JointId.HandRight).ConfidenceLevel; var handposition = kinect.GetCalibration().TransformTo2D(pos, CalibrationDeviceType.Depth, CalibrationDeviceType.Color); handpositionGlobal.X = handposition.Value.X; handpositionGlobal.Y = handposition.Value.Y; // z is the same handpositionGlobal.Z = pos.Z; } } unsafe { using (var pin = color.Memory.Pin()) { // put color camera in bitmap this.bitmapColorCamera.WritePixels(region, (IntPtr)pin.Pointer, (int)color.Size, color.StrideBytes); //get pixel from color camera uint * colorPixels = (uint *)this.bitmapColorCamera.BackBuffer; ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer; int closest = 501; int x = 0; int y = 0; int cx = 0; int cy = 0; // Debug.WriteLine(pos.X); bool isFirst = true; int xref = 0, yref = 0; int finalcount = 0; for (int i = 0; i < this.colorHeight * this.colorWidth; i++) { //if (i > 512000) //{ // colorPixels[i] = 0; // continue; //} x++; if (i % 1280 == 0) { x = 0; y++; } //draw square around hand int color_data = 255 << 16; // R color_data |= 128 << 8; // G color_data |= 255 << 0; // B int ss = 150; if (x - ss == ((int)handpositionGlobal.X) && (y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y)) { colorPixels[i] = (uint)color_data; } if (x + ss == ((int)handpositionGlobal.X) && (y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y)) { colorPixels[i] = (uint)color_data; } if (y + ss == ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X)) { colorPixels[i] = (uint)color_data; } if (y - ss == ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X)) { colorPixels[i] = (uint)color_data; } //insede the square if ((y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X)) { if (isFirst) { xref = x; yref = y; isFirst = false; } //select pixels insede the selected deep if (depthPixels[i] < handpositionGlobal.Z + 50 && depthPixels[i] > handpositionGlobal.Z - 150 && depthPixels[i] != 0) { //scale actual depth float deep = (((int)depthPixels[i] - ((int)handpositionGlobal.Z - 150)) * 250) / 200; float deepF = deep / 255.0f; foto[y - yref, x - xref] = (uint)deep; uint xs = (uint)((x - xref) / 10); uint ys = (uint)((y - yref) / 10); if (deepF > lastImage[ys, xs] || lastImage[ys, xs] == 1) { lastImage[ys, xs] = deepF; var temp = System.Drawing.Color.FromArgb((int)deep, (int)deep, (int)deep); bm.SetPixel((int)ys, (int)xs, temp); } } else { foto[y - yref, x - xref] = 1; uint xs = (uint)((x - xref) / 10); uint ys = (uint)((y - yref) / 10); // bm.SetPixel(x - xref, y - yref, System.Drawing.Color.FromArgb((int)(depthPixels[i] / 100), (int)(depthPixels[i] / 100), (int)(depthPixels[i]) / 100)); } } } } } this.bitmapColorCamera.AddDirtyRect(region); this.bitmapColorCamera.Unlock(); //this.bitmapFinal = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bm.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions()); if (saveNext && actual > 0 && conf != JointConfidenceLevel.Low) { WriteToFile(lastImage, status); if (actual == 1) { saveNext = false; _main.RecordFedbackFill = new SolidColorBrush(Colors.Green); } actual--; } } } } }
private async void Recorder_Loaded() { //body track using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration() { ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default })) // sensor camera using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16))) { uint trackedBodyId = 0; JointConfidenceLevel conf = JointConfidenceLevel.Low; while (running) { //get capture using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); })) { //create output array that represent the image float[,] actualPhoto = new float[30, 30]; for (int i = 0; i < 30; i++) { for (int j = 0; j < 30; j++) { actualPhoto[i, j] = 255; } } this.transform.DepthImageToColorCamera(capture, transformedDepth); // Queue latest frame from the sensor. tracker.EnqueueCapture(capture); //get position of the hand using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false)) { //get hand position if (frame != null && frame.NumberOfBodies >= 1) { if (trackedBodyId <= 0) { trackedBodyId = frame.GetBody(0).Id; } var numOfSkeletos = frame.NumberOfBodies; // Debug.WriteLine("num of bodies: " + numOfSkeletos); var body = frame.GetBodySkeleton(0); var pos = body.GetJoint(JointId.HandRight).Position; var fpos = body.GetJoint(JointId.HandTipRight).Position; var rpost = body.GetJoint(JointId.WristRight).Position; conf = body.GetJoint(JointId.HandRight).ConfidenceLevel; Quaternion orin = body.GetJoint(JointId.HandTipRight).Quaternion; //Debug.WriteLine(orin); // Debug.WriteLine(pos); /* * if (conf == JointConfidenceLevel.Low) * { * this.bitmapMain.AddDirtyRect(region); * this.bitmapMain.Unlock(); * * this.Crop.Source = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bm.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions()); * continue; * }*/ // Debug.WriteLine("confidende level : " + conf); // transfor position to capture camera var oldHand = HandPositionColor; //set hand for prediction square var handposition = kinect.GetCalibration().TransformTo2D(pos, CalibrationDeviceType.Depth, CalibrationDeviceType.Color); HandPositionColor.X = handposition.Value.X; HandPositionColor.Y = handposition.Value.Y; // z is the same HandPositionColor.Z = pos.Z; if (conf == JointConfidenceLevel.Medium || conf == JointConfidenceLevel.High) { manager.SetPosition(pos); } // } } } // show normal camera var color = capture.Color; unsafe { using (var pin = color.Memory.Pin()) { // we only using deep now ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer; int closest = 501; int x = 0; int y = 0; int cx = 0; int cy = 0; // Debug.WriteLine(pos.X); bool isFirst = true; int xref = 0, yref = 0; int squareRadious = 150; for (int i = 0; i < this.colorHeight * this.colorWidth; i++) { x++; if (i % 1280 == 0) { x = 0; y++; } //insede the square if ((y - squareRadious) < ((int)HandPositionColor.Y) && (y + squareRadious) > ((int)HandPositionColor.Y) && (x - squareRadious) < ((int)HandPositionColor.X) && (x + squareRadious) > ((int)HandPositionColor.X)) { if (isFirst) { xref = x; yref = y; isFirst = false; } if (depthPixels[i] < closest) { if (ox == 0) { ox = cx; oy = cy; } closest = depthPixels[i]; cx = x; cy = y; } //select pixels insede the selected deep if (depthPixels[i] < HandPositionColor.Z + 50 && depthPixels[i] > HandPositionColor.Z - 150 && depthPixels[i] != 0) { float deep = (((int)depthPixels[i] - ((int)HandPositionColor.Z - 150)) * 250) / 200; // Debug.WriteLine("deep" + (uint)deep); uint xs = (uint)((x - xref) / 10); uint ys = (uint)((y - yref) / 10); if ((uint)deep > actualPhoto[ys, xs] || actualPhoto[ys, xs] == 255) { actualPhoto[ys, xs] = deep; } } } } // manager.SetPosition(new Vector3(cx, cy, 0)); } } if (conf != JointConfidenceLevel.Low) { images.Add(actualPhoto); // Debug.WriteLine("add from queue"); if (images.Count > 10) { } } // Debug.WriteLine(fotoS); } } } }
//Show skeleton image private async Task <BitmapSource> ShowSkeletonImage(Capture capture, Tracker tracker) { tracker.EnqueueCapture(capture); // Try getting latest tracker frame. using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false)) { if (frame != null) { // Save this frame for visualization in Renderer. // One can access frame data here and extract e.g. tracked bodies from it for the needed purpose. // Instead, for simplicity, we transfer the frame object to the rendering background thread. // This example shows that frame popped from tracker should be disposed. Since here it is used // in a different thread, we use Reference method to prolong the lifetime of the frame object. // For reference on how to read frame data, please take a look at Renderer.NativeWindow_Render(). this.visualFrameData.Frame = frame.Reference(); } else { return(null); } } float dpi = MyCanvas.getDpi(); double Width = skeletonImageViewPane.ActualWidth; double Height = skeletonImageViewPane.ActualHeight; Task <BitmapSource> DrawSkeletonDataTask = Task.Run(() => { var lastFrame = this.visualFrameData.TakeFrameWithOwnership(); if (lastFrame == null || lastFrame.NumberOfBodies == 0) { return(null); } //Get first skeleton var skeleton = lastFrame.GetBodySkeleton(0); //Update last post skeleton data UpdateLastPostSkeletonData(skeleton); //Draw Visual List <float> jointPosList = new List <float>(); for (int jointId = 0; jointId < (int)JointId.Count; jointId++) { var joint = skeleton.GetJoint(jointId); jointPosList.Add(joint.Position.X); jointPosList.Add(joint.Position.Y); } float MarginX = 200.0f; float MarginY = 100.0f; jointPosList = this.utils.SkeletonDataAbs2Rel(jointPosList, (int)Width, (int)Height, MarginX, MarginY); DrawingVisual visual = new DrawingVisual(); using (DrawingContext context = visual.RenderOpen()) { const float radius = 3.0f; var boneColor = Color.FromRgb(255, 255, 255); Brush bursh = new SolidColorBrush(boneColor); Pen whitePen = new Pen(Brushes.White, radius); Pen greenPen = new Pen(Brushes.LightGreen, radius); for (int jointId = 0; jointId < jointPosList.Count / 2; jointId++) { // Render the joint as a ellipse. var jointPoint = new Point(jointPosList[2 * jointId], jointPosList[2 * jointId + 1]); context.DrawEllipse(bursh, whitePen, jointPoint, radius, radius); // Render the bone as a line. if (JointConnections.JointParent.TryGetValue((JointId)jointId, out JointId parentId)) { var startPos = new Point(jointPosList[2 * jointId], jointPosList[2 * jointId + 1]); int parId = Convert.ToInt32(parentId); var endPos = new Point(jointPosList[2 * parId], jointPosList[2 * parId + 1]); if (SLRPostJoint.PostJointsList.Contains((JointId)jointId) && SLRPostJoint.PostJointsList.Contains(parentId)) { context.DrawLine(greenPen, startPos, endPos); } else { context.DrawLine(whitePen, startPos, endPos); } } } } //this.utils.UpdataMyCanvasView(skeletonImageViewPane, visual, dpi); BitmapSource skeletonBitmapSource = utils.ConvertVisual2BitmapSource(visual, Width, Height, dpi); skeletonBitmapSource.Freeze(); return(skeletonBitmapSource); }); return(await DrawSkeletonDataTask.ConfigureAwait(false)); }
private async void PictureRecorder_Loaded(object sender, RoutedEventArgs e) { using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration() { ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default })) { while (true) { uint[,] foto = new uint[300, 300]; bm = new Bitmap(300, 300); using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image( Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16))) using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); })) { Text.Content = this.StatusText; this.transform.DepthImageToColorCamera(capture, transformedDepth); this.bitmap.Lock(); var color = capture.Color; var region = new Int32Rect(0, 0, color.WidthPixels, (int)(color.HeightPixels / 1)); // Queue latest frame from the sensor. tracker.EnqueueCapture(capture); unsafe { using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false)) using (var pin = color.Memory.Pin()) { //get hand position if (frame != null && frame.NumberOfBodies >= 1) { var body = frame.GetBodySkeleton(0); var pos = body.GetJoint(JointId.HandRight).Position; // var temp = kinect.GetCalibration().TransformTo2D(new Vector2(pos.X,pos.Y),-pos.Z ,CalibrationDeviceType.,CalibrationDeviceType.Color); var temp = kinect.GetCalibration().TransformTo2D(pos, CalibrationDeviceType.Depth, CalibrationDeviceType.Color); if (temp != null) { //Debug.WriteLine(temp.Value.X); this.bitmap.WritePixels(region, (IntPtr)pin.Pointer, (int)color.Size, color.StrideBytes); } uint * colorPixels = (uint *)this.bitmap.BackBuffer; ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer; int closest = 501; int x = 0; int y = 0; int cx = 0; int cy = 0; // Debug.WriteLine(pos.X); bool isFirst = true; int xref = 0, yref = 0; for (int i = 0; i < this.colorHeight * this.colorWidth; i++) { //if (i > 512000) //{ // colorPixels[i] = 0; // continue; //} x++; if (i % 1280 == 0) { x = 0; y++; } //colorPixels[i] = 0; //// Compute the pixel's color. int color_data = 255 << 16; // R color_data |= 128 << 8; // G color_data |= 255 << 0; // B int ss = 150; if (x - ss == ((int)temp.Value.X) && (y - ss) < ((int)temp.Value.Y) && (y + ss) > ((int)temp.Value.Y)) { colorPixels[i] = (uint)color_data; } if (x + ss == ((int)temp.Value.X) && (y - ss) < ((int)temp.Value.Y) && (y + ss) > ((int)temp.Value.Y)) { colorPixels[i] = (uint)color_data; } if (y + ss == ((int)temp.Value.Y) && (x - ss) < ((int)temp.Value.X) && (x + ss) > ((int)temp.Value.X)) { colorPixels[i] = (uint)color_data; } if (y - ss == ((int)temp.Value.Y) && (x - ss) < ((int)temp.Value.X) && (x + ss) > ((int)temp.Value.X)) { colorPixels[i] = (uint)color_data; } //insede the square if ((y - ss) < ((int)temp.Value.Y) && (y + ss) > ((int)temp.Value.Y) && (x - ss) < ((int)temp.Value.X) && (x + ss) > ((int)temp.Value.X)) { if (isFirst) { xref = x; yref = y; isFirst = false; } if (depthPixels[i] < pos.Z + 100 && depthPixels[i] > pos.Z - 100 && depthPixels[i] != 0) { foto[y - yref, x - xref] = colorPixels[i]; int deep = (((int)depthPixels[i] - ((int)pos.Z - 100)) * 250) / 200; //Debug.WriteLine(deep); bm.SetPixel(x - xref, y - yref, System.Drawing.Color.FromArgb(deep, deep, deep)); } else { foto[y - yref, x - xref] = 0; // bm.SetPixel(x - xref, y - yref, System.Drawing.Color.FromArgb((int)(depthPixels[i] / 100), (int)(depthPixels[i] / 100), (int)(depthPixels[i]) / 100)); bm.SetPixel(x - xref, y - yref, System.Drawing.Color.White); } } if (depthPixels[i] < pos.Z + 100 && depthPixels[i] > pos.Z - 100 && depthPixels[i] != 0) { if (depthPixels[i] < closest) { if (ox == 0) { ox = cx; oy = cy; } closest = depthPixels[i]; cx = x; cy = y; } continue; } //colorPixels[i] = 0; } // Debug.WriteLine("x: " + cx); // Debug.WriteLine("y: " + cy); MoveEllipse(cx, cy); // Debug.WriteLine(closest); if (closest == 701) { //mouse_event((uint)MouseEventFlags.LEFTDOWN, 0, 0, 0, 0); // mouse_event((uint)MouseEventFlags.LEFTUP, 0, 0, 0, 0); } } } } //Bitmap btm = new Bitmap(this.colorWidth, this.colorHeight); this.bitmap.AddDirtyRect(region); this.bitmap.Unlock(); Bitmap bitmap1; unsafe { fixed(uint *intPtr = &foto[0, 0]) { bitmap1 = new Bitmap(300, 300, 300 * 4, System.Drawing.Imaging.PixelFormat.Format32bppRgb, new IntPtr(intPtr)); } MyIm.Source = this.bitmap; var bitmapSource = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bitmap1.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions()); MyIm1.Source = bitmapSource; var bitmapSource1 = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bm.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions()); MyIm2.Source = bitmapSource1; // CreateThumbnail("file.png", this.bitmap.Clone()); // System.Windows.Application.Current.Shutdown(); } } } } }
private async void Window_Loaded(object sender, RoutedEventArgs e) { Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration() { ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default }); while (running) { using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); })) { this.StatusText = "Received Capture: " + capture.Depth.DeviceTimestamp; this.bitmap.Lock(); var color = capture.Color; var region = new Int32Rect(0, 0, color.WidthPixels, color.HeightPixels); unsafe { using (var pin = color.Memory.Pin()) { this.bitmap.WritePixels(region, (IntPtr)pin.Pointer, (int)color.Size, color.StrideBytes); var bmpSource = BitmapSource.Create(color.WidthPixels, color.HeightPixels, 96.0, 96.0, PixelFormats.Bgr32, null, color.Memory.ToArray(), color.StrideBytes); // JpegBitmapEncoder to save BitmapSource to file // imageSerial is the serial of the sequential image JpegBitmapEncoder encoder = new JpegBitmapEncoder(); encoder.Frames.Add(BitmapFrame.Create(bmpSource)); using (var fs = new FileStream("./img/" + (imageSerial++) + ".jpeg", FileMode.Create, FileAccess.Write)) { encoder.Save(fs); } } } this.bitmap.AddDirtyRect(region); this.bitmap.Unlock(); tracker.EnqueueCapture(capture); } // Try getting latest tracker frame. using (Frame lastFrame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false)) { if (lastFrame == null) { continue; } lock (lockObject) { List <Vector3> joints = new List <Vector3>(); for (uint i = 0; i < lastFrame.NumberOfBodies; ++i) { Skeleton skeleton = lastFrame.GetBodySkeleton(i); var bodyId = lastFrame.GetBodyId(i); for (int jointId = 0; jointId < (int)JointId.Count; ++jointId) { var joint = skeleton.GetJoint(jointId); joints.Add(joint.Position / 1000); } } jointsList.Add(joints); } } } }
async private void WindowLoaded(object sender, RoutedEventArgs e) { Device device; try { using (device = Device.Open(0)) { device.StartCameras(new DeviceConfiguration { ColorFormat = ImageFormat.ColorBGRA32, ColorResolution = ColorResolution.R720p, DepthMode = DepthMode.NFOV_Unbinned, SynchronizedImagesOnly = true, CameraFPS = FPS.FPS30, }); int colorWidth = device.GetCalibration().ColorCameraCalibration.ResolutionWidth; int colorHeight = device.GetCalibration().ColorCameraCalibration.ResolutionHeight; var callibration = device.GetCalibration(DepthMode.NFOV_Unbinned, ColorResolution.R720p); var trackerConfig = new TrackerConfiguration(); trackerConfig.ProcessingMode = TrackerProcessingMode.Gpu; trackerConfig.SensorOrientation = SensorOrientation.Default; using (var tracker = Tracker.Create(callibration, trackerConfig)) { using (Transformation transform = device.GetCalibration().CreateTransformation()) { while (true) { using (Capture capture = await Task.Run(() => { return(device.GetCapture()); }).ConfigureAwait(true)) { Task <BitmapSource> createInputColorBitmapTask = Task.Run(() => { Image color = capture.Color; BitmapSource source = BitmapSource.Create(color.WidthPixels, color.HeightPixels, 96, 96, PixelFormats.Bgra32, null, color.Memory.ToArray(), color.StrideBytes); source.Freeze(); return(source); }); this.inputColorBitmap = await createInputColorBitmapTask.ConfigureAwait(true); this.InputColorImageViewPane.Source = inputColorBitmap; tracker.EnqueueCapture(capture); using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false)) { if (frame != null) { Console.WriteLine("Number Body: " + frame.NumberOfBodies); if (frame.NumberOfBodies > 0) { await SaveFile(this.inputColorBitmap); if (await callDetectDarknessServices().ConfigureAwait(true)) { speech("The room is too dark please turn on the light."); this.DarknessAlertCount = this.DarknessAlertCount + 1;; } else if (await callDetectionObjectServices().ConfigureAwait(true)) { speech("Please Beware of Object On the Floor. Please Beware of Object On the Floor."); this.ObjectAlertCount = this.ObjectAlertCount + 1;; } } } } } switch ((DateTime.Now.ToString("HH:mm", System.Globalization.DateTimeFormatInfo.InvariantInfo))) { case "00:00": case "00:01": case "00:02": this.ObjectAlertCount = 0; this.DarknessAlertCount = 0; break; default: break; } await CallLineApiService().ConfigureAwait(true); } } } } } catch (Exception err) { Console.WriteLine(err); } }