private void depthStream_onNewFrame(VideoStream vStream) { if (vStream.isValid && vStream.isFrameAvailable()) { using (VideoFrameRef frame = vStream.readFrame()) { if (frame.isValid) { if (this.depthBitmap == null) { this.depthBitmap = new Bitmap(1, 1); } lock (this.depthBitmap) { try { frame.updateBitmap( this.depthBitmap, VideoFrameRef.copyBitmapOptions.Force24BitRGB | VideoFrameRef.copyBitmapOptions.DepthInvert | VideoFrameRef.copyBitmapOptions.DepthFillShadow | VideoFrameRef.copyBitmapOptions.DepthHistogramEqualize | VideoFrameRef.copyBitmapOptions.DepthFillRigthBlack); } catch (Exception) { this.depthBitmap = frame.toBitmap( VideoFrameRef.copyBitmapOptions.Force24BitRGB | VideoFrameRef.copyBitmapOptions.DepthInvert | VideoFrameRef.copyBitmapOptions.DepthFillShadow | VideoFrameRef.copyBitmapOptions.DepthHistogramEqualize | VideoFrameRef.copyBitmapOptions.DepthFillRigthBlack); } } this.BeginInvoke( (Action) delegate { lock (this.depthBitmap) { if (this.p_depth.Image != null) { this.p_depth.Image.Dispose(); } this.p_depth.Image = new Bitmap(this.depthBitmap); this.p_depth.Refresh(); } }); } } } }
/// <summary> /// Get a frame from the device and process it. /// </summary> /// <param name="depthframe">Out. Sensor depth output.</param> /// <param name="colorframe">Out. Sensor color output.</param> /// <param name="interest">Out. List of interest points extracted from the map.</param> private void ReadProcessFrame(out float[][] depthframe, out Color[] colorframe, out List <SparseItem> interest) { using (VideoFrameRef frameref = depth.ReadFrame()) { depthframe = DepthFrameToArray(frameref); } using (VideoFrameRef frameref = color.ReadFrame()) { colorframe = ColorFrameToArray(frameref); } interest = ExtractKeypoints(colorframe, depthframe, (int)ResX, (int)ResY, 20); }
/// <summary> /// Get a grayscale image from the sensor internal data in float format. /// </summary> /// <param name="image">Original frame reference.</param> /// <returns>Image data array.</returns> private float[][] DepthFrameToArray(VideoFrameRef image) { IntPtr data = image.Data; int width = image.VideoMode.Resolution.Width; int height = image.VideoMode.Resolution.Height; int subwidth = width / Delta; int subheight = height / Delta; short[] copy = new short[width * height]; Marshal.Copy(data, copy, 0, copy.Length); float[][] array = new float[subwidth][]; for (int i = 0; i < array.Length; i++) { array[i] = new float[subheight]; } // calculate averages in Delta x Delta neighborhoods int h = 0; for (int k = 0; k < array[0].Length; k++) { for (int m = 0; m < Delta; m++) { for (int i = 0; i < array.Length; i++) { for (int n = 0; n < Delta; n++) { array[i][k] += (copy[h] == 0) ? float.NaN : copy[h]; h++; } } } } float alpha = 1.0f / (Delta * Delta); for (int i = 0; i < array.Length; i++) { for (int k = 0; k < array[0].Length; k++) { array[i][k] *= alpha; } } return(array); }
/// <summary> /// Get a color image from the sensor internal data in Color format. /// </summary> /// <param name="image">Original frame reference.</param> /// <returns>Image data array.</returns> private Color[] ColorFrameToArray(VideoFrameRef image) { IntPtr data = image.Data; int width = image.VideoMode.Resolution.Width; int height = image.VideoMode.Resolution.Height; int subwidth = width / Delta; int subheight = height / Delta; byte[] copy = new byte[3 * width * height]; Marshal.Copy(data, copy, 0, copy.Length); Vector3[] lpvector = new Vector3[subwidth * subheight]; Color[] lowpass = new Color[subwidth * subheight]; int h = 0; int m = 0; for (int k = 0; k < subheight; k++) { for (int p = 0; p < Delta; p++) { m = subwidth * k; for (int i = 0; i < subwidth; i++, m++) { for (int n = 0; n < Delta; n++) { lpvector[m].X += copy[h++]; lpvector[m].Y += copy[h++]; lpvector[m].Z += copy[h++]; } } } } float alpha = 1.0f / (Delta * Delta); for (m = 0; m < lowpass.Length; m++) { lowpass[m].R = (byte)(alpha * lpvector[m].X); lowpass[m].G = (byte)(alpha * lpvector[m].Y); lowpass[m].B = (byte)(alpha * lpvector[m].Z); lowpass[m].A = (byte)255; } return(lowpass); }
void depthSensor_onNewFrame(VideoStream vStream) { if (!vStream.isValid || !vStream.isFrameAvailable()) { return; } VideoFrameRef frame = vStream.readFrame(); if (!frame.isValid) { return; } lock (depthMutex) { depthFrame = frame; } }
void colorSensor_onNewFrame(VideoStream vStream) { if (!vStream.isValid || !vStream.isFrameAvailable()) { return; } VideoFrameRef frame = vStream.readFrame(); if (!frame.isValid) { return; } lock (colorMutex) { colorFrame = frame; } }
private void colorStream_onNewFrame(VideoStream vStream) { if (vStream.isValid && vStream.isFrameAvailable()) { using (VideoFrameRef frame = vStream.readFrame()) { if (frame.isValid) { if (this.colorBitmap == null) { this.colorBitmap = new Bitmap(1, 1); } lock (this.colorBitmap) { try { frame.updateBitmap(this.colorBitmap, VideoFrameRef.copyBitmapOptions.Force24BitRGB); } catch (Exception) { this.colorBitmap = frame.toBitmap(VideoFrameRef.copyBitmapOptions.Force24BitRGB); } } this.BeginInvoke( (Action) delegate { lock (this.colorBitmap) { if (this.p_image.Image != null) { this.p_image.Image.Dispose(); } this.p_image.Image = new Bitmap(this.colorBitmap); this.p_image.Refresh(); } }); } } } }
private void OpenNI2_OnNewFrame(VideoStream stream) { if (stream != null && stream.IsValid && stream.IsFrameAvailable()) { using (VideoFrameRef frame = stream.ReadFrame()) { if (frame.IsValid) { VideoFrameRef.CopyBitmapOptions options = VideoFrameRef.CopyBitmapOptions.Force24BitRgb | VideoFrameRef.CopyBitmapOptions.DepthFillShadow; try { frame.UpdateBitmap(this.mBitmap, options); } catch (Exception) { // Happens when Bitmap object is not compatible with returned Frame this.mBitmap = frame.ToBitmap(options); } using (MemoryStream memory = new MemoryStream()) { this.mBitmap.Save(memory, ImageFormat.Bmp); memory.Position = 0; this.mImage = new BitmapImage(); this.mImage.BeginInit(); this.mImage.StreamSource = memory; this.mImage.CacheOption = BitmapCacheOption.OnLoad; this.mImage.EndInit(); this.mImage.Freeze(); } this.Dispatcher.Invoke((Action)(() => { this.ImgDepth.Source = this.mImage; })); } } } }
public void UpdateTexture() { int width = 640; int height = 480; DataBox mapSubresource = context.DeviceManager.Context.MapSubresource(TextureResource, 0, MapMode.WriteDiscard, MapFlags.None); VideoFrameRef vidRef = videoStream.ReadFrame(); byte[] bits = new byte[width * height]; List <byte> drawed = new List <byte>(); Marshal.Copy(vidRef.Data, bits, 0, width * height); mapSubresource.Data.Seek(0, SeekOrigin.Begin); for (int i = 0; i < width * height; i++) { drawed.Add(bits[i]); drawed.Add(bits[i]); drawed.Add(bits[i]); drawed.Add(255); } mapSubresource.Data.WriteRange(drawed.ToArray()); context.DeviceManager.Context.UnmapSubresource(TextureResource, 0); }
public void UpdateTexture() { int width = 640; int height = 480; DataBox mapSubresource = context.DeviceManager.Context.MapSubresource(TextureResource, 0, MapMode.WriteDiscard, MapFlags.None); VideoFrameRef vidRef = videoStream.readFrame(); byte[] bits = new byte[width * height * 2]; List <byte> drawed = new List <byte>(); Marshal.Copy(vidRef.Data, bits, 0, width * height * 2); mapSubresource.Data.Seek(0, SeekOrigin.Begin); for (int i = 0; i < width * height; i++) { UInt16 col = BitConverter.ToUInt16(bits, i * 2); byte color = (byte)Math.Min(col * 255 / MaxDistance, 255); drawed.Add(color); drawed.Add(color); drawed.Add(color); drawed.Add(255); } mapSubresource.Data.WriteRange(drawed.ToArray()); context.DeviceManager.Context.UnmapSubresource(TextureResource, 0); }
public void UpdateTexture() { int width = 640; int height = 480; DataBox mapSubresource = context.DeviceManager.Context.MapSubresource(TextureResource, 0, MapMode.WriteDiscard, MapFlags.None); VideoFrameRef vidRef = videoStream.ReadFrame(); UserTrackerFrameRef usrRef = KinectDevice.CurrentUserTrackerFrameRef; IntPtr intPtr = usrRef.UserMap.Pixels; byte[] bits = new byte[width * height * 3]; byte[] ubits = new byte[width * height * 2]; List <byte> drawed = new List <byte>(); Marshal.Copy(vidRef.Data, bits, 0, width * height * 3); Marshal.Copy(intPtr, ubits, 0, width * height * 2); mapSubresource.Data.Seek(0, SeekOrigin.Begin); UserData cursorUser = KinectDevice.CurrentUserTrackerFrameRef.GetUserById(KinectDevice.UserCursor); var targetSkel = cursorUser.Skeleton; for (int i = 0; i < width * height; i++) { short uid = BitConverter.ToInt16(ubits, i * 2); if (cursorUser.IsValid && uid == KinectDevice.UserCursor) { if (targetSkel.State == Skeleton.SkeletonState.Calibrating) { drawed.Add(255); drawed.Add(255); drawed.Add(0); drawed.Add(255); continue; } else if (targetSkel.State == Skeleton.SkeletonState.Tracked) { drawed.Add(0); drawed.Add(255); drawed.Add(0); drawed.Add(255); continue; } else if (targetSkel.State == Skeleton.SkeletonState.None) { drawed.Add(0); drawed.Add(255); drawed.Add(255); drawed.Add(255); continue; } } drawed.Add(bits[i * 3]); drawed.Add(bits[i * 3 + 1]); drawed.Add(bits[i * 3 + 2]); drawed.Add(255); } foreach (var trackedUser in KinectDevice.TrackedUsers) { UserData TargetUser = trackedUser.Value; foreach (var drawJoint in drawJoints) { SkeletonJoint j1 = TargetUser.Skeleton.GetJoint(drawJoint.Item1); SkeletonJoint j2 = TargetUser.Skeleton.GetJoint(drawJoint.Item2); PointF p1 = KinectDevice.NiteUserTracker.ConvertJointCoordinatesToDepth(j1.Position); PointF p2 = KinectDevice.NiteUserTracker.ConvertJointCoordinatesToDepth(j2.Position); byte blue = (byte)(255f * (j1.PositionConfidence + j2.PositionConfidence) / 2f); DrawLine((int)p1.X, (int)p1.Y, (int)p2.X, (int)p2.Y, 2, new byte[] { (byte)(255 - blue), 0, blue, 255 }, drawed); } } mapSubresource.Data.WriteRange(drawed.ToArray()); context.DeviceManager.Context.UnmapSubresource(TextureResource, 0); }
private void CurrentSensorOnNewFrame(VideoStream videoStream) { if (videoStream.IsValid && videoStream.IsFrameAvailable()) { using (VideoFrameRef frame = videoStream.ReadFrame()) { if (frame.IsValid) { VideoFrameRef.CopyBitmapOptions options = VideoFrameRef.CopyBitmapOptions.Force24BitRgb | VideoFrameRef.CopyBitmapOptions.DepthFillShadow; if (this.cb_invert.Checked) { options |= VideoFrameRef.CopyBitmapOptions.DepthInvert; } if (this.cb_equal.Checked) { options |= VideoFrameRef.CopyBitmapOptions.DepthHistogramEqualize; } if (this.cb_fill.Checked) { options |= videoStream.Mirroring ? VideoFrameRef.CopyBitmapOptions.DepthFillRigthBlack : VideoFrameRef.CopyBitmapOptions.DepthFillLeftBlack; } lock (this.bitmap) { /////////////////////// Instead of creating a bitmap object for each frame, you can simply /////////////////////// update one you have. Please note that you must be very careful /////////////////////// with multi-thread situations. try { frame.UpdateBitmap(this.bitmap, options); } catch (Exception) { // Happens when our Bitmap object is not compatible with returned Frame this.bitmap = frame.ToBitmap(options); } /////////////////////// END NOTE /////////////////////// You can always use .toBitmap() if you dont want to /////////////////////// clone image later and be safe when using it in multi-thread situations /////////////////////// This is little slower, but easier to handle // bitmap = frame.toBitmap(options); /////////////////////// END NOTE if (this.cb_mirrorSoft.Checked) { this.bitmap.RotateFlip(RotateFlipType.RotateNoneFlipX); } } ///////////////////// You can simply pass the newly created/updated image to a ///////////////////// PictureBox right here instead of drawing it with Graphic object // this.BeginInvoke(new MethodInvoker(delegate() // { // if (!pb_image.Visible) // pb_image.Visible = true; // if (bitmap == null) // return; // lock (bitmap) // this.BeginInvoke happens on UI Thread so it is better to always keep this lock in place // { // if (pb_image.Image != null) // pb_image.Image.Dispose(); // /////////////////////// If you want to use one bitmap object for all frames, the // /////////////////////// best way to prevent and multi-thread access problems // /////////////////////// is to clone the bitmap each time you want to send it to PictureBox // pb_image.Image = new Bitmap(bitmap, bitmap.Size); // /////////////////////// END NOTE // /////////////////////// If you only use toBitmap() method. you can simply skip the // /////////////////////// cloning process. It is perfectly thread-safe. // //pb_image.Image = bitmap; // /////////////////////// END NOTE // pb_image.Refresh(); // } // })); ///////////////////// END NOTE if (!this.pb_image.Visible) { this.Invalidate(); } } } } }
// ReSharper disable once ParameterHidesMember private void HandTrackerOnNewData(HandTracker handTracker) { if (!handTracker.IsValid) { return; } HandTrackerFrameRef frame = handTracker.ReadFrame(); if (frame == null || !frame.IsValid) { return; } lock (this.image) { using (VideoFrameRef depthFrame = frame.DepthFrame) { if (this.image.Width != depthFrame.FrameSize.Width || this.image.Height != depthFrame.FrameSize.Height) { this.image = new Bitmap( depthFrame.FrameSize.Width, depthFrame.FrameSize.Height, PixelFormat.Format24bppRgb); } } using (Graphics g = Graphics.FromImage(this.image)) { g.FillRectangle(Brushes.Black, new Rectangle(new Point(0, 0), this.image.Size)); foreach (GestureData gesture in frame.Gestures) { if (gesture.IsComplete) { handTracker.StartHandTracking(gesture.CurrentPosition); } } if (frame.Hands.Length == 0) { g.DrawString("Raise your hand", SystemFonts.DefaultFont, Brushes.White, 10, 10); } else { foreach (HandData hand in frame.Hands) { if (hand.IsTracking) { Point handPosEllipse = new Point(); PointF handPos = handTracker.ConvertHandCoordinatesToDepth(hand.Position); handPosEllipse.X = (int)handPos.X - 5; handPosEllipse.Y = (int)handPos.Y - 5; g.DrawEllipse(new Pen(Brushes.White, 5), new Rectangle(handPosEllipse, new Size(5, 5))); } } } g.Save(); } } this.Invoke( new MethodInvoker( delegate { this.fps = ((1000000 / (frame.Timestamp - this.lastTime)) + (this.fps * 4)) / 5; this.lastTime = frame.Timestamp; this.Text = @"Frame #" + frame.FrameIndex + @" - Time: " + frame.Timestamp + @" - FPS: " + this.fps; this.pb_preview.Image = this.image.Clone( new Rectangle(new Point(0, 0), this.image.Size), PixelFormat.Format24bppRgb); frame.Release(); })); }
private void CurrentSensorOnNewFrame(VideoStream vStream) { if (vStream.IsValid && vStream.IsFrameAvailable() && !this.isIdle) { using (VideoFrameRef frame = vStream.ReadFrame()) { if (frame.IsValid) { lock (this.bitmap) { try { frame.UpdateBitmap(this.bitmap, this.renderOptions); } catch (Exception) { this.bitmap = frame.ToBitmap(this.renderOptions); } Rectangle position = new Rectangle(new Point(0, 0), this.bitmap.Size); if (this.currentCropping == Rectangle.Empty) { this.currentCropping = position; } if (Settings.Default.SmartCam) { if (this.activeUserId > 0) { position.X = (int)(this.activePosition.X * this.bitmap.Size.Width); position.Width = (int)(this.activePosition.Width * this.bitmap.Size.Width); position.Y = (int)(this.activePosition.Y * this.bitmap.Size.Height); position.Height = (int)(this.activePosition.Height * this.bitmap.Size.Height); position.Width = (int) (((Decimal)this.bitmap.Size.Width / this.bitmap.Size.Height) * position.Height); position.X -= (position.Width / 2); position.X = Math.Max(position.X, 0); position.X = Math.Min(position.X, this.bitmap.Size.Width - position.Width); position.Y = Math.Max(position.Y, 0); position.Y = Math.Min(position.Y, this.bitmap.Size.Height - position.Height); } } if (this.currentCropping != position) { if (Math.Abs(position.X - this.currentCropping.X) > 8 || Math.Abs(position.Width - this.currentCropping.Width) > 5) { this.currentCropping.X += Math.Min( position.X - this.currentCropping.X, this.bitmap.Size.Width / 50); this.currentCropping.Width += Math.Min( position.Width - this.currentCropping.Width, this.bitmap.Size.Width / 25); } if (Math.Abs(position.Y - this.currentCropping.Y) > 8 || Math.Abs(position.Height - this.currentCropping.Height) > 5) { this.currentCropping.Y += Math.Min( position.Y - this.currentCropping.Y, this.bitmap.Size.Height / 50); this.currentCropping.Height += Math.Min( position.Height - this.currentCropping.Height, this.bitmap.Size.Height / 25); } } if (this.currentCropping.Size != this.bitmap.Size) { using (Graphics g = Graphics.FromImage(this.bitmap)) { if (this.currentCropping != Rectangle.Empty) { g.DrawImage( this.bitmap, new Rectangle(new Point(0, 0), this.bitmap.Size), this.currentCropping, GraphicsUnit.Pixel); } g.Save(); } } if (this.softMirror) { this.bitmap.RotateFlip(RotateFlipType.RotateNoneFlipX); } } if (!this.isIdle) { this.broadcaster.SendBitmap(this.bitmap); } this.BeginInvoke( (Action) delegate { if (!this.isIdle) { lock (this.bitmap) { if (this.pb_image.Image != null) { this.pb_image.Image.Dispose(); } this.pb_image.Image = new Bitmap(this.bitmap, this.pb_image.Size); this.pb_image.Refresh(); } } }); } } } }