public static HandTracker Create(Device device = null) { IntPtr deviceHandle = IntPtr.Zero; if (device != null && device.isValid) deviceHandle = device.Handle; IntPtr handle; NiTE.throwIfError(HandTracker_create(out handle, deviceHandle)); HandTracker ut = new HandTracker(handle); ut.handler_events = HandTracker_RegisterListener(handle, ut.internal_listener); return ut; }
void hTracker_onNewData(HandTracker hTracker) { if (!hTracker.isValid) return; using (HandTrackerFrameRef frame = hTracker.readFrame()) { if (!frame.isValid) return; lock (image) { using (OpenNIWrapper.VideoFrameRef depthFrame = frame.DepthFrame) { if (image.Width != depthFrame.FrameSize.Width || image.Height != depthFrame.FrameSize.Height) image = new Bitmap(depthFrame.FrameSize.Width, depthFrame.FrameSize.Height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); } using (Graphics g = Graphics.FromImage(image)) { g.FillRectangle(Brushes.Black, new Rectangle(new Point(0, 0), image.Size)); foreach (GestureData gesture in frame.Gestures) if (gesture.isComplete) hTracker.startHandTracking(gesture.CurrentPosition); if (frame.Hands.Length == 0) g.DrawString("Raise your hand", SystemFonts.DefaultFont, Brushes.White, 10, 10); else foreach (HandData hand in frame.Hands) { if (hand.isTracking) { Point HandPosEllipse = new Point(); PointF HandPos = hTracker.ConvertHandCoordinatesToDepth(hand.Position); HandPosEllipse.X = (int)HandPos.X - 5; HandPosEllipse.Y = (int)HandPos.Y - 5; g.DrawEllipse(new Pen(Brushes.White, 5), new Rectangle(HandPosEllipse, new Size(5, 5))); } } g.Save(); } } this.Invoke(new MethodInvoker(delegate() { fps = ((1000000 / (frame.Timestamp - lastTime)) + (fps * 4)) / 5; lastTime = frame.Timestamp; this.Text = "Frame #" + frame.FrameIndex.ToString() + " - Time: " + frame.Timestamp.ToString() + " - FPS: " + fps.ToString(); pb_preview.Image = image.Clone(new Rectangle(new Point(0, 0), image.Size), System.Drawing.Imaging.PixelFormat.Format24bppRgb); })); } }
public static HandTracker Create(Device device = null) { IntPtr deviceHandle = IntPtr.Zero; if (device != null && device.isValid) { deviceHandle = device.Handle; } IntPtr handle; NiTE.throwIfError(HandTracker_create(out handle, deviceHandle)); HandTracker ut = new HandTracker(handle); ut.handler_events = HandTracker_RegisterListener(handle, ut.internal_listener); return(ut); }
private void button1_Click(object sender, EventArgs e) { hTracker = HandTracker.Create(); btn_start.Enabled = false; HandleError(hTracker.StartGestureDetection(GestureData.GestureType.HAND_RAISE)); hTracker.onNewData += new HandTracker.HandTrackerListener(hTracker_onNewData); // FIXED Jun 2013 ///* Because of incompatibility between current version of OpenNI and NiTE, // * we can't use event based reading. So we put our sample in a loop. // * You can copy OpenNI.dll from version 2.0 to solve this problem. // * Then you can uncomment above line of code and comment below ones. // */ //while (this.IsHandleCreated) //{ // hTracker_onNewData(hTracker); // Application.DoEvents(); //} }
// ReSharper disable once ParameterHidesMember private void HandTrackerOnNewData(HandTracker handTracker) { if (!handTracker.IsValid) { return; } HandTrackerFrameRef frame = handTracker.ReadFrame(); if (frame == null || !frame.IsValid) { return; } lock (this.image) { using (VideoFrameRef depthFrame = frame.DepthFrame) { if (this.image.Width != depthFrame.FrameSize.Width || this.image.Height != depthFrame.FrameSize.Height) { this.image = new Bitmap( depthFrame.FrameSize.Width, depthFrame.FrameSize.Height, PixelFormat.Format24bppRgb); } } using (Graphics g = Graphics.FromImage(this.image)) { g.FillRectangle(Brushes.Black, new Rectangle(new Point(0, 0), this.image.Size)); foreach (GestureData gesture in frame.Gestures) { if (gesture.IsComplete) { handTracker.StartHandTracking(gesture.CurrentPosition); } } if (frame.Hands.Length == 0) { g.DrawString("Raise your hand", SystemFonts.DefaultFont, Brushes.White, 10, 10); } else { foreach (HandData hand in frame.Hands) { if (hand.IsTracking) { Point handPosEllipse = new Point(); PointF handPos = handTracker.ConvertHandCoordinatesToDepth(hand.Position); handPosEllipse.X = (int)handPos.X - 5; handPosEllipse.Y = (int)handPos.Y - 5; g.DrawEllipse(new Pen(Brushes.White, 5), new Rectangle(handPosEllipse, new Size(5, 5))); } } } g.Save(); } } this.Invoke( new MethodInvoker( delegate { this.fps = ((1000000 / (frame.Timestamp - this.lastTime)) + (this.fps * 4)) / 5; this.lastTime = frame.Timestamp; this.Text = @"Frame #" + frame.FrameIndex + @" - Time: " + frame.Timestamp + @" - FPS: " + this.fps; this.pb_preview.Image = this.image.Clone( new Rectangle(new Point(0, 0), this.image.Size), PixelFormat.Format24bppRgb); frame.Release(); })); }
private bool Start() { RegisterFilter(); if (this.isIdle && this.broadcaster.HasServer()) { MessageBox.Show( @"Only one server is allowed.", @"Multi-Server", MessageBoxButtons.OK, MessageBoxIcon.Error); return false; } bool isSameDevice = this.currentDevice != null && this.currentDevice.IsValid && this.currentDevice.DeviceInfo.Uri == Settings.Default.DeviceURI; bool isSameSensor = isSameDevice && this.currentSensor != null && this.currentSensor.IsValid && this.currentSensor.SensorInfo.GetSensorType() == (Device.SensorType)Settings.Default.CameraType; if (!isSameDevice) { if (Settings.Default.DeviceURI == string.Empty) { this.currentDevice = null; MessageBox.Show( @"Please select a device to open and then click Apply.", @"Device Open", MessageBoxButtons.OK, MessageBoxIcon.Warning); return false; } } if (!isSameSensor) { if (Settings.Default.CameraType == -1) { this.currentDevice = null; MessageBox.Show( @"Please select a sensor to open and then click Apply.", @"Sensor Create", MessageBoxButtons.OK, MessageBoxIcon.Warning); return false; } } if (!isSameDevice) { try { this.currentDevice = Device.Open(Settings.Default.DeviceURI); } catch (Exception ex) { this.currentDevice = null; MessageBox.Show( string.Format("Can not open selected Device. {0}", ex.Message), @"Device Open", MessageBoxButtons.OK, MessageBoxIcon.Error); return false; } } if (!isSameSensor) { try { this.currentSensor = this.currentDevice.CreateVideoStream((Device.SensorType)Settings.Default.CameraType); this.currentSensor.OnNewFrame += this.CurrentSensorOnNewFrame; } catch (Exception ex) { this.currentSensor = null; MessageBox.Show( string.Format("Can not open selected Sensor from selected Device. {0}", ex.Message), @"Sensor Create", MessageBoxButtons.OK, MessageBoxIcon.Error); return false; } } else { this.currentSensor.Stop(); } VideoMode[] vmodes = this.currentSensor.SensorInfo.GetSupportedVideoModes().ToArray(); VideoMode selectedVideoMode = null; switch (this.currentSensor.SensorInfo.GetSensorType()) { case Device.SensorType.Color: this.renderOptions = VideoFrameRef.CopyBitmapOptions.Force24BitRgb; if (Settings.Default.Color_HD) { foreach (VideoMode vm in vmodes) { if (vm.Resolution.Width == 1280 && (vm.Resolution.Height == 960 || vm.Resolution.Height == 1024)) { if ((selectedVideoMode == null || (selectedVideoMode.Fps < vm.Fps && vm.DataPixelFormat < selectedVideoMode.DataPixelFormat)) && vm.DataPixelFormat != VideoMode.PixelFormat.Jpeg && vm.DataPixelFormat != VideoMode.PixelFormat.Yuv422) { selectedVideoMode = vm; } } } this.isHd = selectedVideoMode != null; if (!this.isHd) { MessageBox.Show( @"This device doesn't support ~1.3MP resolution.", @"HD Resolution", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } if (selectedVideoMode == null) { foreach (VideoMode vm in vmodes) { if (vm.Resolution == new Size(640, 480)) { if ((selectedVideoMode == null || (selectedVideoMode.Fps < vm.Fps && vm.DataPixelFormat < selectedVideoMode.DataPixelFormat)) && vm.DataPixelFormat != VideoMode.PixelFormat.Jpeg && vm.DataPixelFormat != VideoMode.PixelFormat.Yuv422) { selectedVideoMode = vm; } } } } break; case Device.SensorType.Depth: this.renderOptions = VideoFrameRef.CopyBitmapOptions.Force24BitRgb | VideoFrameRef.CopyBitmapOptions.DepthFillShadow; if (Settings.Default.Depth_Fill) { if (this.cb_mirror.Enabled && this.cb_mirror.Checked) { this.renderOptions |= VideoFrameRef.CopyBitmapOptions.DepthFillRigthBlack; } else { this.renderOptions |= VideoFrameRef.CopyBitmapOptions.DepthFillLeftBlack; } } if (Settings.Default.Depth_Invert) { this.renderOptions |= VideoFrameRef.CopyBitmapOptions.DepthInvert; } if (Settings.Default.Depth_Histogram) { this.renderOptions |= VideoFrameRef.CopyBitmapOptions.DepthHistogramEqualize; } foreach (VideoMode vm in vmodes) { if (vm.Resolution == new Size(640, 480)) { if ((selectedVideoMode == null || selectedVideoMode.Fps < vm.Fps) && (vm.DataPixelFormat == VideoMode.PixelFormat.Depth1Mm || vm.DataPixelFormat == VideoMode.PixelFormat.Depth100Um)) { selectedVideoMode = vm; } } } break; case Device.SensorType.Ir: this.renderOptions = VideoFrameRef.CopyBitmapOptions.Force24BitRgb; foreach (VideoMode vm in vmodes) { if (vm.Resolution == new Size(640, 480)) { if ((selectedVideoMode == null || (selectedVideoMode.Fps < vm.Fps && vm.DataPixelFormat < selectedVideoMode.DataPixelFormat)) && vm.DataPixelFormat != VideoMode.PixelFormat.Jpeg && vm.DataPixelFormat != VideoMode.PixelFormat.Yuv422) { selectedVideoMode = vm; } } } break; } if (selectedVideoMode != null) { try { if (this.currentSensor.VideoMode.Fps != selectedVideoMode.Fps || this.currentSensor.VideoMode.DataPixelFormat != selectedVideoMode.DataPixelFormat || this.currentSensor.VideoMode.Resolution != selectedVideoMode.Resolution) { this.currentSensor.VideoMode = selectedVideoMode; } } catch (Exception ex) { MessageBox.Show( string.Format("Can not set active video mode to {0}. {1}", selectedVideoMode, ex.Message), @"Sensor Config", MessageBoxButtons.OK, MessageBoxIcon.Error); return false; } } else { MessageBox.Show( @"No acceptable video mode found.", @"Sensor Config", MessageBoxButtons.OK, MessageBoxIcon.Error); return false; } this.softMirror = Settings.Default.Mirroring; if (Settings.Default.SmartCam) { try { if (!isSameDevice || (this.uTracker == null || this.hTracker == null || !this.uTracker.IsValid || !this.hTracker.IsValid)) { this.uTracker = UserTracker.Create(this.currentDevice); this.hTracker = HandTracker.Create(this.currentDevice); this.hTracker.StartGestureDetection(GestureData.GestureType.HandRaise); this.hTracker.OnNewData += this.NiTeOnNewData; } } catch (Exception) { } } if (!HandleError(this.currentSensor.Start())) { this.Stop(false); return false; } this.btn_stopstart.Text = @"Stop Streaming"; this.isIdle = false; this.notify.Visible = true; return true; }
private void NiTeOnNewData(HandTracker handTracker) { try { if (Settings.Default.SmartCam && this.uTracker != null && this.uTracker.IsValid && this.hTracker != null && this.hTracker.IsValid) { using (UserTrackerFrameRef userframe = this.uTracker.ReadFrame()) { using (HandTrackerFrameRef handframe = this.hTracker.ReadFrame()) { foreach (GestureData gesture in handframe.Gestures) { if (!gesture.IsComplete) { continue; } PointF handPos = this.hTracker.ConvertHandCoordinatesToDepth(gesture.CurrentPosition); short userId = Marshal.ReadByte( userframe.UserMap.Pixels + (int)(handPos.Y * userframe.UserMap.DataStrideBytes) + (int)(handPos.X * 2)); if (userId > 0) { this.activeUserId = userId; } } handframe.Release(); } if (this.activeUserId > 0) { UserData user = userframe.GetUserById(this.activeUserId); if (user.IsValid && user.IsVisible && user.CenterOfMass.Z > 0) { RectangleF position = new RectangleF(0, 0, 0, 0); PointF botlocation = this.uTracker.ConvertJointCoordinatesToDepth(user.CenterOfMass); int pSize = (int) (Math.Max((int)((4700 - user.CenterOfMass.Z) * 0.08), 50) * ((float)userframe.UserMap.FrameSize.Height / 480)); position.Y = (int)botlocation.Y - pSize; position.Height = pSize; position.X = (int)botlocation.X; this.activePosition.X = position.X / userframe.UserMap.FrameSize.Width; this.activePosition.Width = position.Width / userframe.UserMap.FrameSize.Width; this.activePosition.Y = position.Y / userframe.UserMap.FrameSize.Height; this.activePosition.Height = position.Height / userframe.UserMap.FrameSize.Height; userframe.Release(); return; } } userframe.Release(); } } } catch (Exception) { } this.activeUserId = 0; }