/// <summary> /// Send a request to Kinect service to update smoothing parameters /// </summary> /// <param name="transformSmooth">A value indicating whether to apply transform smooth</param> /// <param name="smoothing">The amount of smoothing to be applied</param> /// <param name="correction">The amount of correction to be applied</param> /// <param name="prediction">The amount of prediction to be made</param> /// <param name="jitterRadius">The radius for jitter processing</param> /// <param name="maxDeviationRadius">Maximum deviation radius</param> internal void UpdateSkeletalSmoothing( bool transformSmooth, float smoothing, float correction, float prediction, float jitterRadius, float maxDeviationRadius) { mskinect.TransformSmoothParameters newSmoothParams = new mskinect.TransformSmoothParameters(); newSmoothParams.Correction = correction; newSmoothParams.JitterRadius = jitterRadius; newSmoothParams.MaxDeviationRadius = maxDeviationRadius; newSmoothParams.Prediction = prediction; newSmoothParams.Smoothing = smoothing; kinectProxy.UpdateSkeletalSmoothingRequest request = new kinectProxy.UpdateSkeletalSmoothingRequest(); request.TransfrormSmooth = transformSmooth; request.SkeletalEngineTransformSmoothParameters = newSmoothParams; Activate( Arbiter.Choice( this.kinectPort.UpdateSkeletalSmoothing(request), success => { // nothing to do }, fault => { // the fault handler is outside the WPF dispatcher // to perfom any UI related operation we need to go through the WPF adapter // show an error message this.wpfServicePort.Invoke(() => this.userInterface.ShowFault(fault)); })); }
void InitKinect() { // parameters used to smooth the skeleton data kinect.TransformSmoothParameters parameters = new kinect.TransformSmoothParameters(); parameters.Smoothing = 0.3f; parameters.Correction = 0.3f; parameters.Prediction = 0.4f; parameters.JitterRadius = 0.05f; parameters.MaxDeviationRadius = 0.05f; // ============================================================= // create Kinect device: // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit foreach (var potentialSensor in kinect.KinectSensor.KinectSensors) { if (potentialSensor.Status == kinect.KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Create the drawing group we'll use for drawing this.drawingGroup = new DrawingGroup(); // Create an image source that we can use in our image control this.imageSource = new DrawingImage(this.drawingGroup); // Display the drawing using our image control //skeletonImage.Source = this.imageSource; // Turn on the skeleton stream to receive skeleton frames this.sensor.SkeletonStream.Enable(parameters); this.sensor.DepthStream.Enable(); this.sensor.ColorStream.Enable(); // Add an event handler to be called whenever there is new color frame data this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; //this.sensor.DepthFrameReady += this.SensorDepthFrameReady; this.sensor.ColorFrameReady += this.SensorColorFrameReady; // Allocate space to put the color pixels we'll receive this.colorFramePixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; // Allocate space to put the depth pixels we'll receive //this.depthPixels = new short[this.sensor.DepthStream.FramePixelDataLength]; // Allocate space to put the color pixels we'll get as result of Depth pixels conversion. One depth pixel will amount to BGR - three color pixels plus one unused //this.colorDepthPixels = new byte[this.sensor.DepthStream.FramePixelDataLength * 4]; // This is the bitmap we'll display on-screen. To work with bitmap extensions(http://writeablebitmapex.codeplex.com/) must be PixelFormats.Pbgra32 this.colorBitmapVideo = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Pbgra32, null); //this.colorBitmapDepth = new WriteableBitmap(this.sensor.DepthStream.FrameWidth, this.sensor.DepthStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data videoImage.Source = this.colorBitmapVideo; //depthImage.Source = this.colorBitmapDepth; // Start the sensor! try { this.sensor.Start(); } catch (IOException) { this.sensor = null; } } if (null == this.sensor) { System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in."); } //nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color); //nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex); lastTime = DateTime.Now; isElevationTaskOutstanding = false; ElevationAngle = 0; EnsureElevationAngle(); }
void InitKinect() { // parameters used to smooth the skeleton data kinect.TransformSmoothParameters parameters = new kinect.TransformSmoothParameters(); parameters.Smoothing = 0.3f; parameters.Correction = 0.3f; parameters.Prediction = 0.4f; parameters.JitterRadius = 0.05f; parameters.MaxDeviationRadius = 0.05f; // ============================================================= // create Kinect device: // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit foreach (var potentialSensor in kinect.KinectSensor.KinectSensors) { if (potentialSensor.Status == kinect.KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Create the drawing group we'll use for drawing this.drawingGroup = new DrawingGroup(); // Create an image source that we can use in our image control this.imageSource = new DrawingImage(this.drawingGroup); // Display the drawing using our image control //skeletonImage.Source = this.imageSource; // Turn on the skeleton stream to receive skeleton frames this.sensor.SkeletonStream.Enable(parameters); this.sensor.DepthStream.Enable(); this.sensor.ColorStream.Enable(); // Add an event handler to be called whenever there is new color frame data this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; //this.sensor.DepthFrameReady += this.SensorDepthFrameReady; this.sensor.ColorFrameReady += this.SensorColorFrameReady; // Allocate space to put the color pixels we'll receive this.colorFramePixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; // Allocate space to put the depth pixels we'll receive //this.depthPixels = new short[this.sensor.DepthStream.FramePixelDataLength]; // Allocate space to put the color pixels we'll get as result of Depth pixels conversion. One depth pixel will amount to BGR - three color pixels plus one unused //this.colorDepthPixels = new byte[this.sensor.DepthStream.FramePixelDataLength * 4]; // This is the bitmap we'll display on-screen. To work with bitmap extensions(http://writeablebitmapex.codeplex.com/) must be PixelFormats.Pbgra32 this.colorBitmapVideo = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Pbgra32, null); //this.colorBitmapDepth = new WriteableBitmap(this.sensor.DepthStream.FrameWidth, this.sensor.DepthStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data videoImage.Source = this.colorBitmapVideo; //depthImage.Source = this.colorBitmapDepth; // Start the sensor! try { this.sensor.Start(); } catch (IOException) { this.sensor = null; } } if (null == this.sensor) { System.Windows.MessageBox.Show("Runtime initialization failed. Please make sure Kinect device is plugged in."); } //nui.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color); //nui.DepthStream.Open(ImageStreamType.Depth, 2, ImageResolution.Resolution320x240, ImageType.DepthAndPlayerIndex); lastTime = DateTime.Now; isElevationTaskOutstanding = false; ElevationAngle = 0; EnsureElevationAngle(); }
/// <summary> /// Send a request to Kinect service to update smoothing parameters /// </summary> /// <param name="transformSmooth"></param> /// <param name="smoothing"></param> /// <param name="correction"></param> /// <param name="prediction"></param> /// <param name="jitterRadius"></param> /// <param name="maxDeviationRadius"></param> internal void UpdateSkeletalSmoothing( bool transformSmooth, float smoothing, float correction, float prediction, float jitterRadius, float maxDeviationRadius) { nui.TransformSmoothParameters newSmoothParams = new nui.TransformSmoothParameters(); newSmoothParams.Correction = correction; newSmoothParams.JitterRadius = jitterRadius; newSmoothParams.MaxDeviationRadius = maxDeviationRadius; newSmoothParams.Prediction = prediction; newSmoothParams.Smoothing = smoothing; kinectProxy.UpdateSkeletalSmoothingRequest request = new kinectProxy.UpdateSkeletalSmoothingRequest(); request.TransfrormSmooth = transformSmooth; request.SkeletalEngineTransformSmoothParameters = newSmoothParams; Activate( Arbiter.Choice( this.kinectPort.UpdateSkeletalSmoothing(request), success => { // nothing to do }, fault => { // the fault handler is outside the WPF dispatcher // to perfom any UI related operation we need to go through the WPF adapter // show an error message this.wpfServicePort.Invoke(() => this.userInterface.ShowFault(fault)); })); }