public KinectGrabWheelController(WheelDelegate wheelDelegate, ScreenManager screenManager, KinectSensor kinect) { this.device = screenManager.GraphicsDevice; this.wheelDelegate = wheelDelegate; position = new Vector2(); positionScreen = new Vector2(); this.kinectSensor = kinect; //if (!kinect.IsRunning) //{ var parameters = new TransformSmoothParameters { Smoothing = 0.1f, Correction = 0.0f, Prediction = 0.0f, JitterRadius = 1.0f, MaxDeviationRadius = 0.5f }; kinect.SkeletonStream.Enable(parameters); kinect.DepthStream.Enable(); kinect.ColorStream.Enable(); kinect.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(ks_AllFramesReady); interactionStream = new Microsoft.Kinect.Toolkit.Interaction.InteractionStream(kinect, new myIntClient()); interactionStream.InteractionFrameReady += new EventHandler<InteractionFrameReadyEventArgs>(intStream_InteractionFrameReady); kinect.Start(); //} }
protected void StartInteractionProcessing() { this.InteractionClient = new InteractionClient(); this.interactionStream = new InteractionStream(this.sensor, this.InteractionClient); //Prepare interaction this.kinectInteraction.AllocateUserInfos(InteractionFrame.UserInfoArrayLength); this.interactionStream.InteractionFrameReady += kinectInteraction.InteractionFrameReady; }
private void OnSensorChanged(KinectSensor oldSensor, KinectSensor newSensor) { this.CheckForNested(); if (this.kinectAdapter != null) { // Clear hand pointer state since hand pointers corresponding to one // sensor can't be reused when processing data from another sensor. this.kinectAdapter.ClearHandPointers(); } if (oldSensor != null) { oldSensor.DepthFrameReady -= this.SensorDepthFrameReady; oldSensor.SkeletonFrameReady -= this.SensorSkeletonFrameReady; this.skeletons = null; this.userInfos = null; this.interactionStream.InteractionFrameReady -= this.InteractionFrameReady; this.interactionStream.Dispose(); this.interactionStream = null; } if (newSensor != null) { this.interactionStream = new InteractionStream(newSensor, this.kinectAdapter); this.interactionStream.InteractionFrameReady += this.InteractionFrameReady; // Allocate space to put the skeleton and interaction data we'll receive this.skeletons = new Skeleton[newSensor.SkeletonStream.FrameSkeletonArrayLength]; this.userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; newSensor.DepthFrameReady += this.SensorDepthFrameReady; newSensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; } }
private UserInfo[] _userInfos; //the information about the interactive users public InteractiveGestureDetector(KinectSensor sensor) : base() { _sensor = sensor; _skeletons = new Skeleton[_sensor.SkeletonStream.FrameSkeletonArrayLength]; _userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); _sensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; _sensor.DepthFrameReady += SensorOnDepthFrameReady; _sensor.SkeletonFrameReady += SensorOnSkeletonFrameReady; //Interaction stream _interactionStream = new InteractionStream(_sensor, new DummyInteractionClient()); _interactionStream.InteractionFrameReady += InteractionStreamOnInteractionFrameReady; _sensor.SkeletonStream.Enable(); // initialize the gesture recognizer gestureController = new AutoRegisteringGestureController(); gestureController.GestureRecognized += new EventHandler<GestureEventArgs>(gestureController_GestureRecognized); }
public void Initialize() { foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.Sensor = potentialSensor; break; } } if (this.Sensor != null) { var parameters = new TransformSmoothParameters { Smoothing = 0.0f, Correction = 0.0f, Prediction = 0.0f, JitterRadius = 0.0f, MaxDeviationRadius = 0.0f }; interactionStream = new InteractionStream(Sensor, new InteractionClient()); this.Sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); this.Sensor.SkeletonStream.Enable(parameters); this.Sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); try { this.Sensor.Start(); } catch (System.IO.IOException ex) { Debug.WriteLine(ex.Message); Debug.WriteLine(ex.StackTrace); this.Sensor = null; } } if (this.Sensor == null) { Debug.WriteLine("No kinect connected!"); } this.CurrentBitmap = Assets.SplashLogo; }
private void initKinect() { if (KinectSensor.KinectSensors.Count > 0) kinect = KinectSensor.KinectSensors[0]; else { info.Content = "Pas de kinect connectée"; } try { kinect.SkeletonStream.Enable(); kinect.DepthStream.Enable(); interStream = new InteractionStream(kinect, new DummyInteraction()); kinect.DepthFrameReady += KinectOnDepthFrameReady; kinect.SkeletonFrameReady += KinectOnSkeletonFrameReady; interStream.InteractionFrameReady += interStream_InteractionFrameReady; kinect.Start(); } catch(Exception e) { // throw e; } }
/// <summary> /// Called when the KinectSensorChooser gets a new sensor /// </summary> /// <param name="sender">sender of the event</param> /// <param name="args">event arguments</param> private static void SensorChooserOnKinectChanged(object sender, KinectChangedEventArgs args) { if (args.OldSensor != null) { try { args.OldSensor.DepthStream.Range = DepthRange.Default; args.OldSensor.SkeletonStream.EnableTrackingInNearRange = false; args.OldSensor.DepthStream.Disable(); args.OldSensor.SkeletonStream.Disable(); } catch (InvalidOperationException) { // KinectSensor might enter an invalid state while enabling/disabling streams or stream features. // E.g.: sensor might be abruptly unplugged. } } if (args.NewSensor != null) { try { Console.WriteLine("encontrou novo sensor"); kinectSensor = args.NewSensor; args.NewSensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); args.NewSensor.SkeletonStream.Enable(); try { args.NewSensor.DepthStream.Range = DepthRange.Near; args.NewSensor.SkeletonStream.EnableTrackingInNearRange = true; } catch (InvalidOperationException) { // Non Kinect for Windows devices do not support Near mode, so reset back to default mode. args.NewSensor.DepthStream.Range = DepthRange.Default; args.NewSensor.SkeletonStream.EnableTrackingInNearRange = false; } } catch (InvalidOperationException) { // KinectSensor might enter an invalid state while enabling/disabling streams or stream features. // E.g.: sensor might be abruptly unplugged. } skeletons = new Skeleton[kinectSensor.SkeletonStream.FrameSkeletonArrayLength]; // Allocate ST data userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; //added by Ricardo interactionStream = new InteractionStream(kinectSensor, new DummyInteractionClient()); interactionStream.InteractionFrameReady += InteractionStreamOnInteractionFrameReady; kinectSensor.DepthFrameReady += SensorOnDepthFrameReady; kinectSensor.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(kinect_SkeletonFrameReady); // Get Ready for Skeleton Ready Events kinectSensor.Start(); } }
private void InicializarFluxoInteracao() { fluxoInteracao = new InteractionStream(kinect, canvasDesenho); fluxoInteracao.InteractionFrameReady += fluxoInteracao_InteractionFrameReady; }
private void SensorChooserOnKinectChanged(object sender, KinectChangedEventArgs args) { if (args.OldSensor != null) { try { args.OldSensor.AllFramesReady -= this.SensorAllFramesReady; args.OldSensor.DepthStream.Range = DepthRange.Default; args.OldSensor.SkeletonStream.EnableTrackingInNearRange = false; args.OldSensor.DepthStream.Disable(); args.OldSensor.ColorStream.Disable(); args.OldSensor.SkeletonStream.Disable(); if (colorStream != null) { this.colorStream.BackgroundRemovedFrameReady -= this.BackgroundRemovedFrameReadyHandler; this.colorStream.Dispose(); this.colorStream = null; } if (interactionStream != null) { this.interactionStream.InteractionFrameReady -= this.InteractionFrameReadyHandler; this.interactionStream.Dispose(); this.interactionStream = null; } this.controller.Sensor = null; } catch (InvalidOperationException) { // the sample says something bad might happen while trying to do that stuff } } if (args.NewSensor != null) { try { TransformSmoothParameters smoothingParam = new TransformSmoothParameters(); { smoothingParam.Smoothing = 0.5f; smoothingParam.Correction = 0.5f; smoothingParam.Prediction = 0.5f; smoothingParam.JitterRadius = 0.05f; smoothingParam.MaxDeviationRadius = 0.04f; }; args.NewSensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); args.NewSensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); args.NewSensor.SkeletonStream.Enable(smoothingParam); args.NewSensor.SkeletonStream.Enable(); args.NewSensor.DepthStream.Range = DepthRange.Default; args.NewSensor.SkeletonStream.EnableTrackingInNearRange = false; this.skeletons = new Skeleton[args.NewSensor.SkeletonStream.FrameSkeletonArrayLength]; this.colorStream = new BackgroundRemovedColorStream(args.NewSensor); this.colorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30, DepthImageFormat.Resolution640x480Fps30); this.colorStream.BackgroundRemovedFrameReady += this.BackgroundRemovedFrameReadyHandler; this.interactionStream = new InteractionStream(args.NewSensor, new DummyInteractionClient()); this.interactionStream.InteractionFrameReady += this.InteractionFrameReadyHandler; args.NewSensor.AllFramesReady += this.SensorAllFramesReady; this.controller.Sensor = args.NewSensor; } catch (InvalidOperationException) { // I guess something might go wrong } } }
//TODO: Ensure that all initial Kinect settings (like white balance, etc) get set on the actual Kinect for both GUI and console mode private void LaunchKinect() { //Setup default properties if (masterKinectSettings.colorImageMode != KinectBase.ColorImageFormat.Undefined) { kinect.ColorStream.Enable(convertColorImageFormat(masterKinectSettings.colorImageMode)); kinect.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>(kinect_ColorFrameReady); isColorStreamOn = true; //Check to see if the Kinect is a Kinect for Windows or a Xbox 360 Kinect so options can be enabled accordingly try { ColorCameraSettings test = kinect.ColorStream.CameraSettings; test = null; isXbox360Kinect = false; } catch { isXbox360Kinect = true; } } if (masterKinectSettings.depthImageMode != KinectBase.DepthImageFormat.Undefined) { //kinect.DepthStream.Enable(); kinect.DepthStream.Enable(convertDepthImageFormat(masterKinectSettings.depthImageMode)); isDepthStreamOn = true; kinect.SkeletonStream.Enable(); //Note, the audio stream MUST be started AFTER this (known issue with SDK v1.7). Currently not an issue as the audio isn't started until the server is launched later in the code. kinect.SkeletonStream.EnableTrackingInNearRange = true; //Explicitly enable depth tracking in near mode (this can be true when the depth mode is near or default, but if it is false, there is no skeleton data in near mode) //Create the skeleton data container if (skeletonHandGrabData == null) { skeletonHandGrabData = new List<HandGrabInfo>(); } else { skeletonHandGrabData.Clear(); } interactStream = new InteractionStream(kinect, new DummyInteractionClient()); kinect.DepthFrameReady += new EventHandler<DepthImageFrameReadyEventArgs>(kinect_DepthFrameReady); kinect.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(kinect_SkeletonFrameReady); kinect.SkeletonStream.EnableTrackingInNearRange = true; interactStream.InteractionFrameReady += new EventHandler<InteractionFrameReadyEventArgs>(interactStream_InteractionFrameReady); } kinect.Start(); StartUpdateTimer(); }
public void ShutdownSensor() { if (kinect != null) { //The "new" syntax is sort of odd, but these really do remove the handlers from the specified events kinect.ColorFrameReady -= kinect_ColorFrameReady; kinect.DepthFrameReady -= kinect_DepthFrameReady; kinect.SkeletonFrameReady -= kinect_SkeletonFrameReady; interactStream.InteractionFrameReady -= interactStream_InteractionFrameReady; if (updateTimer != null) { updateTimer.Stop(); updateTimer.Elapsed -= updateTimer_Elapsed; updateTimer.Dispose(); } interactStream.Dispose(); interactStream = null; if (kinect.AudioSource != null) { if (audioStream != null) { audioStream.Close(); audioStream.Dispose(); } kinect.AudioSource.Stop(); } kinect.Stop(); } }
/// <summary> /// Función para inicializar el kinect, se ejecuta en el evento de que un Kinect se contecte o se inicie. /// </summary> /// <param name="sender"></param> /// <param name="args">Objeto que tiene la referencia a los sensores (Kinect).</param> private void KinectSensorChooserKinectChanged(object sender, KinectChangedEventArgs args) { if (args.OldSensor != null) { //Si se conecta un nuevo kinect o, al parecer, si se reinicia el kinect desactivamos el sensor //que estaba anteriormente. try { args.OldSensor.DepthStream.Range = DepthRange.Default; args.OldSensor.SkeletonStream.EnableTrackingInNearRange = false; args.OldSensor.DepthStream.Disable(); args.OldSensor.SkeletonStream.Disable(); } catch (InvalidOperationException) { //Ignoramos los errores } } if (args.NewSensor != null) { try { args.NewSensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); args.NewSensor.SkeletonStream.Enable(); try { args.NewSensor.DepthStream.Range = DepthRange.Near; args.NewSensor.SkeletonStream.EnableTrackingInNearRange = true; args.NewSensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; StatusValue.Text = "NearMode"; } catch (InvalidOperationException) { // Si el Kinect no es compatible con "Near Mode" lo reestablecemos al default. args.NewSensor.DepthStream.Range = DepthRange.Default; args.NewSensor.SkeletonStream.EnableTrackingInNearRange = false; StatusValue.Text = "DefaultMode"; } sensor = args.NewSensor; //Establece el suavizado del movimiento de los Joints. TransformSmoothParameters smoothingParam = new TransformSmoothParameters(); { smoothingParam.Smoothing = 0.5f; smoothingParam.Correction = 0.1f; smoothingParam.Prediction = 0.5f; smoothingParam.JitterRadius = 0.1f; smoothingParam.MaxDeviationRadius = 0.1f; }; sensor.SkeletonStream.Enable(smoothingParam); sensor.SkeletonFrameReady += KinectSkeletonFrameReady; #if MOUSE_CONTROL _interactionStream = new InteractionStream(sensor, new InteractionClient()); _interactionStream.InteractionFrameReady += KinectInteractionFrameReady; sensor.DepthFrameReady += KinectDepthFrameReady; #endif #if VIEW_CAMERA sensor.ColorStream.Enable(); sensor.ColorFrameReady += KinectColorFrameReady; #endif StatusValue.Text += " Connected"; SliderMotorAngle.Value = sensor.ElevationAngle; KinectAngle.Text = string.Format("Ángulo: {0}", sensor.ElevationAngle.ToString()); } catch (InvalidOperationException) { StatusValue.Text = "Error"; //Ignoramos los errores } } }
/// <summary> /// Execute startup tasks /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { // Create the drawing group we'll use for drawing this.drawingGroup = new DrawingGroup(); // Create an image source that we can use in our image control this.imageSource = new DrawingImage(this.drawingGroup); // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser). foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Allocate space to put the depth pixels we'll receive this.depthPixels = new DepthImagePixel[this.sensor.DepthStream.FramePixelDataLength]; // Turn on the skeleton stream to receive skeleton frames sensor.SkeletonStream.Enable(); // Turn on the color stream to receive color frames this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); // Turn on the depth stream to receive depth frames this.sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); //se actualiza la informacion del usuario que devuelva kinect. this.userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; // Allocate space to put the pixels we'll receive this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; // This is the bitmap we'll display on-screen this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data this.Image.Source = this.imageSource; //Iniciamos el evento para que pueda detectar el abrir y cerrar de las manos. this.interactionStream = new InteractionStream(sensor, new DummyInteractionClient()); //Añadimos un manejador de eventos que se llamará cuando detecte un nuevo InteractionStream. this.interactionStream.InteractionFrameReady += InteractionStreamOnInteractionFrameReady; // Add an event handler to be called whenever there is new depth frame data this.sensor.DepthFrameReady += this.SensorDepthFrameReady; // Add an event handler to be called whenever there is new color frame data this.sensor.ColorFrameReady += this.SensorColorFrameReady; // Add an event handler to be called whenever there is new color frame data this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; //Asignamos al puzzle el sensor que está activo. this.puzzle.asignarSensor(this.sensor); // Start the sensor! try { this.sensor.Start(); } catch (IOException) { this.sensor = null; } } if (null == this.sensor) { this.statusBarText.Text = Properties.Resources.NoKinectReady; } }
public void Evaluate(int SpreadMax) { if (this.FInvalidateConnect) { if (this.FInRuntime.PluginIO.IsConnected) { stream = new InteractionStream(this.FInRuntime[0].Runtime, new InteractionClientTest()); stream.InteractionFrameReady += stream_InteractionFrameReady; this.runtime = this.FInRuntime[0]; this.runtime.SkeletonFrameReady += runtime_SkeletonFrameReady; this.runtime.DepthFrameReady += this.runtime_DepthFrameReady; } else { if (stream != null) { this.runtime.SkeletonFrameReady -= runtime_SkeletonFrameReady; this.runtime.DepthFrameReady -= this.runtime_DepthFrameReady; stream.InteractionFrameReady -= stream_InteractionFrameReady; stream.Dispose(); stream = null; this.runtime = null; } } this.FInvalidateConnect = false; } List<UserInfo> infs = new List<UserInfo>(); for (int i = 0; i < this.infos.Length;i++) { if (this.infos[i] != null) { if (this.infos[i].SkeletonTrackingId != 0) { infs.Add(this.infos[i]); } } } this.FOutSkelId.SliceCount = infs.Count; this.FOutUI.SliceCount = infs.Count; for (int i = 0; i < infs.Count; i++) { UserInfo ui = infs[i]; this.FOutSkelId[i] = ui.SkeletonTrackingId; this.FOutUI[i] = ui; } }
/// <summary> /// Execute startup tasks - Use this for setup that needs to occur after a Kinect is available /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { // Create the drawing group we'll use for drawing this.drawingGroup = new DrawingGroup(); // Create an image source that we can use in our image control this.imageSource = new DrawingImage(this.drawingGroup); // Display the drawing using our image control Canvas.Source = this.imageSource; // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser). foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } // Ensure that we have a sensor before continuing if (null != this.sensor) { // Turn on the proper streams to receive event frames // Disable lines for event streams you are not using this.sensor.SkeletonStream.Enable(); this.sensor.DepthStream.Enable(); this.interactionStream = new InteractionStream(this.sensor, new ArkinectInteractionClient()); // Add an event handlers to be called whenever there is new color frame data // Disable event handler registrations for events you are not using this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; this.sensor.DepthFrameReady += this.SensorDepthFrameReady; this.interactionStream.InteractionFrameReady += this.SensorInteractionFrameReady; // Start the sensor! try { this.sensor.Start(); } catch (IOException) { this.sensor = null; } } screenWidth = this.layoutGrid.RenderSize.Width; screenHeight = this.layoutGrid.RenderSize.Height; ball = newBall(screenWidth, screenHeight); paddle = newPaddle(screenWidth, screenHeight); resetGame(); // No sensor, complain if (null == this.sensor) { this.ScoreText.Text = ACMX.Games.Arkinect.Properties.Resources.NoKinectReady; } // Prepare a delegated callback quitTimer.Elapsed += delegate(System.Object o, ElapsedEventArgs eea) { if (humanNumber == -1) { Application.Current.Dispatcher.BeginInvokeShutdown(System.Windows.Threading.DispatcherPriority.Normal); if (null != sensor) { sensor.Stop(); } } }; }
/// <summary> /// Synchronize the Interaction stream management /// </summary> public void SynchronizeInteractionStream() { if (PropertiesPluginKinect.Instance.KinectPointingModeEnabled || PropertiesPluginKinect.Instance.EnableGestureGrip) { if (m_refKinectInteraction == null) { try { m_refKinectInteraction = new InteractionStream(m_refKinectsensor, new DummyInteractionClient()); m_refKinectInteraction.InteractionFrameReady += OnKinectInteractionFrameReady; // Always need depth stream m_refKinectsensor.DepthStream.Enable(KINECT_DEFAULT_DEPTH_STREAM_FORMAT); } catch (InvalidOperationException ex) { throw new KinectException(ex.Message); } } } else { if (m_refKinectInteraction != null) { // No noeed to have kinect interaction stream lock (m_refKinectInteraction) { m_refKinectInteraction.InteractionFrameReady -= OnKinectInteractionFrameReady; m_refKinectInteraction.Dispose(); m_refKinectInteraction = null; } } } }
/// <summary> /// Converts the InteractionFrameReady event to an observable sequence. /// </summary> /// <param name="interactionStream">The interaction stream.</param> /// <returns>The observable sequence.</returns> public static IObservable <InteractionFrameReadyEventArgs> GetInteractionFrameReadyObservable(this InteractionStream interactionStream) { if (interactionStream == null) { throw new ArgumentNullException("interactionStream"); } return(Observable.FromEventPattern <InteractionFrameReadyEventArgs>(h => interactionStream.InteractionFrameReady += h, h => interactionStream.InteractionFrameReady -= h) .Select(e => e.EventArgs)); }
/// <summary> /// Prepare to feed data and skeleton frames to a new interaction stream and receive /// interaction data from interaction stream. /// </summary> /// <param name="sensor"> /// Sensor from which we will stream depth and skeleton data. /// </param> public void InitializeTracking(KinectSensor sensor) { // Allocate space to put the skeleton and interaction data we'll receive this.userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; this.interactionStream = new InteractionStream(sensor, this); this.interactionStream.InteractionFrameReady += this.InteractionFrameReady; this.kinectSensor = sensor; }
/// <summary> /// Lets ISensorStreamHandler know that Kinect Sensor associated with this stream /// handler has changed. /// </summary> /// <param name="newSensor"> /// New KinectSensor. /// </param> public override void OnSensorChanged(KinectSensor newSensor) { if (this.sensor != null) { try { this.interactionStream.InteractionFrameReady -= this.InteractionFrameReadyAsync; this.interactionStream.Dispose(); this.interactionStream = null; this.sensor.SkeletonStream.AppChoosesSkeletons = false; } catch (InvalidOperationException) { // KinectSensor might enter an invalid state while enabling/disabling streams or stream features. // E.g.: sensor might be abruptly unplugged. } this.userInfos = null; } this.sensor = newSensor; if (newSensor != null) { try { this.interactionStream = new InteractionStream(newSensor, this); this.interactionStream.InteractionFrameReady += this.InteractionFrameReadyAsync; this.sensor.SkeletonStream.AppChoosesSkeletons = true; this.userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; } catch (InvalidOperationException) { // KinectSensor might enter an invalid state while enabling/disabling streams or stream features. // E.g.: sensor might be abruptly unplugged. } } this.userStateManager.Reset(); this.userViewerColorizer.ResetColorLookupTable(); }
/// <summary> /// Clean up interaction stream and associated data structures. /// </summary> /// <param name="sensor"> /// Sensor from which we were streaming depth and skeleton data. /// </param> public void ResetTracking() { this.kinectSensor = null; this.userInfos = null; this.interactionStream.InteractionFrameReady -= this.InteractionFrameReady; this.interactionStream.Dispose(); this.interactionStream = null; }
/// <summary> /// Prepares a Kinect to be started. Enables streams, among other things. /// Call Start(); from outside after this. /// </summary> /// <param name="sensor">Kinect to set as active</param> private void initializeKinectSensor( KinectSensor sensor ) { if ( sensor == null ) { return; } _kinectSensor = sensor; if( !_kinectSensor.SkeletonStream.IsEnabled && !_kinectSensor.DepthStream.IsEnabled ) { _kinectSensor.AllFramesReady += sensor_AllFramesReady; } if ( _kinectSensor.SkeletonStream.IsEnabled ) { _kinectSensor.SkeletonStream.Disable(); } _totalSkeleton = new Skeleton[6]; _userInfo = new UserInfo[6]; if ( this._enableSmoothing ) { System.Diagnostics.Debug.Assert( this._enableSmoothing == true && this._smoothingParam != null ); _kinectSensor.SkeletonStream.Enable( _smoothingParam ); } else { _kinectSensor.SkeletonStream.Enable(); } if ( _kinectSensor.DepthStream.IsEnabled ) { _kinectSensor.DepthStream.Disable(); } _handsVisible = false; _kinectSensor.DepthStream.Enable( DEPTH_IMAGE_FORMAT ); _kinectSensor.SkeletonStream.EnableTrackingInNearRange = true; _interactionStream = new InteractionStream( _kinectSensor, _interactionClient ); _interactionStream.InteractionFrameReady += new EventHandler<InteractionFrameReadyEventArgs>( InteractiontStream_InteractionFrameReady ); initializeSecurityTimer(); initializePositionTrackerController(); // Call Start(); from outside. }
void MainWindow_Loaded( object sender, RoutedEventArgs e ) { // Kinectの初期化 kinect = KinectSensor.KinectSensors[0]; kinect.AllFramesReady += kinect_AllFramesReady; kinect.ColorStream.Enable(); kinect.DepthStream.Enable(); kinect.SkeletonStream.Enable(); kinect.Start(); // インタラクションライブラリの初期化 stream = new InteractionStream( kinect, new KinectAdapter() ); stream.InteractionFrameReady += stream_InteractionFrameReady; }
//Called when a Kinect is connected and ready private void SensorChooserOnKinectChanged(object sender, KinectChangedEventArgs args) { if (args.OldSensor != null) { try { args.OldSensor.DepthStream.Range = DepthRange.Default; args.OldSensor.SkeletonStream.EnableTrackingInNearRange = false; args.OldSensor.DepthStream.Disable(); args.OldSensor.SkeletonStream.Disable(); } catch (InvalidOperationException) { MessageBox.Show("InvalidOperationException on old sensor"); } } if (args.NewSensor != null) { try { this._sensor = args.NewSensor; _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); _sensor.SkeletonStream.Enable(); //Because we have the Kinect for XBox 360, we can't use the Near Range _sensor.DepthStream.Range = DepthRange.Default; _sensor.SkeletonStream.EnableTrackingInNearRange = false; _interactionStream = new InteractionStream(_sensor, new DummyInteractionClient()); _interactionStream.InteractionFrameReady += InteractionStreamOnInteractionFrameReady; _userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; _sensor.DepthFrameReady += SensorOnDepthFrameReady; _sensor.SkeletonFrameReady += SensorOnSkeletonFrameReady; _sensor.Start(); } catch (InvalidOperationException) { MessageBox.Show("InvalidOperationException on new sensor"); } } }
private void InitializeKinect() { foreach (var potentialSensor in KinectSensor.KinectSensors) { //search for sensors connected, and actually connect to the kinect that returns connected if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; skeletons = new Skeleton[sensor.SkeletonStream.FrameSkeletonArrayLength]; usersInfo = new UserInfo[InteractionFrame.UserInfoArrayLength]; //enable tracking the skeletons for the kinect sensor.SkeletonStream.Enable(); //measure the depth sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); //set for seating...TESTING PURPOSES sensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Default; //create an event for anytime a skeleton is present to the sensor sensor.SkeletonFrameReady += SensorSkeletonFrameReady; //for measuring the depth of our interaction with a user sensor.DepthFrameReady += SensorOnDepthFrameReady; //kinectRegion.KinectSensor = sensor; this.interactionStream = new InteractionStream(sensor, new DummyInteractionClient()); this.interactionStream.InteractionFrameReady += this.InteractionFrameReady; sensor.Start(); break; } } }
private void InitializeKinect() { bool useNearMode = false; // this is just a test, so it only works with one Kinect, and quits if that is not available. _sensor = KinectSensor.KinectSensors.FirstOrDefault(); if (_sensor == null) { _initialized = false; return; } _skeletons = new Skeleton[_sensor.SkeletonStream.FrameSkeletonArrayLength]; _userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; _sensor.DepthStream.Range = useNearMode ? DepthRange.Near : DepthRange.Default; _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); // for seated mode / near range interaction, enable these: _sensor.SkeletonStream.TrackingMode = useNearMode ? SkeletonTrackingMode.Seated : SkeletonTrackingMode.Default; _sensor.SkeletonStream.EnableTrackingInNearRange = useNearMode; _sensor.SkeletonStream.Enable(); _interactionStream = new InteractionStream(_sensor, new DummyInteractionClient()); _interactionStream.InteractionFrameReady += InteractionStreamOnInteractionFrameReady; _sensor.DepthFrameReady += SensorOnDepthFrameReady; _sensor.SkeletonFrameReady += SensorOnSkeletonFrameReady; _sensor.Start(); _initialized = true; }
// Event handler Kinect controller change. Handles Sensor configs on start and stop. private void SensorChooserOnKinectChanged(object sender, KinectChangedEventArgs args) { bool error = false; if (args.OldSensor != null) { try { args.OldSensor.DepthStream.Range = DepthRange.Default; args.OldSensor.SkeletonStream.EnableTrackingInNearRange = false; args.OldSensor.DepthStream.Disable(); args.OldSensor.SkeletonStream.Disable(); } catch (InvalidOperationException) { error = true; } } if (args.NewSensor != null) { try { var parameters = new TransformSmoothParameters { Smoothing = 0.7f, Correction = 0.3f, Prediction = 1.0f, JitterRadius = 1.0f, MaxDeviationRadius = 1.0f, }; _sensor = args.NewSensor; _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); _sensor.SkeletonStream.Enable(parameters); _skeletons = new Skeleton[_sensor.SkeletonStream.FrameSkeletonArrayLength]; _userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength]; try { _sensor.DepthStream.Range = DepthRange.Default; _sensor.SkeletonStream.EnableTrackingInNearRange = false; _sensor.SkeletonStream.TrackingMode = SkeletonTrackingMode.Default; } catch (InvalidOperationException) { _sensor.DepthStream.Range = DepthRange.Default; _sensor.SkeletonStream.EnableTrackingInNearRange = false; error = true; } _interactionStream = new InteractionStream(_sensor, new DummyInteractionClient()); _interactionStream.InteractionFrameReady += InteractionStreamOnInteractionFrameReady; _sensor.DepthFrameReady += SensorOnDepthFrameReady; _sensor.SkeletonFrameReady += SensorOnSkeletonFrameReady; } catch (InvalidOperationException) { error = true; } } if (!error) { kinectRegion.KinectSensor = _sensor; } }