public MainWindow() { Sensor = KinectSensor.GetDefault(); Sensor.Open(); FrameReader = Sensor.InfraredFrameSource.OpenReader(); BitmapToDisplay = new WriteableBitmap( FrameReader.InfraredFrameSource.FrameDescription.Width, FrameReader.InfraredFrameSource.FrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null); InitializeComponent(); this.WindowStyle = System.Windows.WindowStyle.None; this.WindowState = System.Windows.WindowState.Maximized; Loaded += OpenKinect; Closing += CloseKinect; KeyDown += CheckForExit; }
public FingerTrackingPage() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _depthReader = _sensor.DepthFrameSource.OpenReader(); _depthReader.FrameArrived += DepthReader_FrameArrived; _infraredReader = _sensor.InfraredFrameSource.OpenReader(); _infraredReader.FrameArrived += InfraredReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; // Initialize the HandsController and subscribe to the HandsDetected event. _handsController = new HandsController(); _handsController.HandsDetected += HandsController_HandsDetected; _sensor.Open(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { kinectManager = KinectManager.GetInstance; if (kinectManager.Sensor != null && kinectManager.Sensor.IsOpen == false) { kinectManager.Open(); wsClient = new WebSocketClient(); wsClient.InitializeConnection(); bodyTracker = new CVision.Tracking.BodyTracker(); frameProc = new FrameProcessor(kinectManager.Sensor); colorFrameReader = kinectManager.Sensor.ColorFrameSource.OpenReader(); colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived; infraredFrameReader = kinectManager.Sensor.InfraredFrameSource.OpenReader(); infraredFrameReader.FrameArrived += InfraredFrameReader_FrameArrived; depthFrameReader = kinectManager.Sensor.DepthFrameSource.OpenReader(); depthFrameReader.FrameArrived += DepthFrameReader_FrameArrived; bodyFrameReader = kinectManager.Sensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += BodyFrameReader_FramedArrived; } }
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e) { if (_bodyReader != null) { _bodyReader.Dispose(); _bodyReader = null; } if (_depthReader != null) { _depthReader.Dispose(); _depthReader = null; } if (_infraredReader != null) { _infraredReader.Dispose(); _infraredReader = null; } if (_sensor != null) { _sensor.Close(); _sensor = null; } }
/// <summary> /// The KinectHelper constructor /// </summary> public KinectHelper() { kinectSensor = KinectSensor.GetDefault(); if (kinectSensor != null) { kinectSensor.Open(); if (kinectSensor.IsOpen) { // grab the audio stream IReadOnlyList <AudioBeam> audioBeamList = this.kinectSensor.AudioSource.AudioBeams; Stream audioStream = audioBeamList[0].OpenInputStream(); this.convertStream = new KinectAudioStream(audioStream); kinectBody = kinectSensor.BodyFrameSource.OpenReader(); kinectColor = kinectSensor.ColorFrameSource.OpenReader(); kinectDepth = kinectSensor.DepthFrameSource.OpenReader(); kinectInfrared = kinectSensor.InfraredFrameSource.OpenReader(); } else { throw new Exception("Kinect - Cannot open the Kinect Sensor to receive data!"); } } else { throw new Exception("Kinect - Cannot find a Kinect Sensor to open!"); } }
protected override void OnNavigatedTo(NavigationEventArgs e) { base.OnNavigatedTo(e); try { // Kinectを開く kinect = KinectSensor.GetDefault(); if (kinect == null) { throw new Exception("Kinectを開けません"); } kinect.Open(); // 赤外線画像の情報を取得する infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription; // 画像化のためのバッファを作成する infraredBitmapBuffer = new byte[infraredFrameDesc.LengthInPixels * 4]; infraredBitmap = new WriteableBitmap(infraredFrameDesc.Width, infraredFrameDesc.Height); ImageInfrared.Source = infraredBitmap; infraredBuffer = new ushort[infraredFrameDesc.LengthInPixels]; // 赤外線画像リーダーを開く infraredFrameReader = kinect.InfraredFrameSource.OpenReader(); infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived; } catch (Exception ex) { MessageDialog dlg = new MessageDialog(ex.Message); dlg.ShowAsync(); } }
/// <summary> /// The main window of the app. /// </summary> public MainWindow() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _depthReader = _sensor.DepthFrameSource.OpenReader(); _depthReader.FrameArrived += DepthReader_FrameArrived; _infraredReader = _sensor.InfraredFrameSource.OpenReader(); _infraredReader.FrameArrived += InfraredReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; // Initialize the HandsController and subscribe to the HandsDetected event. _handsController = new HandsController(); _handsController.HandsDetected += HandsController_HandsDetected; _sensor.Open(); } }
/// <summary> /// Initializes a new instance of the MainWindow class. /// </summary> public MainWindow() { // get the kinectSensor object this.kinectSensor = KinectSensor.GetDefault(); // open the reader for the depth frames this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); // wire handler for frame arrival this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; // get FrameDescription from InfraredFrameSource this.infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; // create the bitmap to display this.infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example this.DataContext = this; // initialize the components (controls) of the window this.InitializeComponent(); }
private void InfraraedFrameReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args) { bool infraredFrameProcessed = false; // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = args.FrameReference.AcquireFrame()) { if (infraredFrame != null) { FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; // verify data and write the new infrared frame to the display bitmap if (((infraredFrameDescription.Width * infraredFrameDescription.Height) == this.infraredFrameData.Length) && (infraredFrameDescription.Width == this.bitmap.PixelWidth) && (infraredFrameDescription.Height == this.bitmap.PixelHeight)) { // Copy the pixel data from the image to a temporary array infraredFrame.CopyFrameDataToArray(this.infraredFrameData); infraredFrameProcessed = true; } } } // we got a frame, convert and render if (infraredFrameProcessed) { ConvertInfraredDataToPixels(); RenderPixelArray(this.infraredPixels); } }
protected override void OnNavigatedTo( NavigationEventArgs e ) { base.OnNavigatedTo( e ); try { // Kinectを開く kinect = KinectSensor.GetDefault(); if ( kinect == null ) { throw new Exception( "Kinectを開けません" ); } kinect.Open(); // 赤外線画像の情報を取得する infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription; // 画像化のためのバッファを作成する infraredBitmapBuffer = new byte[infraredFrameDesc.LengthInPixels * 4]; infraredBitmap = new WriteableBitmap( infraredFrameDesc.Width, infraredFrameDesc.Height ); ImageInfrared.Source = infraredBitmap; infraredBuffer = new ushort[infraredFrameDesc.LengthInPixels]; // 赤外線画像リーダーを開く infraredFrameReader = kinect.InfraredFrameSource.OpenReader(); infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived; } catch ( Exception ex ) { MessageDialog dlg = new MessageDialog( ex.Message ); dlg.ShowAsync(); } }
void Start() { _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Reader_Infrared = _Sensor.InfraredFrameSource.OpenReader(); _Reader_Depth = _Sensor.DepthFrameSource.OpenReader(); _Data_Depth = new ushort[_Sensor.DepthFrameSource.FrameDescription.LengthInPixels]; var frameDesc = _Sensor.InfraredFrameSource.FrameDescription; _Data_Infrared = new ushort[frameDesc.LengthInPixels]; _RawData = new byte[frameDesc.LengthInPixels * 4]; _Texture = new Texture2D(frameDesc.Width, frameDesc.Height, TextureFormat.BGRA32, false); Debug.Log("Width: " + frameDesc.Width + ", Height: " + frameDesc.Height); width = frameDesc.Width; height = frameDesc.Height; infrared_matrix = new int[height, width]; depth_matrix = new int[height, width]; if (!_Sensor.IsOpen) { _Sensor.Open(); } } }
/// <summary> /// Initialize Kinect Infrared /// </summary> private void InitializeInfrared() { if (_kinect == null) { return; } // Get frame description for the color output FrameDescription desc = _kinect.InfraredFrameSource.FrameDescription; // Get the framereader for Color _infraReader = _kinect.InfraredFrameSource.OpenReader(); // Allocate pixel array _infraData = new ushort[desc.Width * desc.Height]; _infraPixels = new byte[desc.Width * desc.Height * _bytePerPixel]; // Create new WriteableBitmap _infraBitmap = new WriteableBitmap(desc.Width, desc.Height, 96, 96, PixelFormats.Bgr32, null); // Link WBMP to UI InfraredImage.Source = _infraBitmap; // Hook-up event _infraReader.FrameArrived += OnInfraredFrameArrived; }
private void Window_Loaded(object sender, RoutedEventArgs e) { try { // Kinectを開く kinect = KinectSensor.GetDefault(); kinect.Open(); // 赤外線画像の情報を取得する infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription; // 赤外線リーダーを開く infraredFrameReader = kinect.InfraredFrameSource.OpenReader(); infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived; // 表示のためのビットマップに必要なものを作成 infraredBuffer = new ushort[infraredFrameDesc.LengthInPixels]; infraredBitmap = new WriteableBitmap( infraredFrameDesc.Width, infraredFrameDesc.Height, 96, 96, PixelFormats.Gray16, null); infraredRect = new Int32Rect(0, 0, infraredFrameDesc.Width, infraredFrameDesc.Height); infraredStride = infraredFrameDesc.Width * (int)infraredFrameDesc.BytesPerPixel; ImageInfrared.Source = infraredBitmap; } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
/// <summary> /// Execute shutdown tasks /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void MainWindow_Closing(object sender, CancelEventArgs e) { if (this.infraredFrameReader != null) { // InfraredFrameReader is IDisposable this.infraredFrameReader.Dispose(); this.infraredFrameReader = null; } if (this.kinectSensor != null) { this.kinectSensor.Close(); this.kinectSensor = null; } for (int i = 0; i < this.bodyCount; i++) { if (this.faceFrameReaders[i] != null) { // FaceFrameReader is IDisposable this.faceFrameReaders[i].Dispose(); this.faceFrameReaders[i] = null; } if (this.faceFrameSources[i] != null) { // FaceFrameSource is IDisposable this.faceFrameSources[i].Dispose(); this.faceFrameSources[i] = null; } } }
public FacePage() { System.Media.SoundPlayer player = new System.Media.SoundPlayer(); player.Play(); InitializeComponent(); currFaceState = FaceState.KinectWait; currBodyState = BodyState.KinectWait; faceSamples = new double[NUM_SAMPLES]; flagRuns = new int[Enum.GetNames(typeof(FlagType)).Length]; _sensor = KinectSensor.GetDefault(); if (_sensor != null) { currFaceState = FaceState.FaceWait; currBodyState = BodyState.BodyWait; _infraredSource = _sensor.InfraredFrameSource; _infraredReader = _infraredSource.OpenReader(); _infraredReader.FrameArrived += InfraredReader_FrameArrived; _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _sensor.Open(); } }
void OnApplicationQuit() { if (colorReader != null) { colorReader.Dispose(); colorReader = null; } if (infraredReader != null) { infraredReader.Dispose(); infraredReader = null; } if (depthReader != null) { depthReader.Dispose(); depthReader = null; } if (sensor != null) { if (sensor.IsOpen) { sensor.Close(); } sensor = null; } }
/// <summary> /// Initialize Kinect Infrared /// </summary> /// <param name="source"></param> public void InitializeInfrared(Image source) { if (_kinect == null) { return; } if (_infraReader != null) { return; } // Get frame description for the color output FrameDescription desc = _kinect.InfraredFrameSource.FrameDescription; // Get the framereader for Color _infraReader = _kinect.InfraredFrameSource.OpenReader(); // Allocate pixel array _infraData = new ushort[desc.Width * desc.Height]; _infraPixels = new byte[desc.Width * desc.Height * _bytePerPixel]; // Create new WriteableBitmap _infraBitmap = new WriteableBitmap(desc.Width, desc.Height, 96, 96, PixelFormats.Bgr32, null); if (source != null) { source.Source = _infraBitmap; } }
private async void _infraredReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs e) { bool shouldRecord = _recorder != null && _recorder.IsStarted && InfraredCheckBox.IsChecked.GetValueOrDefault(); bool shouldDisplay = _displayType == FrameTypes.Infrared; if (shouldRecord || shouldDisplay) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { frame.CopyFrameDataToArray(_infraredData); if (shouldRecord) { _recorder.RecordFrame(frame, _infraredData); } } else { shouldDisplay = false; System.Diagnostics.Debug.WriteLine("!!! FRAME SKIPPED (Infrared in MainPage)"); } } if (shouldDisplay) { await _infraredBitmap.UpdateAsync(_infraredData); } } }
public MainPage() { // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // get the infraredFrameDescription from the InfraredFrameSource FrameDescription infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; // open the reader for the infrared frames this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); // wire handler for frame arrival this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; // allocate space to put the pixels being received and converted this.infraredFrameData = new ushort[infraredFrameDescription.Width * infraredFrameDescription.Height]; this.infraredPixels = new byte[infraredFrameDescription.Width * infraredFrameDescription.Height * BytesPerPixel]; // create the bitmap to display this.bitmap = new WriteableBitmap(infraredFrameDescription.Width, infraredFrameDescription.Height); this.CurrentFrameDescription = infraredFrameDescription; // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // use the window object as the view model in this simple example this.DataContext = this; // open the sensor this.kinectSensor.Open(); this.InitializeComponent(); }
/// <summary> /// Initializes a new instance of the MainWindow class. /// </summary> public MainWindow() { // get the kinectSensor object this.kinectSensor = KinectSensor.GetDefault(); // open the reader for the depth frames this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); // wire handler for frame arrival this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; // get FrameDescription from InfraredFrameSource this.infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; // create the bitmap to display this.infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example this.DataContext = this; // initialize the components (controls) of the window this.InitializeComponent(); }
private void Page_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); _coordinateMapper = _sensor.CoordinateMapper; _collectedMeasurements = new List <double>(); if (_sensor != null) { _infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription; _infraredBitmap = new WriteableBitmap(_infraredFrameDescription.Width, _infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); camera.Source = _infraredBitmap; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyCount = _sensor.BodyFrameSource.BodyCount; _bodies = new Body[_bodyCount]; _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _faceFrameSource = new HighDefinitionFaceFrameSource(_sensor); _faceFrameReader = _faceFrameSource.OpenReader(); _faceFrameReader.FrameArrived += FaceReader_FrameArrived; _irReader = _sensor.InfraredFrameSource.OpenReader(); _irReader.FrameArrived += InfraredReader_FrameArrived; _sensor.Open(); } _settingsVM = DevPortalVM.LoadContext(SETTINGS_FILENAME); DevPortalGrid.DataContext = _settingsVM; _devicePortalClient = new DevPortalHelper(_settingsVM); }
private void OnOpenReader(object sender, RoutedEventArgs e) { FrameDescription infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); this.kinectSensor.Open(); }
public void InitializeInfraredStream() { this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); infraredHandler = InfraredHandler.Instance; infraredHandler.InfraredHandlerSet(this.kinectSensor.InfraredFrameSource.FrameDescription); //this.indraredBitmap = new WriteableBitmap(infraredHandler.Width, infraredHandler.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); infraredHandler.SetShowState(false); }
public void TerminateFrame() { if (this.infraredFrameReader != null) { this.infraredFrameReader.Dispose(); this.infraredFrameReader = null; this.infraredBitmap = null; } }
/// <summary> /// Open readers /// </summary> private static void OpenFrameReaders() { // Open readers m_ColorFrameReader = kinectSensor.ColorFrameSource.OpenReader(); m_DepthFrameReader = kinectSensor.DepthFrameSource.OpenReader(); m_BodyFrameReader = kinectSensor.BodyFrameSource.OpenReader(); m_BodyIndexFrameReader = kinectSensor.BodyIndexFrameSource.OpenReader(); m_InfraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader(); }
public void TerminateFrame() { if (this.infraredFrameReader != null) { this.infraredFrameReader.Dispose(); this.infraredFrameReader = null; this.infraredBitmap = null; } }
private void InfraredFrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args) { using (InfraredFrame frame = args.FrameReference.AcquireFrame()) { if (frame != null) { } } }
/// <summary> /// The main window of the app. /// </summary> public MainWindow() { // get participant number participant = Interaction.InputBox("Please enter participant number: ", "HANDGR Data Gatherer", ""); while (!int.TryParse(participant, out participantNumber) || participantNumber < 0 || participantNumber > 49) { participant = Interaction.InputBox("Invalid participant number entered. Please enter a VALID participant number (between 0 and 49): ", "HANDGR Data Gatherer", ""); } // get round number round = Interaction.InputBox("Participant " + participant + ": \n Please enter round number: ", "HANDGR Data Gatherer", ""); while (!int.TryParse(round, out roundNumber) || roundNumber < 0 || roundNumber > 26) { round = Interaction.InputBox("Participant " + participant + ": \n Invalid round number entered. Please enter a VALID round number (between 0 and 25): ", "HANDGR Data Gatherer", ""); } InitializeComponent(); _sensor = KinectSensor.GetDefault(); this.details.Text = "Participant Number: " + participant + "\nRound Number: " + round; gestureImage.Source = new BitmapImage(new Uri(Directory.GetCurrentDirectory() + "\\images\\blank.png")); //read in order of gestures using (var reader = new StreamReader(@"./orders/" + participant + "-" + round + ".txt")) { while (!reader.EndOfStream) { var line = reader.ReadLine(); var values = line.Split(','); for (int i = 0; i < 10; ++i) { gestures.Add(values[i]); } } } if (_sensor != null) { _depthReader = _sensor.DepthFrameSource.OpenReader(); _depthReader.FrameArrived += DepthReader_FrameArrived; _infraredReader = _sensor.InfraredFrameSource.OpenReader(); _infraredReader.FrameArrived += InfraredReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; // Initialize the HandsController and subscribe to the HandsDetected event. _handsController = new HandsController(); _handsController.HandsDetected += HandsController_HandsDetected; this.KeyDown += new System.Windows.Input.KeyEventHandler(OnEnter); _sensor.Open(); } }
/// <summary> /// Disposes the InfraredFrameReader /// </summary> public void Dispose() { if (this.infraredFrameReader != null) { // InfraredFrameReader is IDisposable this.infraredFrameReader.FrameArrived -= this.Reader_InfraredFrameArrived; this.infraredFrameReader.Dispose(); this.infraredFrameReader = null; } }
/// <summary> /// Constructor /// </summary> /// <param name="sensor">Kinect sensor</param> public KinectSensorInfraredFrameProvider(KinectSensor sensor) { if (sensor == null) throw new ArgumentNullException("sensor"); this.sensor = sensor; this.reader = this.sensor.InfraredFrameSource.OpenReader(); this.reader.FrameArrived += FrameArrived; this.frameData = new InfraredFrameData(); }
public MainWindow() { InitializeComponent(); v1Sensor = KinectV1Lib.KV1.GetDefault(); v2Sensor = KinectSensor.GetDefault(); v2IRReader = v2Sensor.InfraredFrameSource.OpenReader(); v2IRReader.FrameArrived += v2IRReader_FrameArrived; }
/// <summary> /// Stops the <c>KinectRecorder</c>, writes all frames remaining in the /// record queue, and closes the associated stream. /// </summary> public async Task StopAsync() { if (_isStopped) { return; } System.Diagnostics.Debug.WriteLine(">>> StopAsync (queue size {0})", _recordQueue.Count); _isStarted = false; _isStopped = true; if (_bodyReader != null) { _bodyReader.FrameArrived -= _bodyReader_FrameArrived; _bodyReader.Dispose(); _bodyReader = null; } if (_colorReader != null) { _colorReader.FrameArrived -= _colorReader_FrameArrived; _colorReader.Dispose(); _colorReader = null; } if (_depthReader != null) { _depthReader.FrameArrived -= _depthReader_FrameArrived; _depthReader.Dispose(); _depthReader = null; } if (_infraredReader != null) { _infraredReader.FrameArrived -= _infraredReader_FrameArrived; _infraredReader.Dispose(); _infraredReader = null; } try { await _processFramesTask; } catch { System.Diagnostics.Debug.WriteLine("!!! Process Canceled (in StopAsync)"); } _processFramesTask = null; await CloseWriterAsync(); System.Diagnostics.Debug.WriteLine("<<< StopAsync (DONE!)"); }
/// <summary> /// Start to retrieve the frame /// </summary> public override void Open() { // Open the reader for the depth frames this.infraredFrameReader = this.sensor.InfraredFrameSource.OpenReader(); // Wire handler for frame arrival this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; // Get FrameDescription from InfraredFrameSource this.frameDescription = this.sensor.InfraredFrameSource.FrameDescription; // Create the bitmap to display this.imageBitmap = new WriteableBitmap(this.frameDescription.Width, this.frameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); }
public void initKinectStuff() { this.kinectSensor = KinectSensor.GetDefault(); this.kinectSensor.Open(); this.frameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); videoHandler = new VideoHandler(this.kinectSensor); audioHandler = new AudioHandler(this.kinectSensor); bodyFrameHandler = new BodyFrameHandler(this.kinectSensor); faceFrameHandler = new FaceFrameHandler(this.kinectSensor); }
public void Start() { if (Sensor == null) { return; } Log("Start sensor..."); // Mapper coordinateMapper = Sensor.CoordinateMapper; // Open connection Sensor.Open(); // Audio Stream StartAudioStream(); if (ConfigManager.GetInstance().Find("kinect_v2.speech.speech_only", true)) { init = true; return; } // Init single frame StartColorStream(); StartDepthStream(); StartInfraredStream(); StartBodyStream(); StartBodyIndexStream(); // Motion Task StartMotionTask(); // Multi Frame Reader // reader = Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); // reader.MultiSourceFrameArrived += OnMultipleFramesArrivedHandler; // Single Frame Reader dfr = Sensor.DepthFrameSource.OpenReader(); dfr.FrameArrived += (object sender, DepthFrameArrivedEventArgs e) => { HandleDepthFrame(e.FrameReference); }; xfr = Sensor.InfraredFrameSource.OpenReader(); xfr.FrameArrived += (object sender, InfraredFrameArrivedEventArgs e) => { HandleInfraredFrame(e.FrameReference); }; cfr = Sensor.ColorFrameSource.OpenReader(); cfr.FrameArrived += (object sender, ColorFrameArrivedEventArgs e) => { HandleColorFrame(e.FrameReference); }; bfr = Sensor.BodyFrameSource.OpenReader(); bfr.FrameArrived += (object sender, BodyFrameArrivedEventArgs e) => { HandleBodyFrame(e.FrameReference); }; ifr = Sensor.BodyIndexFrameSource.OpenReader(); ifr.FrameArrived += (object sender, BodyIndexFrameArrivedEventArgs e) => { HandleBodyIndexFrame(e.FrameReference); }; init = true; }
private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e ) { // 終了処理 if ( infraredFrameReader != null ) { infraredFrameReader.Dispose(); infraredFrameReader = null; } if ( kinect != null ) { kinect.Close(); kinect = null; } }
public bool InitializeFrame() { this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); if (this.infraredFrameReader != null) { this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; this.infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; this.infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); this.stopwatch.Start(); return true; } return false; }
void MainPage_Loaded(object sender, RoutedEventArgs e) { sensor = KinectSensor.GetDefault(); irReader = sensor.InfraredFrameSource.OpenReader(); FrameDescription fd = sensor.InfraredFrameSource.FrameDescription; irData = new ushort[fd.LengthInPixels]; irDataConverted = new byte[fd.LengthInPixels * 4]; irBitmap = new WriteableBitmap(fd.Width, fd.Height); image.Source = irBitmap; bodies = new Body[6]; msfr = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Infrared); msfr.MultiSourceFrameArrived += msfr_MultiSourceFrameArrived; sensor.Open(); }
public MainWindow() { InitializeComponent(); RecordButton.Click += RecordButton_Click; ColorCompressionCombo.Items.Add("None (1920x1080)"); ColorCompressionCombo.Items.Add("None (1280x720)"); ColorCompressionCombo.Items.Add("None (640x360)"); ColorCompressionCombo.Items.Add("JPEG (1920x1080)"); ColorCompressionCombo.Items.Add("JPEG (1280x720)"); ColorCompressionCombo.Items.Add("JPEG (640x360)"); ColorCompressionCombo.SelectedIndex = 0; SmoothingCombo.Items.Add("None"); SmoothingCombo.Items.Add("Kalman Filter"); SmoothingCombo.Items.Add("Double Exponential"); SmoothingCombo.SelectionChanged += SmoothingCombo_SelectionChanged; SmoothingCombo.SelectedIndex = 0; DisplayCombo.Items.Add("Body"); DisplayCombo.Items.Add("Color"); DisplayCombo.Items.Add("Depth"); DisplayCombo.Items.Add("Infrared"); DisplayCombo.SelectionChanged += DisplayCombo_SelectionChanged; DisplayCombo.SelectedIndex = 0; _sensor = KinectSensor.GetDefault(); _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += _bodyReader_FrameArrived; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += _colorReader_FrameArrived; _depthReader = _sensor.DepthFrameSource.OpenReader(); _depthReader.FrameArrived += _depthReader_FrameArrived; _infraredReader = _sensor.InfraredFrameSource.OpenReader(); _infraredReader.FrameArrived += _infraredReader_FrameArrived; _sensor.Open(); OutputImage.Source = _colorBitmap.Bitmap; }
/// <summary> /// Initializes a new instance of the KinectIRView class /// </summary> /// <param name="kinectSensor">Active instance of the Kinect sensor</param> public KinectIRView(KinectSensor kinectSensor) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } // open the reader for the Infrared frames this.infraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader(); // wire handler for frame arrival this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; // get FrameDescription from InfraredFrameSource this.infraredFrameDescription = kinectSensor.InfraredFrameSource.FrameDescription; // create the bitmap to display this.infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); }
private void InitializeInfrared() { if (_kinectSensor == null) return; // Get frame description for the color output var desc = _kinectSensor.InfraredFrameSource.FrameDescription; // Get the framereader for Color _infraReader = _kinectSensor.InfraredFrameSource.OpenReader(); // Allocate pixel array _infraData = new ushort[desc.Width*desc.Height]; InfraPixels = new byte[desc.Width*desc.Height*_bytePerPixel]; // Create new WriteableBitmap _infraBitmap = new WriteableBitmap(desc.Width, desc.Height, 96, 96, PixelFormats.Bgr32, null); // Hook-up event _infraReader.FrameArrived += OnInfraredFrameArrived; }
public MainWindow() { InitializeComponent(); myState = new States(); myState = States.menu; loadMode(); this.kinectSensor = KinectSensor.GetDefault(); this.kinectSensor.Open(); this.frameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); rulesAnalyzer = new RulesAnalyzer(this); //rulesAnalyzerFIFO = new RulesAnalyzerFIFO(this); rulesAnalyzerImproved = new RulesAnalyzerImproved(this); videoHandler = new VideoHandler(this.kinectSensor); audioHandler = new AudioHandler(this.kinectSensor); bodyFrameHandler = new BodyFrameHandler(this.kinectSensor); initializeHaptic(); }
void irReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args) { using (InfraredFrame irFrame = args.FrameReference.AcquireFrame()) { if (irFrame != null) { irFrame.CopyFrameDataToArray(irData); for (int i =0; i < irData.Length; i++) { byte intesity = (byte)(irData[i] >> 8); irDataConverted[i * 4] = intesity; irDataConverted[i * 4+1] = intesity; irDataConverted[i * 4+2] = intesity; irDataConverted[i * 4+3] = 255; } irDataConverted.CopyTo(irBitmap.PixelBuffer); irBitmap.Invalidate(); } } }
public LiveBackground() : base() { if (!DesignerProperties.GetIsInDesignMode(this)) { _kinectManager = KinectManager.Default; // open the reader for the color frames this._colorFrameReader = _kinectManager.KinectSensor.ColorFrameSource.OpenReader(); this._infraredFrameReader = _kinectManager.KinectSensor.InfraredFrameSource.OpenReader(); this._bodyIndexFrameReader = _kinectManager.KinectSensor.BodyIndexFrameSource.OpenReader(); FrameDescription bodyDescription = this.GetFrameDescriptionForMode(BackgroundMode.BodyIndex); this.bodyIndexPixels = new uint[bodyDescription.Width * bodyDescription.Height]; // wire handler for frame arrival this._colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; this._infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; this._bodyIndexFrameReader.FrameArrived += this.Reader_BodyIndexFrameArrived; this._needColor = true; } }
private void Window_Loaded( object sender, RoutedEventArgs e ) { try { // Kinectを開く kinect = KinectSensor.GetDefault(); if ( kinect == null ) { throw new Exception("Kinectを開けません"); } kinect.Open(); // 赤外線画像の情報を取得する infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription; // 赤外線リーダーを開く infraredFrameReader = kinect.InfraredFrameSource.OpenReader(); infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived; } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
/// <summary> /// Initializes a new instance of the Streamer class. /// </summary> /// public Streamer() { this.kinectSensor = KinectSensor.GetDefault(); // open the reader for the color frames this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader(); this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); // wire handler for frame arrival this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; // create the colorFrameDescription from the ColorFrameSource using Bgra format FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); this.infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; // create the bitmap to display this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); this.infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); // open the sensor this.kinectSensor.Open(); }
public async Task StopAsync() { if (_isStopped) return; System.Diagnostics.Debug.WriteLine(">>> Attempt to Stop Recording (Color queue size {0})", _recordColorQueue.Count); System.Diagnostics.Debug.WriteLine(">>> Attempt to Stop Recording (Depth queue size {0})", _recordDepthQueue.Count); System.Diagnostics.Debug.WriteLine(">>> Attempt to Stop Recording (Infrared queue size {0})", _recordInfraredQueue.Count); _isStarted = false; _isStopped = true; if (_colorReader != null) { _colorReader.FrameArrived -= _colorReader_FrameArrived; _colorReader.Dispose(); _colorReader = null; } if (_depthReader != null) { _depthReader.FrameArrived -= _depthReader_FrameArrived; _depthReader.Dispose(); _depthReader = null; } if (_infraredReader != null) { _infraredReader.FrameArrived -= _infraredReader_FrameArrived; _infraredReader.Dispose(); _infraredReader = null; } try { await _processColorFramesTask; await _processDepthFramesTask; await _processInfraredFramesTask; } catch { System.Diagnostics.Debug.WriteLine("!!! Process Canceled (in StopAsync)"); } _processColorFramesTask = null; _processDepthFramesTask = null; _processInfraredFramesTask = null; await CloseWriterAsync(); System.Diagnostics.Debug.WriteLine("<<< Stopping recording (DONE!) :D "); /* Console.WriteLine("oooo Color enqueue avg {0}",colorSum/colorCounter); Console.WriteLine("oooo Depth enqueue avg {0}", depthSum / depthCounter); Console.WriteLine("oooo Infrared enqueue avg {0}", infraredSum / infraredCounter); */ }
public void Start() { nowColor = DateTime.Now; nowDepth = DateTime.Now; nowInfrared = DateTime.Now; laterColor = nowColor.AddSeconds(1); laterDepth = nowDepth.AddSeconds(1); laterInfrared = nowInfrared.AddSeconds(1); if (_isStarted) return; if (_sensor != null) { if (EnableColorRecorder) { _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += _colorReader_FrameArrived; } if (EnableDepthRecorder) { _depthReader = _sensor.DepthFrameSource.OpenReader(); _depthReader.FrameArrived += _depthReader_FrameArrived; } if (EnableInfraredRecorder) { _infraredReader = _sensor.InfraredFrameSource.OpenReader(); _infraredReader.FrameArrived += _infraredReader_FrameArrived; } if (!_sensor.IsOpen) _sensor.Open(); } _isStarted = true; try { _generalSemaphore.Wait(); var metadata = new FileMetadata() { Version = this.GetType().GetTypeInfo().Assembly.GetName().Version.ToString(), HasColor = this.EnableColorRecorder, HasDepth = this.EnableDepthRecorder, HasInfrared = this.EnableInfraredRecorder, ColorLocation = this.ColorLocation, DepthLocation = this.DepthLocation, InfraredLocation = this.InfraredLocation, ColorCodecId = this.ColorRecorderCodec.ColorCodecId, DepthCodecId = this.DepthRecorderCodec.DepthCodecId, InfraredCodecId = this.InfraredRecorderCodec.InfraredCodecId, FpsColor = 100-this.ColorFramerate, FpsDepth = 100-this.DepthFramerate, FpsInfrared = 100-this.InfraredFramerate }; _fileMetaDataWriter.Write(JsonConvert.SerializeObject(metadata)); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Error Saving MetaData: " + ex); } finally { _generalSemaphore.Release(); if (_fileMetaDataWriter != null) { _fileMetaDataWriter.Flush(); if (_fileMetaDataWriter.BaseStream != null) { _fileMetaDataWriter.BaseStream.Flush(); } _fileMetaDataWriter.Dispose(); _fileMetaDataWriter = null; } } _processColorFramesTask = ProcessColorFramesAsync(); _processDepthFramesTask = ProcessDepthFramesAsync(); _processInfraredFramesTask = ProcessInfraredFramesAsync(); }
protected virtual void Dispose(bool disposing) { if (disposing) { if (_colorReader != null) { _colorReader.FrameArrived -= _colorReader_FrameArrived; _colorReader.Dispose(); _colorReader = null; } if (_depthReader != null) { _depthReader.FrameArrived -= _depthReader_FrameArrived; _depthReader.Dispose(); _depthReader = null; } if (_infraredReader != null) { _infraredReader.FrameArrived -= _infraredReader_FrameArrived; _infraredReader.Dispose(); _infraredReader = null; } #region Color disposing try { _colorSemaphore.Wait(); if (_colorWriter != null) { _colorWriter.Flush(); if (_colorWriter.BaseStream != null) { _colorWriter.BaseStream.Flush(); } _colorWriter.Dispose(); _colorWriter = null; } } catch (Exception ex) { // TODO: Change to log the error System.Diagnostics.Debug.WriteLine("Error Disposing Color: " + ex); } finally { _colorSemaphore.Dispose(); } #endregion #region Depth disposing try { _depthSemaphore.Wait(); if (_depthWriter != null) { _depthWriter.Flush(); if (_depthWriter.BaseStream != null) { _depthWriter.BaseStream.Flush(); } _depthWriter.Dispose(); _depthWriter = null; } } catch (Exception ex) { // TODO: Change to log the error System.Diagnostics.Debug.WriteLine("Error Disposing Depth: " + ex); } finally { _depthSemaphore.Dispose(); } #endregion #region Infrared disposing try { _infraredSemaphore.Wait(); if (_infraredWriter != null) { _infraredWriter.Flush(); if (_infraredWriter.BaseStream != null) { _infraredWriter.BaseStream.Flush(); } _infraredWriter.Dispose(); _infraredWriter = null; } } catch (Exception ex) { // TODO: Change to log the error System.Diagnostics.Debug.WriteLine("Error Disposing Infrared: " + ex); } finally { _infraredSemaphore.Dispose(); } #endregion if (_processFramesCancellationTokenSource != null) { _processFramesCancellationTokenSource.Dispose(); _processFramesCancellationTokenSource = null; } } }
void infraredFrameReader_FrameArrived( InfraredFrameReader sender, InfraredFrameArrivedEventArgs args ) { UpdateInfraredFrame( args ); DrawInfraredFrame(); }
public async Task<object> OpenInfraredReader(dynamic input) { this.logCallback("OpenInfraredReader"); if (this.infraredFrameReader != null) { return false; } this.infraredFrameCallback = (Func<object, Task<object>>)input.infraredFrameCallback; this.infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; this.logCallback("infrared: " + this.infraredFrameDescription.Width + "x" + this.infraredFrameDescription.Height); //depth data this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); this.infraredFrameReader.FrameArrived += this.InfraredReader_FrameArrived; this.infraredPixels = new byte[this.infraredFrameDescription.Width * this.infraredFrameDescription.Height]; return true; }
public MainWindow() { // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // get the coordinate mapper this.coordinateMapper = this.kinectSensor.CoordinateMapper; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader(); this.audioReader = this.kinectSensor.AudioSource.OpenReader(); // get the depth (display) extents FrameDescription jointFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); FrameDescription infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; colorRenderer = new ColorFrameRenderer(colorFrameDescription.Width, colorFrameDescription.Height, jointFrameDescription.Width, jointFrameDescription.Height, infraredFrameDescription.Width, infraredFrameDescription.Height); var drawingGroup = new DrawingGroup(); var drawingImage = new DrawingImage(drawingGroup); hudRenderer = new HudRenderer(drawingGroup, drawingImage, colorFrameDescription.Width, colorFrameDescription.Height); AudioSource audioSource = this.kinectSensor.AudioSource; // Allocate 1024 bytes to hold a single audio sub frame. Duration sub frame // is 16 msec, the sample rate is 16khz, which means 256 samples per sub frame. // With 4 bytes per sample, that gives us 1024 bytes. this.audioBuffer = new byte[audioSource.SubFrameLengthInBytes]; this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; this.audioReader.FrameArrived += audioReader_FrameArrived; //on startup hide the audio meter AudioMeterVisibility = Visibility.Hidden; this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); //Infrared // open the reader for the depth frames this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); // wire handler for frame arrival this.infraredFrameReader.FrameArrived += this.colorRenderer.Reader_InfraredFrameArrived; // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text TODO: change namespace name in resources this.StatusText = this.kinectSensor.IsAvailable ? Microsoft.Samples.Kinect.BodyBasics.Properties.Resources.RunningStatusText : Microsoft.Samples.Kinect.BodyBasics.Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example this.DataContext = this; // initialize the components (controls) of the window this.InitializeComponent(); //register the code which will tell the system what to do when keys are pressed SetupKeyHandlers(); //initialize panTilt = PanTiltController.GetOrCreatePanTiltController(); firingControl = FiringController.GetOrCreateFiringController(); var panTiltErr = panTilt.TryInitialize(); var firingErr = firingControl.TryInitialize(); if (panTiltErr != null) { //crash the app. we can't do anything if it doesn't intialize throw panTiltErr; } if (firingErr != null) { //crash the app. we can't do anything if it doesn't intialize throw firingErr; } string safetyText; if (this.firingControl.VirtualSafetyOn) { safetyText = Microsoft.Samples.Kinect.BodyBasics.Properties.Resources.SafetyDisengagedText; } else { safetyText = Microsoft.Samples.Kinect.BodyBasics.Properties.Resources.SafetyEngagedText; } panTilt.TryInitialize(); //draw the headsup display initially this.hudRenderer.RenderHud(new HudRenderingParameters() { CannonX = this.CannonX, CannonY = this.CannonY, //CannonTheta = this.CannonTheta, StatusText = this.statusText, SystemReady = (this.kinectSensor.IsAvailable && this.kinectSensor.IsOpen && this.panTilt.IsReady), FrameRate = this.FrameRate, TrackingMode = this.trackingMode, FiringSafety = this.firingControl.VirtualSafetyOn, FiringSafetyText = safetyText }); //set voice synth to Hazel this.voiceSynth.SelectVoice("Microsoft Hazel Desktop"); this.voiceSynth.SpeakAsync("Kinect Cannon Fully Initialized"); //debug start frame rate counter FPSTimerStart(); // Try to use the controller }
public void Start() { if (Sensor == null) { return; } Log("Start sensor..."); // Mapper coordinateMapper = Sensor.CoordinateMapper; // Open connection Sensor.Open(); // Audio Stream StartAudioStream(); if (ConfigManager.GetInstance().Find("kinect_v2.speech.speech_only", true)) { init = true; return; } // Init single frame StartColorStream(); StartDepthStream(); StartInfraredStream(); StartBodyStream(); StartBodyIndexStream(); // Motion Task StartMotionTask(); // Multi Frame Reader // reader = Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); // reader.MultiSourceFrameArrived += OnMultipleFramesArrivedHandler; // Single Frame Reader dfr = Sensor.DepthFrameSource.OpenReader(); dfr.FrameArrived += (object sender, DepthFrameArrivedEventArgs e) => { HandleDepthFrame(e.FrameReference); }; xfr = Sensor.InfraredFrameSource.OpenReader(); xfr.FrameArrived += (object sender, InfraredFrameArrivedEventArgs e) => { HandleInfraredFrame(e.FrameReference); }; cfr = Sensor.ColorFrameSource.OpenReader(); cfr.FrameArrived += (object sender, ColorFrameArrivedEventArgs e) => { HandleColorFrame(e.FrameReference); }; bfr = Sensor.BodyFrameSource.OpenReader(); bfr.FrameArrived += (object sender, BodyFrameArrivedEventArgs e) => { HandleBodyFrame(e.FrameReference); }; ifr = Sensor.BodyIndexFrameSource.OpenReader(); ifr.FrameArrived += (object sender, BodyIndexFrameArrivedEventArgs e) => { HandleBodyIndexFrame(e.FrameReference); }; init = true; }
public void ShutdownSensor() { if (skeletonReader != null) { skeletonReader.FrameArrived -= skeletonReader_FrameArrived; skeletonReader.Dispose(); skeletonReader = null; } if (depthReader != null) { depthReader.FrameArrived -= depthReader_FrameArrived; depthReader.Dispose(); depthReader = null; } if (colorReader != null) { colorReader.FrameArrived -= colorReader_FrameArrived; colorReader.Dispose(); colorReader = null; } if (irReader != null) { irReader.FrameArrived -= irReader_FrameArrived; irReader.Dispose(); irReader = null; } if (audioStream != null) { audioStream.Close(); audioStream.Dispose(); audioStream = null; } if (audioReader != null) { audioReader.FrameArrived -= audioReader_FrameArrived; audioReader.Dispose(); audioReader = null; } //Note: we don't close the Kinect here because it would remove it from the list of avaliable Kinects }
private void LaunchKinect() { //TODO: Update this Kinect v2 launch method to support loaded options //Note: The Kinect.Open function is not called here because it has to be opened previously to show up on the list of avaliable Kinects //Setup the skeleton reader skeletonReader = kinect.BodyFrameSource.OpenReader(); skeletonReader.FrameArrived += skeletonReader_FrameArrived; //Setup the depth reader depthReader = kinect.DepthFrameSource.OpenReader(); depthReader.FrameArrived += depthReader_FrameArrived; //Setup the color reader colorReader = kinect.ColorFrameSource.OpenReader(); colorReader.FrameArrived += colorReader_FrameArrived; //Setup the ir reader irReader = kinect.InfraredFrameSource.OpenReader(); irReader.FrameArrived += irReader_FrameArrived; }
public void Start() { if (_isStarted) return; if (_isStopped) throw new InvalidOperationException("Cannot restart a recording after it has been stopped"); if (_sensor != null) { if (EnableBodyRecorder) { _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += _bodyReader_FrameArrived; } if (EnableColorRecorder) { _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += _colorReader_FrameArrived; } if (EnableDepthRecorder) { _depthReader = _sensor.DepthFrameSource.OpenReader(); _depthReader.FrameArrived += _depthReader_FrameArrived; } if (EnableFaceRecorder) { _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += _faceReader_FrameArrived; } if (EnableInfraredRecorder) { _infraredReader = _sensor.InfraredFrameSource.OpenReader(); _infraredReader.FrameArrived += _infraredReader_FrameArrived; } if (!_sensor.IsOpen) _sensor.Open(); } _isStarted = true; try { _writerSemaphore.Wait(); // initialize and write file metadata var metadata = new RMetadata() { Version = this.GetType().GetTypeInfo().Assembly.GetName().Version.ToString(), //ColorCodecId = this.ColorRecorderCodec.CodecId }; if (_sensor != null) { //metadata.DepthCameraIntrinsics = _sensor.CoordinateMapper.GetDepthCameraIntrinsics(); //metadata.DepthFrameToCameraSpaceTable = _sensor.CoordinateMapper.GetDepthFrameToCameraSpaceTable(); } else { var sensor = KinectSensor.GetDefault(); if (sensor != null) { //metadata.DepthCameraIntrinsics = sensor.CoordinateMapper.GetDepthCameraIntrinsics(); //metadata.DepthFrameToCameraSpaceTable = sensor.CoordinateMapper.GetDepthFrameToCameraSpaceTable(); } } _writer.Write(JsonConvert.SerializeObject(metadata)); } catch (Exception ex) { // TODO: Change to log the error System.Diagnostics.Debug.WriteLine(ex); } finally { _writerSemaphore.Release(); } _processFramesTask = ProcessFramesAsync(); }
public async Task StopAsync() { if (_isStopped) return; System.Diagnostics.Debug.WriteLine(">>> StopAsync (queue size {0})", _recordQueue.Count); _isStarted = false; _isStopped = true; if (_bodyReader != null) { _bodyReader.FrameArrived -= _bodyReader_FrameArrived; _bodyReader.Dispose(); _bodyReader = null; } if (_colorReader != null) { _colorReader.FrameArrived -= _colorReader_FrameArrived; _colorReader.Dispose(); _colorReader = null; } if (_depthReader != null) { _depthReader.FrameArrived -= _depthReader_FrameArrived; _depthReader.Dispose(); _depthReader = null; } if (_faceReader != null) { _faceReader.FrameArrived -= _faceReader_FrameArrived; _faceReader.Dispose(); _faceReader = null; } if (_infraredReader != null) { _infraredReader.FrameArrived -= _infraredReader_FrameArrived; _infraredReader.Dispose(); _infraredReader = null; } try { await _processFramesTask; } catch { System.Diagnostics.Debug.WriteLine("!!! Process Canceled (in StopAsync)"); } _processFramesTask = null; await CloseWriterAsync(); System.Diagnostics.Debug.WriteLine("<<< StopAsync (DONE!)"); }
public void SetInfrared(bool enable) { irreader = this.Runtime.InfraredFrameSource.OpenReader(); irreader.FrameArrived += irreader_FrameArrived; }
public async Task<object> Close(object input) { if (this.depthFrameReader != null) { this.depthFrameReader.Dispose(); this.depthFrameReader = null; } if (this.bodyIndexFrameReader != null) { this.bodyIndexFrameReader.Dispose(); this.bodyIndexFrameReader = null; } if (this.colorFrameReader != null) { this.colorFrameReader.Dispose(); this.colorFrameReader = null; } if (this.infraredFrameReader != null) { this.infraredFrameReader.Dispose(); this.infraredFrameReader = null; } if (this.longExposureInfraredFrameReader != null) { this.longExposureInfraredFrameReader.Dispose(); this.longExposureInfraredFrameReader = null; } if (this.bodyFrameReader != null) { this.bodyFrameReader.Dispose(); this.bodyFrameReader = null; } if (this.kinectSensor != null) { this.kinectSensor.Close(); this.kinectSensor = null; } return true; }
public FisioterapeutaTestAnalisisView() { InitializeComponent(); #region Commun Initializations _iEstadosAnalisisMarcha = 0; timer.Interval = new TimeSpan( 200); timer.Tick += new EventHandler(timer_Tick); timer.Start(); blnLimpiarGrafica = false; markerInitialContact = 0; #endregion #region Initialization of CLEYE demo - commented //this.Loaded += new RoutedEventHandler(MainWindow_Loaded); ////capture PS3 frame by this manual event //this.KeyDown += FisioterapeutaTestAnalisisView_KeyDown; ///// //markers = new List<System.Drawing.Point>(); //angles = new List<double>(); //countFrames = 0; #endregion #region Initialization of Kinect v1.0 //// Look through all sensors and start the first connected one. //// This requires that a Kinect is connected at the time of app startup. //// To make your app robust against plug/unplug, //// it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser). //foreach (var potentialSensor in KinectSensor.KinectSensors) //{ // if (potentialSensor.Status == KinectStatus.Connected) // { // this.sensor = potentialSensor; // break; // } //} //if (null != this.sensor) //{ // // Turn on the color stream to receive color frames // this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); // // Allocate space to put the pixels we'll receive // this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; // // This is the bitmap we'll display on-screen // this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Gray16, null); // // Set the image we display to point to the bitmap where we'll put the image data // this.IImageFormat.Source = this.colorBitmap; // // Add an event handler to be called whenever there is new color frame data // this.sensor.ColorFrameReady += this.SensorColorFrameReady; // // Start the sensor! // try // { // this.sensor.Start(); // } // catch (IOException) // { // this.sensor = null; // } //} #endregion #region Initialization of Kinect v2.0 // get the kinectSensor object this.kinectSensor = KinectSensor.GetDefault(); // open the reader for the depth frames this.infraredFrameReader = this.kinectSensor.InfraredFrameSource.OpenReader(); // wire handler for frame arrival this.infraredFrameReader.FrameArrived += this.Reader_InfraredFrameArrived; // get FrameDescription from InfraredFrameSource this.infraredFrameDescription = this.kinectSensor.InfraredFrameSource.FrameDescription; // create the bitmap to display this.infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width, this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); // set IsAvailableChanged event notifier //this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text //this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText // : Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example //this.DataContext = this; // initialize the components (controls) of the window //this.InitializeComponent(); #endregion #region Initialization of Capture Angles demo original_Frame = null; umbral_frame = null; //markers = new List<System.Drawing.Point>(); angles = new List<double>(); countFrames = 0; markersHistory = new List<List<System.Drawing.PointF>>(); #endregion }