private void kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame != null) { //Pass the data to the interaction frame for processing if (interactStream != null /* && parent.server.serverMasterOptions.kinectOptions[kinectID].trackSkeletons*/) { interactStream.ProcessDepth(frame.GetRawPixelData(), frame.Timestamp); } if (isGUI && parent.DepthStreamConnectionID == kinect.DeviceConnectionId) { depthImagePixels = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(depthImagePixels); depthImage.WritePixels(new System.Windows.Int32Rect(0, 0, frame.Width, frame.Height), depthImagePixels, frame.Width * frame.BytesPerPixel, 0); //Display the frame rate on the GUI double tempFPS = CalculateFrameRate(frame.Timestamp, ref lastDepthTime, ref depthTimeIntervals); parent.DepthFPSTextBlock.Text = tempFPS.ToString("F1"); } } } }
void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e) { using (var colorFrame = e.OpenColorImageFrame()) { if (colorFrame != null) { var pixel = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(pixel); ImageRgb.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixel, colorFrame.Width * 4); } } using (var depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { // Depth情報を入れる // GetRawPixelData()はインタラクションライブラリ内で実装された拡張メソッド stream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } } using (var skeletonFrame = e.OpenSkeletonFrame()) { if (skeletonFrame != null) { var skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength]; skeletonFrame.CopySkeletonDataTo(skeletons); // スケルトン情報を入れる stream.ProcessSkeleton(skeletons, kinect.AccelerometerGetCurrentReading(), skeletonFrame.Timestamp); } } }
private void SensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { stream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } } }
private void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { //onRaisedVoiceCommand(new VoiceCommandEventArgs("I'm in state" + state + " and I got depth data")); using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { inStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } } }
void kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame == null) { return; } interStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } }
public static bool DepthFrameReady(DepthImageFrame depthFrame, ref InteractionStream interactionStream) { try { interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); return(true); } catch (InvalidOperationException) { return(false); // DepthFrame functions may throw when the sensor gets // into a bad state. Ignore the frame in that case. } }
private void KinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e) { using (var colorFrame = e.OpenColorImageFrame()) { if (colorFrame == null) { return; } var pixels = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(pixels); var stride = colorFrame.Width * 4; ImageSource = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride); } using (var depthFrame = e.OpenDepthImageFrame()) { if (depthFrame == null) { return; } try { interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (InvalidOperationException ex) { Console.Error.WriteLine(ex); } } using (var skeletonFrame = e.OpenSkeletonFrame()) { if (skeletonFrame == null) { return; } try { skeletonFrame.CopySkeletonDataTo(skeletons); var accelerometerReading = kinectSensor.AccelerometerGetCurrentReading(); interactionStream.ProcessSkeleton(skeletons, accelerometerReading, skeletonFrame.Timestamp); } catch (InvalidOperationException ex) { Console.Error.WriteLine(ex); } } }
/// <summary> /// Event handler for Kinect sensor's DepthFrameReady event sacado del proyecto de developer toolkit browser 1.8 depth basics-WPF /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { // Copy the pixel data from the image to a temporary array depthFrame.CopyDepthImagePixelDataTo(this.depthPixels); // Le pasamos los datos de profundidad a InteractionStream. interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); // Get the min and max reliable depth for the current frame int minDepth = depthFrame.MinDepth; int maxDepth = depthFrame.MaxDepth; // Convert the depth to RGB int colorPixelIndex = 0; for (int i = 0; i < this.depthPixels.Length; ++i) { // Get the depth for this pixel short depth = depthPixels[i].Depth; // To convert to a byte, we're discarding the most-significant // rather than least-significant bits. // We're preserving detail, although the intensity will "wrap." // Values outside the reliable depth range are mapped to 0 (black). // Note: Using conditionals in this loop could degrade performance. // Consider using a lookup table instead when writing production code. // See the KinectDepthViewer class used by the KinectExplorer sample // for a lookup table example. byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0); // Write out blue byte this.colorPixels[colorPixelIndex++] = intensity; // Write out green byte this.colorPixels[colorPixelIndex++] = intensity; // Write out red byte this.colorPixels[colorPixelIndex++] = intensity; // We're outputting BGR, the last byte in the 32 bits is unused so skip it // If we were outputting BGRA, we would write alpha here. ++colorPixelIndex; } } } }
/// <summary> /// Evènements : A chaque nouvelle image de profondeur, on envoie les informations au flux d'interaction /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void EventDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame == null) { return; } try { interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (InvalidOperationException) { } } }
private void SensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs) { using (DepthImageFrame depthFrame = depthImageFrameReadyEventArgs.OpenDepthImageFrame()) { if (depthFrame == null) { return; } try { _interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (InvalidOperationException) { // If exception skip frame } } }
private void kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame != null) { //Pass the data to the interaction frame for processing if (interactStream != null && frame.Format == DepthImageFormat.Resolution640x480Fps30) { interactStream.ProcessDepth(frame.GetRawPixelData(), frame.Timestamp); } KinectBase.DepthFrameEventArgs depthE = new KinectBase.DepthFrameEventArgs(); depthE.kinectID = this.kinectID; depthE.perPixelExtra = 2; depthE.width = frame.Width; depthE.height = frame.Height; depthE.bytesPerPixel = frame.BytesPerPixel; depthE.reliableMin = (float)frame.MinDepth / (float)ushort.MaxValue; depthE.reliableMax = (float)frame.MaxDepth / (float)ushort.MaxValue; depthE.timeStamp = new TimeSpan(frame.Timestamp * 10000); //Convert from milliseconds to ticks and set the time span //The second 2 bytes of the DepthImagePixel structure hold the actual depth as a uint16, so lets get those, and put the data in the blue and green channel of the image //depthE.image = new byte[frame.PixelDataLength * (depthE.perPixelExtra + depthE.bytesPerPixel)]; depthE.image = depthImagePool.GetObject(); //Get an image array from the object pool if (depthE.image.Length != frame.PixelDataLength * (depthE.perPixelExtra + depthE.bytesPerPixel)) //If the object is the wrong size, replace it with one that is the right size { depthE.image = new byte[frame.PixelDataLength * (depthE.perPixelExtra + depthE.bytesPerPixel)]; } unsafe { //The sizeof() operation is unsafe in this instance, otherwise this would all be safe code IntPtr depthImagePtr = Marshal.AllocHGlobal(sizeof(DepthImagePixel) * frame.PixelDataLength); frame.CopyDepthImagePixelDataTo(depthImagePtr, frame.PixelDataLength); Marshal.Copy(depthImagePtr, depthE.image, 2, depthE.image.Length - 2); Marshal.FreeHGlobal(depthImagePtr); } OnDepthFrameReceived(depthE); } } }
/// <summary> /// Instantiates a new InteractionStream, feeds this InteractionStream with Skeleton- and DepthData and subscribes to the InteractionFrameReady event. /// </summary> /// <param name="kinectSensor">The Kinect sensor passed to the interaction stream instance.</param> /// <param name="interactionClient">The interaction client passed to the interaction stream instance.</param> /// <returns>An UserInfo stream that contains an action that disposes the interaction stream when the observable is disposed.</returns> public static IObservable <UserInfo[]> GetUserInfoObservable(this KinectSensor kinectSensor, IInteractionClient interactionClient) { if (kinectSensor == null) { throw new ArgumentNullException("kinect"); } if (interactionClient == null) { throw new ArgumentNullException("interactionClient"); } if (!kinectSensor.DepthStream.IsEnabled) { throw new InvalidOperationException("The depth stream is not enabled, but mandatory."); } if (!kinectSensor.SkeletonStream.IsEnabled) { throw new InvalidOperationException("The skeleton stream is not enabled, but mandatory."); } return(Observable.Create <UserInfo[]>(observer => { var stream = new InteractionStream(kinectSensor, interactionClient); var obs = kinectSensor.GetAllFramesReadyObservable() .SelectStreams((_, __) => Tuple.Create(_.Timestamp, __.Timestamp)) .Subscribe(_ => { stream.ProcessSkeleton(_.Item3, kinectSensor.AccelerometerGetCurrentReading(), _.Item4.Item1); stream.ProcessDepth(_.Item2, _.Item4.Item2); }); stream.GetInteractionFrameReadyObservable() .SelectUserInfo() .Subscribe(_ => observer.OnNext(_)); return new Action(() => { obs.Dispose(); stream.Dispose(); }); })); }
private void SensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs) { using (DepthImageFrame depthFrame = depthImageFrameReadyEventArgs.OpenDepthImageFrame()) { if (depthFrame == null) { return; } try { interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (InvalidOperationException) { // DepthFrame functions may throw when the sensor gets // into a bad state. Ignore the frame in that case. } } }
private void OnDepthFrameReady(object o, DepthImageFrameReadyEventArgs df) { // Console.WriteLine("DepthFrame"); using (DepthImageFrame depthFrame = df.OpenDepthImageFrame()) { if (depthFrame == null) { return; } try { inter_stream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (InvalidOperationException) { // depth frame weirdnesshen the sensor gets Console.WriteLine("Depth Problems"); } } }
private void FuncoesProfundidade(DepthImageFrame quadro, byte[] bytesImagem, int distanciaMaxima) { if (quadro == null || bytesImagem == null) { return; } using (quadro) { DepthImagePixel[] imagemProfundidade = new DepthImagePixel[quadro.PixelDataLength]; quadro.CopyDepthImagePixelDataTo(imagemProfundidade); if (btnDesenhar.IsChecked) { fluxoInteracao.ProcessDepth(imagemProfundidade, quadro.Timestamp); } else if (btnEscalaCinza.IsChecked) { ReconhecerProfundidade(bytesImagem, distanciaMaxima, imagemProfundidade); } } }
private void SensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs) { if (_sensor != sender) { return; } using (var depthFrame = depthImageFrameReadyEventArgs.OpenDepthImageFrame()) { if (depthFrame != null) { try { _interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (Exception e) { Debug.WriteLine(e.Message); } } } }