void sensor_DepthFrameReady(AllFramesReadyEventArgs e)
        {
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame == null)
                {
                    return;
                }

                //turn raw data into an array of distances;
                var depthArray = depthFrame.ToDepthArray();

                MidPointDistanceViaGetDistanceText.Text = depthFrame.GetDistance(depthFrame.Width / 2, depthFrame.Height / 2).ToString();

                //image
                DepthImageWithMinDistance.Source = depthArray.ToBitmapSource(depthFrame.Width, depthFrame.Height,
                                                                             _minDistance, Colors.Red);

                //image
                DepthImage.Source = depthFrame.ToBitmapSource();

                if (_saveDepthFrame)
                {
                    _saveDepthFrame = false;
                    depthFrame.ToBitmapSource().Save(DateTime.Now.ToString("yyyyMMddHHmmss") + "_depth.jpg", ImageFormat.Jpeg);
                }
            }
        }
Beispiel #2
0
        // すべてのデータの更新通知を受け取る
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Disposableなのでusingでくくる
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    imageRgbCamera.Source = colorFrame.ToBitmapSource();
                }
            }

            // Disposableなのでusingでくくる
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    imageDepthCamera.Source = depthFrame.ToBitmapSource();
                }
            }

            // Disposableなのでusingでくくる
            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    // 骨格位置の表示
                    ShowSkeleton(skeletonFrame);
                }
            }
        }
        /**
         * Metoda handler pentru evenimentele create
         * de captarea imaginilor prin senzorii de adancime.
         **/
        void DepthImageReady(object sender, DepthImageFrameReadyEventArgs e)
        {
            bool receiveData = false;

            using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame())
            {
                if (depthImageFrame != null)
                {
                    if (pixelDataDepth == null)
                    {
                        pixelDataDepth = new byte[depthImageFrame.PixelDataLength];
                    }
                    depthImageFrame.CopyDepthImagePixelDataTo(this.depthPixels);
                    receiveData = true;
                }
                else
                {
                    //nu s-au primit date
                }
                if (receiveData)
                {
                    image2.Source = depthImageFrame.ToBitmapSource();
                }
            }
        }
 private void OnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e)
 {
     using (DepthImageFrame frame = e.OpenDepthImageFrame())
     {
         if (frame == null)
         {
             return;
         }
         //depthImage.Source = frame.ToBitmap(DepthImageMode.Colors);
         depthImage.Source = frame.ToBitmapSource();
     }
     //throw new NotImplementedException();
 }
Beispiel #5
0
        void SensorDepthFrameReady(AllFramesReadyEventArgs e)
        {
            // if the window is displayed, show the depth buffer image
            if (WindowState == WindowState.Normal)
            {
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame == null)
                    {
                        return;
                    }

                    video.Source = depthFrame.ToBitmapSource();
                }
            }
        }