コード例 #1
0
        private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            bool depthFrameProcessed = false;

            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                    {
                        if (((this.depthframdescrioption.Width * this.depthframdescrioption.Height) == (depthBuffer.Size / this.depthframdescrioption.BytesPerPixel)) &&
                            (this.depthframdescrioption.Width == this.depthmap.PixelWidth) && (this.depthframdescrioption.Height == this.depthmap.PixelHeight))
                        {
                            ushort maxDepth = ushort.MaxValue;
                            this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth);
                            depthFrameProcessed = true;
                        }
                    }
                }
            }

            if (depthFrameProcessed)
            {
                this.RenderDepthPixels();
            }
        }
コード例 #2
0
        /// <summary>
        /// Handles the depth frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            bool depthFrameProcessed = false;

            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    // the fastest way to process the body index data is to directly access
                    // the underlying buffer
                    using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                    {
                        // verify data and write the color data to the display bitmap
                        if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) &&
                            (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                        {
                            // Note: In order to see the full range of depth (including the less reliable far field depth)
                            // we are setting maxDepth to the extreme potential depth threshold
                            ushort maxDepth = ushort.MaxValue;

                            // If you wish to filter by reliable depth distance, uncomment the following line:
                            //// maxDepth = depthFrame.DepthMaxReliableDistance

                            this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth);
                            depthFrameProcessed = true;
                        }
                    }
                }
            }

            if (depthFrameProcessed)
            {
                this.RenderDepthPixels();
            }
        }
コード例 #3
0
        private void LongExposureInfraredReader_FrameArrived(object sender, LongExposureInfraredFrameArrivedEventArgs e)
        {
            if (this.processingLongExposureInfraredFrame)
            {
                return;
            }
            this.processingLongExposureInfraredFrame = true;
            bool longExposureInfraredFrameProcessed = false;

            using (LongExposureInfraredFrame longExposureInfraredFrame = e.FrameReference.AcquireFrame())
            {
                if (longExposureInfraredFrame != null)
                {
                    using (Microsoft.Kinect.KinectBuffer longExposureInfraredBuffer = longExposureInfraredFrame.LockImageBuffer())
                    {
                        // verify data and write the color data to the display bitmap
                        if (((this.longExposureInfraredFrameDescription.Width * this.longExposureInfraredFrameDescription.Height) == (longExposureInfraredBuffer.Size / this.longExposureInfraredFrameDescription.BytesPerPixel)))
                        {
                            this.ProcessLongExposureInfraredFrameData(longExposureInfraredBuffer.UnderlyingBuffer, longExposureInfraredBuffer.Size, this.longExposureInfraredFrameDescription.BytesPerPixel);
                            longExposureInfraredFrameProcessed = true;
                        }
                    }
                }
            }

            if (longExposureInfraredFrameProcessed)
            {
                this.Rescale(this.longExposureInfraredPixels, this.truncatedLongExposureInfraredPixels);
                this.longExposureInfraredFrameCallback(this.truncatedLongExposureInfraredPixels);
            }
            this.processingLongExposureInfraredFrame = false;
        }
コード例 #4
0
        /// <summary>
        /// creates a depth picture from the depthframe data package and broadcasts it
        /// </summary>
        /// <param name="e">the depthframe data package</param>
        void CalculateDepthPicture(DepthFrameArrivedEventArgs e)
        {
            using (DepthFrame df = e.FrameReference.AcquireFrame())
            {
                if (df != null)
                {
                    using (Microsoft.Kinect.KinectBuffer depthBuffer = df.LockImageBuffer())
                    {
                        WriteableBitmap depthBitmap = new WriteableBitmap(df.FrameDescription.Width, df.FrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null);

                        if (((df.FrameDescription.Width * df.FrameDescription.Height) == (depthBuffer.Size / df.FrameDescription.BytesPerPixel)) &&
                            (df.FrameDescription.Width == depthBitmap.PixelWidth) && (df.FrameDescription.Height == depthBitmap.PixelHeight))
                        {
                            depthReturnStruc dd          = ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, df);
                            byte[]           depthPixels = dd.pictureData;
                            depthBitmap.WritePixels(
                                new Int32Rect(0, 0, depthBitmap.PixelWidth, depthBitmap.PixelHeight),
                                depthPixels,
                                depthBitmap.PixelWidth,
                                0);

                            depthBitmap.Freeze();
                            OnDepthPictureEvent.BeginInvoke(depthBitmap, null, null);
                            OnDepthDataEvent.BeginInvoke(dd.depthData, null, null);
                        }
                    }
                }
            }
        }
コード例 #5
0
        /// Handles the body index frame data arriving from the sensor
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrived(object sender, BodyIndexFrameArrivedEventArgs e)
        {
            bool bodyIndexFrameProcessed = false;

            using (BodyIndexFrame bodyIndexFrame = e.FrameReference.AcquireFrame())
            {
                if (bodyIndexFrame != null)
                {
                    // the fastest way to process the body index data is to directly access
                    // the underlying buffer
                    using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer())
                    {
                        // verify data and write the color data to the display bitmap
                        if (((this._bodyIndexFrameDescription.Width * this._bodyIndexFrameDescription.Height) == bodyIndexBuffer.Size) &&
                            (this._bodyIndexFrameDescription.Width == this._bodyIndexBitmap.PixelWidth) && (this._bodyIndexFrameDescription.Height == this._bodyIndexBitmap.PixelHeight))
                        {
                            this.ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size);
                            bodyIndexFrameProcessed = true;
                        }
                    }
                }
            }

            if (bodyIndexFrameProcessed)
            {
                this.RenderBodyIndexPixels();
            }
        }
コード例 #6
0
        public void InfraredFrameArrival(InfraredFrame df, double fps, ref bool processed, WriteableBitmap infraredBitmap)
        {
            using (Microsoft.Kinect.KinectBuffer infraredBuffer = df.LockImageBuffer())
            {
                // verify data and write the new infrared frame data to the display bitmap
                if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)) &&
                    (this.infraredFrameDescription.Width == infraredBitmap.PixelWidth) && (this.infraredFrameDescription.Height == infraredBitmap.PixelHeight))
                {
                    ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size);

                    processed = true;
                    if (infraredRecording)
                    {
                        this.infraredBinaryBuffer.Enqueue((byte[])(infraredPixelBuffer.Clone()));
                        this.frameCount++;
                        if (fps < 16.0)
                        {
                            Console.WriteLine("fps drop yaşandı");
                            this.infraredBinaryBuffer.Enqueue((byte[])(infraredPixelBuffer.Clone()));
                            this.frameCount++;
                        }
                    }
                }
            }
        }
コード例 #7
0
        /// <summary>
        /// Handles the infrared frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        public void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e)
        {
            bool infraredFrameProcessed = false;

            // InfraredFrame is IDisposable
            using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame())
            {
                if (infraredFrame != null)
                {
                    // the fastest way to process the infrared frame data is to directly access
                    // the underlying buffer
                    using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                    {
                        // verify data and write the new infrared frame data to the display bitmap
                        if (((infraredFrame.FrameDescription.Width * infraredFrame.FrameDescription.Height) == (infraredBuffer.Size / infraredFrame.FrameDescription.BytesPerPixel)) &&
                            (infraredFrame.FrameDescription.Width == this.infraredSource.PixelWidth) && (infraredFrame.FrameDescription.Height == this.infraredSource.PixelHeight))
                        {
                            this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size);
                            infraredFrameProcessed = true;
                        }
                    }
                }
            }

            if (infraredFrameProcessed)
            {
                this.RenderInfraredPixels();
            }
        }
コード例 #8
0
ファイル: KinectServiceDepth.cs プロジェクト: haschdl/kinect
        protected override void MultiSourceFrameReaderOnMultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            var reference = e.FrameReference.AcquireFrame();

            using (var bodyIndexFrame = reference.BodyIndexFrameReference.AcquireFrame())
                using (var depthFrame = reference.DepthFrameReference.AcquireFrame())
                {
                    if (depthFrame != null && bodyIndexFrame != null)
                    {
                        depthFrame.CopyFrameDataToArray(_depthFrameData);
                        bodyIndexFrame.CopyFrameDataToArray(_bodyIndexFrameData);

                        // the fastest way to process the body index data is to directly access
                        // the underlying buffer
                        using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                        {
                            // verify data and write the color data to the display bitmap
                            if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)))
                            {
                                this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, 500, 3000);
                            }
                        }
                        EnqueueKinectMessage(NextKinectMessage);
                    }
                }
        }
コード例 #9
0
        private void InfraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e)
        {
            if (this.processingInfraredFrame)
            {
                return;
            }
            this.processingInfraredFrame = true;
            bool infraredFrameProcessed = false;

            using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame())
            {
                if (infraredFrame != null)
                {
                    // the fastest way to process the body index data is to directly access
                    // the underlying buffer
                    using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                    {
                        // verify data and write the color data to the display bitmap
                        if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)))
                        {
                            this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size, this.infraredFrameDescription.BytesPerPixel);
                            infraredFrameProcessed = true;
                        }
                    }
                }
            }

            if (infraredFrameProcessed)
            {
                this.Rescale(this.infraredPixels, this.truncatedInfraredPixels);
                this.infraredFrameCallback(this.truncatedInfraredPixels);
            }
            this.processingInfraredFrame = false;
        }
コード例 #10
0
        private void Depth_Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            bool depthFrameProcessed = false;

            //chart1.Series[0].Points.Clear();


            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                    {
                        // verify data and write the color data to the display bitmap
                        if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) &&
                            (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                        {
                            // Note: In order to see the full range of depth (including the less reliable far field depth)
                            // we are setting maxDepth to the extreme potential depth threshold
                            ushort maxDepth = ushort.MaxValue;

                            // If you wish to filter by reliable depth distance, uncomment the following line:
                            maxDepth = depthFrame.DepthMaxReliableDistance;

                            this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth);
                            depthFrameProcessed = true;

                            if (ctr < 1)
                            {
                                writer.WriteLine("\nmax_depth: " + depthFrame.DepthMaxReliableDistance);
                                writer.WriteLine("min_depth: " + depthFrame.DepthMinReliableDistance);
                                writer.WriteLine("Cur_max" + cur_mux);
                            }
                        }
                    }
                }
            }

            if (depthFrameProcessed)
            {
                // if (ctr < 1)
                // {
                //     writer.WriteLine("Depth_length:"+this.depthPixels.Length);
                //     int sum=0;
                //     for (int i = 0; i < this.depthPixels.Length; i++)
                //     {
                //         //Console.Write("Depth["+i+"] =  " + depthPixels[i] +"\n");
                //         sum += hist[i];
                //         writer.Write(hist[i] + "\t");
                //     }
                //     writer.Write("\n\nsum= \t"+sum);
                // }
                //for(int i = 0; i < this.depthPixels.Length; i++)
                //  chart1.Series[0].Points.Add(hist[i]);
                //chart1.SaveImage(hist_display.Source);
                ctr++;
                this.RenderDepthPixels();
            }
        }
コード例 #11
0
        public WriteableBitmap ParseToWriteableBitmap(InfraredFrame infraredFrame)
        {
            WriteableBitmap infraredBitmap = new WriteableBitmap(infraredFrame.FrameDescription.Width, infraredFrame.FrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null);

            using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
            {
                ConvertInfraredFrameData(infraredBitmap, infraredFrame.FrameDescription, infraredBuffer.UnderlyingBuffer, infraredBuffer.Size);
            }

            return(infraredBitmap);
        }
コード例 #12
0
 /// <summary>
 /// Store body index image
 /// </summary>
 /// <param name="bodyIndexFrame">body index frame to be stored</param>
 /// <param name="frameNumber">frame number</param>
 public static void Handle_BodyIndexFrame(BodyIndexFrame bodyIndexFrame, String frameNumber)
 {
     using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer())
     {
         BitmapSource bitmapSource = BitmapSource.Create(bodyIndexWidth, bodyIndexHeight, 96.0, 96.0,
                                                         PixelFormats.Gray8, null, bodyIndexBuffer.UnderlyingBuffer, (int)bodyIndexBuffer.Size, bodyIndexWidth * 1);
         String bodyIndexPath = FramesAndPaths.GetImageFilePath(FramesAndPaths.FileType.BodyIndexImage, frameNumber);
         bitmapSource.Save(bodyIndexPath + ".jpg", ImageFormat.Jpeg);
     }
     // Release bodyIndexFrame
     bodyIndexFrame.Dispose();
 }
コード例 #13
0
        /// <summary>
        /// Store depth image
        /// </summary>
        /// <param name="depthFrame">depth frame to be stored</param>
        /// <param name="frameNumber">frame number</param>
        public static void Handle_DepthFrame(DepthFrame depthFrame, String frameNumber)
        {
            using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
            {
                BitmapSource bitmapSource = BitmapSource.Create(depthWidth, depthHeight, 96.0, 96.0,
                                                                PixelFormats.Gray16, null, depthBuffer.UnderlyingBuffer, (int)depthBuffer.Size, depthWidth << 1);

                String depthPath = FramesAndPaths.GetImageFilePath(FramesAndPaths.FileType.DepthImage, frameNumber);
                bitmapSource.Save(depthPath + ".png", ImageFormat.Png);
            }
            // Release depthFrame
            depthFrame.Dispose();
        }
コード例 #14
0
        /// <summary>
        /// Store infrared image
        /// </summary>
        /// <param name="infraredFrame">infrared frame to be stored</param>
        /// <param name="frameNumber">frame number</param>
        public static void Handle_InfraredFrame(InfraredFrame infraredFrame, String frameNumber)
        {
            using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
            {
                BitmapSource bitmapSource = BitmapSource.Create(infraredWidth, infraredHeight, 96.0, 96.0,
                                                                PixelFormats.Gray16, null, infraredBuffer.UnderlyingBuffer, (int)infraredBuffer.Size, infraredWidth << 1);

                String infraredPath = FramesAndPaths.GetImageFilePath(FramesAndPaths.FileType.InfraredImage, frameNumber);
                bitmapSource.Save(infraredPath + ".jpg", ImageFormat.Jpeg);
            }
            // Release infraredFrame
            infraredFrame.Dispose();
        }
コード例 #15
0
        private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            bool depthFrameProcessed = false;

            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    if (recording && DateTime.Now.Ticks > (lastFrameRecorded + (1e7 / framesPerSecond)))
                    {
                        ushort[] depthPoints = new ushort[this.depthFrameDescription.Width * this.depthFrameDescription.Height];
                        depthFrame.CopyFrameDataToArray(depthPoints);

                        CameraSpacePoint[] cameraPtsArray = new CameraSpacePoint[this.depthFrameDescription.Width * this.depthFrameDescription.Height];
                        kinectSensor.CoordinateMapper.MapDepthFrameToCameraSpace(depthPoints, cameraPtsArray);

                        this.ProcessDepthFrameDataToFile(cameraPtsArray, (ushort)(minDepth), (ushort)(maxDepth), "S" + participantNumber.ToString("00") + "_" + globalFrameNumber.ToString("00000") + "_" + fileName + "Frame" + imageCount.ToString("0000") + ".pcd");

                        lastFrameRecorded = DateTime.Now.Ticks;
                        imageCount++;
                        globalFrameNumber++;
                    }

                    // the fastest way to process the body index data is to directly access
                    // the underlying buffer
                    using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                    {
                        // verify data and write the color data to the display bitmap
                        if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) &&
                            (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                        {
                            // Note: In order to see the full range of depth (including the less reliable far field depth)
                            // we are setting maxDepth to the extreme potential depth threshold
                            //ushort maxDepth = ushort.MaxValue;

                            // If you wish to filter by reliable depth distance, uncomment the following line:
                            //maxDepth = depthFrame.DepthMaxReliableDistance;

                            this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, (ushort)(minDepth * 1000), (ushort)(maxDepth * 1000));
                            depthFrameProcessed = true;
                            //this.ProcessDepthFrameDataToFile(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, (ushort)(maxDepth * 1000));
                        }
                    }
                }
            }

            if (depthFrameProcessed)
            {
                this.RenderDepthPixels();
            }
        }
コード例 #16
0
 private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
 {
     using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
     {
         if (depthFrame != null)
         {
             using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
             {
                 depthFrame.CopyFrameDataToArray(uDepthPixels);
             }
         }
     }
     DoMapping();
 }
コード例 #17
0
 //subscribed event set during kinect initialization (called each time a depth frame is available)
 private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e)
 {
     using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
     {
         if (depthFrame != null)
         {
             using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
             {
                 // verify data and begin processing the data
                 if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)))
                 {
                     this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, depthFrame.DepthMaxReliableDistance);
                 }
             }
         }
     }
 }
コード例 #18
0
        public void DepthFrameArrival(DepthFrame df, ref bool frameProcessed, double fps, WriteableBitmap depthBitmap)
        {
            // the fastest way to process the body index data is to directly access
            // the underlying buffer
            using (Microsoft.Kinect.KinectBuffer depthBuffer = df.LockImageBuffer())
            {
                // verify data and write the color data to the display bitmap
                if (((df.FrameDescription.Width * df.FrameDescription.Height) == (depthBuffer.Size / getBPP())) &&
                    (df.FrameDescription.Width == depthBitmap.PixelWidth) && (df.FrameDescription.Height == depthBitmap.PixelHeight))
                {
                    // Note: In order to see the full range of depth (including the less reliable far field depth)
                    // we are setting maxDepth to the extreme potential depth threshold
                    //ushort maxDepth = ushort.MaxValue;

                    // If you wish to filter by reliable depth distance, uncomment the following line:
                    ushort maxDepth = df.DepthMaxReliableDistance;
                    ushort minDepth = df.DepthMinReliableDistance;

                    ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, minDepth, maxDepth);

                    frameProcessed = true;

                    // depthFrame.CopyFrameDataToArray(this.depthPixelBuffer); done in processing function
                    if (depthRecording)
                    {
                        garbageCount++;
                        this.depthBinaryBuffer.Enqueue((byte[])(depthPixelBuffer.Clone()));
                        this.frameCount++;

                        if (fps < 16.0)
                        {
                            garbageCount++;
                            Console.WriteLine("fps drop yaşandı");
                            this.depthBinaryBuffer.Enqueue((byte[])(depthPixelBuffer.Clone()));
                            this.frameCount++;
                        }

                        /*if(garbageCount > 500)
                         * {
                         *  System.GC.Collect();
                         *  garbageCount = 0;
                         * }*/
                    }
                }
            }
        }
コード例 #19
0
 private void InfraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs args)
 {
     using (var frame = args.FrameReference.AcquireFrame())
     {
         if (frame != null)
         {
             using (Microsoft.Kinect.KinectBuffer infraredBuffer = frame.LockImageBuffer())
             {
                 // verify data and write the new infrared frame data to the display bitmap
                 if (((_infraredFrameDescription.Width * _infraredFrameDescription.Height) == (infraredBuffer.Size / _infraredFrameDescription.BytesPerPixel)) &&
                     (_infraredFrameDescription.Width == _infraredBitmap.PixelWidth) && (_infraredFrameDescription.Height == _infraredBitmap.PixelHeight))
                 {
                     this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size);
                 }
             }
         }
     }
 }
コード例 #20
0
ファイル: Form1.cs プロジェクト: kavehbc/KinRes
        void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            var reference = e.FrameReference.AcquireFrame();

            // Depth
            using (var frame = reference.DepthFrameReference.AcquireFrame())
            {
                bool depthFrameProcessed = false;
                if (frame != null)
                {
                    _bitmap     = frame.ToBitmap();
                    video.Image = _bitmap;
                    dip         = new ushort[depthFrameDescription.Width * depthFrameDescription.Height];
                    dip1        = new byte[depthFrameDescription.Width * depthFrameDescription.Height];

                    if (dip != null && frame != null)
                    //  frame.CopyDepthImagePixelDataTo(dip);
                    {
                        using (Microsoft.Kinect.KinectBuffer depthBuffer = frame.LockImageBuffer())
                        {
                            // verify data and write the color data to the display bitmap
                            if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) &&
                                (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                            {
                                // Note: In order to see the full range of depth (including the less reliable far field depth)
                                // we are setting maxDepth to the extreme potential depth threshold
                                ushort maxDepth = ushort.MaxValue;

                                // If you wish to filter by reliable depth distance, uncomment the following line:
                                //maxDepth = frame.DepthMaxReliableDistance;

                                this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, frame.DepthMinReliableDistance, maxDepth);
                                depthFrameProcessed = true;
                            }
                        }
                    }

                    if (depthFrameProcessed)
                    {
                        this.RenderDepthPixels();
                    }
                }
            }
        }
コード例 #21
0
        public void BodyIndexFrameArrival(BodyIndexFrame bif, ref bool frameProcessed, double fps, WriteableBitmap bodyIndexBitmap)
        {
            // the fastest way to process the body index data is to directly access
            // the underlying buffer
            using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bif.LockImageBuffer())
            {
                int width  = bif.FrameDescription.Width;
                int height = bif.FrameDescription.Height;
                // verify data and write the color data to the display bitmap
                if (((width * height) == bodyIndexBuffer.Size) &&
                    (width == bodyIndexBitmap.PixelWidth) && (height == bodyIndexBitmap.PixelHeight))
                {
                    ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size);
                    frameProcessed = true;
                }

                if (bodyRecording)
                {
                    Bitmap bitmapFrame;
                    try
                    {
                        bitmapFrame = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format8bppIndexed);
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine("Body Exception");
                        Console.WriteLine(e);
                        System.GC.Collect();
                        bitmapFrame = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format8bppIndexed);
                    }
                    UtilityClass.ByteArrayToBitmap(ref bitmapFrame, bodyPixelBuffer, width, height);
                    bBitmap = bitmapFrame;
                    bodyBitmapBuffer.Enqueue(bBitmap);
                    //System.GC.Collect();
                    frameCount++;
                    if (fps < 16.0)
                    {
                        Console.WriteLine("fps drop yaşandı");
                        bodyBitmapBuffer.Enqueue(bBitmap);
                        frameCount++;
                    }
                }
            }
        }
コード例 #22
0
        private void updateDisplayedBitmap(BodyIndexFrame bif)
        {
            using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bif.LockImageBuffer())
            {
                //Verify if the frame is of right size - not sure why but recommended in tutorials
                if (((sensor.getBodyIndexFrameDescription().Width *sensor.getBodyIndexFrameDescription().Height) == bodyIndexBuffer.Size) &&
                    (sensor.getBodyIndexFrameDescription().Width == this.displayedBitmap.PixelWidth) &&
                    (sensor.getBodyIndexFrameDescription().Height == this.displayedBitmap.PixelHeight))
                {
                    uint[] pixalData = processBIF(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size);

                    displayedBitmap.WritePixels(
                        new Int32Rect(0, 0, displayedBitmap.PixelWidth, displayedBitmap.PixelHeight), pixalData,
                        this.displayedBitmap.PixelWidth * BytesPerPixel, 0);

                    bitmap_feed.Source = displayedBitmap;
                }
            }
        }
コード例 #23
0
 /// <summary>
 /// 人体索引帧临帧事件
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 public void Reader_BodyIndexFrameArrived(Object sender, BodyIndexFrameArrivedEventArgs e)
 {
     using (BodyIndexFrame bodyIndexFrame = e.FrameReference.AcquireFrame())
     {
         if (bodyIndexFrame != null)
         {
             // the fastest way to process the body index data is to directly access
             // the underlying buffer
             using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer())
             {
                 // verify data and write the color data to the display bitmap
                 if (((this.bodyIndexFrameDescription.Width * this.bodyIndexFrameDescription.Height) == bodyIndexBuffer.Size))
                 {
                     //this.BodyIndexToDepth(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size);
                     this.ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size);
                 }
             }
         }
     }
 }
コード例 #24
0
 private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e)
 {
     // InfraredFrame is IDisposable
     using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame())
     {
         if (infraredFrame != null)
         {
             // the fastest way to process the infrared frame data is to directly access
             // the underlying buffer
             using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
             {
                 // verify data and write the new infrared frame data to the display bitmap
                 var t  = this.infraredFrameDescription;
                 var tt = this.infraredBitmap;
                 if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)) &&
                     (this.infraredFrameDescription.Width == this.infraredBitmap.PixelWidth) && (this.infraredFrameDescription.Height == this.infraredBitmap.PixelHeight))
                 {
                     this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size);
                 }
             }
         }
     }
 }
コード例 #25
0
        /// <summary>
        /// Handles the depth frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void DepthFrameReader(DepthFrame depthFrame)
        {
            ushort minDepths = depthFrame.DepthMinReliableDistance;
            ushort maxDepths = depthFrame.DepthMaxReliableDistance;

            if (depthFrame != null)
            {
                // the fastest way to process the body index data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                {
                    // verify data and write the color data to the display bitmap
                    if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / depthFrameDescription.BytesPerPixel)) &&
                        (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                    {
                        ushort maxDepth = 1000;
                        ushort minDepth = depthFrame.DepthMinReliableDistance;
                        this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, minDepth, maxDepth);
                        this.RenderDepthPixels();
                    }
                }
            }
        }
コード例 #26
0
        void HandleDepthFrame(DepthFrame depthFrame)
        {
            if (depthFrame == null)
            {
                this.currentViewModel.DepthImageOpacity = 0;
                return;
            }
            this.currentViewModel.DepthImageOpacity = 0.8;

            bool depthFrameProcessed = false;

            // the fastest way to process the body index data is to directly access
            // the underlying buffer
            using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
            {
                // verify data and write the color data to the display bitmap
                if (((this.kinectSensor.DepthFrameSource.FrameDescription.Width * this.kinectSensor.DepthFrameSource.FrameDescription.Height) == (depthBuffer.Size / this.kinectSensor.DepthFrameSource.FrameDescription.BytesPerPixel)) &&
                    (this.kinectSensor.DepthFrameSource.FrameDescription.Width == this.currentViewModel.DepthImageSource.PixelWidth) && (this.kinectSensor.DepthFrameSource.FrameDescription.Height == this.currentViewModel.DepthImageSource.PixelHeight))
                {
                    // Note: In order to see the full range of depth (including the less reliable far field depth)
                    // we are setting maxDepth to the extreme potential depth threshold
                    ushort maxDepth = ushort.MaxValue;

                    // If you wish to filter by reliable depth distance, uncomment the following line:
                    //// maxDepth = depthFrame.DepthMaxReliableDistance

                    this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth);
                    depthFrameProcessed = true;
                }
            }

            if (depthFrameProcessed)
            {
                this.RenderDepthPixels();
            }
        }
コード例 #27
0
        private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            bool depthFrameProcessed = false;

            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    _depthData = new ushort[depthFrame.FrameDescription.LengthInPixels];
                    depthFrame.CopyFrameDataToArray(_depthData);

                    using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                    {
                        if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) &&
                            (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                        {
                            // Note: In order to see the full range of depth (including the less reliable far field depth)
                            // we are setting maxDepth to the extreme potential depth threshold
                            ushort maxDepth = ushort.MaxValue;
                            //ushort maxDepth = 2700;

                            // If you wish to filter by reliable depth distance, uncomment the following line:
                            // maxDepth = depthFrame.DepthMaxReliableDistance;

                            this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth);
                            depthFrameProcessed = true;
                        }
                    }
                }
            }

            if (depthFrameProcessed)
            {
                this.RenderDepthPixels();
            }
        }
コード例 #28
0
        void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            var reference = e.FrameReference.AcquireFrame();

            // Infrared
            using (var infraFrame = reference.InfraredFrameReference.AcquireFrame())
            {
                if (infraFrame != null)
                {
                    RenderInfraredPixels(infraFrame);
                }
            }
            // Color
            using (var colorFrame = reference.ColorFrameReference.AcquireFrame())
            {
                // Depth
                using (var depthFrame = reference.DepthFrameReference.AcquireFrame())
                {
                    if (colorFrame != null && depthFrame != null)
                    {
                        var _colorWidth  = colorFrame.ColorFrameSource.FrameDescription.Width;
                        var _colorHeight = colorFrame.ColorFrameSource.FrameDescription.Height;
                        var _depthWidth  = depthFrame.DepthFrameSource.FrameDescription.Width;
                        var _depthHeight = depthFrame.DepthFrameSource.FrameDescription.Height;

                        using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                        {
                            // verify data and write the color data to the display bitmap
                            if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) &&
                                (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                            {
                                // Note: In order to see the full range of depth (including the less reliable far field depth)
                                // we are setting maxDepth to the extreme potential depth threshold
                                ushort maxDepth = ushort.MaxValue;

                                // If you wish to filter by reliable depth distance, uncomment the following line:
                                //// maxDepth = depthFrame.DepthMaxReliableDistance

                                this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth);
                                this.RenderDepthPixels();
                            }
                        }

                        using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            this.colorBitmap.Lock();
                            // verify data and write the new color frame data to the display bitmap
                            if ((_colorWidth == this.colorBitmap.PixelWidth) && (_colorHeight == this.colorBitmap.PixelHeight))
                            {
                                colorFrame.CopyConvertedFrameDataToIntPtr(
                                    this.colorBitmap.BackBuffer,
                                    (uint)(_colorWidth * _colorHeight * 4),
                                    ColorImageFormat.Bgra);

                                this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight));
                            }
                            this.colorBitmap.Unlock();
                        }

                        if ((takeScreenshot || dumpPpms) && !robot.IsMoving())
                        {
                            ushort[] depths = new ushort[_depthHeight * _depthWidth];

                            DepthSpacePoint[] mappedColor = new DepthSpacePoint[_colorHeight * _colorWidth];
                            depthFrame.CopyFrameDataToArray(depths);
                            cm.MapColorFrameToDepthSpace(depths, mappedColor);

                            byte[] colors = new byte[_colorHeight * _colorWidth * 4];
                            //this is the byte array that is converted into a ppm in the end, make it rgba form
                            colorFrame.CopyConvertedFrameDataToArray(colors, ColorImageFormat.Rgba);

                            this.mappedColor = mappedColor;
                            this.depths      = depths;
                            this.colors      = colors;

                            if (takeScreenshot)
                            {
                                ScreenshotSaveFile();
                                takeScreenshot = capturePanorama || false;
                            }
                            else if (dumpPpms)
                            {
                                ScreenshotSaveFile();
                                //DumpPpms();
                                dumpPpms = false;
                            }

                            // Kick off another rotation if capturing a panorama
                            if (capturePanorama)
                            {
                                if (numRotations < MAX_ROTATIONS)
                                {
                                    numRotations++;
                                    RotateCW();
                                    StopMoving(rotateTime);
                                    Thread.Sleep(STABILIZE_TIME);
                                }
                                else
                                {
                                    this.capturePanorama = false;
                                    this.takeScreenshot  = false;
                                    this.panoramaNum++;
                                    this.imageNum     = 0;
                                    this.numRotations = 0;
                                }
                            }
                        }

                        depthCamera.Source = this.depthBitmap;
                        colorCamera.Source = this.colorBitmap;
                        infraCamera.Source = this.infraBitmap;
                    }
                }
            }
        }
コード例 #29
0
        /// <summary>
        /// Handles the multisource frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private unsafe void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Create instance of EMGUargs which holds the output of data from the kinect
            EMGUargs emguArgs = new EMGUargs();
            MultiSourceFrameReference frameReference = e.FrameReference;
            // Variables initialized to null for easy check of camera failures
            MultiSourceFrame multiSourceFrame = null;
            InfraredFrame    infraredFrame    = null;
            ColorFrame       colorFrame       = null;
            DepthFrame       depthFrame       = null;

            // Acquire frame from the Kinect
            multiSourceFrame = frameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            try
            {
                /*
                 * DepthSpacePoint dp = new DepthSpacePoint
                 * {
                 *  X = 50,
                 *  Y = 20
                 * };
                 * DepthSpacePoint[] dps = new DepthSpacePoint[] { dp };
                 * ushort[] depths = new ushort[] { 2000 };
                 * CameraSpacePoint[] ameraSpacePoints = new CameraSpacePoint[1];
                 *
                 * mapper.MapDepthPointsToCameraSpace(dps, depths, ameraSpacePoints);
                 */
                InfraredFrameReference infraredFrameReference = multiSourceFrame.InfraredFrameReference;
                infraredFrame = infraredFrameReference.AcquireFrame();

                DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                depthFrame = depthFrameReference.AcquireFrame();

                // Check whether needed frames are avaliable
                if (infraredFrame == null || depthFrame == null)
                {
                    return;
                }

                // the fastest way to process the depth frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                {
                    // verify data and write the new depth frame data to the display bitmap
                    if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) ==
                         (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat depthImage = this.ProcessDepthFrameData(depthFrame);

                        emguArgs.DepthImage          = depthImage;
                        emguArgs.DepthFrameDimension = new FrameDimension(depthFrameDescription.Width, depthFrameDescription.Height);
                    }

                    //BgrToDephtPixel(depthBuffer.UnderlyingBuffer, depthBuffer.Size);

                    depthFrame.Dispose();
                    depthFrame = null;
                }

                // IR image
                FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;

                // the fastest way to process the infrared frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                {
                    // verify data and write the new infrared frame data to the display bitmap
                    if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat infraredImage = this.ProcessInfaredFrameData(infraredFrame);
                        emguArgs.InfraredImage          = infraredImage;
                        emguArgs.InfraredFrameDimension = new FrameDimension(infraredFrameDescription.Width, infraredFrameDescription.Height);
                        //  infraredImage.Dispose();
                    }
                    infraredFrame.Dispose();
                    infraredFrame = null;

                    // Check as to whether or not the color image is needed for mainwindow view
                    if (generateColorImage)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        if (colorFrame == null)
                        {
                            return;
                        }

                        // color image
                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                        // the fastest way to process the color frame data is to directly access
                        // the underlying buffer
                        using (Microsoft.Kinect.KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            // Conversion to needed EMGU image
                            Mat colorImage = this.ProcessColorFrameData(colorFrame);
                            emguArgs.Colorimage          = colorImage;
                            emguArgs.ColorFrameDimension = new FrameDimension(colorFrameDescription.Width, colorFrameDescription.Height);
                        }
                        // We're done with the colorFrame
                        colorFrame.Dispose();
                        colorFrame = null;
                    }
                }
                // Call the processing finished event for the conversion to EMGU images
                OnEmguArgsProcessed(emguArgs);
            }
            catch (Exception ex)
            {
                // ignore if the frame is no longer available
                Console.WriteLine("FRAME CHRASHED: " + ex.ToString());
            }
            finally
            {
                // generate event at send writeable bitmaps for each frame, and cleanup.
                // only generate event if the mainwindow is shown.

                // DepthFrame, ColorFrame are Disposable.
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                // infraredFrame is Disposable
                if (infraredFrame != null)
                {
                    infraredFrame.Dispose();
                    infraredFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
コード例 #30
0
        /// <summary>
        /// Handles the depth frame data arriving from the sensor
        /// </summary>
        private void Depth_Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            if (this.StatusText == Properties.Resources.RunningStatusText)
            {
                //if(this.extractType == "Depth")
                {
                    using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
                    {
                        if (depthFrame != null)
                        {
                            // the fastest way to process the body index data is to directly access
                            // the underlying buffer
                            using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                            {
                                {
                                    if ((checkbox_ == 2 && frameCount >= min_) || (checkbox_ == 1 && frameCount >= max_))
                                    {
                                        Application.Exit();
                                    }
                                    //Write timings to txt File
                                    //Change 1221 to total number of frames - 1 and output path of file
                                    if (checkbox_ == 2 && this.frameCount == min_)
                                    {
                                        this.timing[this.frameCount++] = (ushort)depthFrame.RelativeTime.Milliseconds;
                                        string s = "";
                                        for (int i = 0; i < frameCount; i++)
                                        {
                                            s = s + timing[i].ToString() + "\n";
                                        }
                                        System.IO.File.WriteAllText(path_ + @"\Depth_Timings.txt", s);
                                    }

                                    depthFrame.CopyFrameDataToArray(depthFrameData);
                                    //this.frameCount++;
                                    this.timing[this.frameCount++] = (ushort)depthFrame.RelativeTime.Milliseconds;
                                    int length_ = depthFrameData.Length;
//                                    for (int i = 0; i < depthFrameData.Length; i++)
//                                        depthFrameDataBuffer[0, i] = depthFrameData[i];
                                    if (checkbox_ == 1 && frameCount >= min_ && frameCount < max_)
                                    {
                                        Console.WriteLine("here " + frameCount.ToString());
                                        for (int i = 0; i < length_; i++)
                                        {
                                            depthFrameDataBuffer[idx, i] = depthFrameData[i];
                                        }
                                        idx++;
                                    }
                                    else if (checkbox_ == 1 && frameCount == max_)
                                    {
                                        Console.WriteLine("here " + frameCount.ToString());
                                        for (int j = 0; j < idx; j++)
                                        {
                                            for (int i = 0; i < length_; i++)
                                            {
                                                depthFrameData[i] = depthFrameDataBuffer[j, i];
                                            }
                                            filePath   = path_ + @"\Depthframe" + (j + min_).ToString() + ".MAT";
                                            this.matfw = new MATWriter("depthmat", filePath, depthFrameData, depthFrame.FrameDescription.Height, depthFrame.FrameDescription.Width);
                                        }
                                        Application.Exit();
                                    }
                                }
                            }
                        }
                    }
                }
            }
            else
            {
                SaveParamsToFile(path_ + @"\Intrinsic parameters.txt");
                this.StatusText = Properties.Resources.SensorIsAvailableStatusText;
            }
        }