private void multiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (kinectSensor == null)
            {
                return;
            }

            MultiSourceFrame msf = e.FrameReference.AcquireFrame();

            if (msf != null)
            {
                using (BodyFrame bf = msf.BodyFrameReference.AcquireFrame())
                {
                    using (BodyIndexFrame bif = msf.BodyIndexFrameReference.AcquireFrame())
                    {
                        using (DepthFrame df = msf.DepthFrameReference.AcquireFrame())
                        {
                            if (df != null && bif != null && bf != null)
                            {
                                AllFrameCallback(bf, bif, df); // Pass these on to anyone who is subscribed to this event
                            }
                        }
                    }
                }
            }
        }
Пример #2
0
    private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
    {
        MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

        if (multiSourceFrame != null)
        {
            using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    // Specified X, Y coordinate
                    // In 1920 x 1080 color frame
                    double x = 1000;
                    double y = 900;

                    FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                    depthWidth  = depthFrameDescription.Width;
                    depthHeight = depthFrameDescription.Height;

                    depthframeData = new ushort[depthWidth * depthHeight];
                    depthFrame.CopyFrameDataToArray(depthframeData);
                    CameraSpacePoint[] csp = new CameraSpacePoint[1920 * 1080];
                    this.coordinateMapper.MapColorFrameToCameraSpace(depthframeData, csp);

                    // Depth(Z Position) of specified coordinate
                    float DepthPosition = csp[(1920 * Convert.ToInt16(y)) + Convert.ToInt16(x)].Z;
                }
            }
        }
    }
Пример #3
0
        private static void Reader_FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (e.FrameReference != null)
            {
                MultiSourceFrame multiFrame = e.FrameReference.AcquireFrame();
                if (multiFrame.DepthFrameReference != null)
                {
                    try
                    {
                        using (DepthFrame depthFrame = multiFrame.DepthFrameReference.AcquireFrame())

                        {
                            if (depthFrame != null)
                            {
                                using (KinectBuffer buffer = depthFrame.LockImageBuffer())
                                {
                                    depthFrameDescription = depthFrame.FrameDescription;
                                    depthWidth            = depthFrameDescription.Width;
                                    depthHeight           = depthFrameDescription.Height;
                                    depthFrameData        = new ushort[depthWidth * depthHeight];
                                    depthFrame.CopyFrameDataToArray(depthFrameData);
                                }
                            }
                        }
                    }
                    catch (Exception) { return; }
                }
            }
        }
Пример #4
0
        public void UpdateBody()
        {
            if (mKinect != null)
            {
                MultiSourceFrame frame = mFrameReader.AcquireLatestFrame();
                if (frame != null)
                {
                    using (var bodyFrame = frame.BodyFrameReference.AcquireFrame())
                    {
                        if (bodyFrame != null)
                        {
                            if (mSkeletons == null)
                            {
                                mSkeletons       = new Microsoft.Kinect.Body[mKinect.BodyFrameSource.BodyCount];
                                mRightHandPoints = new DepthSpacePoint[mKinect.BodyFrameSource.BodyCount];
                                mLeftHandPoints  = new DepthSpacePoint[mKinect.BodyFrameSource.BodyCount];
                            }
                            bodyFrame.GetAndRefreshBodyData(mSkeletons);
                        }
                    }
                }

                // Update hand positions
                if (mSkeletons != null &&
                    mRightHandPoints != null &&
                    mLeftHandPoints != null)
                {
                    for (int i = 0; i < mKinect.BodyFrameSource.BodyCount; i++)
                    {
                        mRightHandPoints[i] = mKinect.CoordinateMapper.MapCameraPointToDepthSpace(mSkeletons[i].Joints[JointType.HandRight].Position);
                        mLeftHandPoints[i]  = mKinect.CoordinateMapper.MapCameraPointToDepthSpace(mSkeletons[i].Joints[JointType.HandLeft].Position);
                    }
                }
            }
        }
Пример #5
0
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            ColorFrame colorFrame = null;
            DepthFrame depthFrame = null;

            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            if (multiSourceFrame == null)
            {
                return;
            }

            colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
            depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();

            if (colorFrame == null | depthFrame == null)
            {
                return;
            }

            this.camera.Source = this.msi.FrameTreatment(colorFrame, depthFrame, this.mode.ToString());

            if (colorFrame != null)
            {
                colorFrame.Dispose();
            }
            if (depthFrame != null)
            {
                depthFrame.Dispose();
            }
        }
        private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            // If frame has expired
            if (multiSourceFrame == null)
            {
                return;
            }

            switch (currentDisplayFrameType)
            {
            case DisplayFrameType.Infrared:
                using (InfraredFrame infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                {
                    ShowInfraredFrame(infraredFrame);
                }
                break;

            case DisplayFrameType.Color:
                using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                {
                    ShowColorFrame(colorFrame);
                }
                break;

            default:
                break;
            }
        }
Пример #7
0
        private bool ProcessColorFrame(MultiSourceFrame multiSourceFrame)
        {
            // Color
            using (var frame = multiSourceFrame.ColorFrameReference.AcquireFrame())
            {
                ColorFrame frameColor = frame;
                if (frame != null)
                {
                    this.ColorMetaData = new ColorMetaData(frameColor);

                    if (PointCloudScannerSettings.ScannerMode == ScannerMode.Color || PointCloudScannerSettings.ScannerMode == ScannerMode.Color_Depth)
                    {
                        //if (PointCloudScannerSettings.BackgroundRemoved)
                        //{
                        //    WriteableBitmap myBitmap = backgroundRemovalTool.Color_Bitmap(this.ColorMetaData, this.DepthMetaData, this.BodyMetaData);
                        //    this.imageColor.Source = myBitmap;

                        //}
                        //else
                        //{
                        if (PointCloudScannerSettings.ScannerMode != ScannerMode.Color_Depth_3DDisplay)
                        {
                            this.imageColor.Source = ImageSourceUtils.CreateImageSource(ColorMetaData.Pixels, ColorMetaData.XColorMaxKinect, ColorMetaData.YColorMaxKinect);
                        }
                        //}
                    }

                    return(true);
                }
            }
            return(false);
        }
Пример #8
0
        private void ProcessColorDepthIR(MultiSourceFrame multiSourceFrame)
        {
            switch (PointCloudScannerSettings.ScannerMode)
            {
            case PointCloudUtils.ScannerMode.Color:
            {
                if (ProcessColorFrame(multiSourceFrame))
                {
                    UpdateFramesPerSecond();
                }

                break;
            }

            case PointCloudUtils.ScannerMode.Color_Depth:
            {
                if (ProcessColorFrame(multiSourceFrame) && ProcessDepthFrame(multiSourceFrame))
                {
                    UpdateFramesPerSecond();
                }
                break;
            }

            case PointCloudUtils.ScannerMode.Depth:
            {
                if (ProcessDepthFrame(multiSourceFrame))
                {
                    UpdateFramesPerSecond();
                }
                break;
            }
            }
        }
        private void UpdateCameraView(MultiSourceFrame multiframe)
        {
            using (ColorFrame colorFrame = multiframe.ColorFrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                    using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                    {
                        this._colorBitmap.Lock();

                        // verify data and write the new color frame data to the display bitmap
                        if ((colorFrameDescription.Width == this._colorBitmap.PixelWidth) && (colorFrameDescription.Height == this._colorBitmap.PixelHeight))
                        {
                            colorFrame.CopyConvertedFrameDataToIntPtr(
                                this._colorBitmap.BackBuffer,
                                (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4),
                                ColorImageFormat.Bgra);

                            this._colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this._colorBitmap.PixelWidth, this._colorBitmap.PixelHeight));
                        }

                        this._colorBitmap.Unlock();
                    }
                }
            }
        }
Пример #10
0
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();
            if (multiSourceFrame == null)
            {
                return;
            }
            switch(currentDisplayFrameType)
            {
                case DisplayFrameType.bodyTracking:
                    using(BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()){
                                    bool dataReceived = false;

                        showBodyFrame(bodyFrame, dataReceived);
                    }

                    break;
                case DisplayFrameType.Color:
                    using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                    {
                        if (colorFrame != null)
                        {
                            this.showColorFrame(colorFrame);
                        }
                    }

                    break;
                default:
                    break;
            }

        }
Пример #11
0
        /// <summary>
        /// Kinect が複数種類のフレームを取得したとき実行されるメソッド(イベントハンドラ)。
        /// </summary>
        /// <param name="sender">
        /// イベントを通知したオブジェクト。ここでは Kinect になる。
        /// </param>
        /// <param name="e">
        /// イベントの発生時に渡されるデータ。
        /// </param>
        void MultiSourceFrameReader_MultiSourceFrameArrived
            (object sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame frames = this.multiSourceFrameReader.AcquireLatestFrame();

            if (frames == null)
            {
                return;
            }

            DepthFrame depthFrame = frames.DepthFrameReference.AcquireFrame();

            if (depthFrame == null)
            {
                return;
            }

            BodyIndexFrame bodyIndexFrame = frames.BodyIndexFrameReference.AcquireFrame();

            if (bodyIndexFrame == null)
            {
                depthFrame.Dispose();
                return;
            }

            this.canvas.Background = new ImageBrush
                                         (GetBitmapSource(depthFrame, bodyIndexFrame,
                                                          this.depthFrameDescription, this.bodyIndexFrameDescription));

            depthFrame.Dispose();
            bodyIndexFrame.Dispose();
        }
Пример #12
0
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            DepthFrame     depthFrame     = null;
            ColorFrame     colorFrame     = null;
            InfraredFrame  infraredFrame  = null;
            BodyFrame      bodyFrame      = null;
            BodyIndexFrame bodyIndexFrame = null;


            switch (currentDisplayFrameType)
            {
            case DisplayFrameType.Infrared:
                using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                    using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) {
                        ShowInfraredFrame(infraredFrame, bodyFrame);
                    }
                break;

            case DisplayFrameType.Depth:
                using (depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) {
                    ShowDepthFrame(depthFrame);
                }
                break;

            default:
                break;
            }
        }
Пример #13
0
        /// <summary>
        /// Write Depth Frame
        /// </summary>
        /// <param name="multiFrame">MultiSourceFrame retrieved from Kinect.</param>
        private void WriteDepth(MultiSourceFrame multiFrame)
        {
            if (multiFrame == null)
            {
                return;
            }

            using (DepthFrame depthFrame = multiFrame.DepthFrameReference.AcquireFrame())
            {
                if (depthFrame == null)
                {
                    return;
                }

                depthFrame.CopyFrameDataToArray(depthBuffer);
                depthBitmap.WritePixels(depthRect, depthBuffer, depthStride, 0);

                string filename = Regex.Replace(Regex.Replace(depthFrame.RelativeTime.ToString(), "[:]", ""), "[.]", "_");
                //string filename = time.ToString("yyyyMMdd_HHmmssfff", CultureInfo.CurrentUICulture.DateTimeFormat);
                string extension = ".png";

                using (FileStream stream = new FileStream(Path.Combine(directory, filename + extension), FileMode.Create, FileAccess.Write))
                {
                    PngBitmapEncoder encoder = new PngBitmapEncoder();
                    encoder.Frames.Add(BitmapFrame.Create(depthBitmap));
                    encoder.Save(stream);
                }
            }
        }
Пример #14
0
 public void FreeMultiSourceFrame(KinectInterop.SensorData sensorData)
 {
     if (multiSourceFrame != null)
     {
         multiSourceFrame = null;
     }
 }
        private void UpdateBodyFrame(MultiSourceFrame multiFrame)
        {
            BODY_CAPTURE = 0;
            BodyFrame bodyFrame;

            bodyFrame = multiFrame.BodyFrameReference.AcquireFrame();
            if (bodyFrame == null)
            {
                return;
            }
            bodyFrame.GetAndRefreshBodyData(bodies);
            //画面に表示されている人数を数える
            for (int count = 0; count < BODY_COUNT; count++)
            {
                Body body    = bodies[count];
                bool tracked = body.IsTracked;
                if (!tracked)
                {
                    continue;
                }
                ulong trackingId = body.TrackingId;
                gestureFrameSource            = gestureFrameReader.VisualGestureBuilderFrameSource;
                gestureFrameSource.TrackingId = trackingId;
                BODY_CAPTURE++;
            }
            bodyFrame.Dispose();
        }
Пример #16
0
        public void PollMostRecentInfraredFrame()
        {
            MultiSourceFrame multiFrame = _reader.AcquireLatestFrame();

            if (multiFrame == null)
            {
                return;
            }

            using (InfraredFrame frame = multiFrame.InfraredFrameReference.AcquireFrame())
            {
                if (frame == null)
                {
                    return; // Could not find multi-frame or infrared-frame
                }

                using (KinectBuffer buffer = frame.LockImageBuffer())
                {
                    if (InfraredFrameDescription.Width * InfraredFrameDescription.Height == buffer.Size / InfraredFrameDescription.BytesPerPixel)
                    {
                        ProcessInfraredFrameData(buffer.UnderlyingBuffer, buffer.Size);
                    }
                }
            }
        }
        private void UpdateBodyPartsPosition(MultiSourceFrame multiframe)
        {
            using (var bodyFrame = multiframe.BodyFrameReference.AcquireFrame())
            {
                if (bodyFrame == null)
                {
                    return;
                }

                var bodies = new Body[bodyFrame.BodyCount];
                bodyFrame.GetAndRefreshBodyData(bodies);

                var body = bodies.FirstOrDefault(b => b.IsTracked);
                if (body == null)
                {
                    return;
                }

                var posHead      = body.Joints[JointType.Head].Position;
                var posHandRight = body.Joints[JointType.HandRight].Position;
                var posHandLeft  = body.Joints[JointType.HandLeft].Position;



                HeadXZProextion      = GetXZProjection(posHead.X, posHead.Z);
                HandRigthXZProextion = GetXZProjection(posHandRight.X, posHandRight.Z);
                HandLeftXZProextion  = GetXZProjection(posHandLeft.X, posHandLeft.Z);

                if (_sphero != null)
                {
                    SpheroXZProextion = _spheroPointTransform.Transform(_sphero.CurrentPoint);
                }
            }
        }
Пример #18
0
        public void PollMostRecentSilhouetteFrame()
        {
            MultiSourceFrame multiFrame = _reader.AcquireLatestFrame();

            if (multiFrame == null)
            {
                return;
            }

            using (BodyIndexFrame frame = multiFrame.BodyIndexFrameReference.AcquireFrame())
            {
                if (frame == null)
                {
                    return; // Could not find multi-frame or body index frame
                }

                using (KinectBuffer buffer = frame.LockImageBuffer())
                {
                    if (
                        SilhouetteFrameDescription.Width *
                        SilhouetteFrameDescription.Height == buffer.Size)
                    {
                        ProcessSilhouetteData(buffer.UnderlyingBuffer, buffer.Size);
                    }
                }
            }
        }
Пример #19
0
        private void RenderFrame(MultiSourceFrame kinectFrame)
        {
            calculateFPS();

            if (!pipeline.Timer.IsRunning)
            {
                return;
            }

            using (var depthFrame = kinectFrame.DepthFrameReference.AcquireFrame())
                using (var colorFrame = kinectFrame.ColorFrameReference.AcquireFrame())
                    using (var irFrame = kinectFrame.InfraredFrameReference.AcquireFrame())
                    {
                        var frame = new KinectFrame();

                        frame.Id           = Guid.NewGuid();
                        frame.RelativeTime = pipeline.Timer.ElapsedTime;

                        depthFrame.CopyFrameDataToIntPtr(frame.InfraredPixels, (uint)Kinect2Metrics.DepthBufferLength);
                        colorFrame.CopyConvertedFrameDataToIntPtr(frame.ColorPixels, (uint)Kinect2Metrics.ColorBufferLength, ColorImageFormat.Bgra);
                        irFrame.CopyFrameDataToIntPtr(frame.InfraredPixels, (uint)Kinect2Metrics.IRBufferLength);

                        pipeline.Enqueue(frame);
                    }

            FPS     = pipeline.Timer.FPS;
            BackLog = pipeline.Count;
        }
Пример #20
0
        /// <summary>
        /// Kinect が複数種類のフレームを取得したとき実行されるメソッド(イベントハンドラ)。
        /// </summary>
        /// <param name="sender">
        /// イベントを通知したオブジェクト。ここでは Kinect になる。
        /// </param>
        /// <param name="e">
        /// イベントの発生時に渡されるデータ。
        /// </param>
        void MultiSourceFrameReader_MultiSourceFrameArrived
            (object sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame frames = this.multiSourceFrameReader.AcquireLatestFrame();

            if (frames == null)
            {
                return;
            }

            ColorFrame colorFrame = frames.ColorFrameReference.AcquireFrame();

            if (colorFrame == null)
            {
                return;
            }

            DepthFrame depthFrame = frames.DepthFrameReference.AcquireFrame();

            if (depthFrame == null)
            {
                //忘れないように注意する。
                colorFrame.Dispose();
                return;
            }

            this.colorCanvas.Background
                = new ImageBrush(GetBitmapSource(colorFrame, colorFrameDescription));
            this.depthCanvas.Background
                = new ImageBrush(GetBitmapSource(depthFrame, depthFrameDescription));

            colorFrame.Dispose();
            depthFrame.Dispose();
        }
Пример #21
0
        public void PollMostRecentDepthFrame()
        {
            MultiSourceFrame multiFrame = _reader.AcquireLatestFrame();

            if (multiFrame == null)
            {
                return;
            }

            using (DepthFrame frame = multiFrame.DepthFrameReference.AcquireFrame())
            {
                if (frame == null)
                {
                    return; // Could not find multi-frame or depth-frame
                }

                using (KinectBuffer buffer = frame.LockImageBuffer())
                {
                    if (DepthFrameDescription.Width * DepthFrameDescription.Height == buffer.Size / DepthFrameDescription.BytesPerPixel)
                    {
                        ProcessDepthFrameData(
                            buffer.UnderlyingBuffer,
                            buffer.Size,
                            frame.DepthMinReliableDistance,
                            ushort.MaxValue);
                    }
                }
            }
        }
        /// <summary>
        /// Handles the depth and color frames data arriving from the sensor
        /// </summary>
        /// <param name="sender">Object that sent the event</param>
        /// <param name="e">Event arguments</param>
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if ((!this.continousStream && !this.msi.RepColorDelivered && !this.msi.RepMappingDelivered && !this.msi.RepMaskDelivered) || this.continousStream)
            {
                this.frameCount++;
                if (DateTime.Now.Second - this.t0_seconds == 1)
                {
                    this.FPS.Content = "FPS:" + this.frameCount;
                    this.frameCount  = 0;
                    this.t0_seconds  = DateTime.Now.Second;
                }
                this.msi.RepColorDelivered   = true;
                this.msi.RepMappingDelivered = true;
                this.msi.RepMaskDelivered    = true;
                ColorFrame colorFrame = null;
                DepthFrame depthFrame = null;

                MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();
                if (multiSourceFrame == null)
                {
                    return;
                }

                colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();

                if (colorFrame == null | depthFrame == null)
                {
                    return;
                }

                this.camera.Source = this.msi.ProcessFramesData(colorFrame, depthFrame, this.mode.ToString());
            }
        }
        private void AcquireBodyFrames()
        {
            if (this.multiSourceFrameReader == null)
            {
                return;
            }

            MultiSourceFrame multiFrame = this.multiSourceFrameReader.AcquireLatestFrame();

            if (multiFrame == null)
            {
                return;
            }

            using (BodyFrame bodyFrame = multiFrame.BodyFrameReference.AcquireFrame())
            {
                if (bodyFrame == null)
                {
                    return;
                }

                bodyFrame.GetAndRefreshBodyData(this.Bodies);
                this.Floor = bodyFrame.FloorClipPlane;
            }

            multiFrame = null;
        }
Пример #24
0
        /// <summary>
        /// Write Color Frame
        /// </summary>
        /// <param name="multiFrame">MultiSourceFrame retrieved from Kinect.</param>
        private void WriteColor(MultiSourceFrame multiFrame)
        {
            if (multiFrame == null)
            {
                return;
            }

            using (ColorFrame colorFrame = multiFrame.ColorFrameReference.AcquireFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                colorFrame.CopyConvertedFrameDataToArray(colorBuffer, ColorImageFormat.Bgra);
                colorBitmap.WritePixels(colorRect, colorBuffer, colorStride, 0);

                string filename = Regex.Replace(Regex.Replace(colorFrame.RelativeTime.ToString(), "[:]", ""), "[.]", "_");
                //string filename = time.ToString("yyyyMMdd_HHmmssfff", CultureInfo.CurrentUICulture.DateTimeFormat);
                string extension = ".jpg";

                using (FileStream stream = new FileStream(Path.Combine(directory, filename + extension), FileMode.Create, FileAccess.Write))
                {
                    JpegBitmapEncoder encoder = new JpegBitmapEncoder();
                    encoder.Frames.Add(BitmapFrame.Create(colorBitmap));
                    encoder.Save(stream);
                }
            }
        }
Пример #25
0
        private void OnMultipleFramesArrivedHandler(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            init = true;

            // Retrieve multisource frame reference
            MultiSourceFrameReference multiRef   = e.FrameReference;
            MultiSourceFrame          multiFrame = null;

            try {
                AllFrameWatch.Again();

                multiFrame = multiRef.AcquireFrame();
                if (multiFrame == null)
                {
                    AllFrameWatch.Stop();  return;
                }

                HandleDepthFrame(multiFrame.DepthFrameReference);

                // Motion check
                if (Task.StandBy)
                {
                    AllFrameWatch.Stop(); return;
                }

                HandleColorFrame(multiFrame.ColorFrameReference);
                HandleBodyFrame(multiFrame.BodyFrameReference);
                HandleBodyIndexFrame(multiFrame.BodyIndexFrameReference);

                AllFrameWatch.Stop();
            } catch (Exception) { /* ignore if the frame is no longer available */ } finally { }
        }
        private void getCameraImage(MultiSourceFrame reference)
        {
            using (var frame = reference.ColorFrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    int         width  = frame.FrameDescription.Width;
                    int         height = frame.FrameDescription.Height;
                    PixelFormat format = PixelFormats.Bgr32;

                    byte[] pixels = new byte[width * height * ((format.BitsPerPixel + 7) / 8)];
                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToArray(pixels);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToArray(pixels, ColorImageFormat.Bgra);
                    }
                    int stride = width * format.BitsPerPixel / 8;

                    camera.Source = BitmapSource.Create(width, height, 96, 96, format, null, pixels, stride);
                }
            }
        }
Пример #27
0
        private static void Reader_FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (e.FrameReference != null)
            {
                MultiSourceFrame multiFrame = e.FrameReference.AcquireFrame();

                if (multiFrame.DepthFrameReference != null)
                {
                    try
                    {
                        using (DepthFrame depthFrame = multiFrame.DepthFrameReference.AcquireFrame())
                        {
                            if (depthFrame != null)
                            {
                                using (KinectBuffer buffer = depthFrame.LockImageBuffer())
                                {
                                    depthFrameDescription = depthFrame.FrameDescription;
                                    depthWidth            = depthFrameDescription.Width;
                                    depthHeight           = depthFrameDescription.Height;
                                    depthFrameData        = new ushort[depthWidth * depthHeight];
                                    depthFrame.CopyFrameDataToArray(depthFrameData);
                                }
                            }
                        }
                    }
                    catch (Exception) { return; }
                }

                if (multiFrame.ColorFrameReference != null)
                {
                    try
                    {
                        using (ColorFrame colorFrame = multiFrame.ColorFrameReference.AcquireFrame())
                        {
                            if (colorFrame != null)
                            {
                                colorFrameDescription = colorFrame.FrameDescription;
                                colorWidth            = colorFrameDescription.Width;
                                colorHeight           = colorFrameDescription.Height;
                                colorFrameData        = new byte[colorWidth * colorHeight * bytesForPixelColor]; // 4 == bytes per color

                                using (KinectBuffer buffer = colorFrame.LockRawImageBuffer())
                                {
                                    if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                                    {
                                        colorFrame.CopyRawFrameDataToArray(colorFrameData);
                                    }
                                    else
                                    {
                                        colorFrame.CopyConvertedFrameDataToArray(colorFrameData, ColorImageFormat.Bgra);
                                    }
                                }
                            }
                        }
                    }
                    catch (Exception) { return; }
                }
            }
        }
Пример #28
0
        private void ProcessDepthColorIR(MultiSourceFrame multiSourceFrame)
        {
            switch (PointCloudScannerSettings.ScannerMode)
            {
            case PointCloudUtils.ScannerMode.Color:
            {
                if (ProcessColorFrame(multiSourceFrame))
                {
                    UpdateScannerFramesPerSecond();
                }

                break;
            }

            case PointCloudUtils.ScannerMode.Color_Depth:
            {
                bool bDepthProcessed = ProcessDepthFrame(multiSourceFrame);
                bool bColorProcessed = ProcessColorFrame(multiSourceFrame);
                if (bDepthProcessed || bColorProcessed)
                {
                    UpdateScannerFramesPerSecond();
                }

                break;
            }

            case PointCloudUtils.ScannerMode.Color_Depth_3DDisplay:
            {
                bool bDepthProcessed = ProcessDepthFrame(multiSourceFrame);
                bool bColorProcessed = ProcessColorFrame(multiSourceFrame);
                if (bDepthProcessed || bColorProcessed)
                {
                    UpdateScannerFramesPerSecond();
                }

                break;
            }

            case PointCloudUtils.ScannerMode.Depth:
            {
                if (ProcessDepthFrame(multiSourceFrame))
                {
                    UpdateScannerFramesPerSecond();
                }
                break;
            }

            case PointCloudUtils.ScannerMode.IR:
            {
                if (ProcessIRFrame(multiSourceFrame))
                {
                    UpdateScannerFramesPerSecond();
                }
                break;
            }
            }
        }
Пример #29
0
        /// <summary>
        /// Processes frame in the background (not quite ready yet).
        /// </summary>
        /// <param name="frame">Frame</param>
        public void ProcessFrameInBackground(MultiSourceFrame frame)
        {
            BackgroundWorker worker = new BackgroundWorker();

            worker.DoWork += (sender, e) =>
            {
                ProcessFrame((MultiSourceFrame)e.Argument);
            };
        }
Пример #30
0
        private void OnKinectFrameArrived(object sender, MultiSourceFrameArrivedEventArgs frameArgs)
        {
            MultiSourceFrame acquiredFrame = frameArgs.FrameReference.AcquireFrame();

            UpdateCameraImage(acquiredFrame);
            canvas.Children.Clear();
            GetBodiesData(acquiredFrame);
            ManageBodiesData();
        }
Пример #31
0
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (!(KinectStreamerConfig.ProvideBodyData || KinectStreamerConfig.ProvideColorData || KinectStreamerConfig.ProvideDepthData))
            {
                return;
            }

            depthFrame = null;
            colorFrame = null;
            bodyFrame = null;

            multiSourceFrame = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }

            // We use a try/finally to ensure that we clean up before we exit the function.
            // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer.
            try
            {
                depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame();

                // If any frame has expired by the time we process this event, return.
                // The "finally" statement will Dispose any that are not null.
                if ((depthFrame == null) || (colorFrame == null) || (bodyFrame == null))
                {
                    return;
                }

                // Process color stream if needed

                if (KinectStreamerConfig.ProvideColorData)
                {
                    ProcessColorData();
                }

                // Process depth frame if needed

                if (KinectStreamerConfig.ProvideDepthData)
                {
                    ProcessDepthData();
                }

                // Process body data if needed
                if (KinectStreamerConfig.ProvideBodyData)
                {
                    ProcessBodyData();
                }

            }
            finally
            {
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }
                if (bodyFrame != null)
                {
                    bodyFrame.Dispose();
                }
            }
        }
Пример #32
0
        private void ProcessBodyIndex(MultiSourceFrame msf)
        {
            var bodyIndexFrame = msf.BodyIndexFrameReference.AcquireFrame();
            if (bodyIndexFrame != null)
            {
                var buffer = bodyIndexFrame.LockImageBuffer();
                this.bodyIndexCompressor.Compress(buffer.UnderlyingBuffer);
                buffer.Dispose();

                bodyIndexFrame.Dispose();
            }
        }
Пример #33
0
        private void DepthFrameHandling(MultiSourceFrame frame)
        {
            try
            {
                DepthFrame depthFrame = frame.DepthFrameReference.AcquireFrame();
                DepthFrameReference frameReference = frame.DepthFrameReference;
                if (depthFrame != null)
                {
                    // DepthFrame is IDisposable
                    using (depthFrame)
                    {
                        FrameDescription frameDescription = depthFrame.FrameDescription;

                        #region FPS
                        this.framesSinceUpdate++;

                        // update status unless last message is sticky for a while
                        if (DateTime.Now >= nextStatusUpdate)
                        {
                            // calcuate fps based on last frame received
                            double fps = 0.0;

                            if (stopwatch.IsRunning)
                            {
                                stopwatch.Stop();
                                fps = framesSinceUpdate / stopwatch.Elapsed.TotalSeconds;
                                stopwatch.Reset();
                            }

                            nextStatusUpdate = DateTime.Now + TimeSpan.FromSeconds(1);
                            toolStripLabel_fps.Text = fps + " " + (frameReference.RelativeTime - this.startTime).ToString() + "mS";
                        }

                        if (!stopwatch.IsRunning)
                        {
                            framesSinceUpdate = 0;
                            stopwatch.Start();
                        }
                        #endregion

                        // verify data and write the new depth frame data to the display bitmap
                        if (((frameDescription.Width * frameDescription.Height) == frameData.Length))
                        {
                            // Copy the pixel data from the image to a temporary array
                            depthFrame.CopyFrameDataToArray(frameData);

                            coordinateMapper.MapDepthFrameToColorSpace(frameData, colorPoints);

                            // Get the min and max reliable depth for the current frame
                            ushort minDepth = depthFrame.DepthMinReliableDistance;
                            ushort maxDepth = depthFrame.DepthMaxReliableDistance;
                            float imaxDepth = 1.0f / maxDepth;
                            for (int i = 0; i < this.frameData.Length; ++i)
                            {
                                // Get the depth for this pixel
                                ushort depth = this.frameData[i];

                                // To convert to a byte, we're discarding the most-significant
                                // rather than least-significant bits.
                                // We're preserving detail, although the intensity will "wrap."
                                // Values outside the reliable depth range are mapped to 0 (black).
                                //pixels[i] = 1.0f - ((depth >= minDepth && depth <= maxDepth) ? depth : 0) * imaxDepth;
                                depthImageMatrix[i] = 1.0f - depth * imaxDepth;
                            }

                            depthImageMatrixAve.Multiply(0.85f);
                            depthImageMatrixAve += 0.15f*depthImageMatrix;

                            // Updating viewports...
                            depthImageElement.UpdateInternalImage(depthImageMatrix, depthColorMap, true);
                            colorImageElement.UpdateInternalImage(depthImageMatrixAve, depthColorMap, true);
                            canvas1.ReDraw();

                        }
                    }
                }
            }
            catch (Exception) { }
        }
Пример #34
0
        private void ProcessDepth(MultiSourceFrame msf)
        {
            var depthFrame = msf.DepthFrameReference.AcquireFrame();
            if (depthFrame != null)
            {
                var buffer = depthFrame.LockImageBuffer();
                this.depthCompressor.Compress(buffer.UnderlyingBuffer);
                buffer.Dispose();

                depthFrame.Dispose();
            }
        }
Пример #35
0
        private void UpdateColorFrame( MultiSourceFrame multiFrame )
        {
            using ( var colorFrame = multiFrame.ColorFrameReference.AcquireFrame() ) {
                if ( colorFrame == null ) {
                    return;
                }

                // BGRAデータを取得する
                colorFrame.CopyConvertedFrameDataToArray(
                                            colorBuffer, colorFormat );
            }
        }
Пример #36
0
        private void UpdateDepthFrame( MultiSourceFrame multiFrame )
        {
            using ( var depthFrame = multiFrame.DepthFrameReference.AcquireFrame() ) {
                if ( depthFrame == null ) {
                    return;
                }

                // Depthデータを取得する
                depthFrame.CopyFrameDataToArray( depthBuffer );
            }
        }
Пример #37
0
        private void UpdateBodyIndexFrame( MultiSourceFrame multiFrame )
        {
            using ( var bodyIndexFrame = multiFrame.BodyIndexFrameReference.AcquireFrame() ) {
                if ( bodyIndexFrame == null ) {
                    return;
                }

                // ボディインデックスデータを取得する
                bodyIndexFrame.CopyFrameDataToArray( bodyIndexBuffer );
            }
            return;
        }