示例#1
0
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (!(KinectStreamerConfig.StreamBodyData ||
                KinectStreamerConfig.StreamColorData ||
                KinectStreamerConfig.StreamDepthData ||
                KinectStreamerConfig.StreamPointCloudData ||
                KinectStreamerConfig.ProvideCalibrationData ||
                KinectStreamerConfig.StreamColoredPointCloudData
                ))
            {
                return;
            }

            depthFrame = null;
            colorFrame = null;
            bodyFrame = null;

            bodyStreamMessage = null;
            colorStreamMessage = null;
            pointCloudStreamMessage = null;
            depthStreamMessage = null;
            calibrationDataMessage = null;
            coloredPointCloudStreamMessage = null;

            multiSourceFrame = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }

            // We use a try/finally to ensure that we clean up before we exit the function.
            // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer.
            try
            {
                depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                //Debug.Write(colorFrame.RelativeTime.ToString());
                bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame();

                // If any frame has expired by the time we process this event, return.
                // The "finally" statement will Dispose any that are not null.
                if ((depthFrame == null) || (colorFrame == null) || (bodyFrame == null))
                {
                    return;
                }

                // Process color stream if needed

                if (KinectStreamerConfig.StreamColorData || KinectStreamerConfig.StreamColoredPointCloudData)
                {
                    ProcessColorData();
                }

                // Process depth frame if needed

                if (KinectStreamerConfig.StreamDepthData || KinectStreamerConfig.StreamPointCloudData || KinectStreamerConfig.StreamColoredPointCloudData)
                {
                    ProcessDepthData();

                    if (KinectStreamerConfig.StreamPointCloudData || KinectStreamerConfig.StreamColoredPointCloudData)
                    {
                        GenerateFullPointCloud();
                    }
                }

                if (KinectStreamerConfig.StreamColoredPointCloudData)
                {
                    ProcessPointCloudColors();
                }

                // Process body data if needed
                if (KinectStreamerConfig.StreamBodyData || KinectStreamerConfig.ProvideCalibrationData)
                {
                    ProcessBodyData();
                }

                SendData();
            }
            finally
            {
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }
                if (bodyFrame != null)
                {
                    bodyFrame.Dispose();
                }
            }
        }
示例#2
0
        public NullablePoint3D[] GenerateFullPointCloud()
        {
            List<NullablePoint3D> validPointList = new List<NullablePoint3D>();

            kinectSensor.CoordinateMapper.MapDepthPointsToCameraSpace(allDepthSpacePoints, depthArray, pointCloudCandidates);
            int i = 0;
            foreach (CameraSpacePoint point in pointCloudCandidates)
            {
                if (GeometryHelper.IsValidPoint(point))
                {
                    //validPointList.Add(GeometryHelper.CameraSpacePointToPoint3D(point));
                    validPointList.Add(new NullablePoint3D(point.X, point.Y, point.Z));
                }
                //  Keep invalid points for easier depth space-camera space mapping on client side
                else
                {
                    validPointList.Add(null);
                }
                i++;
            }

            FullPointCloud = validPointList.ToArray();

            pointCloudStreamMessage = new PointCloudStreamMessage(FullPointCloud);

            return validPointList.ToArray();
        }