示例#1
0
        void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            var result = new LiveFrame(_sensor);

            var multiFrame = e.FrameReference.AcquireFrame();

            if (multiFrame == null)
            {
                return;
            }

            result.NativeColorFrame     = multiFrame.ColorFrameReference.AcquireFrame();
            result.NativeDepthFrame     = multiFrame.DepthFrameReference.AcquireFrame();
            result.NativeInfraredFrame  = multiFrame.InfraredFrameReference.AcquireFrame();
            result.NativeBodyFrame      = multiFrame.BodyFrameReference.AcquireFrame();
            result.NativeBodyIndexFrame = multiFrame.BodyIndexFrameReference.AcquireFrame();

            // Dispose of the result, whether or not we dispatch it
            using (result)
            {
                if (result.NativeColorFrame != null && result.NativeDepthFrame != null && result.NativeInfraredFrame != null && result.NativeBodyFrame != null && result.NativeBodyIndexFrame != null)
                {
                    this.DispatchFrame(result);
                }
            }
        }
示例#2
0
 protected void DispatchFrame(LiveFrame frame)
 {
     foreach (FrameReaderCallbacks responder in _responders)
     {
         responder.FrameArrived(frame);
     }
 }
        public void FrameArrived(LiveFrame frame)
        {
            if (FpsChanged != null)
            {
                TimeSpan colorFrameRelativeTime = frame.NativeColorFrame.RelativeTime;

                if (!this.StartTimeInMilliseconds.HasValue)
                {
                    this.StartTimeInMilliseconds = colorFrameRelativeTime.TotalMilliseconds;
                }

                FpsChanged(new object(), new FpsChangedEventArgs(this.GetInstantFps(frame.NativeColorFrame.RelativeTime.TotalMilliseconds), colorFrameRelativeTime.TotalMilliseconds));

            }
        }
        public Tuple<BitmapSource, TimeSpan> CaptureBodyIndexFrameBitmap(LiveFrame frame, byte[] buffer)
        {
            BodyIndexFrame bodyIndexFrame = frame.NativeBodyIndexFrame;

            int width = bodyIndexFrame.FrameDescription.Width;
            int height = bodyIndexFrame.FrameDescription.Height;

            bodyIndexFrame.CopyFrameDataToArray(_teenyBuffer);

            BitmapSource result = BufferCaptureBitmapHelper(_teenyBuffer, width, height, 1, buffer);
            return new Tuple<BitmapSource, TimeSpan>(result, bodyIndexFrame.RelativeTime);
        }
        public Tuple<object, TimeSpan> SerializeSkeletonData(LiveFrame frame)
        {
            List<object> serializedBodies = new List<object>();
            Body firstBody = frame.FirstBody;
            if (firstBody != null)
            {
                serializedBodies.Add(SerializeBody(firstBody, true));
            }
            foreach (Body body in frame.TrackedBodies)
            {
                if (body == firstBody) continue;
                serializedBodies.Add(SerializeBody(body));
            }

            object result = new
            {
                FloorClipPlane = frame.NativeBodyFrame.FloorClipPlane.ToArray(),
                Bodies = serializedBodies
            };
            return new Tuple<object, TimeSpan>(result, frame.NativeBodyFrame.RelativeTime);
        }
        /// <summary>
        /// Densely store depth to color mapping as BLKD.
        /// 
        /// Returns the number of shorts written to buffer.
        /// </summary>
        /// <param name="frame">KinectScanner.Reading.Frame</param>
        /// <param name="filename">filename to store the mapping</param>
        /// <returns></returns>
        public Tuple<Blkd, TimeSpan> CaptureMappedFrame(LiveFrame frame, byte[] buffer)
        {
            DepthFrame depthFrame = frame.NativeDepthFrame;
            CoordinateMapper mapper = frame.NativeCoordinateMapper;

            if (buffer.Length != Frame.DEPTH_INFRARED_PIXELS * DEPTH_MAPPING_BYTES_PER_PIXEL)
                throw new ArgumentException(string.Format("Buffer length is {0} but {1} is needed", buffer.LongLength, Frame.DEPTH_INFRARED_PIXELS * DEPTH_MAPPING_BYTES_PER_PIXEL));

            depthFrame.CopyFrameDataToArray(_depthData);
            mapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);
            mapper.MapDepthFrameToCameraSpace(_depthData, _cameraSpacePoints);

            Array.Clear(buffer, 0, buffer.Length);
            int count = 0;
            for (int i = 0; i < Frame.DEPTH_INFRARED_PIXELS; ++i)
            {
                ColorSpacePoint colorPoint = _colorPoints[i];
                CameraSpacePoint cameraPoint = _cameraSpacePoints[i];

                // make sure the depth pixel maps to a valid point in color space
                short colorX = (short)Math.Floor(colorPoint.X + 0.5);
                short colorY = (short)Math.Floor(colorPoint.Y + 0.5);

                if (colorX < 0 || colorX >= Frame.COLOR_WIDTH || colorY < 0 || colorY >= Frame.COLOR_HEIGHT)
                {
                    colorX = -1;
                    colorY = -1;
                }

                // Little endian === lowest order bytes at lower addresses
                buffer[count++] = (byte)(colorX >> 0);
                buffer[count++] = (byte)(colorX >> 8);

                buffer[count++] = (byte)(colorY >> 0);
                buffer[count++] = (byte)(colorY >> 8);

                float[] cameraPointValues = new float[] { cameraPoint.X, cameraPoint.Y, cameraPoint.Z };
                System.Buffer.BlockCopy(cameraPointValues, 0, buffer, count, 12);
                count += 12;
            }

            Blkd result = new Blkd
            {
                Width = (UInt16)Frame.DEPTH_INFRARED_WIDTH,
                Height = (UInt16)Frame.DEPTH_INFRARED_HEIGHT,
                BytesPerPixel = DEPTH_MAPPING_BYTES_PER_PIXEL,
                Version = 2,
                Data = buffer
            };
            return new Tuple<Blkd, TimeSpan>(result, depthFrame.RelativeTime);
        }
        public Tuple<BitmapSource, TimeSpan> CaptureInfraredFrameBitmap(LiveFrame frame, byte[] buffer)
        {
            InfraredFrame infraredFrame = frame.NativeInfraredFrame;

            int width = infraredFrame.FrameDescription.Width;
            int height = infraredFrame.FrameDescription.Height;

            infraredFrame.CopyFrameDataToArray(_smallBuffer);

            BitmapSource result = BufferCaptureBitmapHelper(_smallBuffer, width, height, 2, buffer);
            return new Tuple<BitmapSource, TimeSpan>(result, infraredFrame.RelativeTime);
        }
        public Tuple<BitmapSource, TimeSpan> CaptureDepthFrameBitmap(LiveFrame frame, byte[] buffer)
        {
            DepthFrame depthFrame = frame.NativeDepthFrame;

            int width = depthFrame.FrameDescription.Width;
            int height = depthFrame.FrameDescription.Height;

            depthFrame.CopyFrameDataToArray(_smallBuffer);

            // Multiply all values by 8 to make the frames more previewable
            for (int i = 0; i < _smallBuffer.Length; ++i)
                _smallBuffer[i] <<= 3;

            BitmapSource result = BufferCaptureBitmapHelper(_smallBuffer, width, height, 2, buffer);
            return new Tuple<BitmapSource, TimeSpan>(result, depthFrame.RelativeTime);
        }
        /// <summary>
        /// This method is similar to BitmapBuilder.buildColorBitmap. However, that method uses
        /// LargeFrameBitmap which encapsulates WriteableBitmap, and a WriteableBitmap can't be
        /// used on a different thread from the one which created it. It can't even be cloned,
        /// or used to create a new WriteableBitmap on a different thread.
        /// 
        /// So we provide this separate interface.
        /// 
        /// TODO: Examine this class and BitmapBuilder for overlaps, and determine if some
        /// consolidation is appropriate. Note that the methods here all provide raw data,
        /// whereas many of the methods in BitmapBuilder involve some processing.
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="buffer"></param>
        /// <returns></returns>
        public Tuple<BitmapSource, TimeSpan> CaptureColorFrameBitmap(LiveFrame frame, byte[] buffer)
        {
            ValidateBuffer(buffer, Frame.COLOR_WIDTH, Frame.COLOR_HEIGHT, COLOR_BYTES_PER_PIXEL);

            ColorFrame colorFrame = frame.NativeColorFrame;

            colorFrame.CopyConvertedFrameDataToArray(buffer, ColorImageFormat.Bgra);

            BitmapSource result = CreateColorBitmap(buffer, Frame.COLOR_WIDTH, Frame.COLOR_HEIGHT);
            return new Tuple<BitmapSource, TimeSpan>(result, colorFrame.RelativeTime);
        }
        public void FrameArrived(LiveFrame frame)
        {
            if (!this.Enabled) return;

            FrameBitmap primary = null;
            FrameBitmap secondary = null;
            double primaryOpacity = 1.0;
            double secondaryOpacity = 1.0;

            switch (this.CameraMode)
            {
                case CameraMode.Color:
                    _bitmapBuilder.BuildColorBitmap(frame.NativeColorFrame, _bitmapLarge1, true);
                    primary = _bitmapLarge1;
                    break;
                case CameraMode.Depth:
                    _bitmapBuilder.BuildDepthBitmap(frame.NativeDepthFrame, _bitmapSmall1, true);
                    primary = _bitmapSmall1;
                    break;
                case CameraMode.Infrared:
                    _bitmapBuilder.BuildInfraredBitmap(frame.NativeInfraredFrame, _bitmapSmall1, true);
                    primary = _bitmapSmall1;
                    break;
                case CameraMode.ColorDepth:
                    throw new NotImplementedException("Camera mode not implemented");
                case CameraMode.InfraredDepth:
                    throw new NotImplementedException("Camera mode not implemented");
                default:
                    throw new ArgumentException("Unrecognized camera mode");
            }

            this.EnsureImageSources(primary, primaryOpacity, secondary, secondaryOpacity);
        }
        void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            var result = new LiveFrame(_sensor);

            var multiFrame = e.FrameReference.AcquireFrame();

            if (multiFrame == null) return;

            result.NativeColorFrame = multiFrame.ColorFrameReference.AcquireFrame();
            result.NativeDepthFrame = multiFrame.DepthFrameReference.AcquireFrame();
            result.NativeInfraredFrame = multiFrame.InfraredFrameReference.AcquireFrame();
            result.NativeBodyFrame = multiFrame.BodyFrameReference.AcquireFrame();
            result.NativeBodyIndexFrame = multiFrame.BodyIndexFrameReference.AcquireFrame();

            // Dispose of the result, whether or not we dispatch it
            using (result)
            {
                if (result.NativeColorFrame != null && result.NativeDepthFrame != null && result.NativeInfraredFrame != null && result.NativeBodyFrame != null && result.NativeBodyIndexFrame != null)
                {
                    this.DispatchFrame(result);
                }
            }
        }
 protected void DispatchFrame(LiveFrame frame)
 {
     foreach (FrameReaderCallbacks responder in _responders)
     {
         responder.FrameArrived(frame);
     }
 }
        /// <summary>
        /// We call Freeze() so we can write these bitmaps to disk from other threads.
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="serializer"></param>
        public void Update(LiveFrame frame, FrameSerializer serializer)
        {
            // (1) Depth mapping
            _depthMapping = serializer.CaptureMappedFrame(frame, _bufferDepthMapping);

            // (2) Depth
            _depth = serializer.CaptureDepthFrameBitmap(frame, _bufferDepth);
            _depth.Item1.Freeze();

            // (3) Infrared
            _infrared = serializer.CaptureInfraredFrameBitmap(frame, _bufferInfrared);
            _infrared.Item1.Freeze();

            // (4) Skeleton
            _skeleton = serializer.SerializeSkeletonData(frame);

            // (5) Color
            _color = serializer.CaptureColorFrameBitmap(frame, _bufferColor);
            _color.Item1.Freeze();

            // (6) Body index
            _bodyIndex = serializer.CaptureBodyIndexFrameBitmap(frame, _bufferBodyIndex);
            _bodyIndex.Item1.Freeze();
        }
        public void FrameArrived(LiveFrame frame)
        {
            Body body = frame.FirstBody;

            if (body == null)
            {
                this.ClearSkeletons();
                this.ClearHands();
            }
            else
            {
                if (this.ShowBody) this.DrawBody(body);
                if (this.ShowHands) this.DrawHands(body);
            }
        }