public Tuple <object, TimeSpan> SerializeSkeletonData(LiveFrame frame) { List <object> serializedBodies = new List <object>(); Body firstBody = frame.FirstBody; if (firstBody != null) { serializedBodies.Add(SerializeBody(firstBody, true)); } foreach (Body body in frame.TrackedBodies) { if (body == firstBody) { continue; } serializedBodies.Add(SerializeBody(body)); } object result = new { FloorClipPlane = frame.NativeBodyFrame.FloorClipPlane.ToArray(), Bodies = serializedBodies }; return(new Tuple <object, TimeSpan>(result, frame.NativeBodyFrame.RelativeTime)); }
public MainWindow() { InitializeComponent(); _usbDriver = new UsbDriver(); _usbDriver.Connect(); _usbDriver.OnReportChange += (bytes) => Dispatcher.Invoke(() => { RawByteOutput.Text = ""; foreach (var b in bytes) { RawByteOutput.Text += string.Format("{0:X2} ", b); } }); _usbDriver.OnTempChange += OnTempChange; _usbDriver.OnModeButtonPressed += () => Dispatcher.Invoke(() => { Button1.Style = Resources["ButtonOnStyle"] as Style; }); _usbDriver.OnModeButtonReleased += () => Dispatcher.Invoke(() => Button1.Style = Resources["ButtonOffStyle"] as Style); _usbDriver.OnUser2ButtonPressed += () => Dispatcher.Invoke(() => { Button3.Style = Resources["ButtonOnStyle"] as Style; }); _usbDriver.OnUser2ButtonReleased += () => Dispatcher.Invoke(() => Button3.Style = Resources["ButtonOffStyle"] as Style); _usbDriver.OnProgramWriteComplete += (success) => MessageBox.Show("Write Complete. Successful: " + success); // TODO: add proper context binding here _frame = new LiveFrame() { Color = { Red = (byte)Red.Value, Green = (byte)Green.Value, Blue = (byte)Blue.Value }, Leds = { LedRawBits = 0x0000 } }; RenderFrame(); }
/// <summary> /// Densely store depth to color mapping as BLKD. /// /// Returns the number of shorts written to buffer. /// </summary> /// <param name="frame">KinectScanner.Reading.Frame</param> /// <param name="filename">filename to store the mapping</param> /// <returns></returns> public Tuple <Blkd, TimeSpan> CaptureMappedFrame(LiveFrame frame, byte[] buffer) { DepthFrame depthFrame = frame.NativeDepthFrame; CoordinateMapper mapper = frame.NativeCoordinateMapper; if (buffer.Length != Frame.DEPTH_INFRARED_PIXELS * DEPTH_MAPPING_BYTES_PER_PIXEL) { throw new ArgumentException(string.Format("Buffer length is {0} but {1} is needed", buffer.LongLength, Frame.DEPTH_INFRARED_PIXELS * DEPTH_MAPPING_BYTES_PER_PIXEL)); } depthFrame.CopyFrameDataToArray(_depthData); mapper.MapDepthFrameToColorSpace(_depthData, _colorPoints); mapper.MapDepthFrameToCameraSpace(_depthData, _cameraSpacePoints); Array.Clear(buffer, 0, buffer.Length); int count = 0; for (int i = 0; i < Frame.DEPTH_INFRARED_PIXELS; ++i) { ColorSpacePoint colorPoint = _colorPoints[i]; CameraSpacePoint cameraPoint = _cameraSpacePoints[i]; // make sure the depth pixel maps to a valid point in color space short colorX = (short)Math.Floor(colorPoint.X + 0.5); short colorY = (short)Math.Floor(colorPoint.Y + 0.5); if (colorX < 0 || colorX >= Frame.COLOR_WIDTH || colorY < 0 || colorY >= Frame.COLOR_HEIGHT) { colorX = -1; colorY = -1; } // Little endian === lowest order bytes at lower addresses buffer[count++] = (byte)(colorX >> 0); buffer[count++] = (byte)(colorX >> 8); buffer[count++] = (byte)(colorY >> 0); buffer[count++] = (byte)(colorY >> 8); float[] cameraPointValues = new float[] { cameraPoint.X, cameraPoint.Y, cameraPoint.Z }; System.Buffer.BlockCopy(cameraPointValues, 0, buffer, count, 12); count += 12; } Blkd result = new Blkd { Width = (UInt16)Frame.DEPTH_INFRARED_WIDTH, Height = (UInt16)Frame.DEPTH_INFRARED_HEIGHT, BytesPerPixel = DEPTH_MAPPING_BYTES_PER_PIXEL, Version = 2, Data = buffer }; return(new Tuple <Blkd, TimeSpan>(result, depthFrame.RelativeTime)); }
/// <summary> /// This method is similar to BitmapBuilder.buildColorBitmap. However, that method uses /// LargeFrameBitmap which encapsulates WriteableBitmap, and a WriteableBitmap can't be /// used on a different thread from the one which created it. It can't even be cloned, /// or used to create a new WriteableBitmap on a different thread. /// /// So we provide this separate interface. /// /// TODO: Examine this class and BitmapBuilder for overlaps, and determine if some /// consolidation is appropriate. Note that the methods here all provide raw data, /// whereas many of the methods in BitmapBuilder involve some processing. /// </summary> /// <param name="frame"></param> /// <param name="buffer"></param> /// <returns></returns> public Tuple <BitmapSource, TimeSpan> CaptureColorFrameBitmap(LiveFrame frame, byte[] buffer) { ValidateBuffer(buffer, Frame.COLOR_WIDTH, Frame.COLOR_HEIGHT, COLOR_BYTES_PER_PIXEL); ColorFrame colorFrame = frame.NativeColorFrame; colorFrame.CopyConvertedFrameDataToArray(buffer, ColorImageFormat.Bgra); BitmapSource result = CreateColorBitmap(buffer, Frame.COLOR_WIDTH, Frame.COLOR_HEIGHT); return(new Tuple <BitmapSource, TimeSpan>(result, colorFrame.RelativeTime)); }
public Tuple <BitmapSource, TimeSpan> CaptureBodyIndexFrameBitmap(LiveFrame frame, byte[] buffer) { BodyIndexFrame bodyIndexFrame = frame.NativeBodyIndexFrame; int width = bodyIndexFrame.FrameDescription.Width; int height = bodyIndexFrame.FrameDescription.Height; bodyIndexFrame.CopyFrameDataToArray(_teenyBuffer); BitmapSource result = BufferCaptureBitmapHelper(_teenyBuffer, width, height, 1, buffer); return(new Tuple <BitmapSource, TimeSpan>(result, bodyIndexFrame.RelativeTime)); }
public Tuple <BitmapSource, TimeSpan> CaptureInfraredFrameBitmap(LiveFrame frame, byte[] buffer) { InfraredFrame infraredFrame = frame.NativeInfraredFrame; int width = infraredFrame.FrameDescription.Width; int height = infraredFrame.FrameDescription.Height; infraredFrame.CopyFrameDataToArray(_smallBuffer); BitmapSource result = BufferCaptureBitmapHelper(_smallBuffer, width, height, 2, buffer); return(new Tuple <BitmapSource, TimeSpan>(result, infraredFrame.RelativeTime)); }
public void FrameArrived(LiveFrame frame) { if (FpsChanged != null) { TimeSpan colorFrameRelativeTime = frame.NativeColorFrame.RelativeTime; if (!this.StartTimeInMilliseconds.HasValue) { this.StartTimeInMilliseconds = colorFrameRelativeTime.TotalMilliseconds; } FpsChanged(new object(), new FpsChangedEventArgs(this.GetInstantFps(frame.NativeColorFrame.RelativeTime.TotalMilliseconds), colorFrameRelativeTime.TotalMilliseconds)); } }
public Tuple <BitmapSource, TimeSpan> CaptureDepthFrameBitmap(LiveFrame frame, byte[] buffer) { DepthFrame depthFrame = frame.NativeDepthFrame; int width = depthFrame.FrameDescription.Width; int height = depthFrame.FrameDescription.Height; depthFrame.CopyFrameDataToArray(_smallBuffer); // Multiply all values by 8 to make the frames more previewable for (int i = 0; i < _smallBuffer.Length; ++i) { _smallBuffer[i] <<= 3; } BitmapSource result = BufferCaptureBitmapHelper(_smallBuffer, width, height, 2, buffer); return(new Tuple <BitmapSource, TimeSpan>(result, depthFrame.RelativeTime)); }
public void FrameArrived(LiveFrame frame) { Body body = frame.FirstBody; if (body == null) { this.ClearSkeletons(); this.ClearHands(); } else { if (this.ShowBody) { this.DrawBody(body); } if (this.ShowHands) { this.DrawHands(body); } } }
/// <summary> /// We call Freeze() so we can write these bitmaps to disk from other threads. /// </summary> /// <param name="frame"></param> /// <param name="serializer"></param> public void Update(LiveFrame frame, FrameSerializer serializer) { // (1) Depth mapping _depthMapping = serializer.CaptureMappedFrame(frame, _bufferDepthMapping); // (2) Depth _depth = serializer.CaptureDepthFrameBitmap(frame, _bufferDepth); _depth.Item1.Freeze(); // (3) Infrared _infrared = serializer.CaptureInfraredFrameBitmap(frame, _bufferInfrared); _infrared.Item1.Freeze(); // (4) Skeleton _skeleton = serializer.SerializeSkeletonData(frame); // (5) Color _color = serializer.CaptureColorFrameBitmap(frame, _bufferColor); _color.Item1.Freeze(); // (6) Body index _bodyIndex = serializer.CaptureBodyIndexFrameBitmap(frame, _bufferBodyIndex); _bodyIndex.Item1.Freeze(); }