/// <summary> /// Display a frame and the evaluation results on the UI /// </summary> /// <param name="frame"></param> /// <returns></returns> private async Task DisplayFrameAndResultAsync(VideoFrame frame) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() => { try { // Enable results to be displayed m_bodyRenderer.IsVisible = true; // Display the input frame if (frame.SoftwareBitmap != null) { await m_processedBitmapSource.SetBitmapAsync(frame.SoftwareBitmap); } else { var bitmap = await SoftwareBitmap.CreateCopyFromSurfaceAsync(frame.Direct3DSurface, BitmapAlphaMode.Ignore); await m_processedBitmapSource.SetBitmapAsync(bitmap); } // If our canvas overlay is properly resized, update displayed results if (UICanvasOverlay.ActualWidth != 0) { m_bodyRenderer.Update(m_binding.Bodies, m_frameSource.FrameSourceType != FrameSourceType.Camera); } // Output result and perf text UISkillOutputDetails.Text = $"Found {m_binding.Bodies.Count} bodies (bind: {m_bindTime.ToString("F2")}ms, eval: {m_evalTime.ToString("F2")}ms"; } catch (TaskCanceledException) { // no-op: we expect this exception when we change media sources // and can safely ignore/continue } catch (Exception ex) { NotifyUser($"Exception while rendering results: {ex.Message}"); } }); }
/// <summary> /// Display a frame and the evaluation results on the UI /// </summary> /// <param name="frame"></param> /// <returns></returns> private async Task DisplayFrameAndResultAsync(VideoFrame frame) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() => { try { // Enable results to be displayed m_bodyRenderer.IsVisible = true; // Display the input frame SoftwareBitmap targetSoftwareBitmap = frame.SoftwareBitmap; // If we receive a Direct3DSurface-backed VideoFrame, convert to a SoftwareBitmap in a format that can be rendered via the UI element if (targetSoftwareBitmap == null) { if (m_renderTargetFrame == null) { m_renderTargetFrame = new VideoFrame(BitmapPixelFormat.Bgra8, frame.Direct3DSurface.Description.Width, frame.Direct3DSurface.Description.Height, BitmapAlphaMode.Ignore); } // Leverage the VideoFrame.CopyToAsync() method that can convert the input Direct3DSurface-backed VideoFrame to a SoftwareBitmap-backed VideoFrame await frame.CopyToAsync(m_renderTargetFrame); targetSoftwareBitmap = m_renderTargetFrame.SoftwareBitmap; } // Else, if we receive a SoftwareBitmap-backed VideoFrame, if its format cannot already be rendered via the UI element, convert it accordingly else { if (targetSoftwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8 || targetSoftwareBitmap.BitmapAlphaMode != BitmapAlphaMode.Ignore) { if (m_renderTargetFrame == null) { m_renderTargetFrame = new VideoFrame(BitmapPixelFormat.Bgra8, targetSoftwareBitmap.PixelWidth, targetSoftwareBitmap.PixelHeight, BitmapAlphaMode.Ignore); } // Leverage the VideoFrame.CopyToAsync() method that can convert the input SoftwareBitmap-backed VideoFrame to a different format await frame.CopyToAsync(m_renderTargetFrame); targetSoftwareBitmap = m_renderTargetFrame.SoftwareBitmap; } } await m_processedBitmapSource.SetBitmapAsync(targetSoftwareBitmap); // If our canvas overlay is properly resized, update displayed results if (UICanvasOverlay.ActualWidth != 0) { m_bodyRenderer.Update(m_binding.Bodies, m_frameSource.FrameSourceType != FrameSourceType.Camera); } // Output result and perf text UISkillOutputDetails.Text = $"Found {m_binding.Bodies.Count} bodies (bind: {m_bindTime.ToString("F2")}ms, eval: {m_evalTime.ToString("F2")}ms"; } catch (TaskCanceledException) { // no-op: we expect this exception when we change media sources // and can safely ignore/continue } catch (Exception ex) { NotifyUser($"Exception while rendering results: {ex.Message}"); } }); }