/// <summary>
        /// Render ObjectDetector skill results
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="objectDetections"></param>
        /// <returns></returns>
        private async Task DisplayFrameAndResultAsync(VideoFrame frame, IReadOnlyList <ObjectDetectorResult> detectedObjects)
        {
            await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
            {
                try
                {
                    SoftwareBitmap targetSoftwareBitmap = frame.SoftwareBitmap;

                    // If we receive a Direct3DSurface-backed VideoFrame, convert to a SoftwareBitmap in a format that can be rendered via the UI element
                    if (targetSoftwareBitmap == null)
                    {
                        if (m_renderTargetFrame == null)
                        {
                            m_renderTargetFrame = new VideoFrame(BitmapPixelFormat.Bgra8, frame.Direct3DSurface.Description.Width, frame.Direct3DSurface.Description.Height, BitmapAlphaMode.Ignore);
                        }

                        // Leverage the VideoFrame.CopyToAsync() method that can convert the input Direct3DSurface-backed VideoFrame to a SoftwareBitmap-backed VideoFrame
                        await frame.CopyToAsync(m_renderTargetFrame);
                        targetSoftwareBitmap = m_renderTargetFrame.SoftwareBitmap;
                    }
                    // Else, if we receive a SoftwareBitmap-backed VideoFrame, if its format cannot already be rendered via the UI element, convert it accordingly
                    else
                    {
                        if (targetSoftwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8 || targetSoftwareBitmap.BitmapAlphaMode != BitmapAlphaMode.Ignore)
                        {
                            if (m_renderTargetFrame == null)
                            {
                                m_renderTargetFrame = new VideoFrame(BitmapPixelFormat.Bgra8, targetSoftwareBitmap.PixelWidth, targetSoftwareBitmap.PixelHeight, BitmapAlphaMode.Ignore);
                            }

                            // Leverage the VideoFrame.CopyToAsync() method that can convert the input SoftwareBitmap-backed VideoFrame to a different format
                            await frame.CopyToAsync(m_renderTargetFrame);
                            targetSoftwareBitmap = m_renderTargetFrame.SoftwareBitmap;
                        }
                    }
                    await m_processedBitmapSource.SetBitmapAsync(targetSoftwareBitmap);

                    // Update displayed results
                    m_bboxRenderer.Render(detectedObjects);

                    // Update the displayed performance text
                    UIPerfTextBlock.Text = $"bind: {m_bindTime.ToString("F2")}ms, eval: {m_evalTime.ToString("F2")}ms";
                }
                catch (TaskCanceledException)
                {
                    // no-op: we expect this exception when we change media sources
                    // and can safely ignore/continue
                }
                catch (Exception ex)
                {
                    NotifyUser($"Exception while rendering results: {ex.Message}");
                }
            });
        }
        public async Task ScoreFrame(VideoFrame videoFrame)
        {
            if (!_isModelLoadedSuccessfully || videoFrame == null)
            {
                return;
            }

            try
            {
                using (SoftwareBitmap bitmapBuffer = new SoftwareBitmap(BitmapPixelFormat.Bgra8,
                                                                        _customVisionONNXModel.InputImageWidth, _customVisionONNXModel.InputImageHeight, BitmapAlphaMode.Ignore))
                {
                    using (VideoFrame buffer = VideoFrame.CreateWithSoftwareBitmap(bitmapBuffer))
                    {
                        await videoFrame.CopyToAsync(buffer);

                        var input = new CustomVisionModelInput()
                        {
                            data = buffer
                        };

                        // Perform prediction using ONNX model
                        DateTime start = DateTime.Now;
                        CustomVisionModelOutput output = await this._customVisionONNXModel.EvaluateAsync(input);

                        await ShowPredictionResults(output, Math.Round((DateTime.Now - start).TotalMilliseconds));
                    }
                }
            }
            catch (Exception ex)
            {
                this._isModelLoadedSuccessfully = false;
                await UpdateStatus("Error", $"Failure scoring camera frame: {ex.Message}");
            }
        }
        public static async Task <TensorFloat> NormalizeImage(VideoFrame frame, Vector3 mean, Vector3 std, uint width, uint height)
        {
            // , BitmapPixelFormat.Bgra8
            var bitmapBuffer = new SoftwareBitmap(frame.SoftwareBitmap.BitmapPixelFormat, frame.SoftwareBitmap.PixelHeight, frame.SoftwareBitmap.PixelHeight, BitmapAlphaMode.Ignore);
            var buffer       = VideoFrame.CreateWithSoftwareBitmap(bitmapBuffer);
            await frame.CopyToAsync(buffer);


            var innerBitmap = new WriteableBitmap(bitmapBuffer.PixelWidth, bitmapBuffer.PixelHeight);

            bitmapBuffer.CopyToBuffer(innerBitmap.PixelBuffer);
            var pixelsStream = innerBitmap.PixelBuffer.AsStream();

            var transform = new BitmapTransform()
            {
                ScaledWidth = width, ScaledHeight = height, InterpolationMode = BitmapInterpolationMode.Cubic
            };
            var decoder = await BitmapDecoder.CreateAsync(pixelsStream.AsRandomAccessStream());

            var pixelData = await decoder.GetPixelDataAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, transform, ExifOrientationMode.RespectExifOrientation, ColorManagementMode.ColorManageToSRgb);

            var pixels = pixelData.DetachPixelData();

            return(Normalize(pixels, mean, std, width, height));
        }
Esempio n. 4
0
        private async Task CropAndDisplayInputImageAsync(VideoFrame inputVideoFrame)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            BitmapBounds cropBounds = new BitmapBounds();
            uint         h          = 28;
            uint         w          = 28;

            var frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            var requiredAR = ((float)28 / 28);

            w                 = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            h                 = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);
            cropBounds.X      = (uint)((frameWidth - w) / 2);
            cropBounds.Y      = 0;
            cropBounds.Width  = w;
            cropBounds.Height = h;

            int nh = 224; //28
            int nw = 224; //28

            cropped_vf = new VideoFrame(BitmapPixelFormat.Bgra8, nh, nw, BitmapAlphaMode.Ignore);

            await inputVideoFrame.CopyToAsync(cropped_vf, cropBounds, null);
        }
Esempio n. 5
0
        public async Task <VideoFrame> GetCropedImage(VideoFrame inputVideoFrame)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            BitmapBounds cropBounds  = new BitmapBounds();
            uint         h           = 227;
            uint         w           = 227;
            var          frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var          frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            var requiredAR = ((float)227 / 227);

            w                 = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            h                 = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);
            cropBounds.X      = (uint)((frameWidth - w) / 2);
            cropBounds.Y      = 0;
            cropBounds.Width  = w;
            cropBounds.Height = h;

            var cropped_vf = new VideoFrame(BitmapPixelFormat.Bgra8, 227, 227, BitmapAlphaMode.Premultiplied);

            await inputVideoFrame.CopyToAsync(cropped_vf, cropBounds, null);

            return(cropped_vf);
        }
Esempio n. 6
0
        public static async Task <VideoFrame> CropVideoFrameAsync(this VideoFrame inputVideoFrame, uint targetWidth, uint targetHeight)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            BitmapBounds cropBounds  = new BitmapBounds();
            uint         h           = targetHeight;
            uint         w           = targetWidth;
            var          frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var          frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            //var requiredAR = ((float)targetWidth / targetHeight);
            //w = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            //h = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);
            w                 = Math.Min((targetWidth), (uint)frameWidth);
            h                 = Math.Min((targetHeight), (uint)frameHeight);
            cropBounds.X      = (uint)((frameWidth - w) / 2);
            cropBounds.Y      = (uint)((frameHeight - h) / 2);
            cropBounds.Width  = w;
            cropBounds.Height = h;

            VideoFrame croppedVideoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, (int)targetWidth, (int)targetHeight, BitmapAlphaMode.Ignore);

            await inputVideoFrame.CopyToAsync(croppedVideoFrame, cropBounds, null);

            return(croppedVideoFrame);
        }
        private static async Task <VideoFrame> CropAndDisplayInputImageAsync(VideoFrame inputVideoFrame, Size targetSize)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            var frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            var  requiredAR = targetSize.Width / targetSize.Height;
            uint w          = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            uint h          = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);

            var cropBounds = new BitmapBounds
            {
                X      = (uint)((frameWidth - w) / 2),
                Y      = 0,
                Width  = w,
                Height = h
            };

            var croppedVf = new VideoFrame(BitmapPixelFormat.Bgra8, (int)targetSize.Width, (int)targetSize.Height, BitmapAlphaMode.Ignore);

            await inputVideoFrame.CopyToAsync(croppedVf, cropBounds, null);

            return(croppedVf);
        }
        public async Task <VideoFrame> CropAndDisplayInputImageAsync(VideoFrame inputVideoFrame)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            BitmapBounds cropBounds  = new BitmapBounds();
            uint         h           = IMAGE_HEIGHT;
            uint         w           = IMAGE_HEIGHT;
            var          frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var          frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            var requiredAR = ((float)IMAGE_HEIGHT / IMAGE_HEIGHT);

            w                 = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            h                 = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);
            cropBounds.X      = (uint)((frameWidth - w) / 2);
            cropBounds.Y      = 0;
            cropBounds.Width  = w;
            cropBounds.Height = h;

            cropped_vf = new VideoFrame(BitmapPixelFormat.Bgra8, IMAGE_HEIGHT, IMAGE_HEIGHT, BitmapAlphaMode.Ignore);

            await inputVideoFrame.CopyToAsync(cropped_vf, cropBounds, null);

            return(cropped_vf);
        }
Esempio n. 9
0
        /// <summary>
        /// FrameAvailable event handler
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="frame"></param>
        private async void frameSource_FrameAvailable(object sender, VideoFrame frame)
        {
            // Locking behavior, so only one skill execution happens at a time
            if (m_skillLock.Wait(0))
            {
                await frame.CopyToAsync(m_cachedFrameForProcessing);

#pragma warning disable CS4014
                // Purposely don't await this: want handler to exit ASAP
                // so that realtime capture doesn't wait for completion.
                // Instead, ProcessFrameAsync will internally lock such that
                // only one execution is active at a time, dropping frames/
                // aborting skill runs as necessary
                Task.Run(async() =>
                {
                    try
                    {
                        await ProcessFrameAsync(m_cachedFrameForProcessing);
                        await UpdateMetricsDisplayAsync();
                    }
                    catch (Exception ex)
                    {
                        Debug.WriteLine(ex.ToString());
                    }
                    finally
                    {
                        m_skillLock.Release();
                    }
                });
#pragma warning restore CS4014
            }
        }
Esempio n. 10
0
        /// <summary>
        /// Crop image given a target width and height
        /// </summary>
        /// <param name="inputVideoFrame"></param>
        /// <returns></returns>
        public static IAsyncOperation <VideoFrame> CenterCropImageAsync(VideoFrame inputVideoFrame, uint targetWidth, uint targetHeight)
        {
            return(AsyncInfo.Run(async(token) =>
            {
                bool useDX = inputVideoFrame.SoftwareBitmap == null;
                VideoFrame result = null;
                float width = 640;
                float height = 440;
                // Center crop
                try
                {
                    //Scale image to appropriate size

                    // Since we will be center-cropping the image, figure which dimension has to be clipped
                    var frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
                    var frameWidth = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

                    Rect cropRect = GetCropRect(frameWidth, frameHeight, targetWidth, targetHeight);
                    BitmapBounds cropBounds = new BitmapBounds()
                    {
                        Width = (uint)cropRect.Width,
                        Height = (uint)cropRect.Height,
                        X = (uint)cropRect.X,
                        Y = (uint)cropRect.Y
                    };

                    // Create the VideoFrame to be bound as input for evaluation
                    if (useDX)
                    {
                        if (inputVideoFrame.Direct3DSurface == null)
                        {
                            throw (new Exception("Invalid VideoFrame without SoftwareBitmap nor D3DSurface"));
                        }

                        result = new VideoFrame(BitmapPixelFormat.Bgra8,
                                                (int)targetWidth,
                                                (int)targetHeight,
                                                BitmapAlphaMode.Premultiplied);
                    }
                    else
                    {
                        result = new VideoFrame(BitmapPixelFormat.Bgra8,
                                                (int)targetWidth,
                                                (int)targetHeight,
                                                BitmapAlphaMode.Premultiplied);
                    }

                    await inputVideoFrame.CopyToAsync(result, cropBounds, null);
                }
                catch (Exception ex)
                {
                    Debug.WriteLine(ex.ToString());
                }

                return result;
            }));
        }
Esempio n. 11
0
        /// <summary>
        /// resize video frame with specifical size
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="targetWidth"></param>
        /// <param name="targetHeight"></param>
        /// <returns></returns>
        public async static Task <VideoFrame> ResizeVideoFrameAsync(VideoFrame frame, VideoEncodingProperties encodingProperties, int targetWidth, int targetHeight)
        {
            if (frame != null)
            {
                var destFrame = new VideoFrame(BitmapPixelFormat.Bgra8, targetWidth, targetWidth);

                var sourceWidth  = 0u;
                var sourceHeight = 0u;
                if (encodingProperties != null)
                {
                    sourceHeight = encodingProperties.Height;
                    sourceWidth  = encodingProperties.Width;
                }
                else
                {
                    if (frame.SoftwareBitmap != null)
                    {
                        sourceHeight = (uint)frame.SoftwareBitmap.PixelHeight;
                        sourceWidth  = (uint)frame.SoftwareBitmap.PixelWidth;
                    }
                    else
                    {
                        sourceHeight = (uint)frame.Direct3DSurface.Description.Height;
                        sourceWidth  = (uint)frame.Direct3DSurface.Description.Width;
                    }
                }

                var scaleHeigth  = targetHeight;
                var scaleWidth   = targetWidth;
                var heightOffset = 0;
                var widthOffset  = 0;
                if (sourceHeight > sourceWidth)
                {
                    scaleHeigth  = (int)sourceWidth * targetHeight / targetWidth;
                    heightOffset = (int)(sourceHeight - scaleHeigth) / 2;
                }
                else
                {
                    scaleWidth  = (int)sourceHeight * targetWidth / targetHeight;
                    widthOffset = (int)(sourceWidth - scaleWidth) / 2;
                }

                await frame.CopyToAsync(destFrame, new BitmapBounds
                {
                    X      = (uint)widthOffset,
                    Y      = (uint)heightOffset,
                    Height = (uint)scaleHeigth,
                    Width  = (uint)scaleWidth
                }, null);

                return(destFrame);
            }
            return(null);
        }
Esempio n. 12
0
        public async Task ProcessFrame(VideoFrame videoFrame, Canvas visualizationCanvas)
        {
            if (!isModelLoadedSuccessfully)
            {
                return;
            }

            try
            {
                using (SoftwareBitmap bitmapBuffer = new SoftwareBitmap(BitmapPixelFormat.Bgra8,
                                                                        CustomVisionModelInputSize, CustomVisionModelInputSize, BitmapAlphaMode.Ignore))
                {
                    using (VideoFrame buffer = VideoFrame.CreateWithSoftwareBitmap(bitmapBuffer))
                    {
                        await videoFrame.CopyToAsync(buffer);

                        var input = new CustomVisionModelInput()
                        {
                            data = buffer
                        };

                        DateTime start = DateTime.Now;

                        // Prediction process with ONNX model
                        CustomVisionModelOutput output = await this.customVisionModel.EvaluateAsync(input);

                        DateTime end = DateTime.Now;

                        await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            ShowResults(output);
                            double predictionTimeInMilliseconds = (end - start).TotalMilliseconds;
                            this.fpsTextBlock.Text = predictionTimeInMilliseconds > 0 ? $"{Math.Round(1000 / predictionTimeInMilliseconds)} fps" : string.Empty;
                        });
                    }
                }
            }
            catch (Exception ex)
            {
                this.isModelLoadedSuccessfully = false;
                await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                {
                    ResetState();
                    await Util.GenericApiCallExceptionHandler(ex, "Failure processing frame");
                });
            }
        }
Esempio n. 13
0
        //Crop the image
        public static IAsyncOperation <VideoFrame> CenterCropImageAsync(VideoFrame inputVideoFrame, uint targetWidth,
                                                                        uint targetHeight)
        {
            return(AsyncInfo.Run(async token =>
            {
                var useDX = inputVideoFrame.SoftwareBitmap == null;
                VideoFrame result = null;
                // Center crop
                try

                {
                    // Since we will be center-cropping the image, figure which dimension has to be clipped
                    var frameHeight = useDX
                        ? inputVideoFrame.Direct3DSurface.Description.Height
                        : inputVideoFrame.SoftwareBitmap.PixelHeight;
                    var frameWidth = useDX
                        ? inputVideoFrame.Direct3DSurface.Description.Width
                        : inputVideoFrame.SoftwareBitmap.PixelWidth;
                    // Create the VideoFrame to be bound as input for evaluation
                    if (useDX)
                    {
                        if (inputVideoFrame.Direct3DSurface == null)
                        {
                            throw new Exception("Invalid VideoFrame without SoftwareBitmap nor D3DSurface");
                        }

                        result = new VideoFrame(BitmapPixelFormat.Bgra8,
                                                (int)targetWidth,
                                                (int)targetHeight,
                                                BitmapAlphaMode.Premultiplied);
                    }
                    else
                    {
                        result = new VideoFrame(BitmapPixelFormat.Bgra8,
                                                (int)targetWidth,
                                                (int)targetHeight,
                                                BitmapAlphaMode.Premultiplied);
                    }
                    await inputVideoFrame.CopyToAsync(result);
                }
                catch (Exception ex)
                {
                    Debug.WriteLine(ex.ToString());
                }
                return result;
            }));
        }
        /// <summary>
        /// MediaFrameReader.FrameArrived callback. Extracts VideoFrame and timestamp and forwards event
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private async void MediaPlayer_VideoFrameAvailable(MediaPlayer sender, object args)
        {
            m_mediaPlayer.CopyFrameToVideoSurface(m_videoFrame.Direct3DSurface);

            if (m_desiredImageDescriptor != null)
            {
                await m_videoFrame.CopyToAsync(m_stagingVideoFrame);

                m_stagingVideoFrame.SystemRelativeTime = m_mediaPlayer.PlaybackSession.Position;
                FrameArrived?.Invoke(this, m_stagingVideoFrame);
            }
            else
            {
                m_videoFrame.SystemRelativeTime = m_mediaPlayer.PlaybackSession.Position;
                FrameArrived?.Invoke(this, m_videoFrame);
            }
        }
Esempio n. 15
0
        public async Task ProcessFrame(VideoFrame videoFrame, Canvas visualizationCanvas)
        {
            if (customVisionONNXModel == null || videoFrame == null)
            {
                return;
            }

            try
            {
                using (SoftwareBitmap bitmapBuffer = new SoftwareBitmap(BitmapPixelFormat.Bgra8,
                                                                        customVisionONNXModel.InputImageWidth, customVisionONNXModel.InputImageHeight, BitmapAlphaMode.Ignore))
                {
                    using (VideoFrame buffer = VideoFrame.CreateWithSoftwareBitmap(bitmapBuffer))
                    {
                        await videoFrame.CopyToAsync(buffer);

                        var input = new CustomVisionModelInput()
                        {
                            data = buffer
                        };

                        DateTime start = DateTime.Now;

                        // Prediction process with ONNX model
                        CustomVisionModelOutput output = await this.customVisionONNXModel.EvaluateAsync(input);

                        await ShowPredictionResults(output, Math.Round((DateTime.Now - start).TotalMilliseconds));
                    }
                }
            }
            catch (Exception ex)
            {
                await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                {
                    if (SettingsHelper.Instance.ShowDebugInfo)
                    {
                        await Util.GenericApiCallExceptionHandler(ex, "Failure processing frame");
                    }
                });
            }
        }
        /// <summary>
        /// Launch file picker for user to select a file and save a VideoFrame to it
        /// </summary>
        /// <param name="frame"></param>
        /// <returns></returns>
        public static IAsyncAction SaveVideoFrameToFilePickedAsync(VideoFrame frame)
        {
            return(AsyncInfo.Run(async(token) =>
            {
                // Trigger file picker to select an image file
                FileSavePicker fileSavePicker = new FileSavePicker();
                fileSavePicker.SuggestedStartLocation = PickerLocationId.PicturesLibrary;
                fileSavePicker.FileTypeChoices.Add("image file", new List <string>()
                {
                    ".jpg"
                });
                fileSavePicker.SuggestedFileName = "NewImage";

                StorageFile selectedStorageFile = await fileSavePicker.PickSaveFileAsync();

                if (selectedStorageFile == null)
                {
                    return;
                }

                using (IRandomAccessStream stream = await selectedStorageFile.OpenAsync(FileAccessMode.ReadWrite))
                {
                    VideoFrame frameToEncode = frame;
                    BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);

                    if (frameToEncode.SoftwareBitmap == null)
                    {
                        Debug.Assert(frame.Direct3DSurface != null);
                        frameToEncode = new VideoFrame(BitmapPixelFormat.Bgra8, frame.Direct3DSurface.Description.Width, frame.Direct3DSurface.Description.Height);
                        await frame.CopyToAsync(frameToEncode);
                    }
                    encoder.SetSoftwareBitmap(
                        frameToEncode.SoftwareBitmap.BitmapPixelFormat.Equals(BitmapPixelFormat.Bgra8) ?
                        frameToEncode.SoftwareBitmap
                        : SoftwareBitmap.Convert(frameToEncode.SoftwareBitmap, BitmapPixelFormat.Bgra8));

                    await encoder.FlushAsync();
                }
            }));
        }
Esempio n. 17
0
        public async Task ProcessFrame(VideoFrame videoFrame, Canvas visualizationCanvas)
        {
            if (!isModelLoadedSuccessfully)
            {
                return;
            }

            try
            {
                using (SoftwareBitmap bitmapBuffer = new SoftwareBitmap(BitmapPixelFormat.Bgra8,
                                                                        ObjectDetectionModelInputSize, ObjectDetectionModelInputSize, BitmapAlphaMode.Ignore))
                {
                    using (VideoFrame buffer = VideoFrame.CreateWithSoftwareBitmap(bitmapBuffer))
                    {
                        await videoFrame.CopyToAsync(buffer);

                        DateTime start = DateTime.Now;

                        IList <PredictionModel> predictions = await this.objectDetectionModel.PredictImageAsync(buffer);

                        double predictionTimeInMilliseconds = (DateTime.Now - start).TotalMilliseconds;

                        await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            this.ShowVisualization(visualizationCanvas, predictions);
                            this.fpsTextBlock.Text = predictionTimeInMilliseconds > 0 ? $"{Math.Round(1000 / predictionTimeInMilliseconds)} fps" : string.Empty;
                        });
                    }
                }
            }
            catch (Exception ex)
            {
                this.isModelLoadedSuccessfully = false;
                await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                {
                    await Util.GenericApiCallExceptionHandler(ex, "Failure processing frame");
                });
            }
        }
        private async void Current_SoftwareBitmapFrameCaptured(object sender, SoftwareBitmapEventArgs e)
        {
            Debug.WriteLine("FrameCaptured");
            Debug.WriteLine($"Frame evaluation started {DateTime.Now}");
            if (e.SoftwareBitmap != null)
            {
                BitmapPixelFormat bpf = e.SoftwareBitmap.BitmapPixelFormat;

                var uncroppedBitmap = SoftwareBitmap.Convert(e.SoftwareBitmap, BitmapPixelFormat.Nv12);
                var faces           = await _faceDetector.DetectFacesAsync(uncroppedBitmap);

                if (faces.Count > 0)
                {
                    //crop image to focus on face portion
                    var        faceBox    = faces[0].FaceBox;
                    VideoFrame inputFrame = VideoFrame.CreateWithSoftwareBitmap(e.SoftwareBitmap);
                    VideoFrame tmp        = null;
                    tmp = new VideoFrame(e.SoftwareBitmap.BitmapPixelFormat, (int)(faceBox.Width + faceBox.Width % 2) - 2, (int)(faceBox.Height + faceBox.Height % 2) - 2);
                    await inputFrame.CopyToAsync(tmp, faceBox, null);

                    //crop image to fit model input requirements
                    VideoFrame croppedInputImage = new VideoFrame(BitmapPixelFormat.Gray8, (int)_inputImageDescriptor.Shape[3], (int)_inputImageDescriptor.Shape[2]);
                    var        srcBounds         = GetCropBounds(
                        tmp.SoftwareBitmap.PixelWidth,
                        tmp.SoftwareBitmap.PixelHeight,
                        croppedInputImage.SoftwareBitmap.PixelWidth,
                        croppedInputImage.SoftwareBitmap.PixelHeight);
                    await tmp.CopyToAsync(croppedInputImage, srcBounds, null);

                    ImageFeatureValue imageTensor = ImageFeatureValue.CreateFromVideoFrame(croppedInputImage);

                    _binding = new LearningModelBinding(_session);

                    TensorFloat  outputTensor        = TensorFloat.Create(_outputTensorDescriptor.Shape);
                    List <float> _outputVariableList = new List <float>();

                    // Bind inputs + outputs
                    _binding.Bind(_inputImageDescriptor.Name, imageTensor);
                    _binding.Bind(_outputTensorDescriptor.Name, outputTensor);

                    // Evaluate results
                    var results = await _session.EvaluateAsync(_binding, new Guid().ToString());

                    Debug.WriteLine("ResultsEvaluated: " + results.ToString());

                    var outputTensorList = outputTensor.GetAsVectorView();
                    var resultsList      = new List <float>(outputTensorList.Count);
                    for (int i = 0; i < outputTensorList.Count; i++)
                    {
                        resultsList.Add(outputTensorList[i]);
                    }

                    var softMaxexOutputs = SoftMax(resultsList);

                    double maxProb  = 0;
                    int    maxIndex = 0;

                    // Comb through the evaluation results
                    for (int i = 0; i < Constants.POTENTIAL_EMOJI_NAME_LIST.Count(); i++)
                    {
                        // Record the dominant emotion probability & its location
                        if (softMaxexOutputs[i] > maxProb)
                        {
                            maxIndex = i;
                            maxProb  = softMaxexOutputs[i];
                        }

                        //for evaluations run on the EmotionPage, record info about single specific emotion of interest
                        if (CurrentEmojis._currentEmoji != null && Constants.POTENTIAL_EMOJI_NAME_LIST[i].Equals(CurrentEmojis._currentEmoji.Name))
                        {
                            SoftwareBitmap potentialBestPic;

                            try
                            {
                                potentialBestPic = SoftwareBitmap.Convert(uncroppedBitmap, BitmapPixelFormat.Bgra8);
                            }
                            catch (Exception ex)
                            {
                                Debug.WriteLine($"Error converting SoftwareBitmap. Details:{ex.Message}. Attempting to continue...");
                                return;
                            }

                            await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal,
                                                                                                                        async() =>
                            {
                                // Give user immediate visual feedback by updating success gauge
                                ScoreUpdated?.Invoke(this, new EmotionPageGaugeScoreEventArgs()
                                {
                                    Score = softMaxexOutputs[i]
                                });

                                // Save original pic for each emotion no matter how bad it is (and record its associated info)
                                double bestScore = CurrentEmojis._emojis.Emojis[CurrentEmojis._currentEmojiIndex].BestScore;
                                if (softMaxexOutputs[i] > bestScore)
                                {
                                    CurrentEmojis._emojis.Emojis[CurrentEmojis._currentEmojiIndex].BestScore = softMaxexOutputs[i];

                                    var source = new SoftwareBitmapSource();

                                    await source.SetBitmapAsync(potentialBestPic);

                                    // Create format of potentialBestPic to be displayed in a gif later
                                    SoftwareBitmap tmpBitmap = potentialBestPic;
                                    WriteableBitmap wb       = new WriteableBitmap(tmpBitmap.PixelWidth, tmpBitmap.PixelHeight);
                                    tmpBitmap.CopyToBuffer(wb.PixelBuffer);

                                    CurrentEmojis._emojis.Emojis[CurrentEmojis._currentEmojiIndex].BestPic      = source;
                                    CurrentEmojis._emojis.Emojis[CurrentEmojis._currentEmojiIndex].ShowOopsIcon = false;
                                    CurrentEmojis._emojis.Emojis[CurrentEmojis._currentEmojiIndex].BestPicWB    = wb;
                                }
                            }
                                                                                                                        );
                        }
                    }

                    Debug.WriteLine($"Probability = {maxProb}, Threshold set to = {Constants.CLASSIFICATION_CERTAINTY_THRESHOLD}, Emotion = {Constants.POTENTIAL_EMOJI_NAME_LIST[maxIndex]}");

                    // For evaluations run on the MainPage, update the emoji carousel
                    if (maxProb >= Constants.CLASSIFICATION_CERTAINTY_THRESHOLD)
                    {
                        Debug.WriteLine("first page emoji should start to update");
                        IntelligenceServiceEmotionClassified?.Invoke(this, new ClassifiedEmojiEventArgs(CurrentEmojis._emojis.Emojis[maxIndex]));
                    }

                    // Dispose of resources
                    if (e.SoftwareBitmap != null)
                    {
                        e.SoftwareBitmap.Dispose();
                        e.SoftwareBitmap = null;
                    }
                }
            }
            IntelligenceServiceProcessingCompleted?.Invoke(this, null);
            Debug.WriteLine($"Frame evaluation finished {DateTime.Now}");
        }
        private async void Current_SoftwareBitmapFrameCaptured(object sender, SoftwareBitmapEventArgs e)
        {
            Debug.WriteLine("FrameCaptured");
            Debug.WriteLine($"Frame evaluation started {DateTime.Now}");
            if (e.SoftwareBitmap != null)
            {
                BitmapPixelFormat bpf = e.SoftwareBitmap.BitmapPixelFormat;

                var uncroppedBitmap = SoftwareBitmap.Convert(e.SoftwareBitmap, BitmapPixelFormat.Nv12);
                var faces           = await _faceDetector.DetectFacesAsync(uncroppedBitmap);

                if (faces.Count > 0)
                {
                    //crop image to focus on face portion
                    var        faceBox    = faces[0].FaceBox;
                    VideoFrame inputFrame = VideoFrame.CreateWithSoftwareBitmap(e.SoftwareBitmap);
                    VideoFrame tmp        = null;
                    tmp = new VideoFrame(e.SoftwareBitmap.BitmapPixelFormat, (int)(faceBox.Width + faceBox.Width % 2) - 2, (int)(faceBox.Height + faceBox.Height % 2) - 2);
                    await inputFrame.CopyToAsync(tmp, faceBox, null);

                    //crop image to fit model input requirements
                    VideoFrame croppedInputImage = new VideoFrame(BitmapPixelFormat.Gray8, (int)_inputImageDescriptor.Shape[3], (int)_inputImageDescriptor.Shape[2]);
                    var        srcBounds         = GetCropBounds(
                        tmp.SoftwareBitmap.PixelWidth,
                        tmp.SoftwareBitmap.PixelHeight,
                        croppedInputImage.SoftwareBitmap.PixelWidth,
                        croppedInputImage.SoftwareBitmap.PixelHeight);
                    await tmp.CopyToAsync(croppedInputImage, srcBounds, null);

                    ImageFeatureValue imageTensor = ImageFeatureValue.CreateFromVideoFrame(croppedInputImage);

                    _binding = new LearningModelBinding(_session);

                    TensorFloat  outputTensor        = TensorFloat.Create(_outputTensorDescriptor.Shape);
                    List <float> _outputVariableList = new List <float>();

                    // Bind inputs + outputs
                    _binding.Bind(_inputImageDescriptor.Name, imageTensor);
                    _binding.Bind(_outputTensorDescriptor.Name, outputTensor);

                    // Evaluate results
                    var results = await _session.EvaluateAsync(_binding, new Guid().ToString());

                    Debug.WriteLine("ResultsEvaluated: " + results.ToString());

                    var outputTensorList = outputTensor.GetAsVectorView();
                    var resultsList      = new List <float>(outputTensorList.Count);
                    for (int i = 0; i < outputTensorList.Count; i++)
                    {
                        resultsList.Add(outputTensorList[i]);
                    }

                    var softMaxexOutputs = SoftMax(resultsList);

                    double maxProb  = 0;
                    int    maxIndex = 0;

                    // Comb through the evaluation results
                    for (int i = 0; i < Constants.POTENTIAL_EMOJI_NAME_LIST.Count(); i++)
                    {
                        // Record the dominant emotion probability & its location
                        if (softMaxexOutputs[i] > maxProb)
                        {
                            maxIndex = i;
                            maxProb  = softMaxexOutputs[i];
                        }
                    }

                    Debug.WriteLine($"Probability = {maxProb}, Threshold set to = {Constants.CLASSIFICATION_CERTAINTY_THRESHOLD}, Emotion = {Constants.POTENTIAL_EMOJI_NAME_LIST[maxIndex]}");

                    // For evaluations run on the MainPage, update the emoji carousel
                    if (maxProb >= Constants.CLASSIFICATION_CERTAINTY_THRESHOLD)
                    {
                        Debug.WriteLine("first page emoji should start to update");
                        IntelligenceServiceEmotionClassified?.Invoke(this, new ClassifiedEmojiEventArgs(CurrentEmojis._emojis.Emojis[maxIndex]));
                    }

                    // Dispose of resources
                    if (e.SoftwareBitmap != null)
                    {
                        e.SoftwareBitmap.Dispose();
                        e.SoftwareBitmap = null;
                    }
                }
            }
            IntelligenceServiceProcessingCompleted?.Invoke(this, null);
            Debug.WriteLine($"Frame evaluation finished {DateTime.Now}");
        }
Esempio n. 20
0
    public async Task StartDetectAsync(int PreviewInterval, int ImageTargetWidth, int ImageTargetHeight, bool WideImage)
    {
#if WINDOWS_UWP
        if (mediaCapture != null)
        {
            var targetWidth  = ImageTargetWidth;
            var targetHeight = ImageTargetHeight;
            try
            {
                var isWideImage = WideImage;
                timer = ThreadPoolTimer.CreatePeriodicTimer(async(source) =>
                {
                    if (mediaCapture != null)
                    {
                        try
                        {
                            //if (previewProperties.Width != (uint)ImageTargetWidth &&
                            //previewProperties.Height != (uint)ImageTargetHeight)
                            //{
                            //    previewProperties.Width = (uint)ImageTargetWidth;
                            //    previewProperties.Height = (uint)ImageTargetHeight;
                            //    await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, previewProperties);
                            //}
                            // Get information about the preview
                            VideoFrame previewFrame;
                            if (currentFrame != null)
                            {
                                if (isWideImage)
                                {
                                    var wideFrame = new VideoFrame(Windows.Graphics.Imaging.BitmapPixelFormat.Bgra8, targetWidth, targetHeight);
                                    await currentFrame.CopyToAsync(wideFrame);
                                    previewFrame = wideFrame;
                                }
                                else
                                {
                                    previewFrame = currentFrame;
                                }
                                if (previewFrame != null)
                                {
                                    VideoFrame resizedFrame;
                                    if (isWideImage)
                                    {
                                        resizedFrame = previewFrame;
                                    }
                                    else
                                    {
                                        resizedFrame = await ImageHelper.ResizeVideoFrameAsync(previewFrame, previewProperties, targetWidth, targetHeight);
                                    }
                                    var startTime = DateTime.Now;
                                    DetectResult  = await EvaluteImageAsync(resizedFrame);
                                    EvalutionTime = (DateTime.Now - startTime).TotalSeconds.ToString();
                                }
                            }
                        }
                        catch (Exception ex)
                        {
                            UnityEngine.Debug.LogException(ex);
                        }
                    }
                }, TimeSpan.FromSeconds(PreviewInterval));
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(ex.ToString());
            }
        }
#endif
    }
Esempio n. 21
0
        /// <summary>
        /// Display a frame and the evaluation results on the UI
        /// </summary>
        /// <param name="frame"></param>
        /// <returns></returns>
        private async Task DisplayFrameAndResultAsync(VideoFrame frame)
        {
            await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
            {
                try
                {
                    // Enable results to be displayed
                    m_bodyRenderer.IsVisible = true;

                    // Display the input frame
                    SoftwareBitmap targetSoftwareBitmap = frame.SoftwareBitmap;

                    // If we receive a Direct3DSurface-backed VideoFrame, convert to a SoftwareBitmap in a format that can be rendered via the UI element
                    if (targetSoftwareBitmap == null)
                    {
                        if (m_renderTargetFrame == null)
                        {
                            m_renderTargetFrame = new VideoFrame(BitmapPixelFormat.Bgra8, frame.Direct3DSurface.Description.Width, frame.Direct3DSurface.Description.Height, BitmapAlphaMode.Ignore);
                        }

                        // Leverage the VideoFrame.CopyToAsync() method that can convert the input Direct3DSurface-backed VideoFrame to a SoftwareBitmap-backed VideoFrame
                        await frame.CopyToAsync(m_renderTargetFrame);
                        targetSoftwareBitmap = m_renderTargetFrame.SoftwareBitmap;
                    }
                    // Else, if we receive a SoftwareBitmap-backed VideoFrame, if its format cannot already be rendered via the UI element, convert it accordingly
                    else
                    {
                        if (targetSoftwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8 || targetSoftwareBitmap.BitmapAlphaMode != BitmapAlphaMode.Ignore)
                        {
                            if (m_renderTargetFrame == null)
                            {
                                m_renderTargetFrame = new VideoFrame(BitmapPixelFormat.Bgra8, targetSoftwareBitmap.PixelWidth, targetSoftwareBitmap.PixelHeight, BitmapAlphaMode.Ignore);
                            }

                            // Leverage the VideoFrame.CopyToAsync() method that can convert the input SoftwareBitmap-backed VideoFrame to a different format
                            await frame.CopyToAsync(m_renderTargetFrame);
                            targetSoftwareBitmap = m_renderTargetFrame.SoftwareBitmap;
                        }
                    }
                    await m_processedBitmapSource.SetBitmapAsync(targetSoftwareBitmap);

                    // If our canvas overlay is properly resized, update displayed results
                    if (UICanvasOverlay.ActualWidth != 0)
                    {
                        m_bodyRenderer.Update(m_binding.Bodies, m_frameSource.FrameSourceType != FrameSourceType.Camera);
                    }

                    // Output result and perf text
                    UISkillOutputDetails.Text = $"Found {m_binding.Bodies.Count} bodies (bind: {m_bindTime.ToString("F2")}ms, eval: {m_evalTime.ToString("F2")}ms";
                }
                catch (TaskCanceledException)
                {
                    // no-op: we expect this exception when we change media sources
                    // and can safely ignore/continue
                }
                catch (Exception ex)
                {
                    NotifyUser($"Exception while rendering results: {ex.Message}");
                }
            });
        }
Esempio n. 22
0
        /// <summary>
        /// タイマイベント
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void TimerCapFlame(object sender)
        {
            //複数スレッドでの同時実行を抑制
            if (!semaphore.Wait(0))
            {
                return;
            }
            else if (this.ModelGen == null)
            {
                semaphore.Release();
                return;
            }

            try
            {
                //AIモデルのインプットデータは解像度224x224,BGRA8にする必要がある。
                BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Bgra8;
                using (VideoFrame previewFrame = new VideoFrame(InputPixelFormat, 640, 480, BitmapAlphaMode.Ignore))
                {
                    //フレームを取得
                    await this.mediaCapture.GetPreviewFrameAsync(previewFrame);

                    if (previewFrame != null)                           //フレームを正しく取得できた時
                    {
                        //モデルへのデータ入力クラスでインスタンスを作成する
                        var modelInput = new Input();

                        //SoftwareBitmapを作成
                        SoftwareBitmap bitmapBuffer = new SoftwareBitmap(BitmapPixelFormat.Bgra8, 224, 224, BitmapAlphaMode.Ignore);

                        //SoftwareBitmapでVideoFrameを作成する
                        VideoFrame buffer = VideoFrame.CreateWithSoftwareBitmap(bitmapBuffer);

                        //キャプチャしたフレームを作成したVideoFrameへコピーする
                        await previewFrame.CopyToAsync(buffer);

                        //SoftwareBitmapを取得する(これはリサイズ済みになる)
                        SoftwareBitmap resizedBitmap = buffer.SoftwareBitmap;

                        //WritableBitmapへ変換する
                        WriteableBitmap innerBitmap = null;
                        byte[]          buf         = null;
                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() => {
                            innerBitmap = new WriteableBitmap(resizedBitmap.PixelWidth, resizedBitmap.PixelHeight);

                            resizedBitmap.CopyToBuffer(innerBitmap.PixelBuffer);
                            buf = new byte[innerBitmap.PixelBuffer.Length];
                            innerBitmap.PixelBuffer.CopyTo(buf);
                        });

                        //バッファへコピーする
                        //innerBitmap.PixelBuffer.CopyTo(buf);6
                        SoftwareBitmap sb = SoftwareBitmap.CreateCopyFromBuffer(buf.AsBuffer(), BitmapPixelFormat.Bgra8, 224, 224, BitmapAlphaMode.Ignore);

                        //取得画像をコントロールに表示する
                        await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
                        {
                            var src = new SoftwareBitmapSource();
                            //await src.SetBitmapAsync(previewFrame.SoftwareBitmap);
                            await src.SetBitmapAsync(sb);
                            Image_CapImg.Source = src;
                        });

                        //画像のアルファチャンネル削除と配列形状変更
                        byte[] buf2 = ConvertImageaArray(buf);


                        //正規化しつつfloat配列に変換する
                        float[] inData = NormalizeImage(buf2);

                        //入力用のテンソルを作成(Windows.AI.MachineLearning.TensorFloatクラス)
                        TensorFloat tf =
                            TensorFloat.CreateFromArray(new long[] { 1, 3, 224, 224 }, inData);

                        //入力フォーマットに合わせたデータをセットする
                        Input indata = new Input();
                        indata.data     = tf;
                        modelInput.data = tf;

                        //AIモデルにデータを渡すと推定値の入ったリストが返る
                        //ModelOutput = await ModelGen.EvaluateAsync(modelInput);
                        var output = await ModelGen.EvaluateAsync(modelInput);

                        //UIスレッドに結果を表示
                        await this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            //予測結果を表示
                            //string label = outputData.classLabel[0];
                            var result_vec = output.mobilenetv20_output_flatten0_reshape0.GetAsVectorView();
                            var list       = result_vec.ToArray <float>();
                            var max1st     = list.Max();
                            var index1st   = Array.IndexOf(list, max1st);                               //最大確立のインデックスを取得

                            string ans = classList.Classes[index1st].ToString();
                            //result = result + "Class: " + label + ", Prob: " + ModelOutput.prob_1[label];

                            //結果表示
                            this.Text_Result_1st.Text = ans + ":" + max1st.ToString("0.0");
                        });
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine("周期処理で例外発生");
                Debug.WriteLine(ex.ToString());
            }
            finally
            {
                semaphore.Release();
            }
        }