Ejemplo n.º 1
0
        async Task <byte[]> GetFrameData(MediaFrameReference frame)
        {
            byte[] bytes = null;

            if (frame == null)
            {
                return(bytes);
            }

            VideoMediaFrame videoMediaFrame = frame.VideoMediaFrame;

            if (videoMediaFrame == null)
            {
                return(bytes);
            }

            VideoFrame     videoFrame     = videoMediaFrame.GetVideoFrame();
            SoftwareBitmap softwareBitmap = videoFrame.SoftwareBitmap;

            if (softwareBitmap == null)
            {
                return(bytes);
            }

            SoftwareBitmap bitmapBGRA8 = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore);

            using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream())
            {
                BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);

                // Set the software bitmap
                encoder.SetSoftwareBitmap(bitmapBGRA8);
                encoder.IsThumbnailGenerated = false;

                try
                {
                    await encoder.FlushAsync();

                    bytes = new byte[stream.Size];
                    await stream.AsStream().ReadAsync(bytes, 0, bytes.Length);
                }
                catch (Exception e)
                {
                    Debug.WriteLine($"Error while trying to encode frame into a byte array, expceiton {e.Message}");
                }
            }

            return(bytes);
        }
Ejemplo n.º 2
0
        public void ProcessFrame()
        {
            MediaFrameReference frame           = videoFrameProcessor.GetLatestFrame();
            VideoMediaFrame     videoMediaFrame = frame?.VideoMediaFrame;

            if (videoMediaFrame == null)
            {
                return;
            }
            // Validate that the incoming frame format is compatible with the FaceTracker
            bool isBitmapPixelFormatSupported = videoMediaFrame.SoftwareBitmap != null && FaceTracker.IsBitmapPixelFormatSupported(videoMediaFrame.SoftwareBitmap.BitmapPixelFormat);

            if (!isBitmapPixelFormatSupported)
            {
                return;
            }
            // Ask the FaceTracker to process this frame asynchronously
            IAsyncOperation <IList <DetectedFace> > processFrameTask = faceTracker.ProcessNextFrameAsync(videoMediaFrame.GetVideoFrame());

            try
            {
                IList <DetectedFace> faces = processFrameTask.GetResults();

                lock (@lock)
                {
                    if (faces.Count == 0)
                    {
                        ++numFramesWithoutFaces;

                        // The FaceTracker might lose track of faces for a few frames, for example,
                        // if the person momentarily turns their head away from the videoFrameProcessor. To smooth out
                        // the tracking, we allow 30 video frames (~1 second) without faces before
                        // we say that we're no longer tracking any faces.
                        if (numFramesWithoutFaces > 30 && latestFaces.Any())
                        {
                            latestFaces.Clear();
                        }
                    }
                    else
                    {
                        numFramesWithoutFaces = 0;
                        latestFaces.Clear();
                        foreach (var face in faces)
                        {
                            latestFaces.Add(face.FaceBox);
                        }
                    }
                }
            }
            catch (Exception e)
            {
                // The task might be cancelled if the FaceAnalysis failed.
                Debug.LogException(e);
            }
        }