internal async Task <ExampleMediaFrameReader> CreateFrameReaderAsync_Impl() { MediaFrameReader faceDetectionFrameReader = null; if (InitializationSettings.FaceDetectionAffinity == ExampleMediaCaptureFaceDetectionAffinity.FrameReader) { faceDetectionFrameReader = await PreviewMediaCapture.CreateFrameReaderAsync(PreviewMediaCapture.FrameSources[ExclusiveRgbSourceInfo.Id]); } var frameReader = new ExampleMediaFrameReader( new Dictionary <MediaFrameSourceKind, MediaFrameReader> { #if USE_INFRARED { MediaFrameSourceKind.Infrared, await ExclusiveMediaCapture.CreateFrameReaderAsync(FrameSources[ExclusiveIrSourceInfo.Id]) }, #else { MediaFrameSourceKind.Infrared, await PreviewMediaCapture.CreateFrameReaderAsync(PreviewMediaCapture.FrameSources[ExclusiveRgbSourceInfo.Id]) }, #endif { MediaFrameSourceKind.Color, await PreviewMediaCapture.CreateFrameReaderAsync(PreviewMediaCapture.FrameSources[ExclusiveRgbSourceInfo.Id]) }, }, faceDetectionFrameReader, PreviewFaceDetectionEffect, InitializationSettings.FrameCorrelationTimeSpan); frameReader.FaceDetected += FrameReader_FaceDetected; return(frameReader); }
/// <summary> /// Handles frame arrive event and sets latest color and infrared frame. /// </summary> /// <param name="sender"></param> /// <param name="args"></param> public void FrameReader_FrameArrived(ExampleMediaFrameReader sender, ExampleMediaFrameArrivedEventArgs args) { try { using (var mediaFrameReference = sender.TryAcquireLatestFrameBySourceKind(args.SourceKind)) { { if (mediaFrameReference != null) { if (mediaFrameReference.SourceKind == MediaFrameSourceKind.Color) { colorFrame = mediaFrameReference.VideoMediaFrame.SoftwareBitmap; } if (mediaFrameReference.SourceKind == MediaFrameSourceKind.Infrared) { if (mediaFrameReference.VideoMediaFrame.InfraredMediaFrame.IsIlluminated) { irFrame = mediaFrameReference.VideoMediaFrame.SoftwareBitmap; } } } } } } catch (ObjectDisposedException) { } finally { } }
/// <summary> /// Initializes ExampleMediaCapture and FrameReader. /// Turns camera on and sets camera preview. /// </summary> /// <returns></returns> public async Task InitializeAsync() { MediaCapture = new ExampleMediaCapture(); await MediaCapture.InitializeAsync(); CaptureElement.Source = MediaCapture.PreviewMediaCapture; CaptureElement.FlowDirection = MediaCapture.PreviewFlowDirection; await MediaCapture.StartPreviewAsync(); FrameReader = await MediaCapture.CreateFrameReaderAsync(); FrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered; FrameReader.FrameArrived += FrameArrivedEvent; await FrameReader.StartAsync(); }
private void FrameReader_FrameArrived(ExampleMediaFrameReader sender, ExampleMediaFrameArrivedEventArgs args) { try { // Access the latest frame // More information: // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader#handle-the-frame-arrived-event // ExampleMediaFrameReference interface is based on Windows.Media.Core.FaceDetectionEffect.FaceDetected // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.frames.multisourcemediaframereference using (var mediaFrameReference = sender.TryAcquireLatestFrameBySourceKind(args.SourceKind)) { FrameCollection.Add(mediaFrameReference); } } catch (ObjectDisposedException) { } finally { } }
private void FrameReader_FaceDetected(ExampleMediaFrameReader sender, FaceAnalysis.FaceDetectedEventArgs args) { FaceDetected?.Invoke(this, args); }