private void FrameCollection_CollectionProgressed(ExampleMediaFrameCollection sender, ExampleMediaFrameCollectionProgressedEventArgs args) { if (args.Frame.SourceKind == MediaFrameSourceKind.Color) { ColorFrameEpochs.Enqueue(args.Frame.SystemRelativeTime.Value); if (ColorFrameEpochs.Count > 10) { ColorFrameEpochs.TryDequeue(out _); } } else if (args.Frame.SourceKind == MediaFrameSourceKind.Infrared) { if (args.Frame.IsIlluminated == true) { IlluminatedInfraredFrameEpochs.Enqueue(args.Frame.SystemRelativeTime.Value); if (IlluminatedInfraredFrameEpochs.Count > 10) { IlluminatedInfraredFrameEpochs.TryDequeue(out _); } } else { NonIlluminatedInfraredFrameEpochs.Enqueue(args.Frame.SystemRelativeTime.Value); if (NonIlluminatedInfraredFrameEpochs.Count > 10) { NonIlluminatedInfraredFrameEpochs.TryDequeue(out _); } } } // Must either dispose or take ownership of the frame args.Frame.Dispose(); }
private async Task StartOperationAsync() { CollectedColorFrames = new ConcurrentBag <ExampleMediaFrame>(); CollectedNonIlluminatedInfraredFrames = new ConcurrentBag <ExampleMediaFrame>(); CollectedIlluminatedInfraredFrames = new ConcurrentBag <ExampleMediaFrame>(); Stopwatch = new Stopwatch(); WrittenToDiskFramesCount = 0; // Open the capture object to pump the frames to UI preview element // More information, including how to keep the display on: // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/simple-camera-preview-access // ExampleMediaCapture interface is based on Windows.Media.Capture.MediaCapture // https://docs.microsoft.com/en-us/uwp/api/Windows.Media.Capture.MediaCapture MediaCapture = new ExampleMediaCapture(); await MediaCapture.InitializeAsync(); FrameCollection = new ExampleMediaFrameCollection(MediaCapture); FrameCollection.CollectionProgressed += FrameCollection_CollectionProgressed; CaptureElement.Source = MediaCapture.PreviewMediaCapture; CaptureElement.FlowDirection = MediaCapture.PreviewFlowDirection; await MediaCapture.StartPreviewAsync(); // Open the reader object to subscribe to arriving frames // More information: // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader#create-a-frame-reader-for-the-frame-source // ExampleMediaFrameReader interface is based on Windows.Media.Capture.Frames.MultiSourceMediaFrameReader // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.frames.multisourcemediaframereader FrameReader = await MediaCapture.CreateFrameReaderAsync(); FrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered; FrameReader.FrameArrived += FrameReader_FrameArrived; // Start frame reader immediately await FrameReader.StartAsync(); DispatcherTimer = new DispatcherTimer { Interval = TimeSpan.FromMilliseconds(100) }; DispatcherTimer.Tick += DispatcherTimer_Tick; DispatcherTimer.Start(); Stopwatch.Start(); }
private async void FrameCollection_CollectionProgressed(ExampleMediaFrameCollection sender, ExampleMediaFrameCollectionProgressedEventArgs args) { if (IsCollectionCompleted) { // Discard extraneous frames args.Frame.Dispose(); return; } if (args.Frame.SourceKind == MediaFrameSourceKind.Color) { CollectedColorFrames.Add(args.Frame); } else if (args.Frame.SourceKind == MediaFrameSourceKind.Infrared) { if (args.Frame.IsIlluminated == true) { CollectedIlluminatedInfraredFrames.Add(args.Frame); } else { CollectedNonIlluminatedInfraredFrames.Add(args.Frame); } } else { // Don't know how to handle, discard args.Frame.Dispose(); } if (IsCollectionCompleted) { FrameCollection.CollectionProgressed -= FrameCollection_CollectionProgressed; await WriteToDiskAsync(); await StopOperationAsync(); } }
private async Task StartOperationAsync() { ColorFrameEpochs = new ConcurrentQueue <TimeSpan>(); IlluminatedInfraredFrameEpochs = new ConcurrentQueue <TimeSpan>(); NonIlluminatedInfraredFrameEpochs = new ConcurrentQueue <TimeSpan>(); // Open the capture object to pump the frames to UI preview element // More information, including how to keep the display on: // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/simple-camera-preview-access // ExampleMediaCapture interface is based on Windows.Media.Capture.MediaCapture // https://docs.microsoft.com/en-us/uwp/api/Windows.Media.Capture.MediaCapture MediaCapture = new ExampleMediaCapture(); await MediaCapture.InitializeAsync(new ExampleMediaCaptureInitializationSettings { FaceDetectionAffinity = ExampleMediaCaptureFaceDetectionAffinity.MediaCapturePreview, }); FrameCollection = new ExampleMediaFrameCollection(MediaCapture); FrameCollection.CollectionProgressed += FrameCollection_CollectionProgressed; CaptureElement.Source = MediaCapture.PreviewMediaCapture; CaptureElement.FlowDirection = MediaCapture.PreviewFlowDirection; await MediaCapture.StartPreviewAsync(); // Open the reader object to subscribe to arriving frames // More information: // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader#create-a-frame-reader-for-the-frame-source // ExampleMediaFrameReader interface is based on Windows.Media.Capture.Frames.MultiSourceMediaFrameReader // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.frames.multisourcemediaframereader FrameReader = await MediaCapture.CreateFrameReaderAsync(); FrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered; FrameReader.FrameArrived += FrameReader_FrameArrived; // Subscribe to face detection events // More information: // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/scene-analysis-for-media-capture#face-detection-effect // ExampleMediaCapture.FaceDetected interface is based on Windows.Media.Core.FaceDetectionEffect.FaceDetected // https://docs.microsoft.com/en-us/uwp/api/windows.media.core.facedetectioneffect.facedetected if (UseFaceDetection) { MediaCapture.FaceDetected += MediaCapture_FaceDetected; } DispatcherTimer = new DispatcherTimer { Interval = TimeSpan.FromMilliseconds(100) }; DispatcherTimer.Tick += DispatcherTimer_Tick; DispatcherTimer.Start(); FrameReaderStopTrigger = new Timer { AutoReset = false, Interval = TimeSpan.FromSeconds(3).TotalMilliseconds, }; FrameReaderStopTrigger.Elapsed += FrameReaderStopTrigger_Elapsed; if (!UseFaceDetection) { PreviewOpacity = 1.0; } }