private void ExampleMediaCapture_FaceDetected(ExampleMediaCapture sender, FaceAnalysis.FaceDetectedEventArgs args)
 {
     if (args.ResultFrame.SystemRelativeTime.HasValue &&
         args.ResultFrame.DetectedFaces.Any() &&
         args.ResultFrame.DetectedFaces.Count <= MaxDetectedFaces)
     {
         DetectedFaceEpochs.Enqueue(args.ResultFrame.SystemRelativeTime.Value);
         Update(args.ResultFrame.SystemRelativeTime.Value);
     }
 }
        /// <summary>
        /// Initializes ExampleMediaCapture and FrameReader.
        /// Turns camera on and sets camera preview.
        /// </summary>
        /// <returns></returns>
        public async Task InitializeAsync()
        {
            MediaCapture = new ExampleMediaCapture();
            await MediaCapture.InitializeAsync();

            CaptureElement.Source        = MediaCapture.PreviewMediaCapture;
            CaptureElement.FlowDirection = MediaCapture.PreviewFlowDirection;
            await MediaCapture.StartPreviewAsync();

            FrameReader = await MediaCapture.CreateFrameReaderAsync();

            FrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered;
            FrameReader.FrameArrived   += FrameArrivedEvent;
            await FrameReader.StartAsync();
        }
Пример #3
0
        private async Task StartOperationAsync()
        {
            CollectedColorFrames = new ConcurrentBag <ExampleMediaFrame>();
            CollectedNonIlluminatedInfraredFrames = new ConcurrentBag <ExampleMediaFrame>();
            CollectedIlluminatedInfraredFrames    = new ConcurrentBag <ExampleMediaFrame>();
            Stopwatch = new Stopwatch();
            WrittenToDiskFramesCount = 0;

            // Open the capture object to pump the frames to UI preview element
            // More information, including how to keep the display on:
            // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/simple-camera-preview-access
            // ExampleMediaCapture interface is based on Windows.Media.Capture.MediaCapture
            // https://docs.microsoft.com/en-us/uwp/api/Windows.Media.Capture.MediaCapture

            MediaCapture = new ExampleMediaCapture();
            await MediaCapture.InitializeAsync();

            FrameCollection = new ExampleMediaFrameCollection(MediaCapture);
            FrameCollection.CollectionProgressed += FrameCollection_CollectionProgressed;

            CaptureElement.Source        = MediaCapture.PreviewMediaCapture;
            CaptureElement.FlowDirection = MediaCapture.PreviewFlowDirection;
            await MediaCapture.StartPreviewAsync();

            // Open the reader object to subscribe to arriving frames
            // More information:
            // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader#create-a-frame-reader-for-the-frame-source
            // ExampleMediaFrameReader interface is based on Windows.Media.Capture.Frames.MultiSourceMediaFrameReader
            // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.frames.multisourcemediaframereader

            FrameReader = await MediaCapture.CreateFrameReaderAsync();

            FrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered;
            FrameReader.FrameArrived   += FrameReader_FrameArrived;

            // Start frame reader immediately

            await FrameReader.StartAsync();

            DispatcherTimer = new DispatcherTimer
            {
                Interval = TimeSpan.FromMilliseconds(100)
            };
            DispatcherTimer.Tick += DispatcherTimer_Tick;
            DispatcherTimer.Start();
            Stopwatch.Start();
        }
Пример #4
0
        private async void MediaCapture_FaceDetected(ExampleMediaCapture sender, FaceDetectedEventArgs args)
        {
            // This event is raised even when no faces are present, the argument must be inspected

            if (args.ResultFrame.DetectedFaces.Any() && FrameReader != null)
            {
                if (PreviewOpacity == 0.0)
                {
                    await FrameReader.StartAsync();

                    PreviewOpacity = 1.0;
                }

                // Reset the countdown to turning off camera
                FrameReaderStopTrigger.Stop();
                FrameReaderStopTrigger.Start();
            }

            UpdateSystemRelativeTime(args.ResultFrame.SystemRelativeTime);
        }
Пример #5
0
        private async Task StartOperationAsync()
        {
            ColorFrameEpochs = new ConcurrentQueue <TimeSpan>();
            IlluminatedInfraredFrameEpochs    = new ConcurrentQueue <TimeSpan>();
            NonIlluminatedInfraredFrameEpochs = new ConcurrentQueue <TimeSpan>();

            // Open the capture object to pump the frames to UI preview element
            // More information, including how to keep the display on:
            // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/simple-camera-preview-access
            // ExampleMediaCapture interface is based on Windows.Media.Capture.MediaCapture
            // https://docs.microsoft.com/en-us/uwp/api/Windows.Media.Capture.MediaCapture

            MediaCapture = new ExampleMediaCapture();
            await MediaCapture.InitializeAsync(new ExampleMediaCaptureInitializationSettings
            {
                FaceDetectionAffinity = ExampleMediaCaptureFaceDetectionAffinity.MediaCapturePreview,
            });

            FrameCollection = new ExampleMediaFrameCollection(MediaCapture);
            FrameCollection.CollectionProgressed += FrameCollection_CollectionProgressed;

            CaptureElement.Source        = MediaCapture.PreviewMediaCapture;
            CaptureElement.FlowDirection = MediaCapture.PreviewFlowDirection;
            await MediaCapture.StartPreviewAsync();

            // Open the reader object to subscribe to arriving frames
            // More information:
            // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader#create-a-frame-reader-for-the-frame-source
            // ExampleMediaFrameReader interface is based on Windows.Media.Capture.Frames.MultiSourceMediaFrameReader
            // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.frames.multisourcemediaframereader

            FrameReader = await MediaCapture.CreateFrameReaderAsync();

            FrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered;
            FrameReader.FrameArrived   += FrameReader_FrameArrived;

            // Subscribe to face detection events
            // More information:
            // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/scene-analysis-for-media-capture#face-detection-effect
            // ExampleMediaCapture.FaceDetected interface is based on Windows.Media.Core.FaceDetectionEffect.FaceDetected
            // https://docs.microsoft.com/en-us/uwp/api/windows.media.core.facedetectioneffect.facedetected

            if (UseFaceDetection)
            {
                MediaCapture.FaceDetected += MediaCapture_FaceDetected;
            }

            DispatcherTimer = new DispatcherTimer
            {
                Interval = TimeSpan.FromMilliseconds(100)
            };
            DispatcherTimer.Tick += DispatcherTimer_Tick;
            DispatcherTimer.Start();

            FrameReaderStopTrigger = new Timer
            {
                AutoReset = false,
                Interval  = TimeSpan.FromSeconds(3).TotalMilliseconds,
            };
            FrameReaderStopTrigger.Elapsed += FrameReaderStopTrigger_Elapsed;

            if (!UseFaceDetection)
            {
                PreviewOpacity = 1.0;
            }
        }
 public ExampleMediaFrameCollection(ExampleMediaCapture ExampleMediaCapture)
 {
     Frames = new ConcurrentQueue <ExampleMediaFrame>();
     ExampleMediaCapture.FaceDetected += ExampleMediaCapture_FaceDetected;
     DelayedProcessingTrigger          = new Timer(TriggerProcessing, null, TimeSpan.Zero, TimeSpan.FromSeconds(1));
 }