コード例 #1
0
        private async Task CreateFaceDetectionEffectAsync()
        {
            //Create the definition, which will contain some initialization settings
            var definition = new FaceDetectionEffectDefinition();

            //To ensure preview smoothness, do not delay incoming samples
            definition.SynchronousDetectionEnabled = false;

            //In this scenario, choose detection speed over accuracy
            definition.DetectionMode = FaceDetectionMode.HighPerformance;

            //Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            //Register for face detection events
            _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

            // Choose the shortest interval between detection events
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);

            // Start detecting faces
            _faceDetectionEffect.Enabled = true;

            //status.Text = "The CreateFaceDetectionEffectAsync has been done...";
            Debug.WriteLine("The CreateFaceDetectionEffectAsync has been done...");
        }
コード例 #2
0
        /// <summary>
        /// Asynchronously start face detection
        /// </summary>
        public async Task <bool> StartFaceDetectionAsync(int detectionInterval)
        {
            if (!FaceDetector.IsSupported)
            {
                Debug.WriteLine("Face detection is not supported on this device");
                return(false);
            }

            if (FaceDetectionEffect == null)
            {
                var definition = new FaceDetectionEffectDefinition
                {
                    DetectionMode = FaceDetectionMode.HighQuality,
                    SynchronousDetectionEnabled = false
                };

                FaceDetectionEffect = (FaceDetectionEffect)await MediaCapture.AddVideoEffectAsync
                                          (definition, MediaStreamType.VideoPreview);
            }

            FaceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(detectionInterval);
            FaceDetectionEffect.Enabled = true;

            return(true);
        }
コード例 #3
0
        private async void InitMediaCapture()
        {
            mediaCapture = new Windows.Media.Capture.MediaCapture();
            await mediaCapture.InitializeAsync(captureInitSettings);

            Windows.Media.Effects.VideoEffectDefinition def = new Windows.Media.Effects.VideoEffectDefinition(Windows.Media.VideoEffects.VideoStabilization);
            await mediaCapture.AddVideoEffectAsync(def, MediaStreamType.VideoRecord);

            profile = Windows.Media.MediaProperties.MediaEncodingProfile.CreateMp4(Windows.Media.MediaProperties.VideoEncodingQuality.Qvga);
            System.Guid MFVideoRotationGuild = new System.Guid("C380465D-2271-428C-9B83-ECEA3B4A85C1");
            int         MFVideoRotation      = ConvertVideoRotationToMFRotation(VideoRotation.None);

            profile.Video.Properties.Add(MFVideoRotationGuild, PropertyValue.CreateInt32(MFVideoRotation));
            var transcoder = new Windows.Media.Transcoding.MediaTranscoder();

            transcoder.AddVideoEffect(Windows.Media.VideoEffects.VideoStabilization);
            capturePreview.Source = mediaCapture;
            DisplayInformation.AutoRotationPreferences = DisplayOrientations.None;

            var definition = new FaceDetectionEffectDefinition();

            definition.SynchronousDetectionEnabled = false;
            definition.DetectionMode = FaceDetectionMode.HighPerformance;
            faceDetectionEffect      = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            faceDetectionEffect.Enabled       = true;
            faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;
        }
コード例 #4
0
        /// <summary>
        /// Initializes face detection on the preview stream, from https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/scene-analysis-for-media-capture
        /// </summary>
        /// <returns></returns>
        public async Task InitializeFaceDetection()
        {
            // Load the face service client to to face recognition with Cognitive Services
            if (FaceService == null)
            {
                FaceService = await FaceServiceHelper.CreateNewFaceServiceAsync();
            }

            // Create the definition, which will contain some initialization settings
            var definition = new FaceDetectionEffectDefinition();

            // To ensure preview smoothness, do not delay incoming samples
            definition.SynchronousDetectionEnabled = false;

            // In this scenario, choose detection speed over accuracy
            definition.DetectionMode = FaceDetectionMode.HighPerformance;

            // Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            // TODO: Chance to a good frequency to save Cognitive Services API calls
            // Choose the shortest interval between detection events
            //_faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            // Currently we offline detect faces every 3 seconds to save API calls
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(3000);

            // Start detecting faces
            _faceDetectionEffect.Enabled = true;

            // Register for face detection events
            _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

            _isDetecting = true;
        }
コード例 #5
0
        /// <summary>
        /// Инициализирует работу с камерой и с локальным распознавателем лиц
        /// </summary>
        private async Task Init()
        {
            MC = new MediaCapture();
            var cameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            var camera   = cameras.First();
            var settings = new MediaCaptureInitializationSettings()
            {
                VideoDeviceId = camera.Id
            };
            await MC.InitializeAsync(settings);

            ViewFinder.Source = MC;

            // Create face detection
            var def = new FaceDetectionEffectDefinition();

            def.SynchronousDetectionEnabled = false;
            def.DetectionMode                     = FaceDetectionMode.HighPerformance;
            FaceDetector                          = (FaceDetectionEffect)(await MC.AddVideoEffectAsync(def, MediaStreamType.VideoPreview));
            FaceDetector.FaceDetected            += FaceDetectedEvent;
            FaceDetector.DesiredDetectionInterval = TimeSpan.FromMilliseconds(100);
            FaceDetector.Enabled                  = true;

            await MC.StartPreviewAsync();

            var props = MC.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

            VideoProps = props as VideoEncodingProperties;
        }
コード例 #6
0
        /// <summary>
        /// Adds face detection to the preview stream, registers for its events, enables it, and gets the FaceDetectionEffect instance
        /// </summary>
        private async Task CreateFaceDetectionEffectAsync()
        {
            // First intialize the camera
            await InitializeCameraAsync();

            // Create the definition, which will contain some initialization settings
            var Definition = new FaceDetectionEffectDefinition();

            // To ensure preview smoothness, do not delay incoming samples
            Definition.SynchronousDetectionEnabled = false;

            // In this scenario, choose balance over speed or accuracy
            Definition.DetectionMode = FaceDetectionMode.Balanced;

            // Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(Definition, MediaStreamType.VideoPreview);

            // Register for face detection events
            _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

            // Choose the shortest interval between detection events
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);

            // Start detecting faces
            _faceDetectionEffect.Enabled = true;
        }
コード例 #7
0
        /// <summary>
        /// Initializes the camera.
        /// Will raise `CameraInit*` events.
        /// </summary>
        /// <returns>Task.</returns>
        public async Task InitializeCameraAsync(Size previewControlSize)
        {
            // Set ui-related values.
            this.previewControlSize = previewControlSize;

            // Ensure that the media capture hasn't been init, yet.
            if (MediaCapture != null)
            {
                return;
            }

            // Get all camera devices.
            var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            // Ensure there has been exactly one camera found.
            if (devices.Count != 1)
            {
                IsFaceDetectionControlAvailable = false;
                CameraInitFailed(this, new MessageEventArgs("No or more than one camera found. No face detection available."));
            }

            // Create new media capture instance.
            MediaCapture = new MediaCapture();

            // Setup callbacks.
            MediaCapture.Failed += MediaCapture_Failed;

            // Init the actual capturing.
            var settings = new MediaCaptureInitializationSettings {
                VideoDeviceId = devices[0].Id
            };
            await MediaCapture.InitializeAsync(settings);

            // Updated preview properties from mediaCapture.
            previewProperties = MediaCapture
                                .VideoDeviceController
                                .GetMediaStreamProperties(MediaStreamType.VideoPreview)
                                as VideoEncodingProperties;

            // Setup face detection
            var definition = new FaceDetectionEffectDefinition
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance
            };

            faceDetectionEffect = (FaceDetectionEffect)await MediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;

            // Operation was successful.
            IsFaceDetectionControlAvailable = true;
            CameraInitSucceeded(this, new MessageEventArgs("Face detection is now available."));
        }
コード例 #8
0
        private async void btnDetectFaces_Click(object sender, RoutedEventArgs e)
        {
            var faceDetectionDefinition = new FaceDetectionEffectDefinition();

            faceDetectionDefinition.SynchronousDetectionEnabled = false;
            faceDetectionDefinition.DetectionMode = FaceDetectionMode.HighPerformance;
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(faceDetectionDefinition, MediaStreamType.VideoPreview);

            _faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            _faceDetectionEffect.Enabled = true;
        }
コード例 #9
0
        private async void FaceDetect()
        {
            var faceDetectionDefinition = new FaceDetectionEffectDefinition();

            faceDetectionDefinition.DetectionMode = FaceDetectionMode.Balanced;
            faceDetectionDefinition.SynchronousDetectionEnabled = false;
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(faceDetectionDefinition, MediaStreamType.VideoPreview);

            _faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            _faceDetectionEffect.Enabled = true;
        }
コード例 #10
0
        private async Task CreateFaceDetectionEffectAsync()
        {
            var definition = new FaceDetectionEffectDefinition();

            definition.SynchronousDetectionEnabled = false;
            definition.DetectionMode = FaceDetectionMode.Balanced;

            faceDetectionEffect = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;
            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(66); // min. 33

            faceDetectionEffect.Enabled = true;
        }
コード例 #11
0
        private async void FaceDetectionInitialization()
        {
            var definition = new FaceDetectionEffectDefinition()
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance
            };

            faceDetectionEffect = (FaceDetectionEffect)await capture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            //Chooses the shortest interval between detection events
            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            faceDetectionEffect.Enabled       = true;
            faceDetectionEffect.FaceDetected += FaceDetected;
        }
コード例 #12
0
        private async void InitializeFaceDetection()
        {
            var definition = new FaceDetectionEffectDefinition();

            definition.SynchronousDetectionEnabled = false;
            definition.DetectionMode = FaceDetectionMode.HighPerformance;
            _faceDetectionEffect     = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);

            _faceDetectionEffect.Enabled = true;


            _faceDetectionEffect.FaceDetected += _faceDetectionEffect_FaceDetected;
        }
コード例 #13
0
        private async Task <FaceDetectionEffect> SetupFaceDetection()
        {
            var faceDetectionEffectDefinition = new FaceDetectionEffectDefinition
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance,
            };
            var faceDetection = await DefaultManager.AddVideoEffectAsync(faceDetectionEffectDefinition, MediaStreamType.VideoPreview) as FaceDetectionEffect;

            faceDetection.FaceDetected            += FaceDetection_FaceDetected;
            faceDetection.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            if (FaceDetection != null)
            {
                FaceDetection.Enabled = false;
            }
            return(FaceDetection = faceDetection);
        }
コード例 #14
0
        public async Task InitAsync()
        {
            if (currentEffect != null)
            {
                currentEffect.Enabled       = false;
                currentEffect.FaceDetected -= FaceDetection_FaceDetected;
            }
            var definition = new FaceDetectionEffectDefinition
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance,
            };

            currentEffect = await defaultManager.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview) as FaceDetectionEffect;

            currentEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(150);
            currentEffect.FaceDetected            += FaceDetection_FaceDetected;
        }
コード例 #15
0
        /// <summary>
        /// Start Face detection
        /// </summary>
        /// <returns></returns>
        private async Task InitFaceTrackerAsync()
        {
            // Create the definition, which will contain some initialization settings
            var definition = new FaceDetectionEffectDefinition();

            // To ensure preview smoothness, do not delay incoming samples
            definition.SynchronousDetectionEnabled = false;

            // In this scenario, choose detection speed over accuracy
            definition.DetectionMode = FaceDetectionMode.HighPerformance;

            // Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await VideoCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            // Choose the shortest interval between detection events
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);

            // Register for face detection events
            _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

            Status = "Face tracker sucessfully initialized!";
        }
コード例 #16
0
ファイル: MainPage.xaml.cs プロジェクト: vaclavpetr/Blog
        private async Task InitMediaDevice()
        {
            try
            {
                if (!FaceDetector.IsSupported)
                {
                    return;
                }

                var cameraDevice = await CameraActions.FindCameraDeviceByPanelAsync(Panel.Front);

                var settings = new MediaCaptureInitializationSettings {
                    VideoDeviceId = cameraDevice.Id
                };
                _mediaCapture = new MediaCapture();
                await _mediaCapture.InitializeAsync(settings);

                var definition = new FaceDetectionEffectDefinition
                {
                    SynchronousDetectionEnabled = false,
                    DetectionMode = FaceDetectionMode.HighPerformance
                };

                var faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

                faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
                faceDetectionEffect.Enabled       = true;
                faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

                CameraViewer.Source = _mediaCapture;
                await _mediaCapture.StartPreviewAsync();

                _previewProperties = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);
            }
            catch (Exception exception)
            {
                Messages = exception.ToString();
            }
        }
コード例 #17
0
        private async Task CreateFaceDetectionEffectAsync()
        {
            // Create the definition, which will contain some initialization settings
            var definition = new FaceDetectionEffectDefinition();

            // To ensure preview smoothness, do not delay incoming samples
            definition.SynchronousDetectionEnabled = false;

            // In this scenario, choose detection speed over accuracy
            definition.DetectionMode = FaceDetectionMode.HighPerformance;

            // Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            // Register for face detection events
            _faceDetectionEffect.FaceDetected += HandleFaceDetectionEffectFaceDetected;

            // Choose the shortest interval between detection events
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(200);

            // Start detecting faces
            _faceDetectionEffect.Enabled = true;
        }
コード例 #18
0
ファイル: MainPage.xaml.cs プロジェクト: MechaniUm/RoboShell
        private async Task InitLongRunning()
        {
            var spk = new UWPLocalSpeaker(media, Windows.Media.SpeechSynthesis.VoiceGender.Female);

            string localIp = GetLocalIp();

            if (localIp == null)
            {
                localIp = "127.0.0.1";
            }
            if (localIp == "")
            {
                localIp = "127.0.0.1";
            }
            spk.Speak($"мой адрес не дом и не улица, мой адрес {localIp} и точка");
            CoreWindow.GetForCurrentThread().KeyDown += KeyPressed;
            Log.Trace("BEFORE receive actual kb");

            try {
                HttpResponseMessage httpResponseMessage = await httpClient.GetAsync("https://github.com/");

                if (httpResponseMessage.IsSuccessStatusCode)
                {
                    byte[] git_kb = await httpClient.GetByteArrayAsync(Config.GitKBUrl);

                    StorageFolder storageFolder = ApplicationData.Current.LocalFolder;
                    StorageFile   sampleFile    =
                        await storageFolder.CreateFileAsync(Config.GitKBFileName, CreationCollisionOption.ReplaceExisting);

                    await Windows.Storage.FileIO.WriteBytesAsync(sampleFile, git_kb);

                    RE = BracketedRuleEngine.LoadBracketedKb(sampleFile);
                    Log.Trace("Using actual git's config version");
                }
                else
                {
                    //try {
                    //    StorageFolder storageFolder = ApplicationData.Current.LocalFolder;
                    //    StorageFile sampleFile = await storageFolder.GetFileAsync(Config.GitKBFileName);
                    //    RE = BracketedRuleEngine.LoadBracketedKb(sampleFile);
                    //    Log.Trace("Using local git's config version");
                    //    offline = true;
                    //}
                    //catch (Exception) {
                    RE = BracketedRuleEngine.LoadBracketedKb(Config.KBFileName);
                    Log.Trace("Using local nongit config version");
                    offline = true;
                    //}
                }
            }
            catch (Exception)
            {
                //try
                //{
                //    StorageFolder storageFolder = ApplicationData.Current.LocalFolder;
                //    StorageFile sampleFile = await storageFolder.GetFileAsync(Config.GitKBFileName);
                //    RE = BracketedRuleEngine.LoadBracketedKb(sampleFile);
                //    Log.Trace("Using local git's config version");
                //    offline = true;
                //}
                //catch (Exception)
                //{
                RE = BracketedRuleEngine.LoadBracketedKb(Config.KBFileName);
                Log.Trace("Using local nongit config version");
                offline = true;
                //}
            }
            Log.Trace("AFTER receive actual kb");


            RE.SetSpeaker(spk);
            RE.Initialize();
            RE.SetExecutor(ExExecutor);
            FaceWaitTimer.Tick   += StartDialog;
            DropoutTimer.Tick    += FaceDropout;
            PreDropoutTimer.Tick += PreDropout;
            InferenceTimer.Tick  += InferenceStep;
            InitGpio();
            if (gpio != null)
            {
                ArduinoInputTimer.Tick += ArduinoInput;
                ArduinoInputTimer.Start();
            }
            yesNoCancelGPIO.Execute(RE.State);
            media.MediaEnded += EndSpeech;

            // Create face detection
            var def = new FaceDetectionEffectDefinition();

            def.SynchronousDetectionEnabled = false;
            def.DetectionMode                     = FaceDetectionMode.HighPerformance;
            FaceDetector                          = (FaceDetectionEffect)(await MC.AddVideoEffectAsync(def, MediaStreamType.VideoPreview));
            FaceDetector.FaceDetected            += FaceDetectedEvent;
            FaceDetector.DesiredDetectionInterval = TimeSpan.FromMilliseconds(100);
            LogLib.Log.Trace("Ready to start face recognition");
            await MC.StartPreviewAsync();

            LogLib.Log.Trace("Face Recognition Started");
            var props = MC.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

            VideoProps           = props as VideoEncodingProperties;
            FaceDetector.Enabled = true;

            InferenceTimer.Start();
        }
コード例 #19
0
        private async Task StartPreviewAsync()
        {
            try
            {
                var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

                var    deviceList = devices.ToList();
                var    device     = devices.FirstOrDefault(x => x.Name.Contains(settings.CameraKey));
                string deviceId   = device == null ? "" : device.Id;


                mediaCapture = new MediaCapture();
                await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.ExclusiveControl, VideoDeviceId = deviceId
                });

                var resolutions = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.Photo).ToList();

                Windows.Media.MediaProperties.VideoEncodingProperties reslution = (Windows.Media.MediaProperties.VideoEncodingProperties)resolutions.Where(x => x.Type == "Video").OrderByDescending(x => ((Windows.Media.MediaProperties.VideoEncodingProperties)x).Width).FirstOrDefault();


                // set used resolution
                await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.Photo, reslution);

                mediaCapture2 = new MediaCapture();
                await mediaCapture2.InitializeAsync(new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId
                });

                mediaCapture3 = new MediaCapture();
                await mediaCapture3.InitializeAsync(new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId
                });

                mediaCapture4 = new MediaCapture();
                await mediaCapture4.InitializeAsync(new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.SharedReadOnly, VideoDeviceId = deviceId
                });

                displayRequest.RequestActive();
                DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape;

                // Create the definition, which will contain some initialization settings
                var definition = new FaceDetectionEffectDefinition();

                // To ensure preview smoothness, do not delay incoming samples
                definition.SynchronousDetectionEnabled = false;

                // In this scenario, choose detection speed over accuracy
                definition.DetectionMode = FaceDetectionMode.HighPerformance;

                // Add the effect to the preview stream
                _faceDetectionEffect = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

                // Choose the shortest interval between detection events
                _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(300);

                // Start detecting faces
                _faceDetectionEffect.Enabled = true;

                // Register for face detection events
                _faceDetectionEffect.FaceDetected += _faceDetectionEffect_FaceDetectedAsync;;
            }
            catch (Exception)
            {
                // This will be thrown if the user denied access to the camera in privacy settings
                Console.Write("The app was denided access to the camera");
                return;
            }

            try
            {
                captionsControl.MainCapture.Source = mediaCapture;
                speechControl.MainCapture.Source   = mediaCapture2;
                tagsControl.MainCapture.Source     = mediaCapture3;
                captureBottomRight.Source          = mediaCapture4;
                await mediaCapture.StartPreviewAsync();

                await mediaCapture2.StartPreviewAsync();

                await mediaCapture3.StartPreviewAsync();

                await mediaCapture4.StartPreviewAsync();
            }
            catch (Exception)
            {
                //mediaCapture.CaptureDeviceExclusiveControlStatusChanged += MediaCapture_CaptureDeviceExclusiveControlStatusChanged; ;
            }
        }
コード例 #20
0
ファイル: MainPage.xaml.cs プロジェクト: bschapendonk/ddssl
        private async Task StartPreviewAsync()
        {
            try
            {
                mediaCapture = new MediaCapture();
                await mediaCapture.InitializeAsync();

                var faceDetectionDefinition = new FaceDetectionEffectDefinition();
                faceDetectionDefinition.DetectionMode = FaceDetectionMode.HighPerformance;
                faceDetectionDefinition.SynchronousDetectionEnabled = false;
                _faceDetectionEffect = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(faceDetectionDefinition, MediaStreamType.VideoPreview);

                _faceDetectionEffect.FaceDetected += async(s, a) =>
                {
                    var detectedFaces = a.ResultFrame.DetectedFaces;
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                    {
                        FaceCanvas.Children.Clear();

                        for (int i = 0; i < detectedFaces.Count; i++)
                        {
                            var face       = detectedFaces[i];
                            var faceBounds = face.FaceBox;

                            var faceHighlightRectangle    = MapRectangleToDetectedFace(detectedFaces[i].FaceBox);
                            faceHighlightRectangle.Stroke = new SolidColorBrush(Colors.Red);

                            faceHighlightRectangle.StrokeThickness = 2;

                            FaceCanvas.Children.Add(faceHighlightRectangle);
                        }
                    });
                };
                _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
                _faceDetectionEffect.Enabled = true;

                displayRequest.RequestActive();
                DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape;
            }
            catch (UnauthorizedAccessException)
            {
                await ShowMessageToUser("The app was denied access to the camera");

                return;
            }

            try
            {
                PreviewControl.Source = mediaCapture;

                await mediaCapture.StartPreviewAsync();

                timer.Start();
                _previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);
                isPreviewing       = true;
            }
            catch (FileLoadException)
            {
                mediaCapture.CaptureDeviceExclusiveControlStatusChanged += _mediaCapture_CaptureDeviceExclusiveControlStatusChanged;
            }
        }
コード例 #21
0
ファイル: Camera.cs プロジェクト: Chaoyun1234/UWP-experiences
        private async Task InitializeWithCameraDevice(DeviceInformation cameraDevice)
        {
            if (_mediaCapture == null)
            {
                _cameraDevice = cameraDevice;
                _mediaCapture = new MediaCapture();

                var settings = new MediaCaptureInitializationSettings {
                    VideoDeviceId = _cameraDevice.Id
                };

                try
                {
                    await _mediaCapture.InitializeAsync(settings);

                    var b = MediaCapture.IsVideoProfileSupported(_cameraDevice.Id);

                    // get highest resolution for preview
                    var resolutions = _mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.Photo).Select(x => x as VideoEncodingProperties);
                    var resolution  = resolutions.Where(r => r != null).OrderByDescending(r => r.Height * r.Width).FirstOrDefault();

                    if (resolution != null)
                    {
                        await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.Photo, resolution);
                    }
                }
                catch
                {
                    _mediaCapture.Dispose();
                    _mediaCapture = null;
                    return;
                }

                if (!App.IsXbox() && (_cameraDevice.EnclosureLocation == null || _cameraDevice.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front))
                {
                    _captureElement.FlowDirection = FlowDirection.RightToLeft;
                }
                else
                {
                    _captureElement.FlowDirection = FlowDirection.LeftToRight;
                }

                _captureElement.Source = _mediaCapture;

                await _mediaCapture.StartPreviewAsync();

                await SetPreviewRotationAsync();

                var definition = new FaceDetectionEffectDefinition();
                // definition.SynchronousDetectionEnabled = false;
                // definition.DetectionMode = FaceDetectionMode.HighPerformance;

                _faceDetectionEffect = (await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview)) as FaceDetectionEffect;
                _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

                _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(100);
                _faceDetectionEffect.Enabled = true;

                _initialized = true;
            }
        }