示例#1
0
        private void FaceDetection_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            DispatcherWrapper.Current().Dispatch(() =>
            {
                var properties = DefaultManager.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
                if (properties == null || properties.Width == 0 || properties.Height == 0)
                {
                    return;
                }

                var canvas    = FacesCanvas.Child as Canvas;
                canvas.Height = properties.Height;
                canvas.Width  = properties.Width;
                FaceDetected?.Invoke(sender, args);
                canvas.Children.Clear();

                foreach (var face in args.ResultFrame.DetectedFaces.Where(x => x.FaceBox.Width != 0 && x.FaceBox.Height != 0))
                {
                    var box = new Rectangle
                    {
                        Height          = face.FaceBox.Height,
                        Width           = face.FaceBox.Width,
                        Stroke          = FacesBoxColor.ToSolidColorBrush(),
                        StrokeThickness = 2,
                    };
                    Canvas.SetLeft(box, face.FaceBox.X);
                    Canvas.SetTop(box, face.FaceBox.Y);
                    canvas.Children.Add(box);
                }
                FaceCountChanged?.Invoke(this, canvas.Children.Count());
            });
        }
        private async Task CloseCamera()
        {
            if (faceDetectionEffect != null && faceDetectionEffect.Enabled)
            {
                faceDetectionEffect.Enabled       = false;
                faceDetectionEffect.FaceDetected -= FaceDetected;
                await capture.ClearEffectsAsync(MediaStreamType.VideoPreview);

                faceDetectionEffect = null;
                facesCanvas.Children.Clear();
            }

            if (capture != null)
            {
                if (isPreviewing)
                {
                    await capture.StopPreviewAsync();

                    isPreviewing = false;
                }
                capture.Dispose();
                capture = null;
            }

            //if (photoFile != null)
            //{
            //    await photoFile.DeleteAsync(StorageDeleteOption.PermanentDelete);
            //    photoFile = null;
            //}
        }
示例#3
0
        private async Task CleanUp()
        {
            if (_faceDetectionEffect != null)
            {
                _faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;
                _faceDetectionEffect = null;
            }

            if (_orientationSensor != null)
            {
                _orientationSensor.OrientationChanged -= OrientationSensor_OrientationChanged;
            }

            if (_displayInformation != null)
            {
                _displayInformation.OrientationChanged -= DisplayInformation_OrientationChanged;
            }

            if (ApiInformation.IsTypePresent("Windows.Phone.UI.Input.HardwareButtons"))
            {
                HardwareButtons.CameraPressed -= HardwareButtons_CameraPressed;
            }

            if (_mediaCapture != null)
            {
                if (_mediaCapture.CameraStreamState == Windows.Media.Devices.CameraStreamState.Streaming)
                {
                    await _mediaCapture.StopPreviewAsync();
                }
                _mediaCapture.Dispose();
                _mediaCapture = null;
            }

            _initialized = false;
        }
        private async void InitMediaCapture()
        {
            mediaCapture = new Windows.Media.Capture.MediaCapture();
            await mediaCapture.InitializeAsync(captureInitSettings);

            Windows.Media.Effects.VideoEffectDefinition def = new Windows.Media.Effects.VideoEffectDefinition(Windows.Media.VideoEffects.VideoStabilization);
            await mediaCapture.AddVideoEffectAsync(def, MediaStreamType.VideoRecord);

            profile = Windows.Media.MediaProperties.MediaEncodingProfile.CreateMp4(Windows.Media.MediaProperties.VideoEncodingQuality.Qvga);
            System.Guid MFVideoRotationGuild = new System.Guid("C380465D-2271-428C-9B83-ECEA3B4A85C1");
            int         MFVideoRotation      = ConvertVideoRotationToMFRotation(VideoRotation.None);

            profile.Video.Properties.Add(MFVideoRotationGuild, PropertyValue.CreateInt32(MFVideoRotation));
            var transcoder = new Windows.Media.Transcoding.MediaTranscoder();

            transcoder.AddVideoEffect(Windows.Media.VideoEffects.VideoStabilization);
            capturePreview.Source = mediaCapture;
            DisplayInformation.AutoRotationPreferences = DisplayOrientations.None;

            var definition = new FaceDetectionEffectDefinition();

            definition.SynchronousDetectionEnabled = false;
            definition.DetectionMode = FaceDetectionMode.HighPerformance;
            faceDetectionEffect      = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            faceDetectionEffect.Enabled       = true;
            faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;
        }
示例#5
0
        /// <summary>
        /// Initializes face detection on the preview stream, from https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/scene-analysis-for-media-capture
        /// </summary>
        /// <returns></returns>
        public async Task InitializeFaceDetection()
        {
            // Load the face service client to to face recognition with Cognitive Services
            if (FaceService == null)
            {
                FaceService = await FaceServiceHelper.CreateNewFaceServiceAsync();
            }

            // Create the definition, which will contain some initialization settings
            var definition = new FaceDetectionEffectDefinition();

            // To ensure preview smoothness, do not delay incoming samples
            definition.SynchronousDetectionEnabled = false;

            // In this scenario, choose detection speed over accuracy
            definition.DetectionMode = FaceDetectionMode.HighPerformance;

            // Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            // TODO: Chance to a good frequency to save Cognitive Services API calls
            // Choose the shortest interval between detection events
            //_faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            // Currently we offline detect faces every 3 seconds to save API calls
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(3000);

            // Start detecting faces
            _faceDetectionEffect.Enabled = true;

            // Register for face detection events
            _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

            _isDetecting = true;
        }
示例#6
0
 private async void _faceDetectionEffect_FaceDetectedAsync(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     if (args.ResultFrame.DetectedFaces.Count > 0)
     {
         try
         {
             if (!isFaceFound || DateTime.Now.Subtract(faceLastDate).TotalMinutes > 5)
             {
                 Analytics.TrackEvent("Faces found, starting capture");
                 isFaceFound = true;
                 await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                 {
                     timerFace.Stop();
                     timerFace.Start();
                     await ActivateUI();
                 });
             }
             faceLastDate = DateTime.Now;
         }
         catch (Exception)
         {
             // eat error
         }
         //await ContCapture();
     }
 }
示例#7
0
        /// <summary>
        /// Инициализирует работу с камерой и с локальным распознавателем лиц
        /// </summary>
        private async Task Init()
        {
            MC = new MediaCapture();
            var cameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            var camera   = cameras.First();
            var settings = new MediaCaptureInitializationSettings()
            {
                VideoDeviceId = camera.Id
            };
            await MC.InitializeAsync(settings);

            ViewFinder.Source = MC;

            // Create face detection
            var def = new FaceDetectionEffectDefinition();

            def.SynchronousDetectionEnabled = false;
            def.DetectionMode                     = FaceDetectionMode.HighPerformance;
            FaceDetector                          = (FaceDetectionEffect)(await MC.AddVideoEffectAsync(def, MediaStreamType.VideoPreview));
            FaceDetector.FaceDetected            += FaceDetectedEvent;
            FaceDetector.DesiredDetectionInterval = TimeSpan.FromMilliseconds(100);
            FaceDetector.Enabled                  = true;

            await MC.StartPreviewAsync();

            var props = MC.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

            VideoProps = props as VideoEncodingProperties;
        }
示例#8
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     var detectedFaces = args.ResultFrame.DetectedFaces;
     await Dispatcher
     .RunAsync(CoreDispatcherPriority.Normal,
               () => DrawFaceBoxes(detectedFaces));
 }
示例#9
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
     {
         setFaceInfoText(args.ResultFrame.DetectedFaces.Count);
     });
 }
示例#10
0
        private async Task CreateFaceDetectionEffectAsync()
        {
            //Create the definition, which will contain some initialization settings
            var definition = new FaceDetectionEffectDefinition();

            //To ensure preview smoothness, do not delay incoming samples
            definition.SynchronousDetectionEnabled = false;

            //In this scenario, choose detection speed over accuracy
            definition.DetectionMode = FaceDetectionMode.HighPerformance;

            //Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            //Register for face detection events
            _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;

            // Choose the shortest interval between detection events
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);

            // Start detecting faces
            _faceDetectionEffect.Enabled = true;

            //status.Text = "The CreateFaceDetectionEffectAsync has been done...";
            Debug.WriteLine("The CreateFaceDetectionEffectAsync has been done...");
        }
示例#11
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            Debug.WriteLine("Face Number: {0}", args.ResultFrame.DetectedFaces.Count);

            //Ask the UI thread to render the face bounding boxes
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

            try
            {
                //if(args.ResultFrame.DetectedFaces.Count > faceNumber)
                //{
                //    faceNumber = args.ResultFrame.DetectedFaces.Count;
                //    //await SendPhotoAsync();
                //}
                //else
                //{
                //    faceNumber = args.ResultFrame.DetectedFaces.Count;
                //}
                faceNumber = args.ResultFrame.DetectedFaces.Count;
            }

            catch (Exception ex)
            {
                Debug.WriteLine("Exception when sending a photo: {0}", ex.ToString());
            }
            //status.Text = "The number of faces is " + faceNumber;
            // Debug.WriteLine("The number of faces is" + faceNumber);
        }
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            Debug.WriteLine($"{args.ResultFrame.DetectedFaces.Count} faces detected");

            if (args.ResultFrame.DetectedFaces.Count == 0)
            {
                return;
            }

            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
            {
                try
                {
                    _faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;

                    // Do stuff here
                    var bitmap = await GetWriteableBitmapFromPreviewFrame();
                    var file   = await SaveBitmapToStorage(bitmap);
                    await AddPerson(file);
                    var faces      = await FindFaces(file);
                    var identities = await Identify(faces);
                    var candidates = await ExtractTopCandidate(identities, faces);

                    string json = JsonConvert.SerializeObject(candidates, Formatting.Indented);

                    ResultText.Text = json;
                }
                finally
                {
                    _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;
                }
            });
        }
示例#13
0
        private async void _faceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
            {
                this.VisualizationCanvas.Children.Clear();
            });


            foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
            {
                BitmapBounds faceRect = face.FaceBox;


                await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                {
                    Rectangle box       = new Rectangle();
                    box.Tag             = face.FaceBox;
                    box.Width           = (uint)(face.FaceBox.Width);
                    box.Height          = (uint)(face.FaceBox.Height);
                    box.Fill            = this.fillBrush;
                    box.Stroke          = this.lineBrush;
                    box.StrokeThickness = this.lineThickness;

                    box.Margin = new Thickness((uint)(face.FaceBox.X + 70), (uint)(face.FaceBox.Y + 150), 0, 0);

                    this.VisualizationCanvas.Children.Add(box);
                });
            }
        }
        private async void FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            var previewStream = previewProperties as VideoEncodingProperties;

            var dispatcher = Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher;

            await dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => {
                a = camPreview.Width / previewStream.Width;

                facesCanvas.Children.Clear();

                //Detects faces in preview (without Project Oxford) and places a rectangle on them
                foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
                {
                    Rectangle rect = new Rectangle()
                    {
                        Width           = face.FaceBox.Width *a,
                        Height          = face.FaceBox.Height *a,
                        Stroke          = new SolidColorBrush(Windows.UI.Colors.Red),
                        StrokeThickness = 2.0
                    };

                    facesCanvas.Children.Add(rect);
                    Canvas.SetLeft(rect, camPreview.Width - (face.FaceBox.X *a) - rect.Width);
                    Canvas.SetTop(rect, face.FaceBox.Y *a);
                }
            });
        }
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            if (args.ResultFrame.DetectedFaces.Any())
            {
                var biggestFace = args.ResultFrame.DetectedFaces.OrderByDescending(f => f.FaceBox.Height * f.FaceBox.Width).FirstOrDefault();
                if (biggestFace != null)
                {
                    var faceBounds = new BitmapBounds
                    {
                        X      = biggestFace.FaceBox.X,
                        Y      = biggestFace.FaceBox.Y,
                        Height = biggestFace.FaceBox.Height,
                        Width  = biggestFace.FaceBox.Width
                    };
                    // Check if face is not too big for face bounding box extrapolation
                    if (false == TryExtendFaceBounds(
                            (int)_previewProperties.Width, (int)_previewProperties.Height,
                            Constants.FaceBoxRatio, ref faceBounds))
                    {
                        return;
                    }
                }

                // Ask the UI thread to render the face bounding boxes
                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

                FaceDetected?.Invoke(sender, args);

                if (IsCheckSmileEnabled)
                {
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => await CheckSmileAsync());
                }
            }
        }
示例#16
0
        /// <summary>
        /// Asynchronously start face detection
        /// </summary>
        public async Task <bool> StartFaceDetectionAsync(int detectionInterval)
        {
            if (!FaceDetector.IsSupported)
            {
                Debug.WriteLine("Face detection is not supported on this device");
                return(false);
            }

            if (FaceDetectionEffect == null)
            {
                var definition = new FaceDetectionEffectDefinition
                {
                    DetectionMode = FaceDetectionMode.HighQuality,
                    SynchronousDetectionEnabled = false
                };

                FaceDetectionEffect = (FaceDetectionEffect)await MediaCapture.AddVideoEffectAsync
                                          (definition, MediaStreamType.VideoPreview);
            }

            FaceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(detectionInterval);
            FaceDetectionEffect.Enabled = true;

            return(true);
        }
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            // Ask the UI thread to render the face bounding boxes
            var detectedFaces = args.ResultFrame.DetectedFaces;

            Debug.WriteLine(detectedFaces.Count);
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(detectedFaces));
        }
示例#18
0
        private async void btnStopDetection_Click(object sender, RoutedEventArgs e)
        {
            this.cvsFaceOverlay.Children.Clear();
            _faceDetectionEffect.Enabled       = false;
            _faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;
            await _mediaCapture.ClearEffectsAsync(MediaStreamType.VideoPreview);

            _faceDetectionEffect = null;
        }
示例#19
0
        private void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            //Debug.WriteLine("Faces: {0}", args.ResultFrame.DetectedFaces.Count);

            var nothing = Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
            {
                (DataContext as MainPageViewModel).OneFacePresent = (args.ResultFrame.DetectedFaces.Count == 1);
            });
        }
示例#20
0
        private async void FaceStopDetect()
        {
            _faceDetectionEffect.Enabled       = false;
            _faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;
            await _mediaCapture.ClearEffectsAsync(MediaStreamType.VideoPreview);

            _faceDetectionEffect = null; this.cvsFaceOverlay.Children.Clear();
            this.cvsFaceOverlay.Children.Clear();
        }
示例#21
0
        // stop face detection
        public async void StopDetect()
        {
            this._faceDetectionEffect.Enabled       = false;
            this._faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;
            this._faceDetectionEffect = null;
            await this._mediaCapture.ClearEffectsAsync(MediaStreamType.VideoPreview);

            // uninnitialize camera
            await this.CleanupCameraAsync();
        }
示例#22
0
        /// <summary>
        /// Asynchronously stop face detection
        /// </summary>
        public async Task StopFaceDetectionAsync()
        {
            if (FaceDetectionEffect != null)
            {
                FaceDetectionEffect.Enabled = false;
                await MediaCapture.ClearEffectsAsync(MediaStreamType.VideoPreview);

                FaceDetectionEffect = null;
            }
        }
        /// <summary>
        /// Initializes the camera.
        /// Will raise `CameraInit*` events.
        /// </summary>
        /// <returns>Task.</returns>
        public async Task InitializeCameraAsync(Size previewControlSize)
        {
            // Set ui-related values.
            this.previewControlSize = previewControlSize;

            // Ensure that the media capture hasn't been init, yet.
            if (MediaCapture != null)
            {
                return;
            }

            // Get all camera devices.
            var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            // Ensure there has been exactly one camera found.
            if (devices.Count != 1)
            {
                IsFaceDetectionControlAvailable = false;
                CameraInitFailed(this, new MessageEventArgs("No or more than one camera found. No face detection available."));
            }

            // Create new media capture instance.
            MediaCapture = new MediaCapture();

            // Setup callbacks.
            MediaCapture.Failed += MediaCapture_Failed;

            // Init the actual capturing.
            var settings = new MediaCaptureInitializationSettings {
                VideoDeviceId = devices[0].Id
            };
            await MediaCapture.InitializeAsync(settings);

            // Updated preview properties from mediaCapture.
            previewProperties = MediaCapture
                                .VideoDeviceController
                                .GetMediaStreamProperties(MediaStreamType.VideoPreview)
                                as VideoEncodingProperties;

            // Setup face detection
            var definition = new FaceDetectionEffectDefinition
            {
                SynchronousDetectionEnabled = false,
                DetectionMode = FaceDetectionMode.HighPerformance
            };

            faceDetectionEffect = (FaceDetectionEffect)await MediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;

            // Operation was successful.
            IsFaceDetectionControlAvailable = true;
            CameraInitSucceeded(this, new MessageEventArgs("Face detection is now available."));
        }
示例#24
0
        /// <summary>
        /// Handle a face detected event
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            // Only run one face detection call to Cognitive Services at a time
            if (!_isRecognizing)
            {
                //If we need the box for the detected face we can get them here
                //foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
                //{
                //    BitmapBounds faceRect = face.FaceBox;
                //}

                _isRecognizing = true;

                var lowLagCapture = await _mediaCapture.PrepareLowLagPhotoCaptureAsync(ImageEncodingProperties.CreateUncompressed(MediaPixelFormat.Bgra8));

                var capturedPhoto = await lowLagCapture.CaptureAsync();

                var softwareBitmap = capturedPhoto.Frame.SoftwareBitmap;

                await lowLagCapture.FinishAsync();

                using (IRandomAccessStream randomAccessStream = new InMemoryRandomAccessStream())
                {
                    BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream);

                    encoder.SetSoftwareBitmap(softwareBitmap);

                    await encoder.FlushAsync();

                    var stream = randomAccessStream.AsStreamForRead();

                    try
                    {
                        //This call the Cognitive Services face API to detect the faces
                        var faces = await FaceService.DetectAsync(stream, true, false);

                        List <Guid> faceList = new List <Guid>();

                        foreach (var face in faces)
                        {
                            faceList.Add(face.FaceId);
                        }

                        LastFaces = faceList.ToArray();
                    }
                    catch
                    {
                        //We could not detect faces using Cognitive Services
                    }
                }

                _isRecognizing = false;
            }
        }
示例#25
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     if (!_displayFaces)
     {
         return;
     }
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
     {
         RenderFaceRectangles(args.ResultFrame.DetectedFaces.ToList());
     });
 }
示例#26
0
        // method to handle face postion detection
        private void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            var detectedFaces            = args.ResultFrame.DetectedFaces;
            FaceDetectEventArgs argument = new FaceDetectEventArgs(detectedFaces);

            if (OnDetectFace == null)
            {
                return;
            }
            OnDetectFace(this, argument);
        }
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            // Billy Action: Add heart eyes
            if (args.ResultFrame.DetectedFaces.Count > 0)
            {
                ShowLoveEyes();
            }

            // Ask the UI thread to render the face bounding boxes
            // await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));
        }
示例#28
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     if (!_displayFaceFrames && !_displayFaceHat)
     {
         return;
     }
     // Use the dispatcher because this method is sometimes called from non-UI threads
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
     {
         RenderFaceRectangles(args.ResultFrame.DetectedFaces.ToList());
     });
 }
示例#29
0
        private async void FaceDetect()
        {
            var faceDetectionDefinition = new FaceDetectionEffectDefinition();

            faceDetectionDefinition.DetectionMode = FaceDetectionMode.Balanced;
            faceDetectionDefinition.SynchronousDetectionEnabled = false;
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(faceDetectionDefinition, MediaStreamType.VideoPreview);

            _faceDetectionEffect.FaceDetected            += FaceDetectionEffect_FaceDetected;
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(33);
            _faceDetectionEffect.Enabled = true;
        }
 private void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
     {
         _ = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => {
             txtFaceDetected.Text       = "Face has been detected!";
             txtFaceDetected.FontWeight = Windows.UI.Text.FontWeights.Bold;
             txtFaceDetected.Foreground = new SolidColorBrush(Windows.UI.Colors.SteelBlue);
             ActuateHardware();
         });
     }
 }
        private async Task CreateFaceDetectionEffectAsync()
        {
            // Create the definition, which will contain some initialization settings
            var definition = new FaceDetectionEffectDefinition();

            // To ensure preview smoothness, do not delay incoming samples
            definition.SynchronousDetectionEnabled = false;

            // In this scenario, choose detection speed over accuracy
            definition.DetectionMode = FaceDetectionMode.HighPerformance;

            // Add the effect to the preview stream
            _faceDetectionEffect = (FaceDetectionEffect)await _mediaCapture.AddVideoEffectAsync(definition, MediaStreamType.VideoPreview);

            // Register for face detection events
            _faceDetectionEffect.FaceDetected += HandleFaceDetectionEffectFaceDetected;

            // Choose the shortest interval between detection events
            _faceDetectionEffect.DesiredDetectionInterval = TimeSpan.FromMilliseconds(200);

            // Start detecting faces
            _faceDetectionEffect.Enabled = true;
        }
        private async void HandleFaceDetectionEffectFaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            if(_faceInView)
            {
                return;
            }

            _faceInView = true;
            await TakePhotoAsync();
            await AnalyzePhotoAsync();

            
        }
 public FaceDetectionEffectEvents(FaceDetectionEffect This)
 {
     this.This = This;
 }
        /// <summary>
        ///  Disables and removes the face detection effect, and unregisters the event handler for face detection
        /// </summary>
        /// <returns></returns>
        private async Task CleanUpFaceDetectionEffectAsync()
        {
            // Disable detection
            _faceDetectionEffect.Enabled = false;

            // Unregister the event handler
            _faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;

            // Remove the effect (see ClearEffectsAsync method to remove all effects from a stream)
            await _mediaCapture.RemoveEffectAsync(_faceDetectionEffect);

            // Clear the member variable that held the effect instance
            _faceDetectionEffect = null;
        }
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            Debug.WriteLine("Face number: {0}", args.ResultFrame.DetectedFaces.Count);
            

            // Ask the UI thread to render the face bounding boxes
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

            ///检测到人脸,上传到服务器。
            /// 
            

            try
            {
                if (args.ResultFrame.DetectedFaces.Count > faceNumber )
                {
                    faceNumber = args.ResultFrame.DetectedFaces.Count;
                    await SendPhotoAsync();
                }
                else
                {
                    faceNumber = args.ResultFrame.DetectedFaces.Count;
                }


                //
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception when sending a photo: {0}", ex.ToString());
            }
            ;
        }
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     // Ask the UI thread to render the face count information
     await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => CountDetectedFaces(args.ResultFrame.DetectedFaces));
 }
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     // Ask the UI thread to render the face bounding boxes
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));
 }
        /// <summary>
        ///  Disables and removes the face detection effect, and unregisters the event handler for face detection
        /// </summary>
        /// <returns></returns>
        private async Task CleanUpFaceDetectionEffectAsync()
        {
            // Disable detection
            _faceDetectionEffect.Enabled = false;

            // Unregister the event handler
            _faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;

            // Remove the effect from the preview stream
            await _mediaCapture.ClearEffectsAsync(MediaStreamType.VideoPreview);

            // Clear the member variable that held the effect instance
            _faceDetectionEffect = null;
        }
示例#39
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

            if (launcher == null)
                return;

            await launcher.SetLightAsync(left | right | up | down);

            if (left)
                await launcher.MoveLeftAsync(5);

            if (right)
                await launcher.MoveRightAsync(5);

            if (down)
                await launcher.MoveDownAsync(5);

            if (up)
                await launcher.MoveUpAsync(5);

            if (visible && !up && !down && !left && !right)
                await launcher.FireAsync();
        }