コード例 #1
0
        private async void FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            var previewStream = previewProperties as VideoEncodingProperties;

            var dispatcher = Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher;

            await dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => {
                a = camPreview.Width / previewStream.Width;

                facesCanvas.Children.Clear();

                //Detects faces in preview (without Project Oxford) and places a rectangle on them
                foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
                {
                    Rectangle rect = new Rectangle()
                    {
                        Width           = face.FaceBox.Width *a,
                        Height          = face.FaceBox.Height *a,
                        Stroke          = new SolidColorBrush(Windows.UI.Colors.Red),
                        StrokeThickness = 2.0
                    };

                    facesCanvas.Children.Add(rect);
                    Canvas.SetLeft(rect, camPreview.Width - (face.FaceBox.X *a) - rect.Width);
                    Canvas.SetTop(rect, face.FaceBox.Y *a);
                }
            });
        }
コード例 #2
0
        private void FaceDetection_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            DispatcherWrapper.Current().Dispatch(() =>
            {
                var properties = DefaultManager.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
                if (properties == null || properties.Width == 0 || properties.Height == 0)
                {
                    return;
                }

                var canvas    = FacesCanvas.Child as Canvas;
                canvas.Height = properties.Height;
                canvas.Width  = properties.Width;
                FaceDetected?.Invoke(sender, args);
                canvas.Children.Clear();

                foreach (var face in args.ResultFrame.DetectedFaces.Where(x => x.FaceBox.Width != 0 && x.FaceBox.Height != 0))
                {
                    var box = new Rectangle
                    {
                        Height          = face.FaceBox.Height,
                        Width           = face.FaceBox.Width,
                        Stroke          = FacesBoxColor.ToSolidColorBrush(),
                        StrokeThickness = 2,
                    };
                    Canvas.SetLeft(box, face.FaceBox.X);
                    Canvas.SetTop(box, face.FaceBox.Y);
                    canvas.Children.Add(box);
                }
                FaceCountChanged?.Invoke(this, canvas.Children.Count());
            });
        }
コード例 #3
0
        private void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            //Debug.WriteLine("Faces: {0}", args.ResultFrame.DetectedFaces.Count);

            var nothing = Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
            {
                (DataContext as MainPageViewModel).OneFacePresent = (args.ResultFrame.DetectedFaces.Count == 1);
            });
        }
コード例 #4
0
        // Detecting faces using camera
        private async void FaceDetectionEffect_FaceDetected(
            FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            var detectedFaces = args.ResultFrame.DetectedFaces;

            await Dispatcher
            .RunAsync(CoreDispatcherPriority.Normal,
                      () => FaceCommands(detectedFaces));
        }
コード例 #5
0
        // method to handle face postion detection
        private void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            var detectedFaces            = args.ResultFrame.DetectedFaces;
            FaceDetectEventArgs argument = new FaceDetectEventArgs(detectedFaces);

            if (OnDetectFace == null)
            {
                return;
            }
            OnDetectFace(this, argument);
        }
コード例 #6
0
 private void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
     {
         _ = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => {
             txtFaceDetected.Text       = "Face has been detected!";
             txtFaceDetected.FontWeight = Windows.UI.Text.FontWeights.Bold;
             txtFaceDetected.Foreground = new SolidColorBrush(Windows.UI.Colors.SteelBlue);
             ActuateHardware();
         });
     }
 }
コード例 #7
0
 private async void OnFaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     if (!IsLoading && args.ResultFrame.DetectedFaces.Any())
     {
         await DispatcherHelper.RunAsync(async() =>
         {
             await RunTaskAsync(async() =>
             {
                 var persons = await MakeRecognition();
                 await VoiceInterface.SayHelloAsync(persons);
             });
         });
     }
 }
コード例 #8
0
        private async void MediaCapture_FaceDetected(ExampleMediaCapture sender, FaceDetectedEventArgs args)
        {
            // This event is raised even when no faces are present, the argument must be inspected

            if (args.ResultFrame.DetectedFaces.Any() && FrameReader != null)
            {
                if (PreviewOpacity == 0.0)
                {
                    await FrameReader.StartAsync();

                    PreviewOpacity = 1.0;
                }

                // Reset the countdown to turning off camera
                FrameReaderStopTrigger.Stop();
                FrameReaderStopTrigger.Start();
            }

            UpdateSystemRelativeTime(args.ResultFrame.SystemRelativeTime);
        }
コード例 #9
0
        /// <summary>
        /// Raised in case of an recognized face.
        /// Will trigger an ui and arm updated if required.
        /// </summary>
        /// <param name="sender">Underlying instance.</param>
        /// <param name="args">Event args.</param>
        private void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            // Get faces from arguments.
            var faces = args.ResultFrame.DetectedFaces;

            // Ensure at least one face has been detected.
            if (faces.Count == 0 || !IsFaceDetectionControlAvailable)
            {
                FaceRectsDetected(this, new FaceRectsDetectedEventArgs(new List <Rect>()));
                return;
            }

            // Get the rectangle of the preview control.
            var previewRect = ScaleStreamToPreviewDimensions(previewProperties, previewControlSize.Width, previewControlSize.Height);

            // Get preview stream properties.
            double previewWidth  = previewProperties.Width;
            double previewHeight = previewProperties.Height;

            // Map FaceBox to a scaled rect.
            var faceRects = faces.Select(face =>
            {
                // Get scaled position information of the face.
                var faceBox         = face.FaceBox;
                var resultingWidth  = (faceBox.Width / previewWidth) * previewRect.Width;
                var resultingHeight = (faceBox.Height / previewHeight) * previewRect.Height;
                var resultingX      = (faceBox.X / previewWidth) * previewRect.Width;
                var resultingY      = (faceBox.Y / previewHeight) * previewRect.Height;

                // Init new rect.
                var rect = new Rect(resultingX, resultingY, resultingWidth, resultingHeight);
                return(rect);
            });

            // Update arm
            UpdateArmPosition(faceRects);

            // Call event.
            FaceRectsDetected(this, new FaceRectsDetectedEventArgs(faceRects));
        }
コード例 #10
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     // Ask the UI thread to render the face count information
     await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => CountDetectedFaces(args.ResultFrame.DetectedFaces));
 }
コード例 #11
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     // Ask the UI thread to render the face bounding boxes
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));
 }
コード例 #12
0
ファイル: MainPage.xaml.cs プロジェクト: afnan-ms/I_Love_Lamp
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
     {
         setFaceInfoText(args.ResultFrame.DetectedFaces.Count);
     });
 }
コード例 #13
0
 private void Face_FaceDetected(Object sender, FaceDetectedEventArgs e)
 {
     FaceDetected?.Invoke(sender, e);
 }
コード例 #14
0
 /// <summary>
 /// Срабатывает при локальном обнаружении лица на фотографии.
 /// Рисует рамку и устанавливает переменную IsFacePresent=true
 /// </summary>
 private async void FaceDetectedEvent(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFace(args.ResultFrame.DetectedFaces.FirstOrDefault()));
 }
コード例 #15
0
        /// <summary>
        /// Handle a face detected event
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            // Only run one face detection call to Cognitive Services at a time
            if (!_isRecognizing)
            {
                //If we need the box for the detected face we can get them here
                //foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
                //{
                //    BitmapBounds faceRect = face.FaceBox;
                //}

                _isRecognizing = true;

                var lowLagCapture = await _mediaCapture.PrepareLowLagPhotoCaptureAsync(ImageEncodingProperties.CreateUncompressed(MediaPixelFormat.Bgra8));

                var capturedPhoto = await lowLagCapture.CaptureAsync();

                var softwareBitmap = capturedPhoto.Frame.SoftwareBitmap;

                await lowLagCapture.FinishAsync();

                using (IRandomAccessStream randomAccessStream = new InMemoryRandomAccessStream())
                {
                    BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream);

                    encoder.SetSoftwareBitmap(softwareBitmap);

                    await encoder.FlushAsync();

                    var stream = randomAccessStream.AsStreamForRead();

                    try
                    {
                        //This call the Cognitive Services face API to detect the faces
                        var faces = await FaceService.DetectAsync(stream, true, false);

                        List <Guid> faceList = new List <Guid>();

                        foreach (var face in faces)
                        {
                            faceList.Add(face.FaceId);
                        }

                        LastFaces = faceList.ToArray();
                    }
                    catch
                    {
                        //We could not detect faces using Cognitive Services
                    }
                }

                _isRecognizing = false;
            }
        }
コード例 #16
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            // Billy Action: Add heart eyes
            if (args.ResultFrame.DetectedFaces.Count > 0)
            {
                ShowLoveEyes();
            }

            // Ask the UI thread to render the face bounding boxes
            // await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));
        }
 private void PreviewFaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     FaceDetected?.Invoke(this, new FaceAnalysis.FaceDetectedEventArgs(new FaceAnalysis.FaceDetectionEffectFrame(args.ResultFrame)));
 }
コード例 #18
0
 private void FaceDetection_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     FaceDetected?.Invoke(sender, args);
 }
コード例 #19
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     var detectedFaces = args.ResultFrame.DetectedFaces;
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
     {
         DrawFaceBoxes(detectedFaces);
         AnalyzeFaces();
     });
 }
コード例 #20
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            if (!_isPreviewing)
            {
                return;
            }


            await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
            {
                SetValue(() => Instructions, "Please move closer until the box turns red...");

                if (HighlightDetectedFaces(args.ResultFrame.DetectedFaces))
                {
                    SetValue(() => Instructions, _defaultInstructions);

                    _faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;

                    try
                    {
                        StorageFile file = await GetPhotoFromPreviewFrame();

                        if (file != null)
                        {
                            List <Identification> matches = await AnalyzePhoto(file);

                            _faceCanvas.Children.Clear();

                            if (matches.Count > 0)
                            {
                                if (matches.First().Person.Name.ToLower() == "unknown")
                                {
                                    ClipboardHelper clipboardHelper = new ClipboardHelper();
                                    clipboardHelper.ImageToClipboard(file);
                                    NavigationHelper.Navigate(typeof(FaceNotFoundPage), false);
                                }
                                else
                                {
                                    bool alreadyAttending = await Repository.AlreadyAttending(App.ActiveMeeting.MeetingId, matches.First().Person.Name);
                                    if (!alreadyAttending)
                                    {
                                        NavigationHelper.Navigate(typeof(FaceFoundPage), matches.First());
                                        ClipboardHelper clipboardHelper = new ClipboardHelper();
                                        clipboardHelper.ImageToClipboard(file);
                                    }
                                    else
                                    {
                                        SetValue(() => Instructions, $"{matches.First().Person.Name} - You are already on the roll.  Please take a seat.");

                                        _timer.Start();
                                    }
                                }
                            }
                            else
                            {
                                _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        Debug.WriteLine(ex.Message);
                        _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;
                    }
                }
            });
        }
コード例 #21
0
ファイル: MainPage.xaml.cs プロジェクト: vaclavpetr/Blog
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     if (!_displayFaceFrames && !_displayFaceHat)
     {
         return;
     }
     // Use the dispatcher because this method is sometimes called from non-UI threads
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
     {
         RenderFaceRectangles(args.ResultFrame.DetectedFaces.ToList());
     });
 }
コード例 #22
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            Debug.WriteLine($"{args.ResultFrame.DetectedFaces.Count} faces detected");

            if (args.ResultFrame.DetectedFaces.Count == 0)
            {
                return;
            }

            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
            {
                try
                {
                    _faceDetectionEffect.FaceDetected -= FaceDetectionEffect_FaceDetected;

                    // Do stuff here
                    var bitmap = await GetWriteableBitmapFromPreviewFrame();
                    var file   = await SaveBitmapToStorage(bitmap);
                    await AddPerson(file);
                    var faces      = await FindFaces(file);
                    var identities = await Identify(faces);
                    var candidates = await ExtractTopCandidate(identities, faces);

                    string json = JsonConvert.SerializeObject(candidates, Formatting.Indented);

                    ResultText.Text = json;
                }
                finally
                {
                    _faceDetectionEffect.FaceDetected += FaceDetectionEffect_FaceDetected;
                }
            });
        }
コード例 #23
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

            if (launcher == null)
            {
                return;
            }

            await launcher.SetLightAsync(left | right | up | down);

            if (left)
            {
                await launcher.MoveLeftAsync(5);
            }

            if (right)
            {
                await launcher.MoveRightAsync(5);
            }

            if (down)
            {
                await launcher.MoveDownAsync(5);
            }

            if (up)
            {
                await launcher.MoveUpAsync(5);
            }

            if (visible && !up && !down && !left && !right)
            {
                await launcher.FireAsync();
            }
        }
コード例 #24
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

            if (launcher == null)
                return;

            await launcher.SetLightAsync(left | right | up | down);

            if (left)
                await launcher.MoveLeftAsync(5);

            if (right)
                await launcher.MoveRightAsync(5);

            if (down)
                await launcher.MoveDownAsync(5);

            if (up)
                await launcher.MoveUpAsync(5);

            if (visible && !up && !down && !left && !right)
                await launcher.FireAsync();
        }
コード例 #25
0
ファイル: MainPage.xaml.cs プロジェクト: vaclavpetr/Blog
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     if (!_displayFaces)
     {
         return;
     }
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
     {
         RenderFaceRectangles(args.ResultFrame.DetectedFaces.ToList());
     });
 }
コード例 #26
0
        private async void HandleFaceDetectionEffectFaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            if(_faceInView)
            {
                return;
            }

            _faceInView = true;
            await TakePhotoAsync();
            await AnalyzePhotoAsync();

            
        }
コード例 #27
0
ファイル: Camera.cs プロジェクト: Chaoyun1234/UWP-experiences
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            if (args.ResultFrame.DetectedFaces.Count > 0 &&
                _isSmileDetectionEnabled &&
                !_analyzingEmotion &&
                (DateTime.Now - _lastEmotionCheck).TotalSeconds > 1)
            {
                _analyzingEmotion = true;

                var    previewProperties = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
                double scale             = 480d / (double)previewProperties.Height;

                VideoFrame videoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, (int)(previewProperties.Width * scale), 480);

                using (var frame = await _mediaCapture.GetPreviewFrameAsync(videoFrame))
                {
                    if (frame.SoftwareBitmap != null)
                    {
                        var bitmap = frame.SoftwareBitmap;

                        InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream();
                        BitmapEncoder encoder             = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);

                        encoder.SetSoftwareBitmap(bitmap);

                        await encoder.FlushAsync();

                        var smiling = await EmotionAPI.Instance.CheckIfEveryoneIsSmiling(stream, args.ResultFrame.DetectedFaces, scale);

                        _lastEmotionCheck = DateTime.Now;

                        if (smiling)
                        {
                            IsSmileDetectionEnabled = false;
                            await _captureElement.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() => { await this.CapturePhotoAsync(); });
                        }
                    }
                }

                _analyzingEmotion = false;
            }
        }
コード例 #28
0
 private async void _faceDetectionEffect_FaceDetectedAsync(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     if (args.ResultFrame.DetectedFaces.Count > 0)
     {
         try
         {
             if (!isFaceFound || DateTime.Now.Subtract(faceLastDate).TotalMinutes > 5)
             {
                 Analytics.TrackEvent("Faces found, starting capture");
                 isFaceFound = true;
                 await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                 {
                     timerFace.Stop();
                     timerFace.Start();
                     await ActivateUI();
                 });
             }
             faceLastDate = DateTime.Now;
         }
         catch (Exception)
         {
             // eat error
         }
         //await ContCapture();
     }
 }
コード例 #29
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            Debug.WriteLine("Face Number: {0}", args.ResultFrame.DetectedFaces.Count);

            //Ask the UI thread to render the face bounding boxes
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

            try
            {
                //if(args.ResultFrame.DetectedFaces.Count > faceNumber)
                //{
                //    faceNumber = args.ResultFrame.DetectedFaces.Count;
                //    //await SendPhotoAsync();
                //}
                //else
                //{
                //    faceNumber = args.ResultFrame.DetectedFaces.Count;
                //}
                faceNumber = args.ResultFrame.DetectedFaces.Count;
            }

            catch (Exception ex)
            {
                Debug.WriteLine("Exception when sending a photo: {0}", ex.ToString());
            }
            //status.Text = "The number of faces is " + faceNumber;
            // Debug.WriteLine("The number of faces is" + faceNumber);
        }
コード例 #30
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     // Ask the UI thread to render the face bounding boxes
     await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));
 }
コード例 #31
0
        private async void _faceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
            {
                this.VisualizationCanvas.Children.Clear();
            });


            foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
            {
                BitmapBounds faceRect = face.FaceBox;


                await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                {
                    Rectangle box       = new Rectangle();
                    box.Tag             = face.FaceBox;
                    box.Width           = (uint)(face.FaceBox.Width);
                    box.Height          = (uint)(face.FaceBox.Height);
                    box.Fill            = this.fillBrush;
                    box.Stroke          = this.lineBrush;
                    box.StrokeThickness = this.lineThickness;

                    box.Margin = new Thickness((uint)(face.FaceBox.X + 70), (uint)(face.FaceBox.Y + 150), 0, 0);

                    this.VisualizationCanvas.Children.Add(box);
                });
            }
        }
コード例 #32
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            Debug.WriteLine("Face number: {0}", args.ResultFrame.DetectedFaces.Count);
            

            // Ask the UI thread to render the face bounding boxes
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

            ///检测到人脸,上传到服务器。
            /// 
            

            try
            {
                if (args.ResultFrame.DetectedFaces.Count > faceNumber )
                {
                    faceNumber = args.ResultFrame.DetectedFaces.Count;
                    await SendPhotoAsync();
                }
                else
                {
                    faceNumber = args.ResultFrame.DetectedFaces.Count;
                }


                //
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception when sending a photo: {0}", ex.ToString());
            }
            ;
        }
コード例 #33
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     // Ask the UI thread to render the face count information
     await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => CountDetectedFaces(args.ResultFrame.DetectedFaces));
 }
コード例 #34
0
 private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     isFaceDetected = args.ResultFrame.DetectedFaces.Count > 0 ? true : false;
 }
コード例 #35
0
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            if (args.ResultFrame.DetectedFaces.Any())
            {
                var biggestFace = args.ResultFrame.DetectedFaces.OrderByDescending(f => f.FaceBox.Height * f.FaceBox.Width).FirstOrDefault();
                if (biggestFace != null)
                {
                    var faceBounds = new BitmapBounds
                    {
                        X      = biggestFace.FaceBox.X,
                        Y      = biggestFace.FaceBox.Y,
                        Height = biggestFace.FaceBox.Height,
                        Width  = biggestFace.FaceBox.Width
                    };
                    // Check if face is not too big for face bounding box extrapolation
                    if (false == TryExtendFaceBounds(
                            (int)_previewProperties.Width, (int)_previewProperties.Height,
                            Constants.FaceBoxRatio, ref faceBounds))
                    {
                        return;
                    }
                }

                // Ask the UI thread to render the face bounding boxes
                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

                FaceDetected?.Invoke(sender, args);

                if (IsCheckSmileEnabled)
                {
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => await CheckSmileAsync());
                }
            }
        }