private async void DrawFaceOutlines(Windows.Media.Core.FaceDetectedEventArgs e)
        {
            FaceOutlineCanvas.Children.Clear();
            var faces = e.ResultFrame.DetectedFaces?.Where(x => x.FaceBox.Width != 0 && x.FaceBox.Height != 0);

            foreach (var face in faces)
            {
                var outline = new Rectangle
                {
                    Style  = FaceOutlineStyle,
                    Height = face.FaceBox.Height,
                    Width  = face.FaceBox.Width,
                };
                Canvas.SetLeft(outline, face.FaceBox.X);
                Canvas.SetTop(outline, face.FaceBox.Y);
                FaceOutlineCanvas.Children.Add(outline);

                //var pic = await cameraService.Photo.CaptureAsync();
                //uint startPointX = (uint)Math.Floor(face.FaceBox.X * scale);
                //uint startPointY = (uint)Math.Floor(face.FaceBox.Y * scale);
                //uint height = (uint)Math.Floor(corpSize.Height * scale);
                //uint width = (uint)Math.Floor(corpSize.Width * scale);
            }

            var projection = faces.Select(x => new DetectedFaceEx {
                FaceBox = x.FaceBox
            });
            var args = new FaceDetectedEventArgsEx {
                Faces = projection
            };

            FaceDetected?.Invoke(this, args);
        }
Esempio n. 2
0
        private void FaceDetection_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            DispatcherWrapper.Current().Dispatch(() =>
            {
                var properties = DefaultManager.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
                if (properties == null || properties.Width == 0 || properties.Height == 0)
                {
                    return;
                }

                var canvas    = FacesCanvas.Child as Canvas;
                canvas.Height = properties.Height;
                canvas.Width  = properties.Width;
                FaceDetected?.Invoke(sender, args);
                canvas.Children.Clear();

                foreach (var face in args.ResultFrame.DetectedFaces.Where(x => x.FaceBox.Width != 0 && x.FaceBox.Height != 0))
                {
                    var box = new Rectangle
                    {
                        Height          = face.FaceBox.Height,
                        Width           = face.FaceBox.Width,
                        Stroke          = FacesBoxColor.ToSolidColorBrush(),
                        StrokeThickness = 2,
                    };
                    Canvas.SetLeft(box, face.FaceBox.X);
                    Canvas.SetTop(box, face.FaceBox.Y);
                    canvas.Children.Add(box);
                }
                FaceCountChanged?.Invoke(this, canvas.Children.Count());
            });
        }
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            if (args.ResultFrame.DetectedFaces.Any())
            {
                var biggestFace = args.ResultFrame.DetectedFaces.OrderByDescending(f => f.FaceBox.Height * f.FaceBox.Width).FirstOrDefault();
                if (biggestFace != null)
                {
                    var faceBounds = new BitmapBounds
                    {
                        X      = biggestFace.FaceBox.X,
                        Y      = biggestFace.FaceBox.Y,
                        Height = biggestFace.FaceBox.Height,
                        Width  = biggestFace.FaceBox.Width
                    };
                    // Check if face is not too big for face bounding box extrapolation
                    if (false == TryExtendFaceBounds(
                            (int)_previewProperties.Width, (int)_previewProperties.Height,
                            Constants.FaceBoxRatio, ref faceBounds))
                    {
                        return;
                    }
                }

                // Ask the UI thread to render the face bounding boxes
                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

                FaceDetected?.Invoke(sender, args);

                if (IsCheckSmileEnabled)
                {
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => await CheckSmileAsync());
                }
            }
        }
Esempio n. 4
0
        private void OnImageGrabbed(Mat mat)
        {
            var sw = new Stopwatch();

            sw.Start();
            ImageGrabbed?.Invoke(mat);
            //skip 2/3 of the frames, due to too much work on CPU
            //if (counter++ % 3 != 0) return;
            var(faces, eyes) = faceEyeDetector.Detect(mat);
            if (!faces.Any() || !eyes.Any())
            {
                return;
            }
            PersonDetected?.Invoke(mat);
            FaceDetected?.Invoke(mat, faces, eyes);
            Console.WriteLine($"Frame processing time : {sw.Elapsed}");
        }
Esempio n. 5
0
        /// <summary>
        /// Starts face detection.
        /// The camera must be in the <see cref="CameraState.Preview"/> state.
        /// </summary>
        /// <since_tizen> 3 </since_tizen>
        /// <privilege>
        /// http://tizen.org/privilege/camera
        /// </privilege>
        /// <remarks>
        /// This should be called after <see cref="StartPreview"/> is started.
        /// The Eventhandler set using <see cref="FaceDetected"/> is invoked when the face is detected in the preview frame.
        /// Internally, it starts continuously focus and focusing on the detected face.
        /// </remarks>
        /// <exception cref="InvalidOperationException">In case of any invalid operations.</exception>
        /// <exception cref="NotSupportedException">In case of this feature is not supported.</exception>
        /// <exception cref="ObjectDisposedException">The camera already has been disposed of.</exception>
        /// <exception cref="UnauthorizedAccessException">In case of access to the resources cannot be granted.</exception>
        public void StartFaceDetection()
        {
            ValidateState(CameraState.Preview);

            _faceDetectedCallback = (IntPtr faces, int count, IntPtr userData) =>
            {
                var    result  = new List <FaceDetectionData>();
                IntPtr current = faces;

                for (int i = 0; i < count; i++)
                {
                    result.Add(new FaceDetectionData(current));
                    current = IntPtr.Add(current, Marshal.SizeOf <Native.DetectedFaceStruct>());
                }

                FaceDetected?.Invoke(this, new FaceDetectedEventArgs(result));
            };
            CameraErrorFactory.ThrowIfError(Native.StartFaceDetection(_handle, _faceDetectedCallback, IntPtr.Zero),
                                            "Failed to start face detection");
        }
Esempio n. 6
0
 private void Face_FaceDetected(Object sender, FaceDetectedEventArgs e)
 {
     FaceDetected?.Invoke(sender, e);
 }
Esempio n. 7
0
        private void HandleFaces(Windows.Foundation.Size framePixelSize, IEnumerable <DetectedFace> detectedFaces, SoftwareBitmap currentFrame)
        {
            //var ignored = Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
            //{
            //    FaceTrackingVisualizationCanvas.Children.Clear();
            //});


            //double actualWidth = FaceTrackingVisualizationCanvas.ActualWidth;
            //double actualHeight = FaceTrackingVisualizationCanvas.ActualHeight;

            if (captureManager.CameraStreamState == CameraStreamState.Streaming &&
                detectedFaces != null /*&& actualWidth != 0 && actualHeight != 0*/)
            {
                //double widthScale = framePixelSize.Width / actualWidth;
                //double heightScale = framePixelSize.Height / actualHeight;

                List <BitmapBounds> currentFaces = new List <BitmapBounds>();
                List <BitmapBounds> missingFaces = new List <BitmapBounds>();

                foreach (DetectedFace detectedFace in detectedFaces)
                {
                    //RealTimeFaceIdentificationBorder faceBorder = new RealTimeFaceIdentificationBorder();
                    //this.FaceTrackingVisualizationCanvas.Children.Add(faceBorder);

                    //faceBorder.ShowFaceRectangle((uint)(detectedFace.FaceBox.X / widthScale), (uint)(detectedFace.FaceBox.Y / heightScale), (uint)(detectedFace.FaceBox.Width / widthScale), (uint)(detectedFace.FaceBox.Height / heightScale));

                    if (currentFrame == null)
                    {
                        continue;
                    }

                    var potentialMatches = ActiveFaces.Where(existingFace =>
                    {
                        return(CoreUtil.AreFacesPotentiallyTheSame(existingFace, detectedFace.FaceBox));
                    });

                    // Is this a new face to us?
                    if (potentialMatches.Count() == 0)
                    {
                        var eventArgs = new FaceDetectedEventArgs()
                        {
                            Face   = detectedFace,
                            Bitmap = currentFrame
                        };

                        if (!ActiveFaces.Any())
                        {
                            FaceDetectionStarted?.Invoke(this, new EventArgs());
                        }

                        FaceDetected?.Invoke(this, eventArgs);
                    }

                    currentFaces.Add(detectedFace.FaceBox);
                }

                if (currentFaces.Count == 0)
                {
                    FacesNoLongerDetected?.Invoke(this, new EventArgs());
                }

                ActiveFaces = currentFaces;
            }
        }
 private void PreviewFaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     FaceDetected?.Invoke(this, new FaceAnalysis.FaceDetectedEventArgs(new FaceAnalysis.FaceDetectionEffectFrame(args.ResultFrame)));
 }
 private void FrameReader_FaceDetected(ExampleMediaFrameReader sender, FaceAnalysis.FaceDetectedEventArgs args)
 {
     FaceDetected?.Invoke(this, args);
 }
Esempio n. 10
0
 private void FaceDetection_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
 {
     FaceDetected?.Invoke(sender, args);
 }
        private void RaiseFaceDetectedEvent(Image <Bgr, byte> image, Bitmap bitmap)
        {
            FaceDetected?.Invoke(this, image, bitmap);

            _controls.TheNewestFace = bitmap;
        }