The detected face entity.
 private void DrawLandmarks(Microsoft.ProjectOxford.Face.Contract.Face face)
 {
     DrawLandmark(face.FaceLandmarks.EyebrowLeftInner);
     DrawLandmark(face.FaceLandmarks.EyebrowLeftOuter);
     DrawLandmark(face.FaceLandmarks.EyebrowRightInner);
     DrawLandmark(face.FaceLandmarks.EyebrowRightOuter);
     DrawLandmark(face.FaceLandmarks.EyeLeftBottom);
     DrawLandmark(face.FaceLandmarks.EyeLeftInner);
     DrawLandmark(face.FaceLandmarks.EyeLeftOuter);
     DrawLandmark(face.FaceLandmarks.EyeLeftTop);
     DrawLandmark(face.FaceLandmarks.EyeRightBottom);
     DrawLandmark(face.FaceLandmarks.EyeRightInner);
     DrawLandmark(face.FaceLandmarks.EyeRightOuter);
     DrawLandmark(face.FaceLandmarks.EyeRightTop);
     DrawLandmark(face.FaceLandmarks.MouthLeft);
     DrawLandmark(face.FaceLandmarks.MouthRight);
     DrawLandmark(face.FaceLandmarks.NoseLeftAlarOutTip);
     DrawLandmark(face.FaceLandmarks.NoseLeftAlarTop);
     DrawLandmark(face.FaceLandmarks.NoseRightAlarOutTip);
     DrawLandmark(face.FaceLandmarks.NoseRightAlarTop);
     DrawLandmark(face.FaceLandmarks.NoseRootLeft);
     DrawLandmark(face.FaceLandmarks.NoseRootRight);
     DrawLandmark(face.FaceLandmarks.NoseTip);
     DrawLandmark(face.FaceLandmarks.PupilLeft);
     DrawLandmark(face.FaceLandmarks.PupilRight);
     DrawLandmark(face.FaceLandmarks.UnderLipBottom);
     DrawLandmark(face.FaceLandmarks.UnderLipTop);
     DrawLandmark(face.FaceLandmarks.UpperLipBottom);
     DrawLandmark(face.FaceLandmarks.UpperLipTop);
 }
示例#2
0
 public NamedFace(Face face)
 {
     FaceId = face.FaceId;
     FaceRectangle = face.FaceRectangle;
     FacialLandmarks = face.FacialLandmarks;
     Attributes = face.Attributes;
 }
        /// <summary>
        /// Accepts an array Face objects and returns an array of GUIDs assocaited with the Face objects
        /// </summary>
        public static Guid[] FacesToFaceIds(Face[] faces)
        {
            var faceIds = new Guid[faces.Length];

            for(var i = 0; i < faces.Length; i++)
            {
                faceIds[i] = faces[i].FaceId;
            }
            return faceIds;
        }
        /// <summary>
        /// Draws a face boxe on the UI
        /// </summary>
        /// <param name="widthScale"></param>
        /// <param name="heightScale"></param>
        /// <param name="face"></param>
        private void DrawFaceOnUI(double widthScale, double heightScale, Microsoft.ProjectOxford.Face.Contract.Face face)
        {
            try
            {
                Rectangle box = new Rectangle();
                box.Width           = (uint)(face.FaceRectangle.Width / widthScale);
                box.Height          = (uint)(face.FaceRectangle.Height / heightScale);
                box.Fill            = new SolidColorBrush(Colors.Transparent);
                box.Stroke          = new SolidColorBrush(Colors.Lime);
                box.StrokeThickness = 2;
                box.Margin          = new Thickness((uint)(face.FaceRectangle.Left / widthScale), (uint)(face.FaceRectangle.Top / heightScale), 0, 0);
                facesCanvas.Children.Add(box);

                // Add face attributes found
                var tb = new TextBlock();
                tb.Foreground = new SolidColorBrush(Colors.Lime);
                tb.Padding    = new Thickness(4);
                tb.Margin     = new Thickness((uint)(face.FaceRectangle.Left / widthScale), (uint)(face.FaceRectangle.Top / heightScale), 0, 0);

                if (face.FaceAttributes?.Age > 0)
                {
                    tb.Text += "Age: " + face.FaceAttributes.Age + Environment.NewLine;
                }

                if (!string.IsNullOrEmpty(face.FaceAttributes?.Gender))
                {
                    tb.Text += "Gender: " + face.FaceAttributes.Gender + Environment.NewLine;
                }

                if (face.FaceAttributes?.Smile > 0)
                {
                    tb.Text += "Smile: " + face.FaceAttributes.Smile + Environment.NewLine;
                }

                if (face.FaceAttributes != null && face.FaceAttributes.Glasses != Microsoft.ProjectOxford.Face.Contract.Glasses.NoGlasses)
                {
                    tb.Text += "Glasses: " + face.FaceAttributes?.Glasses + Environment.NewLine;
                }

                if (face.FaceAttributes?.FacialHair != null)
                {
                    tb.Text += "Beard: " + face.FaceAttributes.FacialHair.Beard + Environment.NewLine;
                    tb.Text += "Moustache: " + face.FaceAttributes.FacialHair.Moustache + Environment.NewLine;
                    tb.Text += "Sideburns: " + face.FaceAttributes.FacialHair.Sideburns + Environment.NewLine;
                }

                facesCanvas.Children.Add(tb);
            }
            catch (Exception ex)
            {
                this.Log("Failure during DrawFaceOnUI()", ex);
            }
        }
示例#5
0
 public static async Task<FaceViewModel> FromFace(Face face, StorageFile photo)
 {
     if (face == null) throw new ArgumentNullException(nameof(face));
     return new FaceViewModel
     {
         Photo = await CropPhoto(photo, face.FaceRectangle),
         Age = face.FaceAttributes?.Age,
         Gender = face.FaceAttributes?.Gender,
         Smile = face.FaceAttributes?.Smile,
         Beard = face.FaceAttributes?.FacialHair?.Beard,
         Moustache = face.FaceAttributes?.FacialHair?.Moustache,
         Sideburns = face.FaceAttributes?.FacialHair?.Sideburns
     };
 }
示例#6
0
        /// <summary> Function which submits a frame to the Emotion API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the emotions returned by the API. </returns>
        private async Task <LiveCameraResult> EmotionAnalysisFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);

            // Submit image to API.
            FaceAPI.Contract.Face[] faces = null;

            // See if we have local face detections for this image.
            var localFaces = (OpenCvSharp.Rect[])frame.UserData;

            if (localFaces == null || localFaces.Count() > 0)
            {
                // If localFaces is null, we're not performing local face detection.
                // Use Cognigitve Services to do the face detection.
                Properties.Settings.Default.FaceAPICallCount++;
                faces = await _faceClient.DetectAsync(
                    jpg,
                    /* returnFaceId= */ false,
                    /* returnFaceLandmarks= */ false,
                    new FaceAPI.FaceAttributeType[1] {
                    FaceAPI.FaceAttributeType.Emotion
                });
            }
            else
            {
                // Local face detection found no faces; don't call Cognitive Services.
                faces = new FaceAPI.Contract.Face[0];
            }

            var emotions = faces.Select(e => e.FaceAttributes.Emotion).ToArray();

            foreach (var emotion in emotions)
            {
                if (emotion.Sadness >= 0.6)
                {
                    MessageBox.Show($"I've detected you are {emotion.Sadness.ToString()} sad, something to cheer you up is on it's way!");
                    await AddPanicEmergency(emotion.Sadness.ToString());
                }
            }

            // Output.
            return(new LiveCameraResult
            {
                Faces = faces.Select(e => CreateFace(e.FaceRectangle)).ToArray(),
                // Extract emotion scores from results.
                EmotionScores = faces.Select(e => e.FaceAttributes.Emotion).ToArray()
            });
        }
示例#7
0
        /// <summary>
        /// Append detected face to UI binding collection
        /// </summary>
        /// <param name="collections">UI binding collection</param>
        /// <param name="imagePath">Original image path, used for rendering face region</param>
        /// <param name="face">Face structure returned from service</param>
        public static void UpdateFace(ObservableCollection <Models.Face> collections, string imagePath, Microsoft.ProjectOxford.Face.Contract.Face face)
        {
            var renderingImage = LoadImageAppliedOrientation(imagePath);

            collections.Add(new Models.Face()
            {
                ImageFile = renderingImage,
                Left      = face.FaceRectangle.Left,
                Top       = face.FaceRectangle.Top,
                Width     = face.FaceRectangle.Width,
                Height    = face.FaceRectangle.Height,
                FaceId    = face.FaceId.ToString(),
            });
        }
        private void UpdateUIWithFaces(Face[] faces, double widthRatio, double heightRatio)
        {
            StringBuilder usersAgeBuilder = new StringBuilder();

            // Cleaning children from stack pannels
            for (int i = canvas.Children.Count - 1; i >= 0; i--)
            {
                UIElement child = canvas.Children[i];
                if (child.GetType() == typeof(IntelligentMallKiosk.FaceMarker))
                {
                    canvas.Children.Remove(child);
                }
            }

            //userInputPannel.Children.Clear();
            Ids.Children.Clear();

            if (faces != null)
            {
                int index = 0;
                foreach (var face in faces)
                {

                    //string age = face.Attributes.Age.ToString();
                    //FaceMarker  fm = new FaceMarker(age, face.Attributes.Gender);

                    FaceMarker fm = new FaceMarker(face as NamedFace);

                    fm.Height = Math.Round(face.FaceRectangle.Height / heightRatio);
                    fm.Width = Math.Round(face.FaceRectangle.Width / widthRatio);
                    canvas.Children.Add(fm);
                    fm.HairlineColor = brushList[index];
                    Canvas.SetZIndex(fm, 1);
                    Canvas.SetTop(fm, Math.Round(face.FaceRectangle.Top / heightRatio));
                    Canvas.SetLeft(fm, Math.Round(face.FaceRectangle.Left / widthRatio));

                    /*  HumanIdentification identification = new HumanIdentification();
                      identification.Age = face.Attributes.Age.ToString() + " years old";
                      identification.Id = face.Attributes.Gender;
                      identification.HairlineColor = brushList[index++];
                      Ids.Children.Add(identification);*/
                }
            }
        }
 private void DrawFaceRectangle(Microsoft.ProjectOxford.Face.Contract.Face face)
 {
     //_viewer.DrawVector
 }
示例#10
0
        /// <summary>
        /// Mark bitmap with given face information
        /// </summary>
        /// <param name="originalBitmap"></param>
        /// <param name="faces"></param>
        /// <returns></returns>
        private static Bitmap drawFaceRectanglesOnBitmap(Bitmap originalBitmap, Face[] faces)
        {
            Bitmap bitmap = originalBitmap.Copy(Bitmap.Config.Argb8888, true);
            Canvas canvas = new Canvas(bitmap);
            Paint paint = new Paint();
            paint.Color = StrokeColor;
            paint.StrokeWidth = strokeWidth;
            paint.TextSize = textSize;
            if (faces != null)
            {
                foreach(Face face in faces)
                {
                    FaceRectangle faceRectangle = face.FaceRectangle;

                    paint.SetStyle(Paint.Style.Stroke);
                    canvas.DrawRect(
                            faceRectangle.Left,
                            faceRectangle.Top,
                            faceRectangle.Left + faceRectangle.Width,
                            faceRectangle.Top + faceRectangle.Height,
                            paint);

                    paint.SetStyle(Paint.Style.Fill);
                    canvas.DrawText(face.Attributes.Gender + ", " + 
                        face.Attributes.Age + " y/o", 
                        faceRectangle.Left, faceRectangle.Top - textSize, paint);
                }
            }
            return bitmap;
        }
示例#11
0
 private async Task AssignFaceToPersonAsync(Face face)
 {
     ContentDialog dialog = new ContentDialog();
     dialog.IsPrimaryButtonEnabled = true;
     dialog.IsSecondaryButtonEnabled = true;
     dialog.PrimaryButtonText = "Okay";
     dialog.SecondaryButtonText = "Cancel";
     ListBox listBox = new ListBox();
     var personsCombined = new List<Person>();
     foreach (var groupItem in PersonGroups)
     {
         personsCombined.AddRange(groupItem.Persons);
     }
     listBox.ItemsSource = personsCombined;
     listBox.DisplayMemberPath = "Name";
     dialog.Content = listBox;
     var result = await dialog.ShowAsync();
     if (result == ContentDialogResult.Primary)
     {
         var person = listBox.SelectedItem as Person;
         if (person != null)
         {
             var groupItem = PersonGroups.First(g => g.Persons.Contains(person));
             if(groupItem != null)
                 await AddPersonFaceAsync(groupItem, person.PersonId, face.FaceId);
         }
             
     }
 }
示例#12
0
        private async Task<Person> IdentifyPersonAsync(Face face)
        {
            Person result = null;
            foreach(var groupItem in PersonGroups)
            {
                if (groupItem.LastTrained.Year > 2000)
                {
                    try
                    {
                        var results =
                            await _client.IdentifyAsync(groupItem.Group.PersonGroupId, new Guid[] {face.FaceId});
                        if (results != null && results.Length > 0)
                        {
                            foreach (var res in results)
                            {
                                foreach (var candidate in res.Candidates)
                                {
                                    if (candidate.Confidence > 0.8)
                                    {
                                        foreach (var pg in PersonGroups)
                                        {
                                            if (pg.Persons.Any(p => p.PersonId == candidate.PersonId))
                                            {
                                                result = pg.Persons.First(p => p.PersonId == candidate.PersonId);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    catch (ClientException ex)
                    {
                        Debug.WriteLine(ex.Message);
                    }

                }
                else
                {
                    if (!groupItem.IsTraining)
                    {
                        MessageDialog dialog = new MessageDialog(string.Format("The person group, {0}, has not been trained", groupItem.Group.Name));
                        await dialog.ShowAsync();
                    }
                }
            }
            return result;
        }
示例#13
0
        private void RenderFaceDetectionResults(Face[] faces)
        {
            ResetFaceDetectionOverlay();
            foreach (var face in faces)
            {
                var heightRatio = SelectedImage.ActualHeight / _currentImage.PixelHeight;
                var widthRatio = SelectedImage.ActualWidth / _currentImage.PixelWidth;
                var localRectHeight = face.FaceRectangle.Height * heightRatio;
                var localRectWidth = face.FaceRectangle.Width * widthRatio;

                Rectangle rect = new Rectangle()
                {
                    Stroke = new SolidColorBrush(Colors.Red),
                    Height = localRectHeight,
                    Width = localRectWidth,
                    IsTapEnabled = true,
                    IsRightTapEnabled = true,
                    IsHitTestVisible = true
                };
                PopupMenu menu = new PopupMenu();


                var horizontalOffset = (PhotoOverlayCanvas.ActualWidth - SelectedImage.ActualWidth) / 2;
                var verticalOffset = (PhotoOverlayCanvas.ActualHeight - SelectedImage.ActualHeight) / 2;

                var localRectLeft = face.FaceRectangle.Left * widthRatio + horizontalOffset;
                var localRectTop = face.FaceRectangle.Top * heightRatio + verticalOffset;

                StackPanel outlinePanel = new StackPanel()
                {
                    IsRightTapEnabled = true,
                    IsHitTestVisible = true,
                    Background = new SolidColorBrush(Colors.Transparent)
                };
                outlinePanel.RightTapped += async (o, e) =>
                {
                    var command = await menu.ShowForSelectionAsync(GetElementRect(o as FrameworkElement));
                };
                outlinePanel.Loaded += (s, e) =>
                {
                    Canvas.SetLeft(outlinePanel, localRectLeft - ((outlinePanel.ActualWidth - localRectWidth) / 2));
                    Canvas.SetTop(outlinePanel, localRectTop - (outlinePanel.ActualHeight - localRectHeight));
                };
                Grid grid = new Grid() { Background = new SolidColorBrush(Colors.White) };
                TextBlock description = new TextBlock()
                {
                    Text = string.Format("{0} year old {1}", face.Attributes.Age, face.Attributes.Gender),
                    Foreground = new SolidColorBrush(Colors.Black),
                    HorizontalAlignment = HorizontalAlignment.Center
                };
                grid.Children.Add(description);
                outlinePanel.Children.Add(grid);
                outlinePanel.Children.Add(rect);
                PhotoOverlayCanvas.Children.Add(outlinePanel);

                menu.Commands.Add(new UICommand("Assign", async (o) =>
                {
                    var selectedFace = o.Id as Face;
                    await AssignFaceToPersonAsync(selectedFace);
                }, face));
                menu.Commands.Add(new UICommand("Identify", async (o) =>
                {
                    var selectedFace = o.Id as Face;
                    var person = await IdentifyPersonAsync(selectedFace);
                    if (person != null)
                    {
                        description.Text = person.Name;
                        Canvas.SetLeft(outlinePanel, localRectLeft - ((outlinePanel.ActualWidth - localRectWidth) / 2));
                    }
                }, face));
            }
        }
        /// <summary>
        /// Renders the rectangles
        /// </summary>
        /// <param name="baseImage">Base image</param>
        /// <param name="faces">The faces to draw rectangle</param>
        /// <param name="emotionScores">The emotion scores</param>
        /// <returns>Rendered bitmapsource</returns>
        public static BitmapSource DrawOverlay(BitmapSource baseImage, FaceAPI.Face[] faces, EmotionScores[] emotionScores)
        {
            double annotationScale = baseImage.PixelHeight / 320;

            DrawingVisual  visual         = new DrawingVisual();
            DrawingContext drawingContext = visual.RenderOpen();

            drawingContext.DrawImage(baseImage, new Rect(0, 0, baseImage.Width, baseImage.Height));

            for (int i = 0; i < faces.Length; i++)
            {
                FaceAPI.Face face = faces[i];

                if (face.FaceRectangle != null)
                {
                    double lineThickness = 4 * annotationScale;

                    Rect faceRect = new Rect(
                        face.FaceRectangle.Left, face.FaceRectangle.Top,
                        face.FaceRectangle.Width, face.FaceRectangle.Height);
                    string text = string.Empty;

                    text += Helper.GetFaceAttributesAsString(face.FaceAttributes);

                    faceRect.Inflate(6 * annotationScale, 6 * annotationScale);

                    SolidColorBrush genderBrush = face.FaceAttributes.Gender.ToLower().Equals("male")
                        ? MaleBrush
                        : FemaleBrush;

                    drawingContext.DrawRectangle(
                        Brushes.Transparent,
                        new Pen(genderBrush, lineThickness),
                        faceRect);

                    //Generate rectangle background for text and place text in it
                    if (text != string.Empty)
                    {
                        FormattedText ft = new FormattedText(text,
                                                             CultureInfo.CurrentCulture, FlowDirection.LeftToRight, Typeface,
                                                             16 * annotationScale, Brushes.Black);

                        double pad  = 3 * annotationScale;
                        double ypad = pad;
                        double xpad = pad + 4 * annotationScale;

                        Point origin = new Point(
                            faceRect.Left + xpad - lineThickness / 2,
                            faceRect.Top - ft.Height - ypad + lineThickness / 2);

                        Rect rect = ft.BuildHighlightGeometry(origin).GetRenderBounds(null);
                        rect.Inflate(xpad, ypad);

                        drawingContext.DrawRectangle(genderBrush, null, rect);
                        drawingContext.DrawText(ft, origin);
                    }
                }
            }

            drawingContext.Close();

            RenderTargetBitmap outputBitmap = new RenderTargetBitmap(
                baseImage.PixelWidth, baseImage.PixelHeight,
                baseImage.DpiX, baseImage.DpiY, PixelFormats.Pbgra32);

            outputBitmap.Render(visual);

            return(outputBitmap);
        }