public static async Task <(Face[] faces, Person person, Emotion[] emotions)> DetectAndIdentifyFace(Bitmap image)
        {
            FaceServiceClient    fsc = new FaceServiceClient(Settings.Instance.FaceApiKey, FaceApiEndpoint);
            EmotionServiceClient esc = new EmotionServiceClient(Settings.Instance.EmotionApiKey);

            //FACE Detection
            //TODO add detection interval as param
            Emotion[] emotions = null;
            Person    person   = null;

            Face[] faces = null;


            //Detect and identify only once per 10 seconds
            if (lastFaceDetectTime.AddSeconds(10) < DateTime.Now)
            {
                lastFaceDetectTime = DateTime.Now;

                MemoryStream memoryStream = new MemoryStream();
                image.Save(memoryStream, System.Drawing.Imaging.ImageFormat.Jpeg);

                //We need to seek to begin
                memoryStream.Seek(0, SeekOrigin.Begin);
                faces = await fsc.DetectAsync(memoryStream, true, true, new List <FaceAttributeType>() { FaceAttributeType.Age, FaceAttributeType.Gender });

                if (faces.Any())
                {
                    var rec = new Microsoft.ProjectOxford.Common.Rectangle[] { faces.First().FaceRectangle.ToRectangle() };
                    //Emotions

                    //We need to seek to begin, due to problems with parallel access we needed to create new memory stream
                    memoryStream = new MemoryStream();
                    image.Save(memoryStream, System.Drawing.Imaging.ImageFormat.Jpeg);
                    memoryStream.Seek(0, SeekOrigin.Begin);

                    //We call Emotion API and we include face rectangle information,
                    //as this way the call is cheaper, as emotion api does not have to run face detection
                    emotions = await esc.RecognizeAsync(memoryStream, rec);


                    //Person Identification
                    var groups = await fsc.ListPersonGroupsAsync();

                    var groupId = groups.First().PersonGroupId;

                    //We are interested only in first candidate
                    var identifyResult = await fsc.IdentifyAsync(groupId, new Guid[] { faces.First().FaceId }, 1);

                    var candidate = identifyResult?.FirstOrDefault()?.Candidates?.FirstOrDefault();

                    if (candidate != null)
                    {
                        person = await fsc.GetPersonAsync(groupId, candidate.PersonId);
                    }
                }
            }
            return(faces, person, emotions);
        }
Exemplo n.º 2
0
        private async Task DetectAndShowEmotion()
        {
            this.progressIndicator.IsActive = true;
            this.DetectFaceAttributes       = true;

            foreach (var child in this.hostGrid.Children.Where(c => !(c is Image)).ToArray())
            {
                this.hostGrid.Children.Remove(child);
            }

            ImageAnalyzer imageWithFace = this.DataContext as ImageAnalyzer;

            if (imageWithFace != null)
            {
                double renderedImageXTransform = this.imageControl.RenderSize.Width / this.bitmapImage.PixelWidth;
                double renderedImageYTransform = this.imageControl.RenderSize.Height / this.bitmapImage.PixelHeight;

                if (imageWithFace.DetectedFaces == null)
                {
                    await imageWithFace.DetectFacesAsync(detectFaceAttributes : this.DetectFaceAttributes, detectFaceLandmarks : this.DetectFaceLandmarks);
                }

                foreach (Face face in imageWithFace.DetectedFaces)
                {
                    FaceIdentificationBorder faceUI = new FaceIdentificationBorder();

                    faceUI.Margin = new Thickness((face.FaceRectangle.Left * renderedImageXTransform) + ((this.ActualWidth - this.imageControl.RenderSize.Width) / 2),
                                                  (face.FaceRectangle.Top * renderedImageYTransform) + ((this.ActualHeight - this.imageControl.RenderSize.Height) / 2), 0, 0);

                    faceUI.BalloonBackground = this.BalloonBackground;
                    faceUI.BalloonForeground = this.BalloonForeground;

                    faceUI.ShowFaceRectangle(face.FaceRectangle.Width * renderedImageXTransform, face.FaceRectangle.Height * renderedImageYTransform);

                    Microsoft.ProjectOxford.Common.Rectangle rectangle = new Microsoft.ProjectOxford.Common.Rectangle();
                    rectangle.Height = face.FaceRectangle.Height;
                    rectangle.Left   = face.FaceRectangle.Left;
                    rectangle.Top    = face.FaceRectangle.Top;
                    rectangle.Width  = face.FaceRectangle.Width;

                    Emotion emotion = new Emotion();
                    emotion.FaceRectangle = rectangle;
                    emotion.Scores        = face.FaceAttributes.Emotion;

                    faceUI.ShowEmotionData(emotion);

                    this.hostGrid.Children.Add(faceUI);

                    if (!this.ShowMultipleFaces)
                    {
                        break;
                    }
                }
            }

            this.progressIndicator.IsActive = false;
        }
Exemplo n.º 3
0
        public async void DrawFaceRectangle(Emotion[] emotionResult, BitmapImage bitMapImage, String urlString)
        {
            if (emotionResult != null && emotionResult.Length > 0)
            {
                Windows.Storage.Streams.IRandomAccessStream stream = await Windows.Storage.Streams.RandomAccessStreamReference.CreateFromUri(new Uri(urlString)).OpenReadAsync();


                BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);


                double resizeFactorH = ImageCanvas.Height / decoder.PixelHeight;
                double resizeFactorW = ImageCanvas.Width / decoder.PixelWidth;


                foreach (var emotion in emotionResult)
                {
                    Microsoft.ProjectOxford.Common.Rectangle faceRect = emotion.FaceRectangle;

                    Image       Img    = new Image();
                    BitmapImage BitImg = new BitmapImage();
                    // open the rectangle image, this will be our face box
                    Windows.Storage.Streams.IRandomAccessStream box = await Windows.Storage.Streams.RandomAccessStreamReference.CreateFromUri(new Uri("ms-appx:///Assets/rectangle.png", UriKind.Absolute)).OpenReadAsync();

                    BitImg.SetSource(box);

                    //rescale each facebox based on the API's face rectangle
                    var maxWidth  = faceRect.Width * resizeFactorW;
                    var maxHeight = faceRect.Height * resizeFactorH;

                    var origHeight = BitImg.PixelHeight;
                    var origWidth  = BitImg.PixelWidth;


                    var ratioX    = maxWidth / (float)origWidth;
                    var ratioY    = maxHeight / (float)origHeight;
                    var ratio     = Math.Min(ratioX, ratioY);
                    var newHeight = (int)(origHeight * ratio);
                    var newWidth  = (int)(origWidth * ratio);

                    BitImg.DecodePixelWidth  = newWidth;
                    BitImg.DecodePixelHeight = newHeight;

                    // set the starting x and y coordiantes for each face box
                    Thickness margin = Img.Margin;

                    margin.Left = faceRect.Left * resizeFactorW;
                    margin.Top  = faceRect.Top * resizeFactorH;

                    Img.Margin = margin;

                    Img.Source = BitImg;
                    ImageCanvas.Children.Add(Img);
                }
            }
        }
Exemplo n.º 4
0
 private Face CreateFace(Microsoft.ProjectOxford.Common.Rectangle rect)
 {
     return(new Face
     {
         FaceRectangle = new FaceRectangle
         {
             Left = rect.Left,
             Top = rect.Top,
             Width = rect.Width,
             Height = rect.Height
         }
     });
 }
Exemplo n.º 5
0
 private Face CreateFace(Microsoft.ProjectOxford.Common.Rectangle faceRectangle)
 {
     return(new Face
     {
         FaceRectangle = new FaceRectangle
         {
             Left = faceRectangle.Left,
             Top = faceRectangle.Top,
             Width = faceRectangle.Width,
             Height = faceRectangle.Height
         }
     });
 }
Exemplo n.º 6
0
        private int RectDist(FaceRectangle rectID, Microsoft.ProjectOxford.Common.Rectangle rectE)
        {
            int xcID = Convert.ToInt32(Math.Round(Convert.ToDecimal(rectID.Left + rectID.Width / 2)));
            int ycID = Convert.ToInt32(Math.Round(Convert.ToDecimal(rectID.Top + rectID.Height / 2)));
            int xcE  = Convert.ToInt32(Math.Round(Convert.ToDecimal(rectE.Left + rectE.Width / 2)));
            int ycE  = Convert.ToInt32(Math.Round(Convert.ToDecimal(rectE.Top + rectE.Height / 2)));

            int dx = xcID - xcE;
            int dy = ycID - ycE;

            int dist = dx ^ 2 + dy ^ 2;

            return(dist);
        }
        public void DrawFaceRectangle(Image image, BitmapImage bitmapSource, Emotion[] emotionResult)
        {
            if (emotionResult != null && emotionResult.Length > 0)
            {
                DrawingVisual  visual         = new DrawingVisual();
                DrawingContext drawingContext = visual.RenderOpen();

                drawingContext.DrawImage(bitmapSource,
                                         new Rect(0, 0, bitmapSource.Width, bitmapSource.Height));

                double dpi          = bitmapSource.DpiX;
                double resizeFactor = 96 / dpi;

                foreach (var emotion in emotionResult)
                {
                    Microsoft.ProjectOxford.Common.Rectangle faceRect = emotion.FaceRectangle;

                    drawingContext.DrawRectangle(
                        Brushes.Transparent,
                        new Pen(Brushes.Cyan, 4),
                        new Rect(
                            faceRect.Left * resizeFactor,
                            faceRect.Top * resizeFactor,
                            faceRect.Width * resizeFactor,
                            faceRect.Height * resizeFactor)
                        );
                }

                drawingContext.Close();

                RenderTargetBitmap faceWithRectBitmap = new RenderTargetBitmap(
                    (int)(bitmapSource.PixelWidth * resizeFactor),
                    (int)(bitmapSource.PixelHeight * resizeFactor),
                    96,
                    96,
                    PixelFormats.Pbgra32);

                faceWithRectBitmap.Render(visual);

                image.Source = faceWithRectBitmap;
            }
        }
Exemplo n.º 8
0
        public async Task FaceDetection(string pathToFile)
        {
            string subscriptionKeyFace = "insert Face API Key here";
            string subscriptionKeyEmo  = "insert Emotions API Key here";


            FaceServiceClient fSC = new FaceServiceClient(subscriptionKeyFace);

            EmotionServiceClient eSC = new EmotionServiceClient(subscriptionKeyEmo);

            using (Stream s = File.OpenRead(pathToFile))
            {
                var requiredFaceAttributes = new FaceAttributeType[] {
                    FaceAttributeType.Age,
                    FaceAttributeType.Gender,
                    FaceAttributeType.Smile,
                    FaceAttributeType.FacialHair,
                    FaceAttributeType.HeadPose,
                    FaceAttributeType.Glasses
                };

                var faces = await fSC.DetectAsync(s,
                                                  returnFaceLandmarks : true,
                                                  returnFaceAttributes : requiredFaceAttributes);


                numOfFacesLabel.Text = "Number of Faces:" + faces.Length;

                var faceRectangles = new List <Microsoft.ProjectOxford.Common.Rectangle>();

                foreach (var face in faces)
                {
                    var rect = face.FaceRectangle;

                    var landmarks = face.FaceLandmarks;

                    var age     = face.FaceAttributes.Age;
                    var gender  = face.FaceAttributes.Gender;
                    var glasses = face.FaceAttributes.Glasses;


                    var rectangle = new Microsoft.ProjectOxford.Common.Rectangle
                    {
                        Height = face.FaceRectangle.Height,
                        Width  = face.FaceRectangle.Width,
                        Top    = face.FaceRectangle.Top,
                        Left   = face.FaceRectangle.Left,
                    };
                    faceRectangles.Add(rectangle);


                    try
                    {
                        ageLabel.Text = "Age: " + age;
                        Console.Write("Age: " + age);

                        genderLabel.Text  = "Gender: " + gender;
                        glassesLabel.Text = "Does the person wear glasses? " + glasses;
                    }
                    catch (Exception e)
                    {
                        Console.Write("Error: " + e);
                    }


                    if (faces.Length > 0)
                    {
                        using (Stream str = File.OpenRead(pathToFile))
                        {
                            var emotions = await eSC.RecognizeAsync(str, faceRectangles.ToArray());


                            string emotionsList = "";

                            foreach (var emotion in emotions)
                            {
                                emotionsList += $@"Anger: {emotion.Scores.Anger}
                                    Contempt: {emotion.Scores.Contempt}
                                    Disgust: {emotion.Scores.Disgust}
                                    Fear: {emotion.Scores.Fear}
                                    Happiness: {emotion.Scores.Happiness}
                                    Neutral: {emotion.Scores.Neutral}
                                    Sadness: {emotion.Scores.Sadness}                                    
                                    Surprise: {emotion.Scores.Surprise}";
                            }

                            emotionsLabel.Text = emotionsList;
                        }
                    }
                }
            }
        }