private async Task DetectarRostro(BitmapImage bitmapSource, Image img, string path)
        {
            try
            {
                faceList = await UploadAndDetectFaces(path);

                if (faceList.Count > 0)
                {
                    // Prepare to draw rectangles around the faces.
                    DrawingVisual  visual         = new DrawingVisual();
                    DrawingContext drawingContext = visual.RenderOpen();
                    drawingContext.DrawImage(bitmapSource,
                                             new Rect(0, 0, bitmapSource.Width, bitmapSource.Height));
                    double dpi = bitmapSource.DpiX;
                    // Some images don't contain dpi info.
                    resizeFactor     = (dpi == 0) ? 1 : 96 / dpi;
                    faceDescriptions = new String[faceList.Count];

                    for (int i = 0; i < faceList.Count; ++i)
                    {
                        DetectedFace face = faceList[i];

                        // Draw a rectangle on the face.
                        drawingContext.DrawRectangle(
                            System.Windows.Media.Brushes.Transparent,
                            new System.Windows.Media.Pen(System.Windows.Media.Brushes.Red, 2),
                            new Rect(
                                face.FaceRectangle.Left * resizeFactor,
                                face.FaceRectangle.Top * resizeFactor,
                                face.FaceRectangle.Width * resizeFactor,
                                face.FaceRectangle.Height * resizeFactor
                                )
                            );

                        // Store the face description.
                        faceDescriptions[i]  = FaceDescription(face);
                        defaultStatusBarText = FaceDescription(face);
                    }

                    drawingContext.Close();

                    // Display the image with the rectangle around the face.
                    RenderTargetBitmap faceWithRectBitmap = new RenderTargetBitmap(
                        (int)(bitmapSource.PixelWidth * resizeFactor),
                        (int)(bitmapSource.PixelHeight * resizeFactor),
                        96,
                        96,
                        PixelFormats.Pbgra32);

                    faceWithRectBitmap.Render(visual);
                    //  img = faceWithRectBitmap;

                    // Set the status bar text.
                    MessageBox.Show(defaultStatusBarText);
                }
            }
            catch (Exception)
            {
            }
        }
Пример #2
0
        private List <Guid?> AddLargeFaceListFace(IFaceClient client, string largeFaceListId, string fileName)
        {
            var persistedFaceIds = new List <Guid?>();

            for (int i = 1; i < 4; i++)
            {
                DetectedFace face = null;
                using (FileStream stream = new FileStream(Path.Combine("TestImages", fileName + i + ".jpg"), FileMode.Open))
                {
                    face = client.Face.DetectWithStreamAsync(stream, true, recognitionModel: recognitionModel).Result[0];
                }

                using (FileStream stream = new FileStream(Path.Combine("TestImages", fileName + i + ".jpg"), FileMode.Open))
                {
                    var persistedFace = client.LargeFaceList.AddFaceFromStreamAsync(largeFaceListId, stream, null, new List <int> {
                        face.FaceRectangle.Left,
                        face.FaceRectangle.Top,
                        face.FaceRectangle.Width,
                        face.FaceRectangle.Height
                    }).Result;

                    persistedFaceIds.Add(persistedFace.PersistedFaceId);
                }
            }

            return(persistedFaceIds);
        }
Пример #3
0
        public async Task <Person> CreatePersonWithFace(DetectedFace detectedFace, OrganizationUser organizationUser, Stream imageStream)
        {
            if (detectedFace?.FaceId == null)
            {
                return(null);
            }

            // Create person
            var userPerson = await _faceClient.PersonGroupPerson.CreateAsync(
                ConfigurationSettings.PersonGroupId,
                organizationUser.Name,
                JsonConvert.SerializeObject(organizationUser));

            imageStream.Seek(0L, SeekOrigin.Begin);

            await _faceClient.PersonGroupPerson.AddFaceFromStreamAsync(
                ConfigurationSettings.PersonGroupId,
                userPerson.PersonId,
                imageStream);

            var person = await _faceClient.PersonGroupPerson.GetAsync(ConfigurationSettings.PersonGroupId, userPerson.PersonId);

            var blobName = $"{organizationUser.Email}.json";

            await CreateBlobContent(blobName, ContainerSettings.PersonsContainer, person);

            return(person);
        }
Пример #4
0
        /// <summary>
        /// Get the cell for a face
        /// </summary>
        /// <param name="previewFrameSize">Webcam resolution</param>
        /// <param name="face">Detected face</param>
        /// <returns>Cell</returns>
        private Cell CreateFaceCell(Size previewFrameSize, DetectedFace face)
        {
            var cellX = (face.FaceBox.X + face.FaceBox.Width / 2) / (uint)(previewFrameSize.Width / _columnsCount);
            var cellY = (face.FaceBox.Y + face.FaceBox.Height / 2) / (uint)(previewFrameSize.Height / _rowsCount);

            return(new Cell((int)cellX, (int)cellY));
        }
        internal FaceImageDimensions(DetectedFace faceToInclude, double facePercentage, int imageHeight, int imageWidth)
        {
            FaceRectangle faceRectangle = faceToInclude.FaceRectangle;
            int           imageSize     = Calculate100Size(faceRectangle.Height > faceRectangle.Width ? faceRectangle.Height : faceRectangle.Width, facePercentage);

            Calculate(faceRectangle, imageSize, imageHeight, imageWidth);
        }
Пример #6
0
        public static Member ToMember(this DetectedFace value)
        {
            var attribs = value.FaceAttributes;
            var member  = new Member
            {
                FaceId     = value.FaceId.ToString(),
                Gender     = attribs.Gender.ToGenderType(),
                Age        = (double)attribs.Age,
                Smile      = (double)attribs.Smile,
                FacialHair = attribs.FacialHair.Average(),
                Glasses    = attribs.Glasses.Average(),
                Makeup     = attribs.Makeup.Average(),
                Emotion    = new EmotionValues
                             (
                    attribs.Emotion.Anger,
                    attribs.Emotion.Contempt,
                    attribs.Emotion.Disgust,
                    attribs.Emotion.Fear,
                    attribs.Emotion.Happiness,
                    attribs.Emotion.Neutral,
                    attribs.Emotion.Sadness,
                    attribs.Emotion.Surprise
                             )
            };

            return(member);
        }
Пример #7
0
            public async Task <WriteableBitmap> MarkFaces(StorageFile file, DetectedFace[] faces)
            {
                if (faces.Length == 0)
                {
                    return(null);
                }

                using (var stream = await file.OpenStreamForReadAsync())
                {
                    WriteableBitmap wb = await BitmapFactory.FromStream(stream);

                    using (wb.GetBitmapContext())
                    {
                        for (int i = 0; i < faces.Length; ++i)
                        {
                            DetectedFace face = faces[i];

                            wb.DrawRectangle(
                                face.FaceRectangle.Left,
                                face.FaceRectangle.Top,
                                face.FaceRectangle.Left + face.FaceRectangle.Width,
                                face.FaceRectangle.Top + face.FaceRectangle.Height,
                                Colors.Red
                                );
                        }
                    }

                    return(wb);
                }
            }
Пример #8
0
        private async void OnAnalyzeFaces(object sender, EventArgs e)
        {
            if (_photo == null)
            {
                return;
            }
            EnableAllButtons(false);
            try
            {
                IList <DetectedFace> faces = await FaceDetection.MakeAnalysisRequest(_photo);

                DetectedFace face = faces.FirstOrDefault();
                ActivityIndicator.IsRunning = false;
                if (face == null)
                {
                    await DisplayAlert("Face Analysis", "No Faces Found", "OK");

                    return;
                }
                string smiling  = face.FaceAttributes.Smile >= 0.75 ? "smiling" : "not smiling";
                var    analysis = $"{face.FaceAttributes.Age} year old {face.FaceAttributes.Gender} who is {smiling}.";
                await DisplayAlert("Face Analysis", analysis, "OK");
            }
            catch (Exception ex)
            {
                await DisplayAlert("Analysis Error", ex.Message, "OK");
            }
            finally
            {
                EnableAllButtons(true);
            }
        }
Пример #9
0
 public Identification(Person person, double confidence, IdentifyResult identifyResult, DetectedFace face)
 {
     Person         = person;
     IdentifyResult = identifyResult;
     Face           = face;
     Confidence     = confidence;
 }
Пример #10
0
        public async Task DetectSingleFaceAsync_Success()
        {
            // Arrange
            var    unitUnderTest = this.CreateFaceMatcher();
            Stream photo         = new MemoryStream();

            DetectedFace expected = new DetectedFace
            {
                FaceId         = Guid.NewGuid(),
                FaceAttributes = new FaceAttributes(gender: Gender.Female)
            };
            var response = new Microsoft.Rest.HttpOperationResponse <IList <DetectedFace> >
            {
                Body = new[] { expected }
            };

            this.mockFaceOperations
            .Setup(mock => mock.DetectWithStreamWithHttpMessagesAsync(
                       photo,
                       true,  // returnFaceId
                       false, // returnFaceLandmarks
                       It.Is <IList <FaceAttributeType> >(list => list.Count == 1 && list.Contains(FaceAttributeType.Gender)),
                       null,  // customeHeaders
                       It.IsAny <System.Threading.CancellationToken>()))
            .Returns(Task.FromResult(response));

            // Act
            DetectedFace result = await unitUnderTest.DetectSingleFaceAsync(photo);

            // Assert
            result.ShouldNotBeNull();
            result.FaceId.ShouldBe(expected.FaceId);
            result.FaceAttributes.Gender.ShouldBe(expected.FaceAttributes.Gender);
        }
Пример #11
0
        private async Task HighlightDetectedFace(DetectedFace face)
        {
            var cx = ViewFinder.ActualWidth / VideoProps.Width;
            var cy = ViewFinder.ActualHeight / VideoProps.Height;

            DFace = face;
            if (face == null)
            {
                FaceRect.Visibility = Visibility.Collapsed;
                Counter.Visibility  = Visibility.Collapsed;
                dt.Stop();
                IsFacePresent = false;
            }
            else
            {
                // Canvas.SetLeft(FaceRect, face.FaceBox.X);
                // Canvas.SetTop(FaceRect, face.FaceBox.Y);
                FaceRect.Margin     = new Thickness(cx * face.FaceBox.X, cy * face.FaceBox.Y, 0, 0);
                FaceRect.Width      = cx * face.FaceBox.Width;
                FaceRect.Height     = cy * face.FaceBox.Height;
                FaceRect.Visibility = Visibility.Visible;
                Counter.Margin      = new Thickness(cx * face.FaceBox.X, cy * face.FaceBox.Y, 0, 0);
                Counter.Width       = face.FaceBox.Width;
                if (!IsFacePresent)
                {
                    Counter.Visibility = Visibility.Visible;
                    IsFacePresent      = true;
                    counter            = 3; Counter.Text = counter.ToString();
                    dt.Start();
                }
            }
        }
        private async Task HighlightDetectedFace(DetectedFace face)
        {
            var cx = ViewFinder.ActualWidth / VideoProps.Width;
            var cy = ViewFinder.ActualHeight / VideoProps.Height;

            if (face == null)
            {
                FaceRect.Visibility = Visibility.Collapsed;
                IsFacePresent       = false;
                last_emo            = "";
                EmoDesc.Visibility  = EmoControl.Visibility = Visibility.Collapsed;
                Desc.Visibility     = Visibility.Visible;
            }
            else
            {
                // Canvas.SetLeft(FaceRect, face.FaceBox.X);
                // Canvas.SetTop(FaceRect, face.FaceBox.Y);
                FaceRect.Margin     = new Thickness(cx * face.FaceBox.X, cy * face.FaceBox.Y, 0, 0);
                FaceRect.Width      = cx * face.FaceBox.Width;
                FaceRect.Height     = cy * face.FaceBox.Height;
                FaceRect.Visibility = Visibility.Visible;
                IsFacePresent       = true;
                EmoDesc.Visibility  = EmoControl.Visibility = Visibility.Visible;
                Desc.Visibility     = Visibility.Collapsed;
            }
        }
        public JsonResult FaceRecognitionResult(string xcoordinate, string ycoordinate)
        {
            IList <DetectedFace> faceList = JsonConvert.DeserializeObject <IList <DetectedFace> >(TempData.Peek("FaceList").ToString());

            String[] faceDescriptions;
            double   xcoordinate_val = Convert.ToDouble(xcoordinate);
            double   ycoordinate_val = Convert.ToDouble(ycoordinate);
            string   face_desc       = "";

            if (faceList.Count > 0)
            {
                faceDescriptions = new String[faceList.Count];
                for (int i = 0; i < faceList.Count; ++i)
                {
                    DetectedFace           face     = faceList[i];
                    FaceRecognitionProcess oProcess = new FaceRecognitionProcess();
                    faceDescriptions[i] = oProcess.FaceDescription(face);

                    // Display the face description if the mouse is over this face rectangle.
                    if (xcoordinate_val >= face.FaceRectangle.Left && xcoordinate_val <= face.FaceRectangle.Left + face.FaceRectangle.Width &&
                        ycoordinate_val >= face.FaceRectangle.Top && ycoordinate_val <= face.FaceRectangle.Top + face.FaceRectangle.Height)
                    {
                        face_desc = faceDescriptions[i];
                        break;
                    }
                }
            }
            return(Json(face_desc));
        }
Пример #14
0
        private void SaveFaceData(DetectedFace face)
        {
            var faceData = new
            {
                Source    = cameraRadioButton.Checked ? "Câmera" : openFileDialog.FileName.Split('\\').LastOrDefault(),
                FaceID    = face.FaceId.ToString(),
                Gender    = face.FaceAttributes.Gender.ToString(),
                Age       = (int)face.FaceAttributes.Age,
                Emotion   = face.FaceAttributes.Emotion.ToRankedList().FirstOrDefault().Key,
                Glasses   = face.FaceAttributes.Glasses.ToString(),
                EyeMakeup = face.FaceAttributes.Makeup.EyeMakeup,
                LipMakeup = face.FaceAttributes.Makeup.LipMakeup,
                Beard     = face.FaceAttributes.FacialHair.Beard,
                Bald      = face.FaceAttributes.Hair.Bald,
                HairColor = face.FaceAttributes.Hair.HairColor.GetTopHairColor()
            };

            var functionUrl = "http://localhost:7071/api/FaceReactionFunction";
            var client      = new RestClient(functionUrl);
            var request     = new RestRequest(Method.POST);

            request.AddHeader("Content-Type", "application/json");
            request.AddParameter("undefined", Newtonsoft.Json.JsonConvert.SerializeObject(faceData), ParameterType.RequestBody);
            client.ExecuteAsync(request, null);
        }
Пример #15
0
        /// <summary>
        /// FaceAPI実行
        /// </summary>
        public static async Task <DetectedFace> ExecuteFaceAPIAsync(string apiKey, string faceUriEndPoint, FaceAttributeType[] faceAttributes, ImageSource faceImageSource)
        {
            var ret = new DetectedFace();

            try
            {
                // クライアント作成
                var faceClient = new FaceClient(new ApiKeyServiceClientCredentials(apiKey), new System.Net.Http.DelegatingHandler[] { })
                {
                    Endpoint = faceUriEndPoint,
                };

                // ストリーム(タップした画像)から検出処理
                System.Diagnostics.Debug.WriteLine("FaceAPI実行");
                var faceList = await faceClient.Face.DetectWithStreamAsync(Common.GetStreamFromImageSource(faceImageSource), true, false, faceAttributes);

                if (faceList.Count == 0 || faceList == null)
                {
                    await UserDialogs.Instance.AlertAsync($"顔検出できませんでした。", "ExecuteFaceAPIAsync", "OK");

                    return(null);
                }
                ret = faceList[0];
            }
            catch (APIErrorException ex)
            {
                await UserDialogs.Instance.AlertAsync($"APIErrorExceptionです。\n{ex}", "ExecuteFaceAPIAsync", "OK");
            }
            catch (Exception ex)
            {
                await UserDialogs.Instance.AlertAsync($"例外が発生しました。\n{ex}", "ExecuteFaceAPIAsync", "OK");
            }

            return(ret);
        }
Пример #16
0
 private EmotionType GetEmotion(DetectedFace detected)
 {
     if (detected.FaceAttributes.Emotion.Anger > 70)
     {
         return(EmotionType.Anger);
     }
     else if (detected.FaceAttributes.Emotion.Sadness > 70)
     {
         return(EmotionType.Sad);
     }
     else if (detected.FaceAttributes.Emotion.Happiness > 70)
     {
         return(EmotionType.Happy);
     }
     else if (detected.FaceAttributes.Emotion.Fear > 70)
     {
         return(EmotionType.Fear);
     }
     else if (detected.FaceAttributes.Emotion.Disgust > 70)
     {
         return(EmotionType.Disgust);
     }
     else if (detected.FaceAttributes.Emotion.Contempt > 70)
     {
         return(EmotionType.Contempt);
     }
     else if (detected.FaceAttributes.Emotion.Neutral > 70)
     {
         return(EmotionType.Neutral);
     }
     else
     {
         return(EmotionType.Unknown);
     }
 }
Пример #17
0
        public FaceDetails(DetectedFace face, System.Windows.Size imageSize, int imgPixelWidth, int imgPixelHeight)
        {
            this.face = face;
            InitializeComponent();

            if (face.FaceOwner != null)
            {
                PersonNames.Text = $"{face.FaceOwner.Name} {face.FaceOwner.LastName}";
                if (face.FaceOwner.DateOfBirth.HasValue)
                {
                    PersonAge.Text = $"{(int)((DateTime.Now - face.FaceOwner.DateOfBirth.Value).TotalDays) / 365} lat";
                }
            }

            sta.MouseEnter += FaceRectangle_MouseEnter;
            sta.MouseLeave += FaceRectangle_MouseLeave;

            var width  = imageSize.Width;
            var height = imageSize.Height;

            var xRatio = width / imgPixelWidth;
            var yRatio = height / imgPixelHeight;

            var posX = face.FaceRectangle.Left;
            var posY = face.FaceRectangle.Top;

            var renderXpos = xRatio * posX;
            var renderYpos = yRatio * posY;

            Margin = new Thickness(renderXpos, renderYpos, 0, 0);
            FaceRectangle.Width  = face.FaceRectangle.Width * xRatio;
            FaceRectangle.Height = face.FaceRectangle.Height * yRatio;
        }
Пример #18
0
 public void SetImageInfo(DetectedFace face)
 {
     id        = face.FaceId.ToString();
     age       = face.FaceAttributes.Age;
     gender    = face.FaceAttributes.Gender;
     imageDate = DateTime.Now;
     AddEmotions(face.FaceAttributes.Emotion);
 }
Пример #19
0
 public void SetImageInfo(DetectedFace face)
 {
     UserId    = SignInViewModel.currentUser.Id;
     Age       = face.FaceAttributes.Age;
     Gender    = face.FaceAttributes.Gender;
     ImageDate = DateTime.Now;
     AddEmotions(face.FaceAttributes.Emotion);
 }
Пример #20
0
        private static FaceCategory GetFaceCategory(DetectedFace face, string countryCode)
        {
            Debug.Assert(face.FaceAttributes.Gender.HasValue, "Face detection did not return a Gender");
            FaceCategory searchCategory
                = new FaceCategory(countryCode, face.FaceAttributes.Gender.Value.Convert());

            return(searchCategory);
        }
Пример #21
0
        public FaceIcon(DetectedFace face)
        {
            Face = face;

            InitializeComponent();

            Loaded += FaceIcon_Loaded;
        }
Пример #22
0
 public Identification(string name, double confidence, IdentifyResult identifyResult, DetectedFace face)
 {
     Person         = new Person();
     Person.Name    = name;
     IdentifyResult = identifyResult;
     Face           = face;
     Confidence     = confidence;
 }
Пример #23
0
        // TODO: 7. 얼굴의 대표 감정을 얻어와 얼굴 사각형 표시하는 메서드 DisplayEmotion()
        private void DisplayEmotion(BitmapImage softwareBitmap, DetectedFace emotion)
        {
            if (emotion != null)
            {
                var emotionName = EmotionHelper.GetTopEmotionName(emotion);

                DrawFaceBox(softwareBitmap, emotion.FaceRectangle, emotionName);
            }
        }
Пример #24
0
        private static void DrawRectangle(WriteableBitmap bitmap, DetectedFace face, Color color, int thinkness)
        {
            var left   = face.FaceRectangle.Left;
            var top    = face.FaceRectangle.Top;
            var width  = face.FaceRectangle.Width;
            var height = face.FaceRectangle.Height;

            DrawRectangle(bitmap, left, top, width, height, color, thinkness);
        }
Пример #25
0
        public async void BtnTake_Clicked(object sender, EventArgs e)
        {
            await CrossMedia.Current.Initialize();

            try
            {
                if (!CrossMedia.Current.IsCameraAvailable || !CrossMedia.Current.IsTakePhotoSupported)
                {
                    await DisplayAlert("No Camera", ":( No camera available.", "OK");

                    return;
                }
                var file = await CrossMedia.Current.TakePhotoAsync(new Plugin.Media.Abstractions.StoreCameraMediaOptions
                {
                    Directory = "Sample",
                    Name      = "xamarin.jpg"
                });

                if (file == null)
                {
                    return;
                }
                imgChoosed.Source = ImageSource.FromStream(() =>
                {
                    var stream = file.GetStream();
                    return(stream);
                });
                Esperar.IsVisible = true;
                Esperar.IsRunning = true;


                lblResult.Text = null;
                var faceList = await Task.Run(async() => await GetImageDescription(file.GetStream()));

                Esperar.IsVisible = false;
                Esperar.IsRunning = false;


                file.Dispose();

                Title = "Detecting...";
                Title = String.Format(
                    "Detection Finished. {0} face(s) detected", faceList.Count);
                faceDescriptions = new String[faceList.Count];
                for (int i = 0; i < faceList.Count; ++i)
                {
                    DetectedFace face = faceList[i];
                    faceDescriptions[i] = FaceDescription(face);
                    lblResult.Text      = faceDescriptions[i] + "\r\n";
                }
            }
            catch (Exception ex)
            {
                string test = ex.Message;
            }
        }
        private static void Translate(Rectangle rectangle, DetectedFace detectedFace)
        {
            var translateTransform = new TranslateTransform()
            {
                X = detectedFace.FaceBox.X,
                Y = detectedFace.FaceBox.Y
            };

            rectangle.RenderTransform = translateTransform;
        }
Пример #27
0
        private void AddFacialLandmark(DetectedFace face, Coordinate coordinate, double renderedImageXTransform, double renderedImageYTransform, Color color)
        {
            double    dotSize = 3;
            Rectangle b       = new Rectangle {
                Fill = new SolidColorBrush(color), Width = dotSize, Height = dotSize, HorizontalAlignment = HorizontalAlignment.Left, VerticalAlignment = VerticalAlignment.Top
            };

            b.Margin = new Thickness(((coordinate.X - face.FaceRectangle.Left) * renderedImageXTransform) - dotSize / 2, ((coordinate.Y - face.FaceRectangle.Top) * renderedImageYTransform) - dotSize / 2, 0, 0);
            this.hostGrid.Children.Add(b);
        }
Пример #28
0
        // </snippet_uploaddetect>

        // <snippet_facedesc>
        // Creates a string out of the attributes describing the face.
        private static string FaceDescription(DetectedFace face)
        {
            StringBuilder sb = new StringBuilder();

            sb.Append("Face: ");
            sb.Append("Pitch:" + face.FaceAttributes.HeadPose.Pitch + "Yaw:" + face.FaceAttributes.HeadPose.Yaw + "Roll:" + face.FaceAttributes.HeadPose.Roll);

            // Return the built string.
            return(sb.ToString());
        }
Пример #29
0
        /// <summary>
        /// Get the cell for a face
        /// </summary>
        /// <param name="previewFrameSize">Webcam resolution</param>
        /// <param name="face">Detected face</param>
        /// <returns>Cell</returns>
        private Cell CreateFaceCell(Size previewFrameSize, DetectedFace face)
        {
            var cellX = (face.FaceBox.X + face.FaceBox.Width / 2) / (uint)(previewFrameSize.Width / _columnsCount);
            var cellY = (face.FaceBox.Y + face.FaceBox.Height / 2) / (uint)(previewFrameSize.Height / _rowsCount);

            return new Cell((int)cellX, (int)cellY);
        }