Ejemplo n.º 1
0
        /// <summary>
        /// Pick image for detection, and using the detected face as the face to person verify.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void FaceImagePicker_Click(object sender, RoutedEventArgs e)
        {
            // Show image picker, show jpg type files only
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                PersonVerifyResult = string.Empty;

                // User already picked one image
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                RightImageDisplay2.Source = renderingImage;

                // Clear last time detection results
                RightFaceResultCollection.Clear();
                PersonVerifyButton.IsEnabled = (FacesCollection.Count != 0 && RightFaceResultCollection.Count != 0);

                MainWindow.Log("Request: Detecting in {0}", pickedImagePath);
                var sw = Stopwatch.StartNew();

                // Call detection REST API, detect faces inside the image
                using (var fileStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        var faceServiceClient = FaceServiceClientHelper.GetInstance(this);
                        var faces             = await faceServiceClient.Face.DetectWithStreamAsync(fileStream, recognitionModel : recognitionModel);

                        // Handle REST API calling error
                        if (faces == null)
                        {
                            return;
                        }

                        MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Count, pickedImagePath);

                        // Convert detection results into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            // Detected faces are hosted in result container, will be used in the verification later
                            RightFaceResultCollection.Add(face);
                        }
                        PersonVerifyButton.IsEnabled = (FacesCollection.Count != 0 && RightFaceResultCollection.Count != 0);
                    }
                    catch (APIErrorException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);

                        return;
                    }
                }
            }
            GC.Collect();
        }
        private async void LoadImage_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker dialog
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                // Clear last detection result
                ResultCollection.Clear();
                DetectedFaces.Clear();
                ImageGlassesDisplay.Source = null;
                DetectedResultsInText      = string.Format("Detecting...");

                // show Main Windows
                MainWindow.Log("Request: Detecting {0}", pickedImagePath);
                var sw = Stopwatch.StartNew();

                // Call detection REST API
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        MainWindow mainWindow      = Window.GetWindow(this) as MainWindow;
                        string     subscriptionKey = mainWindow._scenariosControl.SubscriptionKey;
                        string     endpoint        = mainWindow._scenariosControl.SubscriptionEndpoint;

                        var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);
                        faces = await faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur });

                        MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, pickedImagePath);

                        DetectedResultsInText = string.Format("{0} face(s) has been detected. You can see the result", faces.Length);

                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSizes, imageInfo))
                        {
                            ResultCollection.Add(face);
                        }
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                        GC.Collect();
                        return;
                    }
                    GC.Collect();
                }
            }
        }
        public BitmapSource getMergedPicture(string userPicturePath, string hisPicturePath)
        {
            var             hismanImage = UIHelper.LoadImageAppliedOrientation(hisPicturePath);
            WriteableBitmap target      = new WriteableBitmap(
                hismanImage.PixelWidth,
                hismanImage.PixelHeight,
                hismanImage.DpiX, hismanImage.DpiY,
                hismanImage.Format, null);

            WriteableBitmap[] targets = { target };
            getMergedPictureCore(userPicturePath, hisPicturePath, targets, hismanImage);
            return(targets[0]);
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Pick image for face detection and set detection result to result container
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event argument</param>
        private async void ImagePicker_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker dialog
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                // Clear last detection result
                ResultCollection.Clear();
                DetectedFaces.Clear();
                DetectedResultsInText = string.Format("Detecting...");

                MainWindow.Log("Request: Detecting {0}", pickedImagePath);
                var sw = Stopwatch.StartNew();

                // Call detection REST API
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        MainWindow mainWindow      = Window.GetWindow(this) as MainWindow;
                        string     subscriptionKey = mainWindow._scenariosControl.SubscriptionKey;
                        string     endpoint        = mainWindow._scenariosControl.SubscriptionEndpoint;

                        var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);
                        ProjectOxford.Face.Contract.Face[] faces = await faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur });

                        MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, pickedImagePath);

                        DetectedResultsInText = string.Format("{0} face(s) has been detected", faces.Length);

                        foreach (var face in faces)
                        {
                            DetectedFaces.Add(new Face()
                            {
                                ImageFile         = renderingImage,
                                Left              = face.FaceRectangle.Left,
                                Top               = face.FaceRectangle.Top,
                                Width             = face.FaceRectangle.Width,
                                Height            = face.FaceRectangle.Height,
                                FaceId            = face.FaceId.ToString(),
                                Age               = string.Format("{0:#} years old", face.FaceAttributes.Age),
                                Gender            = face.FaceAttributes.Gender,
                                HeadPose          = string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2)),
                                FacialHair        = string.Format("FacialHair: {0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No"),
                                Glasses           = string.Format("GlassesType: {0}", face.FaceAttributes.Glasses.ToString()),
                                Emotion           = $"{GetEmotion(face.FaceAttributes.Emotion)}",
                                Hair              = string.Format("Hair: {0}", GetHair(face.FaceAttributes.Hair)),
                                Makeup            = string.Format("Makeup: {0}", ((face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")),
                                EyeOcclusion      = string.Format("EyeOccluded: {0}", ((face.FaceAttributes.Occlusion.EyeOccluded) ? "Yes" : "No")),
                                ForeheadOcclusion = string.Format("ForeheadOccluded: {0}", (face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")),
                                MouthOcclusion    = string.Format("MouthOccluded: {0}", (face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")),
                                Accessories       = $"{GetAccessories(face.FaceAttributes.Accessories)}",
                                Blur              = string.Format("Blur: {0}", face.FaceAttributes.Blur.BlurLevel.ToString()),
                                Exposure          = string.Format("{0}", face.FaceAttributes.Exposure.ExposureLevel.ToString()),
                                Noise             = string.Format("Noise: {0}", face.FaceAttributes.Noise.NoiseLevel.ToString()),
                            });
                        }

                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            ResultCollection.Add(face);
                        }
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                        GC.Collect();
                        return;
                    }
                    GC.Collect();
                }
            }
        }
        /// <summary>
        /// Pick image, detect and identify all faces detected
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void Identify_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                // Clear previous detection and identification results
                TargetFaces.Clear();
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                var sw = Stopwatch.StartNew();

                var faceServiceClient = FaceServiceClientHelper.GetInstance(this);
                // Call detection REST API
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        var faces = await faceServiceClient.Face.DetectWithStreamAsync(fStream, recognitionModel : recognitionModel, detectionModel : detectionModel);

                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            TargetFaces.Add(face);
                        }

                        MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Count, GroupName);

                        // Identify each face
                        // Call identify REST API, the result contains identified person information
                        var identifyResult = await faceServiceClient.Face.IdentifyAsync((from face in faces where face.FaceId != null select face.FaceId.Value).ToList(), null, GroupName);

                        for (int idx = 0; idx < faces.Count; idx++)
                        {
                            // Update identification result for rendering
                            var face = TargetFaces[idx];
                            var res  = identifyResult[idx];
                            if (res.Candidates.Count > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString()))
                            {
                                face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName;
                            }
                            else
                            {
                                face.PersonName = "Unknown";
                            }
                        }

                        var outString = new StringBuilder();
                        foreach (var face in TargetFaces)
                        {
                            outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName);
                        }

                        MainWindow.Log("Response: Success. {0}", outString);
                    }
                    catch (APIErrorException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                    }
                }
            }
            GC.Collect();
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Pick image and call find similar with both two modes for each faces detected
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void FindSimilar_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var filePicker = dlg.ShowDialog();

            if (filePicker.HasValue && filePicker.Value)
            {
                // User picked image
                // Clear previous detection and find similar results
                TargetFaces.Clear();
                FindSimilarMatchPersonCollection.Clear();
                FindSimilarMatchFaceCollection.Clear();
                var sw = Stopwatch.StartNew();

                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                // Detect all faces in the picked image
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    MainWindow.Log("Request: Detecting faces in {0}", SelectedFile);
                    var faceServiceClient      = FaceServiceClientHelper.GetInstance(this);
                    IList <DetectedFace> faces = await faceServiceClient.Face.DetectWithStreamAsync(fStream);

                    // Update detected faces on UI
                    foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                    {
                        TargetFaces.Add(face);
                    }

                    MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Count, SelectedFile);

                    // Find two modes similar faces for each face
                    foreach (var f in faces)
                    {
                        if (f.FaceId == null)
                        {
                            continue;
                        }

                        var faceId = f.FaceId.Value;

                        MainWindow.Log("Request: Finding similar faces in Personal Match Mode for face {0}", faceId);

                        try
                        {
                            // Default mode, call find matchPerson similar REST API, the result contains all the face ids which is personal similar to the query face
                            const int           requestCandidatesCount = 4;
                            IList <SimilarFace> result = await faceServiceClient.Face.FindSimilarAsync(
                                faceId,
                                null,
                                _largeFaceListName,
                                maxNumOfCandidatesReturned : requestCandidatesCount);

                            // Update find matchPerson similar results collection for rendering
                            var personSimilarResult = new FindSimilarResult();
                            personSimilarResult.Faces     = new ObservableCollection <Face>();
                            personSimilarResult.QueryFace = new Face()
                            {
                                ImageFile = SelectedFile,
                                Top       = f.FaceRectangle.Top,
                                Left      = f.FaceRectangle.Left,
                                Width     = f.FaceRectangle.Width,
                                Height    = f.FaceRectangle.Height,
                                FaceId    = faceId.ToString(),
                            };
                            foreach (var fr in result)
                            {
                                var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString());
                                Face newFace       = new Face();
                                newFace.ImageFile  = candidateFace.ImageFile;
                                newFace.Confidence = fr.Confidence;
                                newFace.FaceId     = candidateFace.FaceId;
                                personSimilarResult.Faces.Add(newFace);
                            }

                            MainWindow.Log("Response: Found {0} similar faces for face {1}", personSimilarResult.Faces.Count, faceId);

                            FindSimilarMatchPersonCollection.Add(personSimilarResult);
                        }
                        catch (APIErrorException ex)
                        {
                            MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                        }

                        try
                        {
                            // Call find facial match similar REST API, the result faces the top N with the highest similar confidence
                            const int requestCandidatesCount = 4;
                            var       result = await faceServiceClient.Face.FindSimilarAsync(
                                faceId,
                                null,
                                _largeFaceListName,
                                maxNumOfCandidatesReturned : requestCandidatesCount,
                                mode : FindSimilarMatchMode.MatchFace);

                            // Update "matchFace" similar results collection for rendering
                            var faceSimilarResults = new FindSimilarResult();
                            faceSimilarResults.Faces     = new ObservableCollection <Face>();
                            faceSimilarResults.QueryFace = new Face()
                            {
                                ImageFile = SelectedFile,
                                Top       = f.FaceRectangle.Top,
                                Left      = f.FaceRectangle.Left,
                                Width     = f.FaceRectangle.Width,
                                Height    = f.FaceRectangle.Height,
                                FaceId    = faceId.ToString(),
                            };
                            foreach (var fr in result)
                            {
                                var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString());
                                Face newFace       = new Face();
                                newFace.ImageFile  = candidateFace.ImageFile;
                                newFace.Confidence = fr.Confidence;
                                newFace.FaceId     = candidateFace.FaceId;
                                faceSimilarResults.Faces.Add(newFace);
                            }

                            MainWindow.Log("Response: Found {0} similar faces for face {1}", faceSimilarResults.Faces.Count, faceId);

                            FindSimilarMatchFaceCollection.Add(faceSimilarResults);
                        }
                        catch (APIErrorException ex)
                        {
                            MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                        }
                    }
                }
            }
            GC.Collect();
        }
        /// <summary>
        /// Pick image, detect and identify all faces detected
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void Identify_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                // Clear previous detection and identification results
                TargetFaces.Clear();
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                var sw = Stopwatch.StartNew();

                MainWindow mainWindow             = Window.GetWindow(this) as MainWindow;
                string     subscriptionKey        = "7f9f9eb9d73e4606bbfca54abcf93996";
                string     subscriptionEndpoint   = "https://westcentralus.api.cognitive.microsoft.com/face/v1.0";
                var        faceServiceClient      = new FaceServiceClient(subscriptionKey, subscriptionEndpoint);
                var        requiredFaceAttributes = new FaceAttributeType[] {
                    FaceAttributeType.Age,
                    FaceAttributeType.Gender,
                    FaceAttributeType.Smile,
                    FaceAttributeType.Hair,
                    FaceAttributeType.HeadPose,
                    FaceAttributeType.Glasses,
                    FaceAttributeType.Emotion
                };

                // Call detection REST API
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        var faces = await faceServiceClient.DetectAsync(fStream, true, true, requiredFaceAttributes);

                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            TargetFaces.Add(face);
                        }

                        MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, this.GroupId);

                        // Identify each face
                        // Call identify REST API, the result contains identified person information
                        var identifyResult = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : this.GroupId);

                        for (int idx = 0; idx < faces.Length; idx++)
                        {
                            // Update identification result for rendering
                            var face = TargetFaces[idx];
                            var res  = identifyResult[idx];
                            if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString()))
                            {
                                face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName;
                            }
                            else
                            {
                                face.PersonName = "Unknown";
                            }
                        }

                        var outString = new StringBuilder();

                        foreach (var face in faces)
                        {
                            MainWindow.Log("\nFace ID : {0}", face.FaceId.ToString());
                            StringBuilder sb = new StringBuilder();

                            // Add the gender, age, and smile.
                            sb.Append("Gender: ");
                            sb.Append(face.FaceAttributes.Gender);
                            sb.Append("\n ");
                            sb.Append("Age: ");
                            sb.Append(face.FaceAttributes.Age);
                            sb.Append("\n ");
                            sb.Append(String.Format("smile {0:F1}%\n ", face.FaceAttributes.Smile * 100));

                            // Add the emotions. Display all emotions over 10%.
                            sb.Append("Emotion: ");
                            Microsoft.ProjectOxford.Common.Contract.EmotionScores emotionScores = face.FaceAttributes.Emotion;
                            if (emotionScores.Anger >= 0.1f)
                            {
                                sb.Append(
                                    String.Format("anger {0:F1}%, ", emotionScores.Anger * 100));
                            }
                            if (emotionScores.Contempt >= 0.1f)
                            {
                                sb.Append(
                                    String.Format("contempt {0:F1}%, ", emotionScores.Contempt * 100));
                            }
                            if (emotionScores.Disgust >= 0.1f)
                            {
                                sb.Append(
                                    String.Format("disgust {0:F1}%, ", emotionScores.Disgust * 100));
                            }
                            if (emotionScores.Fear >= 0.1f)
                            {
                                sb.Append(
                                    String.Format("fear {0:F1}%, ", emotionScores.Fear * 100));
                            }
                            if (emotionScores.Happiness >= 0.1f)
                            {
                                sb.Append(
                                    String.Format("happiness {0:F1}%, ", emotionScores.Happiness * 100));
                            }
                            if (emotionScores.Neutral >= 0.1f)
                            {
                                sb.Append(
                                    String.Format("neutral {0:F1}%, ", emotionScores.Neutral * 100));
                            }
                            if (emotionScores.Sadness >= 0.1f)
                            {
                                sb.Append(
                                    String.Format("sadness {0:F1}%, ", emotionScores.Sadness * 100));
                            }
                            if (emotionScores.Surprise >= 0.1f)
                            {
                                sb.Append(
                                    String.Format("surprise {0:F1}%, ", emotionScores.Surprise * 100));
                            }
                            sb.Append("\n ");

                            // Add glasses.
                            sb.Append(face.FaceAttributes.Glasses);
                            sb.Append("\n ");

                            // Add hair.
                            sb.Append("Hair: ");

                            var hair = face.FaceAttributes.Hair;
                            if (hair.Bald >= 0.01f)
                            {
                                sb.Append(String.Format("bald {0:F1}% ", hair.Bald * 100));
                            }

                            // Display all hair color attributes over 10%.
                            var hairColors = hair.HairColor;
                            foreach (var hairColor in hairColors)
                            {
                                if (hairColor.Confidence >= 0.1)
                                {
                                    sb.Append(hairColor.Color.ToString());
                                    sb.Append(String.Format(" {0:F1}% ", hairColor.Confidence * 100));
                                }
                            }

                            /*if (hair.HairColor.Length == 0)
                             * {
                             *  if (hair.Invisible)
                             *      sb.Append("Invisible");
                             *  else
                             *      sb.Append("Bald");
                             * }
                             * else
                             * {
                             *  Contract.HairColorType returnColor = Contract.HairColorType.Unknown;
                             *  double maxConfidence = 0.0f;
                             *
                             *  for (int i = 0; i < hair.HairColor.Length; ++i)
                             *  {
                             *      if (hair.HairColor[i].Confidence > maxConfidence)
                             *      {
                             *          maxConfidence = hair.HairColor[i].Confidence;
                             *          returnColor = hair.HairColor[i].Color;
                             *      }
                             *  }
                             *
                             *  sb.Append(returnColor.ToString());
                             * }*/
                            sb.Append("\n");
                            MainWindow.Log("Face Attributes : {0}", sb);

                            MainWindow.Log("Face MouthLeft(X) : {0}", face.FaceLandmarks.MouthLeft.X);
                        }

                        foreach (var face in TargetFaces)
                        {
                            outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName);
                        }



                        MainWindow.Log("Response: Success. {0}", outString);
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    }
                }
            }
            GC.Collect();
        }
        /// <summary>
        /// Pick image for face detection and set detection result to result container
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event argument</param>
        private async void icon_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker dialog
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                // Clear last detection result
                ResultCollection.Clear();
                DetectedFaces.Clear();
                DetectedResultsInText = string.Format("Detecting...");

                MainWindow.Log("Request: Detecting {0}", pickedImagePath);
                var sw = Stopwatch.StartNew();



                // Call detection REST API
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        MainWindow mainWindow      = Window.GetWindow(this) as MainWindow;
                        string     subscriptionKey = mainWindow._scenariosControl.SubscriptionKey;
                        string     endpoint        = mainWindow._scenariosControl.SubscriptionEndpoint;

                        var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);
                        ProjectOxford.Face.Contract.Face[] faces = await faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur });

                        MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, pickedImagePath);

                        DetectedResultsInText = string.Format("{0} face(s) has been detected", faces.Length);


                        //Create Image type
                        Image renderingImage_img = Image.FromFile(dlg.FileName);
                        foreach (var face in faces)
                        {
                            // add Glasses to face
                            // Gan kinh len
                            renderingImage_img = Add_icon(renderingImage_img, face);
                            //ImageGlassesDisplay.Source = (BitmapImage)renderingImage_bit;
                        }
                        renderingImage_img.Save("F:\\study\\Project\\New pj\\Cognitive-Face-Windows\\Images\\icon\\" + saveImage_Count.ToString() + ".jpg");
                        BitmapImage DisplayImage = new BitmapImage(new Uri("F:\\study\\Project\\New pj\\Cognitive-Face-Windows\\Images\\icon\\" + saveImage_Count.ToString() + ".jpg"));
                        ImageiconDisplay.Source = DisplayImage;
                        saveImage_Count++;



                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSizes, imageInfo))
                        {
                            ResultCollection.Add(face);
                        }
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                        GC.Collect();
                        return;
                    }
                    GC.Collect();
                }
            }
        }
        /// <summary>
        /// open camera dialog
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void OpenCamera_Click(object sender, RoutedEventArgs e)
        {
            //OpenFaceButton.IsEnabled = false;
            CameraOpen camera = new CameraOpen();

            camera.ShowDialog();
            //Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            //dlg.DefaultExt = ".jpg";
            //dlg.Filter = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            //var filePicker = dlg.ShowDialog();

            //if (filePicker.HasValue && filePicker.Value)
            //{
            // User picked image
            // Clear previous detection and find similar results
            TargetFaces.Clear();
            FindSimilarMatchPersonCollection.Clear();
            FindSimilarMatchFaceCollection.Clear();
            var sw = Stopwatch.StartNew();

            var pickedImagePath = @"D:\3.jpg";    //dlg.FileName;
            var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
            var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);

            SelectedFile = renderingImage;

            // Detect all faces in the picked image
            using (var fStream = File.OpenRead(pickedImagePath))
            {
                MainWindow.Log("Request: Detecting faces in {0}", SelectedFile);

                MainWindow mainWindow        = Window.GetWindow(this) as MainWindow;
                string     subscriptionKey   = mainWindow._scenariosControl.SubscriptionKey;
                string     endpoint          = mainWindow._scenariosControl.SubscriptionEndpoint;
                var        faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);
                var        faces             = await faceServiceClient.DetectAsync(fStream);

                // Update detected faces on UI
                foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                {
                    TargetFaces.Add(face);
                }

                MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, SelectedFile);

                // Find two modes similar faces for each face
                foreach (var f in faces)
                {
                    var faceId = f.FaceId;
                    MainWindow.Log("Request: Finding similar faces in Personal Match Mode for face {0}", faceId);

                    try
                    {
                        // Default mode, call find matchPerson similar REST API, the result contains all the face ids which is personal similar to the query face
                        const int requestCandidatesCount = 4;
                        var       result = await faceServiceClient.FindSimilarAsync(faceId, faceid_list, requestCandidatesCount);

                        //faceServiceClient.F
                        // Update find matchPerson similar results collection for rendering
                        var personSimilarResult = new FindSimilarResult();
                        personSimilarResult.Faces     = new ObservableCollection <Face>();
                        personSimilarResult.QueryFace = new Face()
                        {
                            ImageFile = SelectedFile,
                            Top       = f.FaceRectangle.Top,
                            Left      = f.FaceRectangle.Left,
                            Width     = f.FaceRectangle.Width,
                            Height    = f.FaceRectangle.Height,
                            FaceId    = faceId.ToString(),
                        };
                        foreach (var fr in result)
                        {
                            var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.FaceId.ToString());
                            Face newFace       = new Face();
                            newFace.ImageFile  = candidateFace.ImageFile;
                            newFace.Confidence = fr.Confidence;
                            newFace.FaceId     = candidateFace.FaceId;
                            personSimilarResult.Faces.Add(newFace);
                        }

                        MainWindow.Log("Response: Found {0} similar faces for face {1}", personSimilarResult.Faces.Count, faceId);

                        FindSimilarMatchPersonCollection.Add(personSimilarResult);
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    }

                    try
                    {
                        // Call find facial match similar REST API, the result faces the top N with the highest similar confidence
                        const int requestCandidatesCount = 4;
                        var       result = await faceServiceClient.FindSimilarAsync(faceId, faceid_list, FindSimilarMatchMode.matchFace, requestCandidatesCount);

                        // Update "matchFace" similar results collection for rendering
                        var faceSimilarResults = new FindSimilarResult();
                        faceSimilarResults.Faces     = new ObservableCollection <Face>();
                        faceSimilarResults.QueryFace = new Face()
                        {
                            ImageFile = SelectedFile,
                            Top       = f.FaceRectangle.Top,
                            Left      = f.FaceRectangle.Left,
                            Width     = f.FaceRectangle.Width,
                            Height    = f.FaceRectangle.Height,
                            FaceId    = faceId.ToString(),
                        };
                        foreach (var fr in result)
                        {
                            var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.FaceId.ToString());
                            Face newFace       = new Face();
                            newFace.ImageFile = candidateFace.ImageFile;
                            //Bitmap imag = new Bitmap();
                            //(candidateFace.ImageFile);
                            //g2.
                            // MainWindow.Log("Response: Found {0} similar faces for face {1}", , faceId);
                            newFace.Confidence = fr.Confidence;
                            newFace.Top        = candidateFace.Top;
                            newFace.Left       = candidateFace.Left;
                            newFace.Width      = candidateFace.Width;
                            newFace.Height     = candidateFace.Height;
                            newFace.FaceId     = fr.FaceId.ToString();//candidateFace.FaceId;
                            faceSimilarResults.Faces.Add(newFace);
                        }
                        var      candidate1 = FacesCollection.First(ff => ff.FaceId == result[0].FaceId.ToString());
                        Bitmap   graph      = new Bitmap(UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Width, UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Height);
                        Graphics g2         = Graphics.FromImage(graph);

                        g2.DrawImage(UIHelper.ImageSourceToBitmap(candidate1.ImageFile), 0, 0);
                        // Rectangle zuibiao = new Rectangle(f.FaceRectangle.Left, f.FaceRectangle.Top, f.FaceRectangle.Width, f.FaceRectangle.Height);
                        Rectangle zuibiao = new Rectangle(candidate1.Left, candidate1.Top, candidate1.Width, candidate1.Height);
                        //g2.DrawImageUnscaled(UIHelper.ImageSourceToBitmap(candidateFace.ImageFile),0,0);
                        g2.DrawImage(UIHelper.ImageSourceToBitmap(SelectedFile), zuibiao, f.FaceRectangle.Left, f.FaceRectangle.Top, f.FaceRectangle.Width, f.FaceRectangle.Height, GraphicsUnit.Pixel);
                        System.Drawing.Image saveImage = System.Drawing.Image.FromHbitmap(graph.GetHbitmap());
                        saveImage.Save(@"E:\hackathon\ls\cognitive-Face-Windows\data1\image1.jpg", ImageFormat.Jpeg);

                        Bitmap   graph1 = new Bitmap(UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Width, UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Height);
                        Graphics g3     = Graphics.FromImage(graph1);

                        g3.DrawImage(UIHelper.ImageSourceToBitmap(candidate1.ImageFile), 0, 0);
                        System.Drawing.Image saveImage1 = System.Drawing.Image.FromHbitmap(graph1.GetHbitmap());
                        saveImage1.Save(@"E:\hackathon\ls\cognitive-Face-Windows\image1.jpg", ImageFormat.Jpeg);
                        MainWindow.Log("Response: Found {0} similar faces for face {1}", faceSimilarResults.Faces.Count, faceId);
                        MergeImage1 = getMergedPicture(@"D:\3.jpg", @"E:\hackathon\ls\cognitive-Face-Windows\image1.jpg");
                        //MergeImage1 = getMergedPicture("D:\\3.jpg", "D:\\1.jpg");
                        FindSimilarMatchFaceCollection.Add(faceSimilarResults);

                        /* MediaPlayer player = new MediaPlayer();
                         * player.Open(new Uri(media_name[candidate1.FaceId].Substring(0, media_name[candidate1.FaceId].Length - 4) + ".WAV", UriKind.Relative));
                         * player.Play();*/
                        Thread.Sleep(4000);
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    }
                }
            }
            //}
            //GC.Collect();
            // OpenFaceButton.IsEnabled = false;
            GC.Collect();
        }
        /// <summary>
        /// Pick image folder and detect all faces in these images
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void FolderPicker_Click(object sender, RoutedEventArgs e)
        {
            MainWindow mainWindow        = Window.GetWindow(this) as MainWindow;
            string     subscriptionKey   = mainWindow._scenariosControl.SubscriptionKey;
            string     endpoint          = mainWindow._scenariosControl.SubscriptionEndpoint;
            var        faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);

            /*try
             * {
             *  MainWindow.Log("Request: Face List {0} will be used to build a person database. Checking whether the face list exists.", _faceListName);
             *
             *  await faceServiceClient.GetFaceListAsync(_faceListName);
             *  groupExists = true;
             *  MainWindow.Log("Response: Face List {0} exists.", _faceListName);
             * }
             * catch (FaceAPIException ex)
             * {
             *  if (ex.ErrorCode != "FaceListNotFound")
             *  {
             *      MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
             *      return;
             *  }
             *  else
             *  {
             *      MainWindow.Log("Response: Face List {0} did not exist previously.", _faceListName);
             *  }
             * }
             *
             * if (groupExists)
             * {
             *  var cleanFaceList = System.Windows.MessageBox.Show(string.Format("Requires a clean up for face list \"{0}\" before setting up a new face list. Click OK to proceed, face list \"{0}\" will be cleared.", _faceListName), "Warning", MessageBoxButton.OKCancel);
             *  if (cleanFaceList == MessageBoxResult.OK)
             *  {
             *      await faceServiceClient.DeleteFaceListAsync(_faceListName);
             *  }
             *  else
             *  {
             *      return;
             *  }
             * }*/
            OpenCameraButton.IsEnabled = false;
            OpenFaceButton.IsEnabled   = false;
            // Show folder picker
            //System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog();
            //var result = dlg.ShowDialog();
            //string file_path = @"D:\microsoftAPI\cognitive-Face-Windows\Data\PersonGroup\Family1-Mom";
            string file_path     = @"E:\hackathon\ls\cognitive-Face-Windows\data1";
            bool   forceContinue = false;


            // if (result == System.Windows.Forms.DialogResult.OK)
            if (System.IO.Directory.Exists(file_path))
            {
                // Enumerate all ".jpg" files in the folder, call detect
                List <Task> tasks = new List <Task>();
                FacesCollection.Clear();
                //DetectionFacesCollection.Clear();
                TargetFaces.Clear();
                FindSimilarMatchPersonCollection.Clear();
                FindSimilarMatchFaceCollection.Clear();
                SelectedFile = null;


                // Set the suggestion count is intent to minimum the data preparation step only,
                // it's not corresponding to service side constraint
                const int SuggestionCount = 10;
                int       processCount    = 0;

                MainWindow.Log("Request: Preparing, detecting faces in chosen folder.");

                //await faceServiceClient.CreateFaceListAsync(_faceListName, _faceListName, "face list for sample");

                var imageList =
                    new ConcurrentBag <string>(
                        Directory.EnumerateFiles(file_path /*dlg.SelectedPath*/, "*.*", SearchOption.AllDirectories)
                        .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif")));

                string img;
                int    invalidImageCount = 0;
                int    i = 0;
                while (imageList.TryTake(out img))
                {
                    tasks.Add(Task.Factory.StartNew(
                                  async(obj) =>
                    {
                        var imgPath = obj as string;
                        // Call detection
                        using (var fStream = File.OpenRead(imgPath))
                        {
                            try
                            {
                                /*var faces =
                                 *  await faceServiceClient.AddFaceToFaceListAsync(_faceListName, fStream);*/
                                // ProjectOxford.Face.Contract.Face[] faces = await faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur });
                                var renderingImage = UIHelper.LoadImageAppliedOrientation(imgPath);
                                var imageInfo      = UIHelper.GetImageInfoForRendering(renderingImage);
                                var faces1         = await faceServiceClient.DetectAsync(fStream);
                                // ObservableCollection<Face> detection_tmp = new ObservableCollection<Face>();

                                //faceServiceClient.
                                // Update detected faces on UI
                                //faces[0].FaceRectangle
                                foreach (var face in faces1)
                                {
                                    //      detection_tmp.Add(face);
                                    //DetectionFacesCollection.
                                    //_faceListName = _faceListName + "-" + face.FaceId;
                                    faceid_list[i] = face.FaceId;
                                    media_name.Add(face.FaceId.ToString(), imgPath);
                                    i++;
                                    //MainWindow.Log(" faceId", _faceListName);
                                    // _faceListName.
                                    //faceServiceClient.a
                                    //var face_list = await faceServiceClient.AddFaceToFaceListAsync(_faceListName, File.OpenRead(face.ImageFile));
                                }
                                return(new Tuple <string, ClientContract.Face[]>(imgPath, faces1));

                                /*foreach (var face in faces)
                                 * {
                                 *
                                 *
                                 * }*/
                            }
                            catch (FaceAPIException ex)
                            {
                                // if operation conflict, retry.
                                if (ex.ErrorCode.Equals("ConcurrentOperationConflict"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                // if operation cause rate limit exceed, retry.
                                else if (ex.ErrorCode.Equals("RateLimitExceeded"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }

                                /*else if (ex.ErrorMessage.Contains("more than 1 face in the image."))
                                 * {
                                 *  Interlocked.Increment(ref invalidImageCount);
                                 * }*/
                                // Here we simply ignore all detection failure in this sample
                                // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object
                                return(new Tuple <string, ClientContract.Face[]>(imgPath, null));
                            }
                        }
                    },
                                  img).Unwrap().ContinueWith((detectTask) =>
                    {
                        var res = detectTask?.Result;
                        if (res?.Item2 == null)
                        {
                            return;
                        }

                        // Update detected faces on UI
                        this.Dispatcher.Invoke(
                            new Action
                            <ObservableCollection <Face>, string, ClientContract.Face[]>(
                                UIHelper.UpdateFace),
                            FacesCollection,
                            res.Item1,
                            res.Item2);
                    }));

                    processCount++;

                    if (processCount >= SuggestionCount && !forceContinue)
                    {
                        var continueProcess =
                            System.Windows.Forms.MessageBox.Show(
                                "The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?",
                                "Warning", System.Windows.Forms.MessageBoxButtons.YesNo);
                        if (continueProcess == System.Windows.Forms.DialogResult.Yes)
                        {
                            forceContinue = true;
                        }
                        else
                        {
                            break;
                        }
                    }

                    if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty)
                    {
                        await Task.WhenAll(tasks);

                        tasks.Clear();
                    }
                }
                if (invalidImageCount > 0)
                {
                    MainWindow.Log("Warning: more or less than one face is detected in {0} images, can not add to face list.", invalidImageCount);
                }
                MainWindow.Log("Response: Success. Total {0} faces are detected.", FacesCollection.Count);
            }
            else
            {
                MainWindow.Log("cannot open file");
            }
            GC.Collect();
            //OpenFaceButton.IsEnabled = true;
            OpenFaceButton.IsEnabled   = true;
            OpenCameraButton.IsEnabled = true;
        }
        public async void getMergedPictureCore(string userPicturePath, string hisPicturePath, WriteableBitmap[] targets, BitmapImage hismanImage)
        {
            ///renderingImag 是一个bitmap类型
            var renderingImage = UIHelper.LoadImageAppliedOrientation(userPicturePath);

            // Call detection REST API
            using (var fStream = File.OpenRead(userPicturePath))
            {
                var    fStream2        = File.OpenRead(hisPicturePath);
                string subscriptionKey = "3f7c942ba5344a61b0645fc7f92377db";
                string endpoint        = "https://westcentralus.api.cognitive.microsoft.com/face/v1.0";

                var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);

                ProjectOxford.Face.Contract.Face[] faces = await faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur });

                ProjectOxford.Face.Contract.Face[] faces2 = await faceServiceClient.DetectAsync(fStream2, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur });

                //DetectedResultsInText = string.Format("{0} face(s) has been detected", faces.Length);
                if (faces.Length <= 0 || faces.Length <= 0)
                {
                    //return SelectedFile;
                    return;
                }
                var face  = faces[0];
                var face2 = faces2[0];

                ///face0
                int upLeftX    = (int)face.FaceLandmarks.EyebrowLeftOuter.X;
                int upLeftY    = (int)face.FaceLandmarks.EyebrowLeftOuter.Y;
                int upLeft2Y   = (int)face.FaceLandmarks.EyebrowLeftInner.Y;
                int upRight2Y  = (int)face.FaceLandmarks.EyebrowRightInner.Y;
                int upRightX   = (int)face.FaceLandmarks.EyebrowRightOuter.X;
                int upRightY   = (int)face.FaceLandmarks.EyebrowRightOuter.Y;
                int downLeftX  = (int)face.FaceLandmarks.MouthLeft.X;
                int downLeftY  = (int)face.FaceLandmarks.MouthLeft.Y;
                int downRightX = (int)face.FaceLandmarks.MouthRight.X;
                int downRightY = (int)face.FaceLandmarks.MouthRight.Y;
                int downMiddle = (int)face.FaceLandmarks.UnderLipBottom.Y;


                ///get offset
                int faceNoseX = (int)(face.FaceLandmarks.NoseRootLeft.X + face.FaceLandmarks.NoseRootRight.X +
                                      face.FaceLandmarks.NoseLeftAlarOutTip.X + face.FaceLandmarks.NoseRightAlarOutTip.X +
                                      face.FaceLandmarks.NoseTip.X) / 5;
                int faceNoseY = (int)(face.FaceLandmarks.NoseRootLeft.Y + face.FaceLandmarks.NoseRootRight.Y +
                                      face.FaceLandmarks.NoseLeftAlarOutTip.Y + face.FaceLandmarks.NoseRightAlarOutTip.Y +
                                      face.FaceLandmarks.NoseTip.Y) / 5;
                int face2NoseX = (int)(face2.FaceLandmarks.NoseRootLeft.X + face2.FaceLandmarks.NoseRootRight.X +
                                       face2.FaceLandmarks.NoseLeftAlarOutTip.X + face2.FaceLandmarks.NoseRightAlarOutTip.X +
                                       face2.FaceLandmarks.NoseTip.X) / 5;
                int face2NoseY = (int)(face2.FaceLandmarks.NoseRootLeft.Y + face2.FaceLandmarks.NoseRootRight.Y +
                                       face2.FaceLandmarks.NoseLeftAlarOutTip.Y + face2.FaceLandmarks.NoseRightAlarOutTip.Y +
                                       face2.FaceLandmarks.NoseTip.Y) / 5;
                int offsetX = face2NoseX - faceNoseX;
                int offsetY = face2NoseY - faceNoseY;



                int pixelHeight  = renderingImage.PixelHeight;
                int bitsPerPixel = renderingImage.Format.BitsPerPixel;
                int pixelWidth   = renderingImage.PixelWidth;
                int stride       = pixelWidth * bitsPerPixel / 8;

                int hismanStride = hismanImage.PixelWidth * hismanImage.Format.BitsPerPixel / 8;

                int[] allPixels = new int[hismanStride * hismanImage.PixelHeight];
                hismanImage.CopyPixels(allPixels, hismanStride, 0);
                targets[0].WritePixels(
                    new Int32Rect(0, 0, hismanImage.PixelWidth, hismanImage.PixelHeight),
                    allPixels, hismanStride, 0);

                ////高度宽度矫正 TODO:
                int up = upRightY < upLeftY ? upRightY : upLeftY;
                up = up < upRight2Y ? up : upRight2Y;
                up = up < upLeft2Y ? up : upLeft2Y;
                int down = downRightY > downLeftY ? downRightY + 1 : downLeftY + 1;
                down = down > downMiddle ? down : downMiddle;
                int height        = down - up;
                int maxWidth      = upRightX - upLeftX;
                int minWidth      = downRightX - downLeftX;
                int halfWidthDiff = (maxWidth - minWidth) / 2;
                down  += height * 15 / 100;;
                up    -= height * 10 / 100;
                height = down - up;

                int[] leftEdge  = new int[down - up];
                int[] rightEdge = new int[down - up];
                for (int i = 0; i < down - up - height / 10; i++)
                {
                    leftEdge[i]  = upLeftX + halfWidthDiff * i / height - (int)System.Math.Sqrt(maxWidth - System.Math.Abs((down - up) / 2 - i));
                    rightEdge[i] = upRightX - halfWidthDiff * i / height + (int)System.Math.Sqrt(maxWidth - System.Math.Abs((down - up) / 2 - i));
                }
                for (int i = down - up - height / 10; i < down - up; i++)
                {
                    leftEdge[i]  = upLeftX + halfWidthDiff * i / height;
                    rightEdge[i] = upRightX - halfWidthDiff * i / height;
                }
                List <byte[]> list = new List <byte[]>();
                for (int i = up; i < down; i += 1)
                {
                    int    choosedWidth = rightEdge[i - up] - leftEdge[i - up];
                    int    lineStride   = choosedWidth * bitsPerPixel / 8;
                    byte[] pixels       = new byte[lineStride];
                    var    temp         = new Int32Rect(leftEdge[i - up], i, choosedWidth, 1);
                    renderingImage.CopyPixels(temp, pixels, lineStride, 0);
                    list.Add(pixels);
                }
                for (int i = up; i < down; i += 1)
                {
                    int choosedWidth = rightEdge[i - up] - leftEdge[i - up];
                    int lineStride   = choosedWidth * bitsPerPixel / 8;
                    targets[0].WritePixels(new Int32Rect(leftEdge[i - up] + offsetX, i + offsetY, choosedWidth, 1), list[i - up], lineStride, 0);
                }
            }
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Pick image, detect and identify all faces detected
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void Identify_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                // Clear previous detection and identification results
                TargetFaces.Clear();
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                var sw = Stopwatch.StartNew();

                MainWindow mainWindow           = Window.GetWindow(this) as MainWindow;
                string     subscriptionKey      = mainWindow._scenariosControl.SubscriptionKey;
                string     subscriptionEndpoint = mainWindow._scenariosControl.SubscriptionEndpoint;
                var        faceServiceClient    = new FaceServiceClient(subscriptionKey, subscriptionEndpoint);

                // Call detection REST API
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        var faces = await faceServiceClient.DetectAsync(fStream);

                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            TargetFaces.Add(face);
                        }

                        MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, GroupName);

                        // Identify each face
                        // Call identify REST API, the result contains identified person information
                        var identifyResult = await faceServiceClient.IdentifyAsync(GroupName, faces.Select(ff => ff.FaceId).ToArray());

                        for (int idx = 0; idx < faces.Length; idx++)
                        {
                            // Update identification result for rendering
                            var face = TargetFaces[idx];
                            var res  = identifyResult[idx];
                            if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString()))
                            {
                                face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName;
                            }
                            else
                            {
                                face.PersonName = "Unknown";
                            }
                        }

                        var outString = new StringBuilder();
                        foreach (var face in TargetFaces)
                        {
                            outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName);
                        }

                        MainWindow.Log("Response: Success. {0}", outString);
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    }
                }
            }
            GC.Collect();
        }
        /// <summary>
        /// Pick image for detection, get detection result and put detection results into LeftResultCollection
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event argument</param>
        private async void LeftImagePicker_Click(object sender, RoutedEventArgs e)
        {
            // Show image picker, show jpg type files only
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files(*.jpg, *png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                FaceVerifyResult = string.Empty;

                // User already picked one image
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                LeftImageDisplay.Source = renderingImage;

                // Clear last time detection results
                LeftResultCollection.Clear();
                FaceVerifyButton.IsEnabled = (LeftResultCollection.Count != 0 && RightResultCollection.Count != 0);
                MainWindow.Log("Request: Detecting in {0}", pickedImagePath);
                var sw = Stopwatch.StartNew();

                // Call detection REST API, detect faces inside the image
                using (var fileStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        MainWindow mainWindow        = Window.GetWindow(this) as MainWindow;
                        string     subscriptionKey   = mainWindow._scenariosControl.SubscriptionKey;
                        string     endpoint          = mainWindow._scenariosControl.SubscriptionEndpoint;
                        var        faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);
                        var        faces             = await faceServiceClient.DetectAsync(fileStream);

                        // Handle REST API calling error
                        if (faces == null)
                        {
                            return;
                        }

                        MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, pickedImagePath);

                        // Convert detection results into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            // Detected faces are hosted in result container, will be used in the verification later
                            LeftResultCollection.Add(face);
                        }

                        FaceVerifyButton.IsEnabled = (LeftResultCollection.Count != 0 && RightResultCollection.Count != 0);
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                        return;
                    }
                }
            }
            GC.Collect();
        }