示例#1
0
        /// <summary>
        /// Verify the face with the person, get whether these two faces belong to the same person
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event argument</param>
        private async void Face2PersonVerification_Click(object sender, RoutedEventArgs e)
        {
            // Call face to face verification, verify REST API supports one face to one person verification only
            // Here, we handle single face image only
            if (Person != null && Person.Faces.Count != 0 && RightFaceResultCollection.Count == 1)
            {
                PersonVerifyResult = "Verifying...";
                var faceId = RightFaceResultCollection[0].FaceId;

                MainWindow.Log("Request: Verifying face {0} and person {1}", faceId, Person.PersonName);

                // Call verify REST API with two face id
                try
                {
                    var faceServiceClient = FaceServiceClientHelper.GetInstance(this);
                    var res = await faceServiceClient.Face.VerifyFaceToPersonAsync(Guid.Parse(faceId), Guid.Parse(Person.PersonId), null, GroupName);

                    // Verification result contains IsIdentical (true or false) and Confidence (in range 0.0 ~ 1.0),
                    // here we update verify result on UI by PersonVerifyResult binding
                    PersonVerifyResult = string.Format("{0} ({1:0.0})", res.IsIdentical ? "the face belongs to the person" : "the face not belong to the person", res.Confidence);
                    MainWindow.Log("Response: Success. Face {0} {1} person {2}", faceId, res.IsIdentical ? "belong" : "not belong", Person.PersonName);
                }
                catch (APIErrorException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);

                    return;
                }
            }
            else
            {
                MessageBox.Show("Verification accepts one person containing face(s) and one face as input, please check.", "Warning", MessageBoxButton.OK);
            }
            GC.Collect();
        }
示例#2
0
        /// <summary>
        /// Pick image for detection, and using the detected face as the face to person verify.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void FaceImagePicker_Click(object sender, RoutedEventArgs e)
        {
            // Show image picker, show jpg type files only
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                PersonVerifyResult = string.Empty;

                // User already picked one image
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                RightImageDisplay2.Source = renderingImage;

                // Clear last time detection results
                RightFaceResultCollection.Clear();
                PersonVerifyButton.IsEnabled = (FacesCollection.Count != 0 && RightFaceResultCollection.Count != 0);

                MainWindow.Log("Request: Detecting in {0}", pickedImagePath);
                var sw = Stopwatch.StartNew();

                // Call detection REST API, detect faces inside the image
                using (var fileStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        var faceServiceClient = FaceServiceClientHelper.GetInstance(this);
                        var faces             = await faceServiceClient.Face.DetectWithStreamAsync(fileStream, recognitionModel : recognitionModel);

                        // Handle REST API calling error
                        if (faces == null)
                        {
                            return;
                        }

                        MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Count, pickedImagePath);

                        // Convert detection results into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            // Detected faces are hosted in result container, will be used in the verification later
                            RightFaceResultCollection.Add(face);
                        }
                        PersonVerifyButton.IsEnabled = (FacesCollection.Count != 0 && RightFaceResultCollection.Count != 0);
                    }
                    catch (APIErrorException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);

                        return;
                    }
                }
            }
            GC.Collect();
        }
示例#3
0
        /// <summary>
        /// Verify two detected faces, get whether these two faces belong to the same person
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event argument</param>
        private async void Face2FaceVerification_Click(object sender, RoutedEventArgs e)
        {
            // Call face to face verification, verify REST API supports one face to one face verification only
            // Here, we handle single face image only
            if (LeftResultCollection.Count == 1 && RightResultCollection.Count == 1)
            {
                FaceVerifyResult = "Verifying...";
                var faceId1 = LeftResultCollection[0].FaceId;
                var faceId2 = RightResultCollection[0].FaceId;

                MainWindow.Log("Request: Verifying face {0} and {1}", faceId1, faceId2);

                // Call verify REST API with two face id
                try
                {
                    var faceServiceClient = FaceServiceClientHelper.GetInstance(this);
                    var res = await faceServiceClient.Face.VerifyFaceToFaceAsync(Guid.Parse(faceId1), Guid.Parse(faceId2));

                    // Verification result contains IsIdentical (true or false) and Confidence (in range 0.0 ~ 1.0),
                    // here we update verify result on UI by FaceVerifyResult binding
                    FaceVerifyResult = string.Format("Confidence = {0:0.00}, {1}", res.Confidence, res.IsIdentical ? "two faces belong to same person" : "two faces not belong to same person");
                    MainWindow.Log("Response: Success. Face {0} and {1} {2} to the same person", faceId1, faceId2, res.IsIdentical ? "belong" : "not belong");
                }
                catch (APIErrorException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);

                    return;
                }
            }
            else
            {
                MessageBox.Show("Verification accepts two faces as input, please pick images with only one detectable face in it.", "Warning", MessageBoxButton.OK);
            }
            GC.Collect();
        }
示例#4
0
        /// <summary>
        /// Pick image folder to detect faces and using these faces to create the person database
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void PersonImageFolderPicker_Click(object sender, RoutedEventArgs e)
        {
            bool groupExists = false;

            var faceServiceClient = FaceServiceClientHelper.GetInstance(this);

            // Test whether the group already exists
            try
            {
                MainWindow.Log("Request: Group {0} will be used to build a person database. Checking whether the group exists.", GroupName);

                await faceServiceClient.LargePersonGroup.GetAsync(GroupName);

                groupExists = true;
                MainWindow.Log("Response: Group {0} exists.", GroupName);
            }
            catch (APIErrorException ex)
            {
                if (ex.Body.Error.Code != "LargePersonGroupNotFound")
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                    return;
                }
                else
                {
                    MainWindow.Log("Response: Group {0} did not exist previously.", GroupName);
                }
            }

            if (groupExists)
            {
                var cleanGroup = System.Windows.MessageBox.Show(string.Format("Requires a clean up for group \"{0}\" before setting up a new person database. Click OK to proceed, group \"{0}\" will be cleared.", GroupName), "Warning", MessageBoxButton.OKCancel);
                if (cleanGroup == MessageBoxResult.OK)
                {
                    await faceServiceClient.LargePersonGroup.DeleteAsync(GroupName);

                    PersonVerifyResult = string.Empty;
                    Person.Faces.Clear();
                }
                else
                {
                    return;
                }
            }

            // Show folder picker
            System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog();
            var result = dlg.ShowDialog();

            // Set the suggestion count is intent to minimum the data preparation step only,
            // it's not corresponding to service side constraint
            const int SuggestionCount = 15;

            if (result == System.Windows.Forms.DialogResult.OK)
            {
                FacesCollection.Clear();
                PersonVerifyButton.IsEnabled = (FacesCollection.Count != 0 && RightFaceResultCollection.Count != 0);

                // Call create large person group REST API
                // Create large person group API call will failed if group with the same name already exists
                MainWindow.Log("Request: Creating group \"{0}\"", GroupName);
                try
                {
                    await faceServiceClient.LargePersonGroup.CreateAsync(GroupName, GroupName, recognitionModel : recognitionModel);

                    MainWindow.Log("Response: Success. Group \"{0}\" created", GroupName);
                }
                catch (APIErrorException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                    return;
                }

                int  processCount  = 0;
                bool forceContinue = false;

                MainWindow.Log("Request: Preparing person for verification, detecting faces in chosen folder.");

                // Enumerate top level directories, each directory contains one person's images

                var tasks = new List <Task>();
                var tag   = System.IO.Path.GetFileName(dlg.SelectedPath);
                Person            = new Person();
                Person.PersonName = tag;

                var faces = new ObservableCollection <Face>();
                Person.Faces = faces;

                // Call create person REST API, the new create person id will be returned
                MainWindow.Log("Request: Creating person \"{0}\"", Person.PersonName);
                Person.PersonId =
                    (await faceServiceClient.LargePersonGroupPerson.CreateAsync(GroupName, Person.PersonName)).PersonId.ToString();
                MainWindow.Log("Response: Success. Person \"{0}\" (PersonID:{1}) created", Person.PersonName, Person.PersonId);

                string img;
                var    imageList =
                    new ConcurrentBag <string>(
                        Directory.EnumerateFiles(dlg.SelectedPath, "*.*", SearchOption.AllDirectories)
                        .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif")));

                // Enumerate images under the person folder, call detection
                int invalidImageCount = 0;
                while (imageList.TryTake(out img))
                {
                    tasks.Add(Task.Factory.StartNew(
                                  async(obj) =>
                    {
                        var imgPath = obj as string;

                        using (var fStream = File.OpenRead(imgPath))
                        {
                            try
                            {
                                var persistFace =
                                    await
                                    faceServiceClient.LargePersonGroupPerson.AddFaceFromStreamAsync(GroupName, Guid.Parse(Person.PersonId), fStream, imgPath);
                                return(new Tuple <string, PersistedFace>(imgPath, persistFace));
                            }
                            catch (APIErrorException ex)
                            {
                                // if operation conflict, retry.
                                if (ex.Body.Error.Code.Equals("ConcurrentOperationConflict"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                // if operation cause rate limit exceed, retry.
                                else if (ex.Body.Error.Code.Equals("RateLimitExceeded"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                else if (ex.Body.Error.Message.Contains("more than 1 face in the image."))
                                {
                                    Interlocked.Increment(ref invalidImageCount);
                                }
                                // Here we simply ignore all detection failure in this sample
                                // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object
                                return(new Tuple <string, PersistedFace>(imgPath, null));
                            }
                        }
                    },
                                  img).Unwrap().ContinueWith((detectTask) =>
                    {
                        // Update detected faces for rendering
                        var detectionResult = detectTask?.Result;
                        if (detectionResult?.Item2 == null)
                        {
                            return;
                        }

                        this.Dispatcher.Invoke(
                            new Action
                            <ObservableCollection <Face>, string, PersistedFace>(
                                UIHelper.UpdateFace),
                            FacesCollection,
                            detectionResult.Item1,
                            detectionResult.Item2);
                    }));
                    processCount++;

                    if (processCount >= SuggestionCount && !forceContinue)
                    {
                        var continueProcess =
                            System.Windows.Forms.MessageBox.Show(
                                "The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?",
                                "Warning", System.Windows.Forms.MessageBoxButtons.YesNo);
                        if (continueProcess == System.Windows.Forms.DialogResult.Yes)
                        {
                            forceContinue = true;
                        }
                        else
                        {
                            break;
                        }
                    }
                    if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty)
                    {
                        await Task.WhenAll(tasks);

                        tasks.Clear();
                    }
                }

                Person.Faces = FacesCollection;

                PersonVerifyButton.IsEnabled = (FacesCollection.Count != 0 && RightFaceResultCollection.Count != 0);

                if (invalidImageCount > 0)
                {
                    MainWindow.Log("Warning: more or less than one face is detected in {0} images, can not add to face list.", invalidImageCount);
                }
                MainWindow.Log("Response: Success. Total {0} faces are detected.", Person.Faces.Count);
            }
            GC.Collect();
        }
        /// <summary>
        /// Pick image, detect and identify all faces detected
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void Identify_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                // Clear previous detection and identification results
                TargetFaces.Clear();
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                var sw = Stopwatch.StartNew();

                var faceServiceClient = FaceServiceClientHelper.GetInstance(this);
                // Call detection REST API
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        var faces = await faceServiceClient.Face.DetectWithStreamAsync(fStream, recognitionModel : recognitionModel, detectionModel : detectionModel);

                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            TargetFaces.Add(face);
                        }

                        MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Count, GroupName);

                        // Identify each face
                        // Call identify REST API, the result contains identified person information
                        var identifyResult = await faceServiceClient.Face.IdentifyAsync((from face in faces where face.FaceId != null select face.FaceId.Value).ToList(), null, GroupName);

                        for (int idx = 0; idx < faces.Count; idx++)
                        {
                            // Update identification result for rendering
                            var face = TargetFaces[idx];
                            var res  = identifyResult[idx];
                            if (res.Candidates.Count > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString()))
                            {
                                face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName;
                            }
                            else
                            {
                                face.PersonName = "Unknown";
                            }
                        }

                        var outString = new StringBuilder();
                        foreach (var face in TargetFaces)
                        {
                            outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName);
                        }

                        MainWindow.Log("Response: Success. {0}", outString);
                    }
                    catch (APIErrorException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                    }
                }
            }
            GC.Collect();
        }
示例#6
0
        /// <summary>
        /// Pick image folder and detect all faces in these images
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void FolderPicker_Click(object sender, RoutedEventArgs e)
        {
            bool groupExists = false;

            var faceServiceClient = FaceServiceClientHelper.GetInstance(this);

            try
            {
                MainWindow.Log("Request: Large Face List {0} will be used to build a person database. Checking whether the large face list exists.", _largeFaceListName);

                await faceServiceClient.LargeFaceList.GetAsync(_largeFaceListName);

                groupExists = true;
                MainWindow.Log("Response: Large Face List {0} exists.", _largeFaceListName);
            }
            catch (APIErrorException ex)
            {
                if (ex.Body.Error.Code != "LargeFaceListNotFound")
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                    return;
                }
                else
                {
                    MainWindow.Log("Response: Large Face List {0} did not exist previously.", _largeFaceListName);
                }
            }

            if (groupExists)
            {
                var cleanFaceList = System.Windows.MessageBox.Show(string.Format("Requires a clean up for large face list \"{0}\" before setting up a new large face list. Click OK to proceed, large face list \"{0}\" will be cleared.", _largeFaceListName), "Warning", MessageBoxButton.OKCancel);
                if (cleanFaceList == MessageBoxResult.OK)
                {
                    await faceServiceClient.LargeFaceList.DeleteAsync(_largeFaceListName);
                }
                else
                {
                    return;
                }
            }

            OpenFaceButton.IsEnabled = false;
            // Show folder picker
            System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog();
            var result = dlg.ShowDialog();

            bool forceContinue = false;


            if (result == System.Windows.Forms.DialogResult.OK)
            {
                // Enumerate all ".jpg" files in the folder, call detect
                List <Task> tasks = new List <Task>();

                FacesCollection.Clear();
                TargetFaces.Clear();
                FindSimilarMatchPersonCollection.Clear();
                FindSimilarMatchFaceCollection.Clear();
                SelectedFile = null;

                // Set the suggestion count is intent to minimum the data preparation step only,
                // it's not corresponding to service side constraint
                const int SuggestionCount = 10;
                int       processCount    = 0;

                MainWindow.Log("Request: Preparing, detecting faces in chosen folder.");

                await faceServiceClient.LargeFaceList.CreateAsync(_largeFaceListName, _largeFaceListName, "large face list for sample");

                var imageList =
                    new ConcurrentBag <string>(
                        Directory.EnumerateFiles(dlg.SelectedPath, "*.*", SearchOption.AllDirectories)
                        .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif")));

                string img;
                int    invalidImageCount = 0;
                while (imageList.TryTake(out img))
                {
                    tasks.Add(Task.Factory.StartNew(
                                  async(obj) =>
                    {
                        var imgPath = obj as string;
                        // Call detection
                        using (var fStream = File.OpenRead(imgPath))
                        {
                            try
                            {
                                var faces =
                                    await faceServiceClient.LargeFaceList.AddFaceFromStreamAsync(_largeFaceListName, fStream);
                                return(new Tuple <string, PersistedFace>(imgPath, faces));
                            }
                            catch (APIErrorException ex)
                            {
                                // if operation conflict, retry.
                                if (ex.Body.Error.Code.Equals("ConcurrentOperationConflict"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                // if operation cause rate limit exceed, retry.
                                else if (ex.Body.Error.Code.Equals("RateLimitExceeded"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                else if (ex.Body.Error.Message.Contains("more than 1 face in the image."))
                                {
                                    Interlocked.Increment(ref invalidImageCount);
                                }
                                // Here we simply ignore all detection failure in this sample
                                // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object
                                return(new Tuple <string, PersistedFace>(imgPath, null));
                            }
                        }
                    },
                                  img).Unwrap().ContinueWith((detectTask) =>
                    {
                        var res = detectTask?.Result;
                        if (res?.Item2 == null)
                        {
                            return;
                        }

                        // Update detected faces on UI
                        this.Dispatcher.Invoke(
                            new Action
                            <ObservableCollection <Face>, string, PersistedFace>(
                                UIHelper.UpdateFace),
                            FacesCollection,
                            res.Item1,
                            res.Item2);
                    }));

                    processCount++;

                    if (processCount >= SuggestionCount && !forceContinue)
                    {
                        var continueProcess =
                            System.Windows.Forms.MessageBox.Show(
                                "The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?",
                                "Warning", System.Windows.Forms.MessageBoxButtons.YesNo);
                        if (continueProcess == System.Windows.Forms.DialogResult.Yes)
                        {
                            forceContinue = true;
                        }
                        else
                        {
                            break;
                        }
                    }

                    if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty)
                    {
                        await Task.WhenAll(tasks);

                        tasks.Clear();
                    }
                }
                if (invalidImageCount > 0)
                {
                    MainWindow.Log("Warning: more or less than one face is detected in {0} images, can not add to large face list.", invalidImageCount);
                }
                MainWindow.Log("Response: Success. Total {0} faces are detected.", FacesCollection.Count);

                try
                {
                    // Start to train the large face list.
                    MainWindow.Log("Request: Training large face list \"{0}\"", _largeFaceListName);
                    await faceServiceClient.LargeFaceList.TrainAsync(_largeFaceListName);

                    // Wait until the training is completed.
                    while (true)
                    {
                        await Task.Delay(1000);

                        var trainingStatus = await faceServiceClient.LargeFaceList.GetTrainingStatusAsync(_largeFaceListName);

                        MainWindow.Log("Response: {0}. Large face list \"{1}\" training process is {2}", "Success", _largeFaceListName, trainingStatus.Status);
                        if (trainingStatus.Status != TrainingStatusType.Running)
                        {
                            if (trainingStatus.Status == TrainingStatusType.Failed)
                            {
                                MainWindow.Log("Response: Training failed with message {0}.", trainingStatus.Message);
                            }

                            break;
                        }
                    }
                    OpenFaceButton.IsEnabled = true;
                }
                catch (APIErrorException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                }
            }

            GC.Collect();
        }
示例#7
0
        /// <summary>
        /// Pick image and call find similar with both two modes for each faces detected
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void FindSimilar_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var filePicker = dlg.ShowDialog();

            if (filePicker.HasValue && filePicker.Value)
            {
                // User picked image
                // Clear previous detection and find similar results
                TargetFaces.Clear();
                FindSimilarMatchPersonCollection.Clear();
                FindSimilarMatchFaceCollection.Clear();
                var sw = Stopwatch.StartNew();

                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                // Detect all faces in the picked image
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    MainWindow.Log("Request: Detecting faces in {0}", SelectedFile);
                    var faceServiceClient      = FaceServiceClientHelper.GetInstance(this);
                    IList <DetectedFace> faces = await faceServiceClient.Face.DetectWithStreamAsync(fStream);

                    // Update detected faces on UI
                    foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                    {
                        TargetFaces.Add(face);
                    }

                    MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Count, SelectedFile);

                    // Find two modes similar faces for each face
                    foreach (var f in faces)
                    {
                        if (f.FaceId == null)
                        {
                            continue;
                        }

                        var faceId = f.FaceId.Value;

                        MainWindow.Log("Request: Finding similar faces in Personal Match Mode for face {0}", faceId);

                        try
                        {
                            // Default mode, call find matchPerson similar REST API, the result contains all the face ids which is personal similar to the query face
                            const int           requestCandidatesCount = 4;
                            IList <SimilarFace> result = await faceServiceClient.Face.FindSimilarAsync(
                                faceId,
                                null,
                                _largeFaceListName,
                                maxNumOfCandidatesReturned : requestCandidatesCount);

                            // Update find matchPerson similar results collection for rendering
                            var personSimilarResult = new FindSimilarResult();
                            personSimilarResult.Faces     = new ObservableCollection <Face>();
                            personSimilarResult.QueryFace = new Face()
                            {
                                ImageFile = SelectedFile,
                                Top       = f.FaceRectangle.Top,
                                Left      = f.FaceRectangle.Left,
                                Width     = f.FaceRectangle.Width,
                                Height    = f.FaceRectangle.Height,
                                FaceId    = faceId.ToString(),
                            };
                            foreach (var fr in result)
                            {
                                var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString());
                                Face newFace       = new Face();
                                newFace.ImageFile  = candidateFace.ImageFile;
                                newFace.Confidence = fr.Confidence;
                                newFace.FaceId     = candidateFace.FaceId;
                                personSimilarResult.Faces.Add(newFace);
                            }

                            MainWindow.Log("Response: Found {0} similar faces for face {1}", personSimilarResult.Faces.Count, faceId);

                            FindSimilarMatchPersonCollection.Add(personSimilarResult);
                        }
                        catch (APIErrorException ex)
                        {
                            MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                        }

                        try
                        {
                            // Call find facial match similar REST API, the result faces the top N with the highest similar confidence
                            const int requestCandidatesCount = 4;
                            var       result = await faceServiceClient.Face.FindSimilarAsync(
                                faceId,
                                null,
                                _largeFaceListName,
                                maxNumOfCandidatesReturned : requestCandidatesCount,
                                mode : FindSimilarMatchMode.MatchFace);

                            // Update "matchFace" similar results collection for rendering
                            var faceSimilarResults = new FindSimilarResult();
                            faceSimilarResults.Faces     = new ObservableCollection <Face>();
                            faceSimilarResults.QueryFace = new Face()
                            {
                                ImageFile = SelectedFile,
                                Top       = f.FaceRectangle.Top,
                                Left      = f.FaceRectangle.Left,
                                Width     = f.FaceRectangle.Width,
                                Height    = f.FaceRectangle.Height,
                                FaceId    = faceId.ToString(),
                            };
                            foreach (var fr in result)
                            {
                                var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString());
                                Face newFace       = new Face();
                                newFace.ImageFile  = candidateFace.ImageFile;
                                newFace.Confidence = fr.Confidence;
                                newFace.FaceId     = candidateFace.FaceId;
                                faceSimilarResults.Faces.Add(newFace);
                            }

                            MainWindow.Log("Response: Found {0} similar faces for face {1}", faceSimilarResults.Faces.Count, faceId);

                            FindSimilarMatchFaceCollection.Add(faceSimilarResults);
                        }
                        catch (APIErrorException ex)
                        {
                            MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                        }
                    }
                }
            }
            GC.Collect();
        }
        /// <summary>
        /// Pick folder, then group detected faces by similarity
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void Grouping_Click(object sender, RoutedEventArgs e)
        {
            // Show folder picker
            System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog();
            var result = dlg.ShowDialog();

            // Set the suggestion count is intent to minimum the data preparation step only,
            // it's not corresponding to service side constraint
            const int SuggestionCount = 10;

            if (result == System.Windows.Forms.DialogResult.OK)
            {
                // User picked one folder
                List <Task> tasks         = new List <Task>();
                int         processCount  = 0;
                bool        forceContinue = false;

                // Clear previous grouping result
                GroupedFaces.Clear();
                Faces.Clear();

                var faceServiceClient = FaceServiceClientHelper.GetInstance(this);
                MainWindow.Log("Request: Preparing faces for grouping, detecting faces in chosen folder.");

                var imageList =
                    new ConcurrentBag <string>(
                        Directory.EnumerateFiles(dlg.SelectedPath, "*.*", SearchOption.AllDirectories)
                        .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif")));

                string img;
                while (imageList.TryTake(out img))
                {
                    tasks.Add(Task.Factory.StartNew(
                                  async(obj) =>
                    {
                        var imgPath = obj as string;
                        // Detect faces in image
                        using (var fStream = File.OpenRead(imgPath))
                        {
                            try
                            {
                                var faces = await faceServiceClient.Face.DetectWithStreamAsync(fStream);
                                return(new Tuple <string, IList <DetectedFace> >(imgPath, faces));
                            }
                            catch (APIErrorException ex)
                            {
                                // if operation conflict, retry.
                                if (ex.Body.Error.Code.Equals("ConcurrentOperationConflict"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                // Here we simply ignore all detection failure in this sample
                                // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object
                                return(new Tuple <string, IList <DetectedFace> >(imgPath, null));
                            }
                        }
                    },
                                  img).Unwrap().ContinueWith((detectTask) =>
                    {
                        // Update detected faces on UI
                        var res = detectTask?.Result;
                        if (res?.Item2 == null)
                        {
                            return;
                        }

                        foreach (var f in res.Item2)
                        {
                            this.Dispatcher.Invoke(
                                new Action <ObservableCollection <Face>, string, DetectedFace>(UIHelper.UpdateFace),
                                Faces,
                                res.Item1,
                                f);
                        }
                    }));
                    if (processCount >= SuggestionCount && !forceContinue)
                    {
                        var continueProcess = System.Windows.Forms.MessageBox.Show("Found many images under chosen folder, may take long time if proceed. Continue?", "Warning", System.Windows.Forms.MessageBoxButtons.YesNo);
                        if (continueProcess == System.Windows.Forms.DialogResult.Yes)
                        {
                            forceContinue = true;
                        }
                        else
                        {
                            break;
                        }
                    }
                    if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty)
                    {
                        await Task.WhenAll(tasks);

                        tasks.Clear();
                    }
                }

                MainWindow.Log("Response: Success. Total {0} faces are detected.", Faces.Count);

                try
                {
                    MainWindow.Log("Request: Grouping {0} faces.", Faces.Count);

                    // Call grouping, the grouping result is a group collection, each group contains similar faces
                    var groupRes = await faceServiceClient.Face.GroupAsync(Faces.Select(f => Guid.Parse(f.FaceId)).ToArray());

                    // Update grouping results for rendering
                    foreach (var g in groupRes.Groups)
                    {
                        var gg = new GroupingResult()
                        {
                            Faces        = new ObservableCollection <Face>(),
                            IsMessyGroup = false,
                        };

                        foreach (var fr in g)
                        {
                            gg.Faces.Add(Faces.First(f => f.FaceId == fr.ToString()));
                        }

                        GroupedFaces.Add(gg);
                    }

                    // MessyGroup contains all faces which are not similar to any other faces.
                    // Take an extreme case for example:
                    // On grouping faces which are not similar to any other faces, the grouping result will contains only one messy group
                    if (groupRes.MessyGroup.Count > 0)
                    {
                        var messyGroup = new GroupingResult()
                        {
                            Faces        = new ObservableCollection <Face>(),
                            IsMessyGroup = true
                        };
                        foreach (var messy in groupRes.MessyGroup)
                        {
                            messyGroup.Faces.Add(Faces.First(f => f.FaceId == messy.ToString()));
                        }

                        GroupedFaces.Add(messyGroup);
                    }

                    MainWindow.Log("Response: Success. {0} faces are grouped into {1} groups.", Faces.Count, GroupedFaces.Count);
                }
                catch (APIErrorException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                }
            }
            GC.Collect();
        }
        /// <summary>
        /// Pick image for face detection and set detection result to result container
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event argument</param>
        private async void ImagePicker_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker dialog
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                // Clear last detection result
                ResultCollection.Clear();
                DetectedFaces.Clear();
                DetectedResultsInText = string.Format("Detecting...");

                MainWindow.Log("Request: Detecting {0}", pickedImagePath);
                var sw = Stopwatch.StartNew();

                // Call detection REST API
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    try
                    {
                        var faceServiceClient      = FaceServiceClientHelper.GetInstance(this);
                        IList <DetectedFace> faces = await faceServiceClient.Face.DetectWithStreamAsync(
                            fStream,
                            false,
                            true,
                            new List <FaceAttributeType>()
                        {
                            FaceAttributeType.Accessories,
                            FaceAttributeType.Age,
                            FaceAttributeType.Blur,
                            FaceAttributeType.Emotion,
                            FaceAttributeType.Exposure,
                            FaceAttributeType.FacialHair,
                            FaceAttributeType.Gender,
                            FaceAttributeType.Glasses,
                            FaceAttributeType.Hair,
                            FaceAttributeType.HeadPose,
                            FaceAttributeType.Makeup,
                            FaceAttributeType.Noise,
                            FaceAttributeType.Occlusion,
                            FaceAttributeType.Smile
                        },
                            recognitionModel
                            );

                        MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Count, pickedImagePath);

                        DetectedResultsInText = string.Format("{0} face(s) has been detected", faces.Count);

                        foreach (var face in faces)
                        {
                            DetectedFaces.Add(new Face()
                            {
                                ImageFile         = renderingImage,
                                Left              = face.FaceRectangle.Left,
                                Top               = face.FaceRectangle.Top,
                                Width             = face.FaceRectangle.Width,
                                Height            = face.FaceRectangle.Height,
                                FaceId            = face.FaceId?.ToString(),
                                Age               = string.Format("{0:#} years old", face.FaceAttributes.Age),
                                Gender            = face.FaceAttributes.Gender.ToString(),
                                HeadPose          = string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2)),
                                FacialHair        = string.Format("FacialHair: {0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No"),
                                Glasses           = string.Format("GlassesType: {0}", face.FaceAttributes.Glasses.ToString()),
                                Emotion           = $"{GetEmotion(face.FaceAttributes.Emotion)}",
                                Hair              = string.Format("Hair: {0}", GetHair(face.FaceAttributes.Hair)),
                                Makeup            = string.Format("Makeup: {0}", ((face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")),
                                EyeOcclusion      = string.Format("EyeOccluded: {0}", ((face.FaceAttributes.Occlusion.EyeOccluded) ? "Yes" : "No")),
                                ForeheadOcclusion = string.Format("ForeheadOccluded: {0}", (face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")),
                                MouthOcclusion    = string.Format("MouthOccluded: {0}", (face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")),
                                Accessories       = $"{GetAccessories(face.FaceAttributes.Accessories)}",
                                Blur              = string.Format("Blur: {0}", face.FaceAttributes.Blur.BlurLevel.ToString()),
                                Exposure          = string.Format("{0}", face.FaceAttributes.Exposure.ExposureLevel.ToString()),
                                Noise             = string.Format("Noise: {0}", face.FaceAttributes.Noise.NoiseLevel.ToString()),
                            });
                        }

                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            ResultCollection.Add(face);
                        }
                    }
                    catch (APIErrorException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                        GC.Collect();
                        return;
                    }
                    GC.Collect();
                }
            }
        }