/// <summary>
        /// Checks the group is trained, else trains it.
        /// </summary>
        /// <returns></returns>
        private async Task CheckGroupIsTrained()
        {
            // Start train large person group
            MainWindow.Log("Request: Training group \"{0}\"", _scanGroup.Group.LargePersonGroupId);
            await RetryHelper.VoidOperationWithBasicRetryAsync(() =>
                                                               _faceServiceClient.TrainLargePersonGroupAsync(_scanGroup.Group.LargePersonGroupId),
                                                               new[] { "RateLimitExceeded" },
                                                               traceWriter : _mainWindowLogTraceWriter);

            // Wait until train completed
            while (true)
            {
                await Task.Delay(1000);

                try // Temporary
                {
                    var status = await _faceServiceClient.GetLargePersonGroupTrainingStatusAsync(_scanGroup.Group.LargePersonGroupId);

                    MainWindow.Log("Response: {0}. Group \"{1}\" training process is {2}", "Success", _scanGroup.Group.LargePersonGroupId, status.Status);
                    if (status.Status != Microsoft.ProjectOxford.Face.Contract.Status.Running)
                    {
                        break;
                    }
                }
                catch (Exception ex)
                {
                    MainWindow.Log($"Error: {ex.Message}");
                    // retry
                }
            }
        }
Beispiel #2
0
        private async void ButtonEntrenar_Click(object sender, RoutedEventArgs e)
        {
            await faceServiceClient.TrainLargePersonGroupAsync(localValue);

            TrainingStatus trainingStatus = new TrainingStatus();

            trainingStatus = await faceServiceClient.GetLargePersonGroupTrainingStatusAsync(localValue);


            var ignored3 = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
            {
                statusItem.Text = trainingStatus.Status.ToString();
            });
        }
        public static async Task TrainPersonGroupAsync()
        {
            try
            {
                var personGroups = await _faceServiceClient.ListLargePersonGroupsAsync();

                var personGroup = personGroups[0];
                await _faceServiceClient.TrainLargePersonGroupAsync(personGroup.LargePersonGroupId);
            }
            catch (Exception e)
            {
                Debug.WriteLine("" + e);
                throw;
            }
        }
Beispiel #4
0
        private async Task TrainFaces()
        {
            Dispatcher.Invoke(() => { Results = "Training..."; });
            await FaceServiceClient.TrainLargePersonGroupAsync(GroupId);

            // Wait until train completed
            while (true)
            {
                await Task.Delay(1000);

                var status = await FaceServiceClient.GetLargePersonGroupTrainingStatusAsync(GroupId);

                if (status.Status != Status.Running)
                {
                    break;
                }
            }
            Dispatcher.Invoke(() => { Results = "Training Complete."; });
        }
Beispiel #5
0
        public async Task ValidarImagen(IDialogContext context, IAwaitable <IEnumerable <Attachment> > argument)
        {
            var        imagen  = await argument;
            HttpClient cliente = new HttpClient();

            foreach (var pic in imagen)
            {
                var url  = pic.ContentUrl;
                var dato = await cliente.GetByteArrayAsync(url);

                Stream            stream            = new MemoryStream(dato);
                FaceServiceClient faceServiceClient = new FaceServiceClient(subscriptionKey, subscriptionEndpoint);

                var faces = await faceServiceClient.AddPersonFaceInLargePersonGroupAsync("1", new Guid(guidPersona), stream, null, null);

                var entrenamiento = faceServiceClient.TrainLargePersonGroupAsync("1");
                var status        = entrenamiento.Status.ToString();

                await context.PostAsync("Estado entrenamiento: " + status + " PersonGuid: " + faces.PersistedFaceId.ToString());
            }
        }
        public async void AddFaceToPerson(int UserID, StorageFile file)
        {
            Guid   userGuid          = new Guid();
            string userPersonGroupID = null;

            using (SqlCommand getUserPersonAndGroupID = new SqlCommand(getUserPersonAndGroupIDQuery))
            {
                getUserPersonAndGroupID.Connection = connection;
                getUserPersonAndGroupID.Parameters.Add("@userID", SqlDbType.Int).Value = UserID;
                connection.Open();
                using (SqlDataReader reader = getUserPersonAndGroupID.ExecuteReader())
                {
                    if (reader.Read())
                    {
                        userGuid = reader.GetGuid(0);
                        var result = reader.GetValue(1);
                        userPersonGroupID = result.ToString();
                    }
                }
                connection.Close();
            }

            IFaceServiceClient faceServiceClient = new FaceServiceClient("ae3512e532c545ba9e821202a1bbd350", "https://eastus.api.cognitive.microsoft.com/face/v1.0");

            using (Stream s = await file.OpenStreamForReadAsync())
            {
                await faceServiceClient.AddPersonFaceInLargePersonGroupAsync(userPersonGroupID, userGuid, s, null, null);
            }
            await faceServiceClient.TrainLargePersonGroupAsync(userPersonGroupID);

            var trainingStatus = await faceServiceClient.GetLargePersonGroupTrainingStatusAsync(userPersonGroupID);

            while (trainingStatus.Status == Status.Running)
            {
                trainingStatus = await faceServiceClient.GetLargePersonGroupTrainingStatusAsync(userPersonGroupID);
            }
        }
Beispiel #7
0
        /// <summary>
        /// Processes the file.
        /// </summary>
        /// <param name="filePath">The file path.</param>
        private async void ProcessFile(string filePath)
        {
            _selectedFilePath = filePath;

            using (var fStream = File.OpenRead(filePath))
            {
                try
                {
                    var faces = await RetryHelper.OperationWithBasicRetryAsync(async() => await
                                                                               _faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur }),
                                                                               new[] { typeof(FaceAPIException) },
                                                                               traceWriter : _mainWindowLogTraceWriter);

                    MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, filePath);

                    if (faces.Length == 0)
                    {
                        btnNext.IsEnabled = true;
                        return;
                    }

                    var renderingImage = UIHelper.LoadImageAppliedOrientation(filePath);
                    var imageInfo      = UIHelper.GetImageInfoForRendering(renderingImage);
                    SelectedFile = renderingImage;

                    foreach (var face in faces)
                    {
                        DetectedFaces.Add(new Models.Face()
                        {
                            ImageFile     = renderingImage,
                            Left          = face.FaceRectangle.Left,
                            Top           = face.FaceRectangle.Top,
                            Width         = face.FaceRectangle.Width,
                            Height        = face.FaceRectangle.Height,
                            FaceRectangle = new FaceRectangle {
                                Height = face.FaceRectangle.Height, Width = face.FaceRectangle.Width, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top
                            },
                            FaceId            = face.FaceId.ToString(),
                            Age               = string.Format("{0:#} years old", face.FaceAttributes.Age),
                            Gender            = face.FaceAttributes.Gender,
                            HeadPose          = string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2)),
                            FacialHair        = string.Format("FacialHair: {0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No"),
                            Glasses           = string.Format("GlassesType: {0}", face.FaceAttributes.Glasses.ToString()),
                            Emotion           = $"{GetEmotion(face.FaceAttributes.Emotion)}",
                            Hair              = string.Format("Hair: {0}", GetHair(face.FaceAttributes.Hair)),
                            Makeup            = string.Format("Makeup: {0}", ((face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")),
                            EyeOcclusion      = string.Format("EyeOccluded: {0}", ((face.FaceAttributes.Occlusion.EyeOccluded) ? "Yes" : "No")),
                            ForeheadOcclusion = string.Format("ForeheadOccluded: {0}", (face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")),
                            MouthOcclusion    = string.Format("MouthOccluded: {0}", (face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")),
                            Accessories       = $"{GetAccessories(face.FaceAttributes.Accessories)}",
                            Blur              = string.Format("Blur: {0}", face.FaceAttributes.Blur.BlurLevel.ToString()),
                            Exposure          = string.Format("{0}", face.FaceAttributes.Exposure.ExposureLevel.ToString()),
                            Noise             = string.Format("Noise: {0}", face.FaceAttributes.Noise.NoiseLevel.ToString()),
                        });
                    }

                    // Convert detection result into UI binding object for rendering
                    foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                    {
                        ResultCollection.Add(face);
                    }

                    // Start train large person group
                    MainWindow.Log("Request: Training group \"{0}\"", _scanGroup.Group.LargePersonGroupId);
                    await RetryHelper.VoidOperationWithBasicRetryAsync(() =>
                                                                       _faceServiceClient.TrainLargePersonGroupAsync(_scanGroup.Group.LargePersonGroupId),
                                                                       new[] { typeof(FaceAPIException) },
                                                                       traceWriter : _mainWindowLogTraceWriter);

                    // Wait until train completed
                    while (true)
                    {
                        await Task.Delay(1000);

                        try // Temporary
                        {
                            var status = await _faceServiceClient.GetLargePersonGroupTrainingStatusAsync(_scanGroup.Group.LargePersonGroupId);

                            MainWindow.Log("Response: {0}. Group \"{1}\" training process is {2}", "Success", _scanGroup.Group.LargePersonGroupId, status.Status);
                            if (status.Status != Microsoft.ProjectOxford.Face.Contract.Status.Running)
                            {
                                break;
                            }
                        }
                        catch (Exception ex)
                        {
                            MainWindow.Log($"Error: {ex.Message}");
                            // retry
                        }
                    }

                    await GoGetMatches();
                }
                catch (FaceAPIException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    GC.Collect();
                    return;
                }
                GC.Collect();
            }

            btnNext.IsEnabled = true;
        }
Beispiel #8
0
 private async void ButtonEntrenar_Click(object sender, RoutedEventArgs e)
 {
     await faceServiceClient.TrainLargePersonGroupAsync(localValue);
 }
Beispiel #9
0
        /// <summary>
        /// Pick the root person database folder, to minimum the data preparation logic, the folder should be under following construction
        /// Each person's image should be put into one folder named as the person's name
        /// All person's image folder should be put directly under the root person database folder
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event argument</param>
        private async void FolderPicker_Click(object sender, RoutedEventArgs e)
        {
            bool groupExists = false;

            MainWindow mainWindow      = Window.GetWindow(this) as MainWindow;
            string     subscriptionKey = mainWindow._scenariosControl.SubscriptionKey;
            string     endpoint        = mainWindow._scenariosControl.SubscriptionEndpoint;

            var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);

            // Test whether the group already exists
            try
            {
                MainWindow.Log("Request: Group {0} will be used to build a person database. Checking whether the group exists.", this.GroupId);

                await faceServiceClient.GetLargePersonGroupAsync(this.GroupId);

                groupExists = true;
                MainWindow.Log("Response: Group {0} exists.", this.GroupId);
            }
            catch (FaceAPIException ex)
            {
                if (ex.ErrorCode != "LargePersonGroupNotFound")
                {
                    MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    return;
                }
                else
                {
                    MainWindow.Log("Response: Group {0} did not exist previously.", this.GroupId);
                }
            }

            if (groupExists)
            {
                var cleanGroup = System.Windows.MessageBox.Show(string.Format("Requires a clean up for group \"{0}\" before setting up a new person database. Click OK to proceed, group \"{0}\" will be cleared.", this.GroupId), "Warning", MessageBoxButton.OKCancel);
                if (cleanGroup == MessageBoxResult.OK)
                {
                    await faceServiceClient.DeleteLargePersonGroupAsync(this.GroupId);

                    this.GroupId = Guid.NewGuid().ToString();
                }
                else
                {
                    return;
                }
            }

            // Show folder picker
            System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog();
            var result = dlg.ShowDialog();

            // Set the suggestion count is intent to minimum the data preparation step only,
            // it's not corresponding to service side constraint
            const int SuggestionCount = 15;

            if (result == System.Windows.Forms.DialogResult.OK)
            {
                // User picked a root person database folder
                // Clear person database
                Persons.Clear();
                TargetFaces.Clear();
                SelectedFile             = null;
                IdentifyButton.IsEnabled = false;

                // Call create large person group REST API
                // Create large person group API call will failed if group with the same name already exists
                MainWindow.Log("Request: Creating group \"{0}\"", this.GroupId);
                try
                {
                    await faceServiceClient.CreateLargePersonGroupAsync(this.GroupId, this.GroupId, dlg.SelectedPath);

                    MainWindow.Log("Response: Success. Group \"{0}\" created", this.GroupId);
                }
                catch (FaceAPIException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    return;
                }

                int  processCount  = 0;
                bool forceContinue = false;

                MainWindow.Log("Request: Preparing faces for identification, detecting faces in chosen folder.");

                // Enumerate top level directories, each directory contains one person's images
                int invalidImageCount = 0;
                foreach (var dir in System.IO.Directory.EnumerateDirectories(dlg.SelectedPath))
                {
                    var    tasks = new List <Task>();
                    var    tag   = System.IO.Path.GetFileName(dir);
                    Person p     = new Person();
                    p.PersonName = tag;

                    var faces = new ObservableCollection <Models.Face>();
                    p.Faces = faces;

                    // Call create person REST API, the new create person id will be returned
                    MainWindow.Log("Request: Creating person \"{0}\"", p.PersonName);

                    p.PersonId = (await RetryHelper.OperationWithBasicRetryAsync(async() => await
                                                                                 faceServiceClient.CreatePersonInLargePersonGroupAsync(this.GroupId, p.PersonName, dir),
                                                                                 new[] { "RateLimitExceeded" },
                                                                                 traceWriter: _mainWindowLogTraceWriter
                                                                                 )).PersonId.ToString();

                    MainWindow.Log("Response: Success. Person \"{0}\" (PersonID:{1}) created", p.PersonName, p.PersonId);

                    string img;
                    // Enumerate images under the person folder, call detection
                    var imageList =
                        new ConcurrentBag <string>(
                            Directory.EnumerateFiles(dir, "*.*", SearchOption.AllDirectories)
                            .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif")));

                    while (imageList.TryTake(out img))
                    {
                        tasks.Add(Task.Factory.StartNew(
                                      async(obj) =>
                        {
                            var imgPath = obj as string;

                            using (var fStream = File.OpenRead(imgPath))
                            {
                                try
                                {
                                    // Update person faces on server side
                                    var persistFace = await faceServiceClient.AddPersonFaceInLargePersonGroupAsync(this.GroupId, Guid.Parse(p.PersonId), fStream, imgPath);
                                    return(new Tuple <string, ClientContract.AddPersistedFaceResult>(imgPath, persistFace));
                                }
                                catch (FaceAPIException ex)
                                {
                                    // if operation conflict, retry.
                                    if (ex.ErrorCode.Equals("ConcurrentOperationConflict"))
                                    {
                                        MainWindow.Log("Concurrent operation conflict. Retrying.");
                                        imageList.Add(imgPath);
                                        return(null);
                                    }
                                    // if operation cause rate limit exceed, retry.
                                    else if (ex.ErrorCode.Equals("RateLimitExceeded"))
                                    {
                                        MainWindow.Log("Rate limit exceeded. Retrying.");
                                        imageList.Add(imgPath);
                                        return(null);
                                    }
                                    else if (ex.ErrorMessage.Contains("more than 1 face in the image."))
                                    {
                                        Interlocked.Increment(ref invalidImageCount);
                                    }
                                    // Here we simply ignore all detection failure in this sample
                                    // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object
                                    return(new Tuple <string, ClientContract.AddPersistedFaceResult>(imgPath, null));
                                }
                            }
                        },
                                      img).Unwrap().ContinueWith((detectTask) =>
                        {
                            // Update detected faces for rendering
                            var detectionResult = detectTask?.Result;
                            if (detectionResult == null || detectionResult.Item2 == null)
                            {
                                return;
                            }

                            this.Dispatcher.Invoke(
                                new Action <ObservableCollection <Models.Face>, string, ClientContract.AddPersistedFaceResult>(UIHelper.UpdateFace),
                                faces,
                                detectionResult.Item1,
                                detectionResult.Item2);
                        }));
                        if (processCount >= SuggestionCount && !forceContinue)
                        {
                            var continueProcess = System.Windows.Forms.MessageBox.Show("The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?", "Warning", System.Windows.Forms.MessageBoxButtons.YesNo);
                            if (continueProcess == System.Windows.Forms.DialogResult.Yes)
                            {
                                forceContinue = true;
                            }
                            else
                            {
                                break;
                            }
                        }

                        if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty)
                        {
                            await Task.WhenAll(tasks);

                            tasks.Clear();
                        }
                    }

                    Persons.Add(p);
                }
                if (invalidImageCount > 0)
                {
                    MainWindow.Log("Warning: more or less than one face is detected in {0} images, can not add to face list.", invalidImageCount);
                }
                MainWindow.Log("Response: Success. Total {0} faces are detected.", Persons.Sum(p => p.Faces.Count));

                try
                {
                    // Start train large person group
                    MainWindow.Log("Request: Training group \"{0}\"", this.GroupId);

                    await RetryHelper.VoidOperationWithBasicRetryAsync(() =>
                                                                       faceServiceClient.TrainLargePersonGroupAsync(this.GroupId),
                                                                       new[] { "RateLimitExceeded" },
                                                                       traceWriter : _mainWindowLogTraceWriter);

                    //await faceServiceClient.TrainLargePersonGroupAsync(this.GroupId);

                    // Wait until train completed
                    while (true)
                    {
                        await Task.Delay(1000);

                        var status = await faceServiceClient.GetLargePersonGroupTrainingStatusAsync(this.GroupId);

                        MainWindow.Log("Response: {0}. Group \"{1}\" training process is {2}", "Success", this.GroupId, status.Status);
                        if (status.Status != Status.Running)
                        {
                            break;
                        }
                    }
                    IdentifyButton.IsEnabled = true;
                }
                catch (FaceAPIException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                }
            }
            GC.Collect();
        }
Beispiel #10
0
        // GET api/values/5
        public async Task<string> Get(int id)
        {
            bool groupExists = false;

            string returnStatus = "Success";
            string subscriptionKey = "";
            string endpoint = "";

            var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);
            System.Diagnostics.Debug.WriteLine("---------Hiiiii--------");

            try
            {
                System.Diagnostics.Debug.WriteLine("Request: Group {0} will be used to build a person database. Checking whether the group exists.", this.GroupId);

                await faceServiceClient.GetLargePersonGroupAsync(this.GroupId);
                count++;
                groupExists = true;
                System.Diagnostics.Debug.WriteLine("Response: Group {0} exists.", this.GroupId);
            }
            catch (FaceAPIException ex)
            {
                if (ex.ErrorCode != "LargePersonGroupNotFound")
                {
                    System.Diagnostics.Debug.WriteLine("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    return "";
                }
                else
                {
                    System.Diagnostics.Debug.WriteLine("Response: Group {0} did not exist previously.", this.GroupId);
                }
            }

            if (groupExists)
            {
                await faceServiceClient.DeleteLargePersonGroupAsync(this.GroupId);
                count++;
                this.GroupId = Guid.NewGuid().ToString();
            }

            const int SuggestionCount = 15;

            Persons.Clear();
            TargetFaces.Clear();
            SelectedFile = null;
            // IdentifyButton.IsEnabled = false;

            System.Diagnostics.Debug.WriteLine("Request: Creating group \"{0}\"", this.GroupId);
            try
            {
                await faceServiceClient.CreateLargePersonGroupAsync(this.GroupId, this.GroupId);
                count++;
                System.Diagnostics.Debug.WriteLine("Response: Success. Group \"{0}\" created", this.GroupId);
            }
            catch (FaceAPIException ex)
            {
                System.Diagnostics.Debug.WriteLine("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                return "";
            }


            int processCount = 0;
            bool forceContinue = false;

            System.Diagnostics.Debug.WriteLine("Request: Preparing faces for identification, detecting faces in chosen folder.");

            // Enumerate top level directories, each directory contains one person's images
            int invalidImageCount = 0;

            foreach (var dir in System.IO.Directory.EnumerateDirectories(""))
            {
                var tasks = new List<Task>();
                var tag = System.IO.Path.GetFileName(dir);
                Person p = new Person();
                p.PersonName = tag;

                var faces = new ObservableCollection<Face>();
                p.Faces = faces;

                // Call create person REST API, the new create person id will be returned
                System.Diagnostics.Debug.WriteLine("Request: Creating person \"{0}\"", p.PersonName);
                p.PersonId = (await faceServiceClient.CreatePersonInLargePersonGroupAsync(this.GroupId, p.PersonName)).PersonId.ToString();
                System.Diagnostics.Debug.WriteLine("Response: Success. Person \"{0}\" (PersonID:{1}) created", p.PersonName, p.PersonId);

                string img;
                // Enumerate images under the person folder, call detection
                var imageList =
                new ConcurrentBag<string>(
                    Directory.EnumerateFiles(dir, "*.*", SearchOption.AllDirectories)
                        .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif")));
                while (imageList.TryTake(out img))
                {
                    tasks.Add(Task.Factory.StartNew(
                        async (obj) =>
                        {
                            var imgPath = obj as string;

                            using (var fStream = File.OpenRead(imgPath))
                            {
                                try
                                {
                                    // Update person faces on server side
                                    var persistFace = await faceServiceClient.AddPersonFaceInLargePersonGroupAsync(this.GroupId, Guid.Parse(p.PersonId), fStream, imgPath);
                                    return new Tuple<string, ClientContract.AddPersistedFaceResult>(imgPath, persistFace);
                                }
                                catch (FaceAPIException ex)
                                {
                                    // if operation conflict, retry.
                                    if (ex.ErrorCode.Equals("ConcurrentOperationConflict"))
                                    {
                                        imageList.Add(imgPath);
                                        return null;
                                    }
                                    // if operation cause rate limit exceed, retry.
                                    else if (ex.ErrorCode.Equals("RateLimitExceeded"))
                                    {
                                        imageList.Add(imgPath);
                                        return null;
                                    }
                                    else if (ex.ErrorMessage.Contains("more than 1 face in the image."))
                                    {
                                        Interlocked.Increment(ref invalidImageCount);
                                    }
                                    // Here we simply ignore all detection failure in this sample
                                    // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object
                                    return new Tuple<string, ClientContract.AddPersistedFaceResult>(imgPath, null);
                                }
                            }
                        },
                        img).Unwrap().ContinueWith((detectTask) =>
                        {
                            // Update detected faces for rendering
                            var detectionResult = detectTask?.Result;
                            if (detectionResult == null || detectionResult.Item2 == null)
                            {
                                return;
                            }
                           
                           

                            

                        
                }

                Persons.Add(p);
            }

            if (invalidImageCount > 0)
            {
                System.Diagnostics.Debug.WriteLine("Warning: more or less than one face is detected in {0} images, can not add to face list.", invalidImageCount);
            }
            System.Diagnostics.Debug.WriteLine("Response: Success. Total {0} faces are detected.", Persons.Sum(p => p.Faces.Count));

            try
            {
                // Start train large person group
                System.Diagnostics.Debug.WriteLine("Request: Training group \"{0}\"", this.GroupId);
                await faceServiceClient.TrainLargePersonGroupAsync(this.GroupId);
                count++;
                // Wait until train completed
                while (true)
                {
                    await Task.Delay(1000);
                    var status = await faceServiceClient.GetLargePersonGroupTrainingStatusAsync(this.GroupId);
                    count++;
                    System.Diagnostics.Debug.WriteLine("Response: {0}. Group \"{1}\" training process is {2}", "Success", this.GroupId, status.Status);
                    if (status.Status != Microsoft.ProjectOxford.Face.Contract.Status.Running)
                    {
                        break;
                    }
                }
                //IdentifyButton.IsEnabled = true;
            }
            catch (FaceAPIException ex)
            {
                System.Diagnostics.Debug.WriteLine("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
            }
        
            GC.Collect();

            CloudStorageAccount storageAccount = CreateStorageAccountFromConnectionString("");

            // Create a blob client for interacting with the blob service.
            CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();

            CloudBlobContainer container = blobClient.GetContainerReference("samplecontainer");
            foreach (IListBlobItem blob in container.ListBlobs())
            {
                // Blob type will be CloudBlockBlob, CloudPageBlob or CloudBlobDirectory
                // Use blob.GetType() and cast to appropriate type to gain access to properties specific to each type
                try
                {
                    await Identify_Click(blob);
                    
                }
                catch (Exception e)
                {
                    returnStatus =  e.ToString();
                    break;
                    
                }
            }

            //Identify_Click();
           return returnStatus;
        }