예제 #1
0
        /// <summary>
        /// Pick image folder and detect all faces in these images
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void FolderPicker_Click(object sender, RoutedEventArgs e)
        {
            bool groupExists = false;

            var faceServiceClient = FaceServiceClientHelper.GetInstance(this);

            try
            {
                MainWindow.Log("Request: Large Face List {0} will be used to build a person database. Checking whether the large face list exists.", _largeFaceListName);

                await faceServiceClient.LargeFaceList.GetAsync(_largeFaceListName);

                groupExists = true;
                MainWindow.Log("Response: Large Face List {0} exists.", _largeFaceListName);
            }
            catch (APIErrorException ex)
            {
                if (ex.Body.Error.Code != "LargeFaceListNotFound")
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                    return;
                }
                else
                {
                    MainWindow.Log("Response: Large Face List {0} did not exist previously.", _largeFaceListName);
                }
            }

            if (groupExists)
            {
                var cleanFaceList = System.Windows.MessageBox.Show(string.Format("Requires a clean up for large face list \"{0}\" before setting up a new large face list. Click OK to proceed, large face list \"{0}\" will be cleared.", _largeFaceListName), "Warning", MessageBoxButton.OKCancel);
                if (cleanFaceList == MessageBoxResult.OK)
                {
                    await faceServiceClient.LargeFaceList.DeleteAsync(_largeFaceListName);
                }
                else
                {
                    return;
                }
            }

            OpenFaceButton.IsEnabled = false;
            // Show folder picker
            System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog();
            var result = dlg.ShowDialog();

            bool forceContinue = false;


            if (result == System.Windows.Forms.DialogResult.OK)
            {
                // Enumerate all ".jpg" files in the folder, call detect
                List <Task> tasks = new List <Task>();

                FacesCollection.Clear();
                TargetFaces.Clear();
                FindSimilarMatchPersonCollection.Clear();
                FindSimilarMatchFaceCollection.Clear();
                SelectedFile = null;

                // Set the suggestion count is intent to minimum the data preparation step only,
                // it's not corresponding to service side constraint
                const int SuggestionCount = 10;
                int       processCount    = 0;

                MainWindow.Log("Request: Preparing, detecting faces in chosen folder.");

                await faceServiceClient.LargeFaceList.CreateAsync(_largeFaceListName, _largeFaceListName, "large face list for sample");

                var imageList =
                    new ConcurrentBag <string>(
                        Directory.EnumerateFiles(dlg.SelectedPath, "*.*", SearchOption.AllDirectories)
                        .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif")));

                string img;
                int    invalidImageCount = 0;
                while (imageList.TryTake(out img))
                {
                    tasks.Add(Task.Factory.StartNew(
                                  async(obj) =>
                    {
                        var imgPath = obj as string;
                        // Call detection
                        using (var fStream = File.OpenRead(imgPath))
                        {
                            try
                            {
                                var faces =
                                    await faceServiceClient.LargeFaceList.AddFaceFromStreamAsync(_largeFaceListName, fStream);
                                return(new Tuple <string, PersistedFace>(imgPath, faces));
                            }
                            catch (APIErrorException ex)
                            {
                                // if operation conflict, retry.
                                if (ex.Body.Error.Code.Equals("ConcurrentOperationConflict"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                // if operation cause rate limit exceed, retry.
                                else if (ex.Body.Error.Code.Equals("RateLimitExceeded"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                else if (ex.Body.Error.Message.Contains("more than 1 face in the image."))
                                {
                                    Interlocked.Increment(ref invalidImageCount);
                                }
                                // Here we simply ignore all detection failure in this sample
                                // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object
                                return(new Tuple <string, PersistedFace>(imgPath, null));
                            }
                        }
                    },
                                  img).Unwrap().ContinueWith((detectTask) =>
                    {
                        var res = detectTask?.Result;
                        if (res?.Item2 == null)
                        {
                            return;
                        }

                        // Update detected faces on UI
                        this.Dispatcher.Invoke(
                            new Action
                            <ObservableCollection <Face>, string, PersistedFace>(
                                UIHelper.UpdateFace),
                            FacesCollection,
                            res.Item1,
                            res.Item2);
                    }));

                    processCount++;

                    if (processCount >= SuggestionCount && !forceContinue)
                    {
                        var continueProcess =
                            System.Windows.Forms.MessageBox.Show(
                                "The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?",
                                "Warning", System.Windows.Forms.MessageBoxButtons.YesNo);
                        if (continueProcess == System.Windows.Forms.DialogResult.Yes)
                        {
                            forceContinue = true;
                        }
                        else
                        {
                            break;
                        }
                    }

                    if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty)
                    {
                        await Task.WhenAll(tasks);

                        tasks.Clear();
                    }
                }
                if (invalidImageCount > 0)
                {
                    MainWindow.Log("Warning: more or less than one face is detected in {0} images, can not add to large face list.", invalidImageCount);
                }
                MainWindow.Log("Response: Success. Total {0} faces are detected.", FacesCollection.Count);

                try
                {
                    // Start to train the large face list.
                    MainWindow.Log("Request: Training large face list \"{0}\"", _largeFaceListName);
                    await faceServiceClient.LargeFaceList.TrainAsync(_largeFaceListName);

                    // Wait until the training is completed.
                    while (true)
                    {
                        await Task.Delay(1000);

                        var trainingStatus = await faceServiceClient.LargeFaceList.GetTrainingStatusAsync(_largeFaceListName);

                        MainWindow.Log("Response: {0}. Large face list \"{1}\" training process is {2}", "Success", _largeFaceListName, trainingStatus.Status);
                        if (trainingStatus.Status != TrainingStatusType.Running)
                        {
                            if (trainingStatus.Status == TrainingStatusType.Failed)
                            {
                                MainWindow.Log("Response: Training failed with message {0}.", trainingStatus.Message);
                            }

                            break;
                        }
                    }
                    OpenFaceButton.IsEnabled = true;
                }
                catch (APIErrorException ex)
                {
                    MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                }
            }

            GC.Collect();
        }
        /// <summary>
        /// open camera dialog
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void OpenCamera_Click(object sender, RoutedEventArgs e)
        {
            //OpenFaceButton.IsEnabled = false;
            CameraOpen camera = new CameraOpen();

            camera.ShowDialog();
            //Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            //dlg.DefaultExt = ".jpg";
            //dlg.Filter = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            //var filePicker = dlg.ShowDialog();

            //if (filePicker.HasValue && filePicker.Value)
            //{
            // User picked image
            // Clear previous detection and find similar results
            TargetFaces.Clear();
            FindSimilarMatchPersonCollection.Clear();
            FindSimilarMatchFaceCollection.Clear();
            var sw = Stopwatch.StartNew();

            var pickedImagePath = @"D:\3.jpg";    //dlg.FileName;
            var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
            var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);

            SelectedFile = renderingImage;

            // Detect all faces in the picked image
            using (var fStream = File.OpenRead(pickedImagePath))
            {
                MainWindow.Log("Request: Detecting faces in {0}", SelectedFile);

                MainWindow mainWindow        = Window.GetWindow(this) as MainWindow;
                string     subscriptionKey   = mainWindow._scenariosControl.SubscriptionKey;
                string     endpoint          = mainWindow._scenariosControl.SubscriptionEndpoint;
                var        faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);
                var        faces             = await faceServiceClient.DetectAsync(fStream);

                // Update detected faces on UI
                foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                {
                    TargetFaces.Add(face);
                }

                MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, SelectedFile);

                // Find two modes similar faces for each face
                foreach (var f in faces)
                {
                    var faceId = f.FaceId;
                    MainWindow.Log("Request: Finding similar faces in Personal Match Mode for face {0}", faceId);

                    try
                    {
                        // Default mode, call find matchPerson similar REST API, the result contains all the face ids which is personal similar to the query face
                        const int requestCandidatesCount = 4;
                        var       result = await faceServiceClient.FindSimilarAsync(faceId, faceid_list, requestCandidatesCount);

                        //faceServiceClient.F
                        // Update find matchPerson similar results collection for rendering
                        var personSimilarResult = new FindSimilarResult();
                        personSimilarResult.Faces     = new ObservableCollection <Face>();
                        personSimilarResult.QueryFace = new Face()
                        {
                            ImageFile = SelectedFile,
                            Top       = f.FaceRectangle.Top,
                            Left      = f.FaceRectangle.Left,
                            Width     = f.FaceRectangle.Width,
                            Height    = f.FaceRectangle.Height,
                            FaceId    = faceId.ToString(),
                        };
                        foreach (var fr in result)
                        {
                            var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.FaceId.ToString());
                            Face newFace       = new Face();
                            newFace.ImageFile  = candidateFace.ImageFile;
                            newFace.Confidence = fr.Confidence;
                            newFace.FaceId     = candidateFace.FaceId;
                            personSimilarResult.Faces.Add(newFace);
                        }

                        MainWindow.Log("Response: Found {0} similar faces for face {1}", personSimilarResult.Faces.Count, faceId);

                        FindSimilarMatchPersonCollection.Add(personSimilarResult);
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    }

                    try
                    {
                        // Call find facial match similar REST API, the result faces the top N with the highest similar confidence
                        const int requestCandidatesCount = 4;
                        var       result = await faceServiceClient.FindSimilarAsync(faceId, faceid_list, FindSimilarMatchMode.matchFace, requestCandidatesCount);

                        // Update "matchFace" similar results collection for rendering
                        var faceSimilarResults = new FindSimilarResult();
                        faceSimilarResults.Faces     = new ObservableCollection <Face>();
                        faceSimilarResults.QueryFace = new Face()
                        {
                            ImageFile = SelectedFile,
                            Top       = f.FaceRectangle.Top,
                            Left      = f.FaceRectangle.Left,
                            Width     = f.FaceRectangle.Width,
                            Height    = f.FaceRectangle.Height,
                            FaceId    = faceId.ToString(),
                        };
                        foreach (var fr in result)
                        {
                            var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.FaceId.ToString());
                            Face newFace       = new Face();
                            newFace.ImageFile = candidateFace.ImageFile;
                            //Bitmap imag = new Bitmap();
                            //(candidateFace.ImageFile);
                            //g2.
                            // MainWindow.Log("Response: Found {0} similar faces for face {1}", , faceId);
                            newFace.Confidence = fr.Confidence;
                            newFace.Top        = candidateFace.Top;
                            newFace.Left       = candidateFace.Left;
                            newFace.Width      = candidateFace.Width;
                            newFace.Height     = candidateFace.Height;
                            newFace.FaceId     = fr.FaceId.ToString();//candidateFace.FaceId;
                            faceSimilarResults.Faces.Add(newFace);
                        }
                        var      candidate1 = FacesCollection.First(ff => ff.FaceId == result[0].FaceId.ToString());
                        Bitmap   graph      = new Bitmap(UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Width, UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Height);
                        Graphics g2         = Graphics.FromImage(graph);

                        g2.DrawImage(UIHelper.ImageSourceToBitmap(candidate1.ImageFile), 0, 0);
                        // Rectangle zuibiao = new Rectangle(f.FaceRectangle.Left, f.FaceRectangle.Top, f.FaceRectangle.Width, f.FaceRectangle.Height);
                        Rectangle zuibiao = new Rectangle(candidate1.Left, candidate1.Top, candidate1.Width, candidate1.Height);
                        //g2.DrawImageUnscaled(UIHelper.ImageSourceToBitmap(candidateFace.ImageFile),0,0);
                        g2.DrawImage(UIHelper.ImageSourceToBitmap(SelectedFile), zuibiao, f.FaceRectangle.Left, f.FaceRectangle.Top, f.FaceRectangle.Width, f.FaceRectangle.Height, GraphicsUnit.Pixel);
                        System.Drawing.Image saveImage = System.Drawing.Image.FromHbitmap(graph.GetHbitmap());
                        saveImage.Save(@"E:\hackathon\ls\cognitive-Face-Windows\data1\image1.jpg", ImageFormat.Jpeg);

                        Bitmap   graph1 = new Bitmap(UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Width, UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Height);
                        Graphics g3     = Graphics.FromImage(graph1);

                        g3.DrawImage(UIHelper.ImageSourceToBitmap(candidate1.ImageFile), 0, 0);
                        System.Drawing.Image saveImage1 = System.Drawing.Image.FromHbitmap(graph1.GetHbitmap());
                        saveImage1.Save(@"E:\hackathon\ls\cognitive-Face-Windows\image1.jpg", ImageFormat.Jpeg);
                        MainWindow.Log("Response: Found {0} similar faces for face {1}", faceSimilarResults.Faces.Count, faceId);
                        MergeImage1 = getMergedPicture(@"D:\3.jpg", @"E:\hackathon\ls\cognitive-Face-Windows\image1.jpg");
                        //MergeImage1 = getMergedPicture("D:\\3.jpg", "D:\\1.jpg");
                        FindSimilarMatchFaceCollection.Add(faceSimilarResults);

                        /* MediaPlayer player = new MediaPlayer();
                         * player.Open(new Uri(media_name[candidate1.FaceId].Substring(0, media_name[candidate1.FaceId].Length - 4) + ".WAV", UriKind.Relative));
                         * player.Play();*/
                        Thread.Sleep(4000);
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    }
                }
            }
            //}
            //GC.Collect();
            // OpenFaceButton.IsEnabled = false;
            GC.Collect();
        }
예제 #3
0
        /// <summary>
        /// Pick image and call find similar with both two modes for each faces detected
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void FindSimilar_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter     = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif";
            var filePicker = dlg.ShowDialog();

            if (filePicker.HasValue && filePicker.Value)
            {
                // User picked image
                // Clear previous detection and find similar results
                TargetFaces.Clear();
                FindSimilarMatchPersonCollection.Clear();
                FindSimilarMatchFaceCollection.Clear();
                var sw = Stopwatch.StartNew();

                var pickedImagePath = dlg.FileName;
                var renderingImage  = UIHelper.LoadImageAppliedOrientation(pickedImagePath);
                var imageInfo       = UIHelper.GetImageInfoForRendering(renderingImage);
                SelectedFile = renderingImage;

                // Detect all faces in the picked image
                using (var fStream = File.OpenRead(pickedImagePath))
                {
                    MainWindow.Log("Request: Detecting faces in {0}", SelectedFile);
                    var faceServiceClient      = FaceServiceClientHelper.GetInstance(this);
                    IList <DetectedFace> faces = await faceServiceClient.Face.DetectWithStreamAsync(fStream);

                    // Update detected faces on UI
                    foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                    {
                        TargetFaces.Add(face);
                    }

                    MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Count, SelectedFile);

                    // Find two modes similar faces for each face
                    foreach (var f in faces)
                    {
                        if (f.FaceId == null)
                        {
                            continue;
                        }

                        var faceId = f.FaceId.Value;

                        MainWindow.Log("Request: Finding similar faces in Personal Match Mode for face {0}", faceId);

                        try
                        {
                            // Default mode, call find matchPerson similar REST API, the result contains all the face ids which is personal similar to the query face
                            const int           requestCandidatesCount = 4;
                            IList <SimilarFace> result = await faceServiceClient.Face.FindSimilarAsync(
                                faceId,
                                null,
                                _largeFaceListName,
                                maxNumOfCandidatesReturned : requestCandidatesCount);

                            // Update find matchPerson similar results collection for rendering
                            var personSimilarResult = new FindSimilarResult();
                            personSimilarResult.Faces     = new ObservableCollection <Face>();
                            personSimilarResult.QueryFace = new Face()
                            {
                                ImageFile = SelectedFile,
                                Top       = f.FaceRectangle.Top,
                                Left      = f.FaceRectangle.Left,
                                Width     = f.FaceRectangle.Width,
                                Height    = f.FaceRectangle.Height,
                                FaceId    = faceId.ToString(),
                            };
                            foreach (var fr in result)
                            {
                                var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString());
                                Face newFace       = new Face();
                                newFace.ImageFile  = candidateFace.ImageFile;
                                newFace.Confidence = fr.Confidence;
                                newFace.FaceId     = candidateFace.FaceId;
                                personSimilarResult.Faces.Add(newFace);
                            }

                            MainWindow.Log("Response: Found {0} similar faces for face {1}", personSimilarResult.Faces.Count, faceId);

                            FindSimilarMatchPersonCollection.Add(personSimilarResult);
                        }
                        catch (APIErrorException ex)
                        {
                            MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                        }

                        try
                        {
                            // Call find facial match similar REST API, the result faces the top N with the highest similar confidence
                            const int requestCandidatesCount = 4;
                            var       result = await faceServiceClient.Face.FindSimilarAsync(
                                faceId,
                                null,
                                _largeFaceListName,
                                maxNumOfCandidatesReturned : requestCandidatesCount,
                                mode : FindSimilarMatchMode.MatchFace);

                            // Update "matchFace" similar results collection for rendering
                            var faceSimilarResults = new FindSimilarResult();
                            faceSimilarResults.Faces     = new ObservableCollection <Face>();
                            faceSimilarResults.QueryFace = new Face()
                            {
                                ImageFile = SelectedFile,
                                Top       = f.FaceRectangle.Top,
                                Left      = f.FaceRectangle.Left,
                                Width     = f.FaceRectangle.Width,
                                Height    = f.FaceRectangle.Height,
                                FaceId    = faceId.ToString(),
                            };
                            foreach (var fr in result)
                            {
                                var  candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString());
                                Face newFace       = new Face();
                                newFace.ImageFile  = candidateFace.ImageFile;
                                newFace.Confidence = fr.Confidence;
                                newFace.FaceId     = candidateFace.FaceId;
                                faceSimilarResults.Faces.Add(newFace);
                            }

                            MainWindow.Log("Response: Found {0} similar faces for face {1}", faceSimilarResults.Faces.Count, faceId);

                            FindSimilarMatchFaceCollection.Add(faceSimilarResults);
                        }
                        catch (APIErrorException ex)
                        {
                            MainWindow.Log("Response: {0}. {1}", ex.Body.Error.Code, ex.Body.Error.Message);
                        }
                    }
                }
            }
            GC.Collect();
        }
        /// <summary>
        /// Pick image folder and detect all faces in these images
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void FolderPicker_Click(object sender, RoutedEventArgs e)
        {
            MainWindow mainWindow        = Window.GetWindow(this) as MainWindow;
            string     subscriptionKey   = mainWindow._scenariosControl.SubscriptionKey;
            string     endpoint          = mainWindow._scenariosControl.SubscriptionEndpoint;
            var        faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint);

            /*try
             * {
             *  MainWindow.Log("Request: Face List {0} will be used to build a person database. Checking whether the face list exists.", _faceListName);
             *
             *  await faceServiceClient.GetFaceListAsync(_faceListName);
             *  groupExists = true;
             *  MainWindow.Log("Response: Face List {0} exists.", _faceListName);
             * }
             * catch (FaceAPIException ex)
             * {
             *  if (ex.ErrorCode != "FaceListNotFound")
             *  {
             *      MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
             *      return;
             *  }
             *  else
             *  {
             *      MainWindow.Log("Response: Face List {0} did not exist previously.", _faceListName);
             *  }
             * }
             *
             * if (groupExists)
             * {
             *  var cleanFaceList = System.Windows.MessageBox.Show(string.Format("Requires a clean up for face list \"{0}\" before setting up a new face list. Click OK to proceed, face list \"{0}\" will be cleared.", _faceListName), "Warning", MessageBoxButton.OKCancel);
             *  if (cleanFaceList == MessageBoxResult.OK)
             *  {
             *      await faceServiceClient.DeleteFaceListAsync(_faceListName);
             *  }
             *  else
             *  {
             *      return;
             *  }
             * }*/
            OpenCameraButton.IsEnabled = false;
            OpenFaceButton.IsEnabled   = false;
            // Show folder picker
            //System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog();
            //var result = dlg.ShowDialog();
            //string file_path = @"D:\microsoftAPI\cognitive-Face-Windows\Data\PersonGroup\Family1-Mom";
            string file_path     = @"E:\hackathon\ls\cognitive-Face-Windows\data1";
            bool   forceContinue = false;


            // if (result == System.Windows.Forms.DialogResult.OK)
            if (System.IO.Directory.Exists(file_path))
            {
                // Enumerate all ".jpg" files in the folder, call detect
                List <Task> tasks = new List <Task>();
                FacesCollection.Clear();
                //DetectionFacesCollection.Clear();
                TargetFaces.Clear();
                FindSimilarMatchPersonCollection.Clear();
                FindSimilarMatchFaceCollection.Clear();
                SelectedFile = null;


                // Set the suggestion count is intent to minimum the data preparation step only,
                // it's not corresponding to service side constraint
                const int SuggestionCount = 10;
                int       processCount    = 0;

                MainWindow.Log("Request: Preparing, detecting faces in chosen folder.");

                //await faceServiceClient.CreateFaceListAsync(_faceListName, _faceListName, "face list for sample");

                var imageList =
                    new ConcurrentBag <string>(
                        Directory.EnumerateFiles(file_path /*dlg.SelectedPath*/, "*.*", SearchOption.AllDirectories)
                        .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif")));

                string img;
                int    invalidImageCount = 0;
                int    i = 0;
                while (imageList.TryTake(out img))
                {
                    tasks.Add(Task.Factory.StartNew(
                                  async(obj) =>
                    {
                        var imgPath = obj as string;
                        // Call detection
                        using (var fStream = File.OpenRead(imgPath))
                        {
                            try
                            {
                                /*var faces =
                                 *  await faceServiceClient.AddFaceToFaceListAsync(_faceListName, fStream);*/
                                // ProjectOxford.Face.Contract.Face[] faces = await faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur });
                                var renderingImage = UIHelper.LoadImageAppliedOrientation(imgPath);
                                var imageInfo      = UIHelper.GetImageInfoForRendering(renderingImage);
                                var faces1         = await faceServiceClient.DetectAsync(fStream);
                                // ObservableCollection<Face> detection_tmp = new ObservableCollection<Face>();

                                //faceServiceClient.
                                // Update detected faces on UI
                                //faces[0].FaceRectangle
                                foreach (var face in faces1)
                                {
                                    //      detection_tmp.Add(face);
                                    //DetectionFacesCollection.
                                    //_faceListName = _faceListName + "-" + face.FaceId;
                                    faceid_list[i] = face.FaceId;
                                    media_name.Add(face.FaceId.ToString(), imgPath);
                                    i++;
                                    //MainWindow.Log(" faceId", _faceListName);
                                    // _faceListName.
                                    //faceServiceClient.a
                                    //var face_list = await faceServiceClient.AddFaceToFaceListAsync(_faceListName, File.OpenRead(face.ImageFile));
                                }
                                return(new Tuple <string, ClientContract.Face[]>(imgPath, faces1));

                                /*foreach (var face in faces)
                                 * {
                                 *
                                 *
                                 * }*/
                            }
                            catch (FaceAPIException ex)
                            {
                                // if operation conflict, retry.
                                if (ex.ErrorCode.Equals("ConcurrentOperationConflict"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }
                                // if operation cause rate limit exceed, retry.
                                else if (ex.ErrorCode.Equals("RateLimitExceeded"))
                                {
                                    imageList.Add(imgPath);
                                    return(null);
                                }

                                /*else if (ex.ErrorMessage.Contains("more than 1 face in the image."))
                                 * {
                                 *  Interlocked.Increment(ref invalidImageCount);
                                 * }*/
                                // Here we simply ignore all detection failure in this sample
                                // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object
                                return(new Tuple <string, ClientContract.Face[]>(imgPath, null));
                            }
                        }
                    },
                                  img).Unwrap().ContinueWith((detectTask) =>
                    {
                        var res = detectTask?.Result;
                        if (res?.Item2 == null)
                        {
                            return;
                        }

                        // Update detected faces on UI
                        this.Dispatcher.Invoke(
                            new Action
                            <ObservableCollection <Face>, string, ClientContract.Face[]>(
                                UIHelper.UpdateFace),
                            FacesCollection,
                            res.Item1,
                            res.Item2);
                    }));

                    processCount++;

                    if (processCount >= SuggestionCount && !forceContinue)
                    {
                        var continueProcess =
                            System.Windows.Forms.MessageBox.Show(
                                "The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?",
                                "Warning", System.Windows.Forms.MessageBoxButtons.YesNo);
                        if (continueProcess == System.Windows.Forms.DialogResult.Yes)
                        {
                            forceContinue = true;
                        }
                        else
                        {
                            break;
                        }
                    }

                    if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty)
                    {
                        await Task.WhenAll(tasks);

                        tasks.Clear();
                    }
                }
                if (invalidImageCount > 0)
                {
                    MainWindow.Log("Warning: more or less than one face is detected in {0} images, can not add to face list.", invalidImageCount);
                }
                MainWindow.Log("Response: Success. Total {0} faces are detected.", FacesCollection.Count);
            }
            else
            {
                MainWindow.Log("cannot open file");
            }
            GC.Collect();
            //OpenFaceButton.IsEnabled = true;
            OpenFaceButton.IsEnabled   = true;
            OpenCameraButton.IsEnabled = true;
        }