private async static Task <Dictionary <string, double> > IdentifyImage(string imagePath, string groupId)
        {
            Dictionary <string, double> nameList = new Dictionary <string, double>();

            using (FileStream stream = new FileStream(imagePath, FileMode.Open, FileAccess.Read))
            {
                IList <DetectedFace> faces = await _client.Face.DetectWithStreamAsync(stream, true, false, null, RecognitionModel.Recognition02);

                if (faces.Any())
                {
                    List <Guid>            faceIds = faces.Select(face => face.FaceId.Value).ToList();
                    IList <IdentifyResult> results = await _client.Face.IdentifyAsync(faceIds, groupId);

                    Person[] people = await Task.WhenAll(results
                                                         .Where(identifyResult => identifyResult.Candidates.Any())
                                                         .Select(async identifyResult =>
                    {
                        IdentifyCandidate candidate = identifyResult.Candidates.First();
                        Microsoft.Azure.CognitiveServices.Vision.Face.Models.Person person = await _client.PersonGroupPerson.GetAsync(groupId, candidate.PersonId);
                        return(new Person()
                        {
                            Confidence = candidate.Confidence,
                            Name = person.Name
                        });
                    }
                                                                 )
                                                         );

                    nameList = people.ToDictionary(person => person.Name, person => person.Confidence);
                }
            }
            return(nameList);
        }
 public static Person CreatePerson(this azure.Person person)
 {
     return(new Person
     {
         Name = person.Name,
         PersonId = person.PersonId,
         UserData = person.UserData,
         PersistedFaceIds = person.PersistedFaceIds,
     });
 }
        public static async Task <GetRecognisedUserViewModel> IdentifyInPersonGroup(IFaceClient client, List <InPutDictlist> personDictionary, string url, string url2, string sourceImageFileName, string recognitionModel)
        {
            GetRecognisedUserViewModel getRecognisedUserViewModel = new GetRecognisedUserViewModel();
            string OutPut = "Initializing..... \n Creating new Personal Model \n ";

            // Create a dictionary for all your images, grouping similar ones under the same key.

            // A group photo that includes some of the persons you seek to identify from your dictionary.
            // string sourceImageFileName = "identification1.jpg";
            // Create a person group.
            // Console.WriteLine($"Create a person group ({personGroupId}).");

            OutPut = OutPut + "person group identity: " + personGroupId + ". \n";
            await client.PersonGroup.CreateAsync(personGroupId, personGroupId, recognitionModel : recognitionModel);

            // The similar faces will be grouped into a single person group person.
            foreach (var groupedFace in personDictionary)
            {
                // Limit TPS
                await Task.Delay(250);

                Microsoft.Azure.CognitiveServices.Vision.Face.Models.Person person = await client.PersonGroupPerson.CreateAsync(personGroupId, groupedFace.Name);

                // Console.WriteLine($"Create a person group person '{groupedFace}'.");

                // Add face to the person group person.
                foreach (var similarImage in groupedFace.ImageName)
                {
                    OutPut = OutPut + "Add face to the person group person( " + groupedFace + ") from image `" + similarImage + "` \n ";
                    string        urlzz = url + similarImage;
                    PersistedFace face  = await client.PersonGroupPerson.AddFaceFromUrlAsync(personGroupId, person.PersonId, urlzz, similarImage);
                }
            }
            // Start to train the person group.
            OutPut = OutPut + "\n \n Now training Person Group: " + personGroupId + "\n";
            await client.PersonGroup.TrainAsync(personGroupId);

            // Wait until the training is completed.
            while (true)
            {
                await Task.Delay(1000);

                var trainingStatus = await client.PersonGroup.GetTrainingStatusAsync(personGroupId);

                OutPut = OutPut + "Training status: " + trainingStatus.Status + ". \n";
                if (trainingStatus.Status == TrainingStatusType.Succeeded)
                {
                    break;
                }
            }
            Console.WriteLine();

            List <Guid> sourceFaceIds = new List <Guid>();
            // Detect faces from source image url.
            List <DetectedFace> detectedFaces = await DetectFaceRecognize(client, $"{url2}{sourceImageFileName}", recognitionModel);

            // Add detected faceId to sourceFaceIds.
            foreach (var detectedFace in detectedFaces)
            {
                sourceFaceIds.Add(detectedFace.FaceId.Value);
            }
            // Identify the faces in a person group.
            var identifyResults = await client.Face.IdentifyAsync(sourceFaceIds, personGroupId);

            double numberstream = 0.0;

            Microsoft.Azure.CognitiveServices.Vision.Face.Models.Person outputperson = new Microsoft.Azure.CognitiveServices.Vision.Face.Models.Person();


            foreach (var identifyResult in identifyResults)
            {
                Microsoft.Azure.CognitiveServices.Vision.Face.Models.Person person = await client.PersonGroupPerson.GetAsync(personGroupId, identifyResult.Candidates[0].PersonId);

                OutPut = OutPut + "Person: '" + person.Name + "' is identified for face in: " + sourceImageFileName + " - " + identifyResult.FaceId + " \n";
                List <Guid> gruildie = new List <Guid>();
                gruildie.Add(identifyResult.FaceId);


                if (identifyResult.Candidates[0].Confidence > numberstream)
                {
                    outputperson.Name             = person.Name;
                    outputperson.PersonId         = person.PersonId;
                    outputperson.PersistedFaceIds = gruildie;
                    getRecognisedUserViewModel.ConfidenceLevel = identifyResult.Candidates[0].Confidence;
                }
            }
            await DeletePersonGroup(client, personGroupId);

            OutPut = OutPut + "\n \n Person Id group deleted.";
            getRecognisedUserViewModel.Answers       = OutPut;
            getRecognisedUserViewModel.highestperson = outputperson;

            // At end, delete person groups in both regions (since testing only)

            return(getRecognisedUserViewModel);
        }