async Task ExecuteFindSimilarFaceCommandAsync()
        {
            if (IsBusy)
            {
                return;
            }

            IsBusy = true;

            try
            {
                MediaFile photo;

                await CrossMedia.Current.Initialize();

                // Take photo
                if (CrossMedia.Current.IsCameraAvailable)
                {
                    photo = await CrossMedia.Current.TakePhotoAsync(new StoreCameraMediaOptions
                    {
                        Directory = "Employee Directory",
                        Name      = "photo.jpg"
                    });
                }
                else
                {
                    photo = await CrossMedia.Current.PickPhotoAsync();
                }

                // Upload to cognitive services
                using (var stream = photo.GetStream())
                {
                    var faceServiceClient = new FaceServiceClient("22e49721a20e457880a32138afc9e027");

                    // Step 4 - Upload our photo and see who it is!
                    var faces = await faceServiceClient.DetectAsync(stream);

                    var faceIds = faces.Select(face => face.FaceId).ToArray();

                    var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds);

                    var result = results[0].Candidates[0].PersonId;

                    var person = await faceServiceClient.GetPersonAsync(personGroupId, result);

                    UserDialogs.Instance.ShowSuccess($"Person identified is {person.Name}.");
                }
            }
            catch (Exception ex)
            {
                UserDialogs.Instance.ShowError(ex.Message);
            }
            finally
            {
                IsBusy = false;
            }
        }
Beispiel #2
0
        public async Task <bool> Face()
        {
            var test = true;
            FaceServiceClient faceServiceClient = new FaceServiceClient("57dfa44fcc82469fb81ec031ba2f43c6", "https://westcentralus.api.cognitive.microsoft.com/face/v1.0");
            // Create an empty PersonGroup
            string personGroupId = "myfriends";
            //await faceServiceClient.CreatePersonGroupAsync(personGroupId, "MyFriends");

            // Define Anna
            CreatePersonResult friend1 = await faceServiceClient.CreatePersonAsync(
                // Id of the PersonGroup that the person belonged to
                personGroupId,
                // Name of the person
                "toto"
                );

            const string friend1ImageDir = @"C:/Users/Megaport/Pictures/face";

            foreach (string imagePath in Directory.GetFiles(friend1ImageDir, "*.jpg"))
            {
                using (Stream s = File.OpenRead(imagePath))
                {
                    // Detect faces in the image and add to Anna
                    await faceServiceClient.AddPersonFaceAsync(
                        personGroupId, friend1.PersonId, s);
                }
            }
            await faceServiceClient.TrainPersonGroupAsync(personGroupId);

            TrainingStatus trainingStatus = null;

            trainingStatus = await faceServiceClient.GetPersonGroupTrainingStatusAsync(personGroupId);

            string testImageFile = "ftp://35.190.168.129/images/test1.png";

            using (Stream s = File.OpenRead(testImageFile))
            {
                var faces = await faceServiceClient.DetectAsync(s);

                var faceIds = faces.Select(face => face.FaceId).ToArray();

                var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds);

                foreach (var identifyResult in results)
                {
                    if (identifyResult.Candidates.Length == 0)
                    {
                        test = false;
                    }
                    else
                    {
                        test = true;
                    }
                }
            }
            return(test);
        }
Beispiel #3
0
        public async Task <Identification[]> DetectCustomerFaces(Stream imageStream, bool includeFaceAttributes)
        {
            // Capture the age, emotion and gender
            var attributes = includeFaceAttributes ? new FaceAttributeType[]
            {
                FaceAttributeType.Emotion
            } : null;

            var faces = await Service.DetectAsync(imageStream, returnFaceAttributes : attributes);

            var faceIds = faces.Select(f => f.FaceId).ToArray();

            IdentifyResult[] matchedCustomers = await Service.IdentifyAsync(LoyalCustomerGroup, faceIds);

            var collated = (from face in faces
                            from match in matchedCustomers
                            where matchedCustomers.Any(o => face.FaceId == match.FaceId)
                            select new {
                FaceId = face.FaceId,
                Rectangle = face.FaceRectangle,
                Emotion = face.FaceAttributes?.Emotion,
                Candidate = match.Candidates.Where(c => c.Confidence > .6).FirstOrDefault()
            }).ToList();

            var response = new List <Identification>();

            foreach (var result in collated)
            {
                var customer = CosmosClient.Instance.GetCustomerByPersonID(result.Candidate.PersonId);
                var orders   = CosmosClient.Instance.GetCustomerOrders(customer.id);
                var faceRect = result.Rectangle;
                var ident    = new Identification()
                {
                    Customer  = customer,
                    Emotion   = ParseEmotions(result.Emotion),
                    Orders    = orders,
                    Rectangle = new Rectangle(faceRect.Left, faceRect.Top, faceRect.Width, faceRect.Height)
                };

                response.Add(ident);
            }

            return(response.ToArray());
        }
Beispiel #4
0
        public async Task <JsonResult> IdentifyLinkAsync(string imageUrl)
        {
            MemoryStream memstream = new MemoryStream();

            try
            {
                ImageUrlBodyEntity entity;
                Request.InputStream.Position = 0;
                Request.InputStream.CopyTo(memstream);
                memstream.Position = 0;
                using (StreamReader reader = new StreamReader(memstream))
                {
                    var text = reader.ReadToEnd();
                    entity = JsonConvert.DeserializeObject <ImageUrlBodyEntity>(text);
                }

                WebRequest  req      = WebRequest.Create(entity.ImageUrl);
                WebResponse response = req.GetResponse();
                Stream      stream   = response.GetResponseStream();

                //var result1 = await client.AddPersonFaceAsync("testgroup", Guid.NewGuid(), stream, "习近平");

                var faces1 = await client.DetectAsync(stream);

                var faceIds = new Guid[1];
                faceIds[0] = faces1[0].FaceId;
                var result = await client.IdentifyAsync("testgroup", faceIds, 1);

                if (result.Count() <= 0 || result[0].Candidates.Count() == 0)
                {
                    return(Json(new { error = "we can not identify your photo because of no memory!", result = result }));
                }
                return(Json(new { error = "", result = result }));
            }
            catch (Exception ex)
            {
                return(Json(new { error = "Hmmm... Something unexpected happened. Please come back later." }));
            }
            finally
            {
                memstream.Dispose();
            }
        }
        private static async Task <IEnumerable <Guid> > IdentifyAsync(Stream inputImageStream, Stream outputImage, string name)
        {
            var identifiedFaces   = new List <Guid>();
            var faceServiceClient = new FaceServiceClient(AppSettings.FaceApi.Key, AppSettings.FaceApi.Uri);
            var personGroupId     = AppSettings.FaceApi.PersonGroupId;
            var image             = Image.FromStream(inputImageStream);
            var pen = new Pen(Color.Red, 2);

            inputImageStream.Seek(0, SeekOrigin.Begin);

            var faces = await faceServiceClient.DetectAsync(inputImageStream);

            var faceIds = faces.Select(face => face.FaceId).ToArray();

            if (!faceIds.Any())
            {
                _log.Info($"[{_context.InvocationId.ToString()}] No faces detected");

                return(null);
            }

            var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds);

            foreach (var result in results)
            {
                if (result.Candidates.Length == 0)
                {
                    continue;
                }

                var candidateId = result.Candidates[0].PersonId;
                var person      = await faceServiceClient.GetPersonAsync(personGroupId, candidateId);

                var face = faces.Where(f => f.FaceId == result.FaceId).FirstOrDefault().FaceRectangle;

                using (Graphics graphics = Graphics.FromImage(image))
                {
                    graphics.DrawRectangle(pen, new Rectangle(face.Left, face.Top, face.Width, face.Height));
                }

                identifiedFaces.Add(result.FaceId);
                _log.Info($"[{_context.InvocationId.ToString()}] FaceId: {result.FaceId} => Identified as: {person.Name}");
            }

            using (var stream = new MemoryStream())
            {
                image.Save(stream, ImageFormat.Png);

                var byteArray = stream.ToArray();

                await outputImage.WriteAsync(byteArray, 0, byteArray.Length);
            }

            return(identifiedFaces);
        }
Beispiel #6
0
        public static async Task <HttpResponseMessage> Run([HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)] HttpRequestMessage req, TraceWriter log)
        {
            log.Info("C# HTTP trigger function processed a request.");
            bool isDebug = false;
            bool johnDoe = true;

            //if (req.RequestUri.Query.Contains("debug")) isDebug = true;
            if (req.RequestUri.Query.Contains("johnDoeDisabled"))
            {
                johnDoe = false;
            }
            var echoIds = req.GetQueryNameValuePairs().Where(kvp => kvp.Key.Equals("echoId", StringComparison.OrdinalIgnoreCase));

            if (echoIds.Any())
            {
                HttpResponseMessage resp = await GetResponse(req, isDebug, null, johnDoePersonId, "John Doe", echoIds.First().Value);

                return(resp);
            }

            var stream = await req.Content.ReadAsStreamAsync();

            if (stream.Length != 0)
            {
                using (var ms = new MemoryStream())
                    using (var faceService = new FaceServiceClient(faceApiKey, faceApiRoot))
                    {
                        var faces = await faceService.DetectAsync(stream);

                        if (faces.Any())
                        {
                            var identified = await faceService.IdentifyAsync(personGroupId, faces.Select(f => f.FaceId).ToArray(), 5);

                            var acquaintance = identified.Where(i => i.Candidates.Any()).OrderByDescending(i => i.Candidates.Select(c => c.Confidence)).FirstOrDefault();
                            var face         = acquaintance?.Candidates?.OrderByDescending(c => c.Confidence)?.FirstOrDefault();

                            if (face != null)
                            {
                                HttpResponseMessage resp = await GetResponse(req, isDebug, faceService, face.PersonId);

                                return(resp);
                            }
                            else if (johnDoe)
                            {
                                HttpResponseMessage resp = await GetResponse(req, isDebug, faceService, johnDoePersonId, "John Doe");

                                return(resp);
                            }
                        }
                    }
            }

            return(req.CreateResponse(HttpStatusCode.NoContent));
        }
Beispiel #7
0
        static async Task TestImageData(string pictureToTest)
        {
            try
            {
                var idx         = pictureToTest.LastIndexOf('\\');
                var picFileName = pictureToTest.Substring(idx + 1);
                Console.WriteLine($"Identifling persons in provided picture <{picFileName}>....");
                using (Stream s = File.OpenRead(pictureToTest))
                {
                    //#5. API to detect a customized face image.
                    var faces = await faceServiceClient.DetectAsync(s);

                    if (!faces.Any())
                    {
                        Console.WriteLine($"Did not detect any faces in picture {picFileName}");
                    }
                    else
                    {
                        var faceIds = faces.Select(face => face.FaceId).ToArray();

                        //#6. Identify the result for the customized image.
                        var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds);

                        foreach (var identifyResult in results)
                        {
                            if (identifyResult.Candidates.Length == 0)
                            {
                                Console.WriteLine($"FaceId {identifyResult.FaceId} is identified as 'Unknown' in picture {picFileName}.");
                            }
                            else
                            {
                                // Get top 1 among all candidates returned
                                var candidateId = identifyResult.Candidates[0].PersonId;
                                var person      = await faceServiceClient.GetPersonAsync(personGroupId, candidateId);

                                if (person != null && !string.IsNullOrWhiteSpace(person.Name))
                                {
                                    Console.WriteLine($"FaceId {identifyResult.FaceId} is identified as {person.Name} in picture <{picFileName}>");
                                }
                            }
                        }
                    }
                }
            }
            catch (FaceAPIException faex)
            {
                throw faex;
            }
            catch (Exception ex)
            {
                throw ex;
            }
        }
Beispiel #8
0
        public async Task <IEnumerable <PhotoFace> > FindPeople(IRandomAccessStream stream)
        {
            Face[]           faces      = null;
            IdentifyResult[] results    = null;
            List <PhotoFace> photoFaces = new List <PhotoFace>();

            try
            {
                // find all faces
                faces = await _client.DetectAsync(stream.AsStream());

                // no faces found
                if (faces.Count() == 0)
                {
                    return(photoFaces);
                }

                if (await CheckIfGroupExistsAsync())
                {
                    results = await _client.IdentifyAsync(_groupId, faces.Select(f => f.FaceId).ToArray());
                }

                for (var i = 0; i < faces.Length; i++)
                {
                    var face = faces[i];

                    var photoFace = new PhotoFace()
                    {
                        Rect       = face.FaceRectangle,
                        Identified = false
                    };

                    if (results != null)
                    {
                        var result = results[i];
                        if (result.Candidates.Length > 0)
                        {
                            photoFace.PersonId   = result.Candidates[0].PersonId;
                            photoFace.Name       = _personList.Where(p => p.PersonId == result.Candidates[0].PersonId).FirstOrDefault()?.Name;
                            photoFace.Identified = true;
                        }
                    }

                    photoFaces.Add(photoFace);
                }
            }
            catch (FaceAPIException ex)
            {
            }

            return(photoFaces);
        }
Beispiel #9
0
        /// <summary>
        /// Detects a face from a Stream
        /// </summary>
        /// <param name="imageStream">Stream imageStream</param>
        /// <returns>Guid personId</returns>
        public async Task <FaceInfoModel> DetectFace(Stream imageStream)
        {
            var faces = await _faceApiClient.DetectAsync(imageStream, true, true, new List <FaceAttributeType>
            {
                FaceAttributeType.Age,
                FaceAttributeType.Emotion,
                FaceAttributeType.FacialHair,
                FaceAttributeType.Gender,
                FaceAttributeType.Glasses,
                FaceAttributeType.HeadPose,
                FaceAttributeType.Smile
            });

            if (faces.Length < 1)
            {
                return(null);
            }

            var identification = await _faceApiClient.IdentifyAsync(MAGIC_MIRROR_GROUP, faces.Select(f => f.FaceId).ToArray());

            if (identification == null || identification.Length != 1 || !identification.First().Candidates.Any())
            {
                return(new FaceInfoModel
                {
                    PersonId = null,
                    FaceId = null,
                    Age = faces.FirstOrDefault()?.FaceAttributes?.Age,
                    Gender = faces.FirstOrDefault()?.FaceAttributes?.Gender
                });
            }

            var candidate = identification.First().Candidates.OrderByDescending(x => x.Confidence).FirstOrDefault();

            if (candidate == null)
            {
                return(new FaceInfoModel
                {
                    PersonId = null,
                    FaceId = null,
                    Age = faces.FirstOrDefault()?.FaceAttributes?.Age,
                    Gender = faces.FirstOrDefault()?.FaceAttributes?.Gender
                });
            }

            return(new FaceInfoModel
            {
                PersonId = candidate.PersonId,
                FaceId = identification.First().FaceId,
                Age = faces.FirstOrDefault()?.FaceAttributes?.Age,
                Gender = faces.FirstOrDefault()?.FaceAttributes?.Gender
            });
        }
Beispiel #10
0
        public async Task <List <IdentifyResults> > Identify(string imgUrl)
        {
            var result = new List <IdentifyResults>();

            var faces = await ServiceClient.DetectAsync(imgUrl);

            var facesId = faces.Select(face => face.FaceId).ToArray();

            int iterCount = faces.Count() / FacesPerOnce;

            iterCount += (faces.Count() % FacesPerOnce == 0) ? 0 : 1;

            var identifyResults = new List <IdentifyResult>();
            var buffer          = new Guid[FacesPerOnce];
            var residue         = faces.Count();

            for (int i = 0; i < iterCount; i++)
            {
                var count = (residue < FacesPerOnce) ? residue : FacesPerOnce;
                Array.Copy(facesId, i * FacesPerOnce, buffer, 0, count);

                var identifyResultsArr = await ServiceClient.IdentifyAsync(GroupId, buffer.Take(count).ToArray());

                identifyResults.AddRange(identifyResultsArr);

                residue -= FacesPerOnce;
                Thread.Sleep(Timeout);
            }

            foreach (var identifyResult in identifyResults)
            {
                if (identifyResult.Candidates.Count() == 0)
                {
                    continue;
                }

                var resultItem = new IdentifyResults();
                foreach (var candidate in identifyResult.Candidates)
                {
                    var person = await ServiceClient.GetPersonAsync(GroupId,
                                                                    candidate.PersonId);

                    resultItem.AddCandidate(person.Name);

                    Thread.Sleep(Timeout);
                }

                result.Add(resultItem);
            }

            return(result);
        }
Beispiel #11
0
        private async void IdentifyFace_Click(object sender, RoutedEventArgs e)
        {
            using (Stream s = await photoFile.OpenStreamForReadAsync())
            {
                var faces = await faceServiceClient.DetectAsync(s);

                status.Text = "faces were detected...";
                var faceIds = faces.Select(face => face.FaceId).ToArray();
                status.Text = "the number of faceids found is: " + faceIds.Length;

                StringBuilder resultText = new StringBuilder();
                try
                {
                    var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds);

                    status.Text = " The results are reaedy";

                    if (results.Length > 0)
                    {
                        //resultText.Append($"{results.Length} face(s) detected: \t");
                        status.Text = $"{results.Length} face(s) detected: \t";
                    }

                    foreach (var identityResult in results)
                    {
                        if (identityResult.Candidates.Length != 0)
                        {
                            var candidateId = identityResult.Candidates[0].PersonId;
                            var person      = await faceServiceClient.GetPersonAsync(personGroupId, candidateId);

                            // resultText.Append($"Authorized User Detected: {person.Name}\t  Door will be unlocked. \t");
                            status.Text = $"Authorized User Detected: {person.Name}. \t Door will be unlocked. \t";
                            await Timer_Tick();

                            pinValue = GpioPinValue.Low;
                            pin.Write(pinValue);
                            pin.SetDriveMode(GpioPinDriveMode.Output);
                        }
                    }
                    if (resultText.ToString().Equals($"{results.Length} face(s) detected! \t"))
                    {
                        status.Text = "Cannot unlock door.";
                        // resultText.Append("No persons identified\t");
                    }
                    //status.Text = resultText.ToString();
                }
                catch (FaceAPIException ex)
                {
                    status.Text = "An error occurred of type:" + ex.ErrorCode; //ex.message
                }
            }
        }
        private async Task <LiveCameraResult> IdentifyAnalysisFunction(VideoFrame frame)
        {
            List <Person> persons = new List <Person>();
            //_timeStampQueue.Enqueue(DateTime.UtcNow);
            var jpg   = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            var attrs = new List <FaceAttributeType> {
                FaceAttributeType.Age
            };
            //await WaitCallLimitPerSecondAsync();
            var faces = await fsc.DetectAsync(jpg, returnFaceAttributes : attrs);

            Properties.Settings.Default.FaceAPICallCount++;
            // Count the API call.
            var faceIDs = faces.Select(x => x.FaceId).ToArray();

            Properties.Settings.Default.FaceAPICallCount++;
            if (faceIDs.Length == 0)
            {
                return(new LiveCameraResult());
            }
            var personList = await fsc.IdentifyAsync(personGroupID, faceIDs);

            Properties.Settings.Default.FaceAPICallCount++;
            string[] celebNames = new string[0];
            if (personList != null && personList[0] != null)
            {
                celebNames = new string[personList.Length];
                for (int i = 0; i < personList.Length; i++)
                {
                    if (personList[i].Candidates.Length > 0)
                    {
                        var result = await fsc.GetPersonAsync(personGroupID, personList[i].Candidates[0].PersonId);

                        celebNames[i] = result.Name;
                        persons.Add(result);
                    }
                    else
                    {
                        celebNames[i] = "Unknown person";
                        persons.Add(new Person());
                    }
                }
            }

            return(new LiveCameraResult
            {
                Persons = persons.ToArray(),
                Faces = faces,
                CelebrityNames = celebNames
            });
        }
        async void ExecuteRecognizePictureCommand()
        {
            var requiredFaceAttributes = new FaceAttributeType[] {
                FaceAttributeType.Age,
                FaceAttributeType.Gender,
                FaceAttributeType.Smile,
                FaceAttributeType.FacialHair,
                FaceAttributeType.HeadPose,
                FaceAttributeType.Glasses
            };

            using (var stream = image.GetStream())
            {
                UserDialogs.Instance.ShowLoading();

                // Step 4a - Detect the faces in this photo.
                var faces = await faceServiceClient.DetectAsync(stream,
                                                                true,
                                                                returnFaceAttributes : requiredFaceAttributes);

                var smileValue   = faces[0].FaceAttributes.Smile;
                var ageValue     = faces[0].FaceAttributes.Age;
                var glassesValue = faces[0].FaceAttributes.Glasses;

                var faceIds = faces.Select(face => face.FaceId).ToArray();

                // Step 4b - Identify the person in the photo, based on the face.
                var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds);

                if (results[0].Candidates.Count() < 1)
                {
                    UserDialogs.Instance.ShowError("Could not find the user");
                }
                else
                {
                    var result = results[0].Candidates[0].PersonId;

                    // Step 4c - Fetch the person from the PersonId and display their name.
                    var person = await faceServiceClient.GetPersonAsync(personGroupId, result);

                    Name    = "Name: " + person.Name;
                    Age     = "Age: " + ageValue;
                    Glasses = "Glasses: " + glassesValue;
                    Smile   = "Status: " + (smileValue > 0.7 ? "Happy" : "Neutral");

                    UserDialogs.Instance.HideLoading();

                    UserDialogs.Instance.ShowSuccess($"Person identified is {person.Name}.", 3000);
                }
            }
        }
Beispiel #14
0
        private async Task <bool> IdentifyFaces()
        {
            try
            {
                // Identify each face
                using (var faceServiceClient = new FaceServiceClient(this.strSubscriptionKey, this.strEndpoint))
                {
                    // Call identify REST API, the result contains identified person information
                    var identifyResult = await faceServiceClient.IdentifyAsync(TargetFaces.Select(ff => new Guid(ff.FaceId)).ToArray(), personGroupId : this.PersonGroupId);

                    this.IdentifiedContacts = new List <Contact>();
                    var faces = TargetFaces.ToArray();
                    for (int idx = 0; idx < faces.Length; idx++)
                    {
                        // Update identification result for rendering
                        var face = TargetFaces[idx];
                        var res  = identifyResult[idx];

                        string contactName;
                        if (res.Candidates.Length > 0)
                        {
                            var person = await faceServiceClient.GetPersonAsync(this.PersonGroupId, res.Candidates[0].PersonId);

                            IdentifiedContacts.Add(new Contact()
                            {
                                FullName = person.Name,
                                Id       = person.UserData,
                                FaceId   = face.FaceId
                            });
                            face.PersonName = person.Name;
                        }
                        else
                        {
                            IdentifiedContacts.Add(new Contact()
                            {
                                FullName = "UnKnown"
                            });
                            face.PersonName = "UnKnown";
                        }
                    }
                }
                return(true);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                return(false);
            }
        }
 public IEnumerable <Candidate> Identify(string groupName, Guid[] faceIDs)
 {
     try
     {
         return(Task.Run(() => Api.IdentifyAsync(groupName.ToLowerInvariant(), faceIDs)).GetAwaiter().GetResult().SelectMany(x => x.Candidates).Select(x => new Candidate
         {
             PersonId = x.PersonId,
             Confidence = x.Confidence,
         }));
     }
     catch (FaceAPIException ex)
     {
         throw new FaceApiException(ex.ErrorMessage);
     }
 }
Beispiel #16
0
        public async Task RecognitionFace(string personGroupId, string imgPath)
        {
            using (Stream s = File.OpenRead(imgPath))
            {
                await faceServiceClient.DetectAsync(s).ContinueWith(async(x) =>
                {
                    var faces   = await x;
                    var faceids = faces.Select(f => f.FaceId).ToArray();

                    try
                    {
                        await faceServiceClient.IdentifyAsync(personGroupId, faceids).ContinueWith(async(y) =>
                        {
                            try
                            {
                                var results = await y;

                                foreach (var item in results)
                                {
                                    Console.WriteLine($"Result of face: { item.FaceId }");
                                    if (item.Candidates.Length == 0)
                                    {
                                        Console.WriteLine("Not identified!!");
                                    }
                                    else
                                    {
                                        var candidateId = item.Candidates[0].PersonId;
                                        await faceServiceClient.GetPersonAsync(personGroupId, candidateId).ContinueWith(async(z) =>
                                        {
                                            var person = await z;
                                            Console.WriteLine($"Identified as {person.Name}");
                                        });
                                    }
                                }
                            }
                            catch (Exception ex)
                            {
                                Console.WriteLine($"Error: {ex.Message}");
                            }
                        });
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine(ex.Message);
                    }
                });
            }
        }
Beispiel #17
0
        /// <summary>
        /// 検出した顔から人を認識する。
        /// </summary>
        /// <param name="senderId">LINE Messagin Service の SenderId</param>
        /// <param name="faces">検出した顔(複数)</param>
        /// <returns>認識した <see cref="Person"/> の配列を返す。認識できなかった場合は要素数 0 の配列を返す。</returns>
        public async Task <Person[]> FindPersonByFacesAsync(string senderId, Face[] faces)
        {
            var personGroupId = SenderIdToPersonGroupId(senderId);

            IdentifyResult[] result = null;
            try
            {
                // Cognitive Face Identify API で Face から Person を認識
                Guid[] guids = faces.Select(face => face.FaceId).Take(10).ToArray(); // Identify API の Face は最大 10 個まで
                result = await _faceServiceClient.IdentifyAsync(personGroupId, guids, 0.6f);
            }
            catch (FaceAPIException e)
            {
                if (e.ErrorCode == "PersonGroupNotTrained")
                {
                    // 学習が必要なら学習する
                    await _faceServiceClient.TrainPersonGroupAsync(personGroupId);
                }

                // 例外の場合は見つからなかった結果として返す
                return(new Person[faces.Length]);
            }

            if (result.Count() == 0)
            {
                // 結果が空の場合は見つからなかった結果として返す
                return(new Person[faces.Length]);
            }
            else
            {
                // Face に対応する Person の配列を生成
                var persons = new Person[faces.Length];
                for (int i = 0; i < faces.Length && i < result.Length; i++)
                {
                    if (result[i].Candidates.Count() > 0)
                    {
                        var personId = result[i].Candidates[0].PersonId.ToString();
                        var person   = await _db.Person.Where(p => p.PersonGroupId == personGroupId && p.PersonId == personId).FirstOrDefaultAsync();

                        persons[i] = person;
                    }
                }

                // Person の配列を返す
                return(persons);
            }
        }
Beispiel #18
0
        async Task ExecuteIdCommand()
        {
            if (!IsBusy)
            {
                Exception Erro = null;

                try
                {
                    IsBusy = true;
                    await CrossMedia.Current.Initialize();

                    MediaFile foto;

                    foto = await CrossMedia.Current.PickPhotoAsync();

                    using (var stream = foto.GetStream())
                    {
                        var faces = await client.DetectAsync(stream);

                        var faceIds = faces.Select(face => face.FaceId).ToArray();

                        var resultado = await client.IdentifyAsync(politicoGrupoId, faceIds);

                        var resposta = resultado[0].Candidates[0].PersonId;

                        var politico = await client.GetPersonAsync(politicoGrupoId, resposta);

                        await DisplayAlert("Resultado", $"O político indentificado é : {politico.Name}", "ok");
                    }
                }
                catch (Exception ex)
                {
                    Erro = ex;
                }

                finally
                {
                    IsBusy = false;
                }

                if (Erro != null)
                {
                    await DisplayAlert("Algo de errado não está certo", Erro.Message, "ok");
                }
            }
        }
Beispiel #19
0
        public Person[] IdentifyPerson(string personGroupId, Face[] faceArray)
        {
            var guids = faceArray.Select(x => x.FaceId).ToArray();

            if (guids.Count() == 0)
            {
                return(null);
            }
            var identities = Task.Run(() => _fsClient.IdentifyAsync(personGroupId, guids)).Result;
            var persons    = new List <Person>();

            foreach (var identity in identities)
            {
                persons.Add(Task.Run(() => _fsClient.GetPersonAsync(personGroupId, identity.Candidates[0].PersonId)).Result);
            }
            return(persons.ToArray());
        }
Beispiel #20
0
        /// <summary>
        /// Set a breakpoint in this method and pay close attention to what this does :) Don't forget to set your API key!!
        /// </summary>
        /// <param name="e"></param>
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            base.OnNavigatedTo(e);

            var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///emma.jpg", UriKind.RelativeOrAbsolute));

            using (var stream = await file.OpenStreamForReadAsync())
            {
                var personGroupId = "default";

                Face[] faces = await faceApiClient.DetectAsync(stream);

                IdentifyResult[] identifyResults = await faceApiClient.IdentifyAsync(personGroupId, new[] { faces.First().FaceId });

                Guid personId = identifyResults.First().Candidates.First().PersonId;
            }
        }
Beispiel #21
0
        public async Task <IEnumerable <FaceDetectionResult> > DetectFacesAsync(ImageRequest request)
        {
            try
            {
                // .NET の FaceAPI を呼ぶためのクラスを生成
                var client = new FaceServiceClient(subscriptionKey: Secrets.CongnitiveServiceFaceApiKey, apiRoot: Consts.CognitiveServiceFaceApiEndPoint);

                // DetectAsync で、画像のどこに顔があって、その顔は何歳か、という情報を取得している
                var results = await client.DetectAsync(imageStream : new MemoryStream(request.Image), returnFaceAttributes : new[]
                {
                    FaceAttributeType.Age,
                });

                var personListId = await this.PersonListIdRepository.GetIdAsync();

                // 取得した顔が誰の顔かを認識している
                var identifyResults = (await client.IdentifyAsync(personListId, results.Select(x => x.FaceId).ToArray()))
                                      .ToDictionary(x => x.FaceId);

                var l = new List <FaceDetectionResult>();
                foreach (var r in results)
                {
                    IdentifyResult identifyResult = null;
                    identifyResults.TryGetValue(r.FaceId, out identifyResult);
                    var faceDetectionResult = new FaceDetectionResult
                    {
                        FaceId    = identifyResult?.Candidates.FirstOrDefault()?.PersonId.ToString() ?? new Guid().ToString(),
                        Age       = (int)r.FaceAttributes.Age,
                        Rectangle = new BusinessObjects.FaceRectangle
                        {
                            Top    = r.FaceRectangle.Top,
                            Left   = r.FaceRectangle.Left,
                            Width  = r.FaceRectangle.Width,
                            Height = r.FaceRectangle.Height,
                        }
                    };
                    l.Add(faceDetectionResult);
                }
                return(l);
            }
            catch (FaceAPIException)
            {
                return(Enumerable.Empty <FaceDetectionResult>());
            }
        }
Beispiel #22
0
        /// <summary> Function which submits a frame to the Face API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the faces returned by the API. </returns>
        private async Task <LiveCameraResult> FacesAnalysisFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            // Submit image to API.
            var attrs = new List <FaceAttributeType> {
                FaceAttributeType.Age,
                FaceAttributeType.Gender, FaceAttributeType.HeadPose
            };
            var faces = await _faceClient.DetectAsync(jpg, returnFaceAttributes : attrs);

            // Count the API call.
            Properties.Settings.Default.FaceAPICallCount++;

            var faceIds = faces.Select(face => face.FaceId).ToArray();

            var results = await _faceClient.IdentifyAsync("df_employees", faceIds);

            Properties.Settings.Default.FaceAPICallCount++;
            foreach (var identifyResult in results)
            {
                Console.WriteLine("Result of face: {0}", identifyResult.FaceId);
                if (identifyResult.Candidates.Length == 0)
                {
                    Console.WriteLine("No one identified");
                }
                else
                {
                    // Get top 1 among all candidates returned
                    var candidateId = identifyResult.Candidates[0].PersonId;
                    var person      = await _faceClient.GetPersonAsync("df_employees", candidateId);

                    Properties.Settings.Default.FaceAPICallCount++;
                    TextToSpeech.Speak($"Hallo {person.Name}");
                    return(new LiveCameraResult {
                        Faces = faces, Person = person
                    });
                }
            }

            // Output.
            return(new LiveCameraResult {
                Faces = faces
            });
        }
        internal async Task IdentifyUsersAsync(string url)
        {
            VisionServiceClient VisionServiceClient = new VisionServiceClient(VISIONAPI_KEY);

            //VisualFeature[] visualFeatures = new VisualFeature[] {
            //                                            VisualFeature.Adult, VisualFeature .Categories,
            //                                                VisualFeature.Color,VisualFeature.Description,
            //                                                    VisualFeature.Faces,VisualFeature.ImageType,
            //                                                        VisualFeature.Tags};
            VisualFeature[] visualFeatures = new VisualFeature[] {
                VisualFeature.Faces
            };

            //url = "https://michistorageea.blob.core.windows.net/data/DSC01498.JPG";

            var result = await VisionServiceClient.AnalyzeImageAsync(url, visualFeatures);

            if (result.Faces != null && result.Faces.Count() > 0)
            {
                var personGroupId = "demo";
                var fsc           = new FaceServiceClient(FACEAPI_KEY);
                var faces         = await fsc.DetectAsync(url);

                var faceIds             = faces.Select(f => f.FaceId).ToArray();
                var faceIdentifyResults = await fsc.IdentifyAsync(personGroupId, faceIds);

                List <string> users = new List <string>();
                foreach (var identifyResult in faceIdentifyResults)
                {
                    var user   = identifyResult.Candidates.OrderByDescending(c => c.Confidence).FirstOrDefault();
                    var person = await fsc.GetPersonAsync(personGroupId, user.PersonId);

                    users.Add(person.Name);
                }

                if (_onOutputReceived != null)
                {
                    _onOutputReceived(new FinalOutputEvent()
                    {
                        IsCompleted = true,
                        EventData   = Encoding.UTF8.GetBytes(string.Join(",", users))
                    });
                }
            }
        }
Beispiel #24
0
        async Task ExecuteFindSimilarFaceCommandAsync()
        {
            if (IsBusy)
            {
                return;
            }

            IsBusy = true;
            ActivateSpinner();

            try
            {
                if (file != null)
                {
                    using (var stream = file.GetStream())
                    {
                        var faceServiceClient = new FaceServiceClient("8f1c2baf6778445f94c3effc4ed53786");

                        // Step 4a - Detect the faces in this photo.
                        var faces = await faceServiceClient.DetectAsync(stream);

                        var faceIds = faces.Select(face => face.FaceId).ToArray();

                        // Step 4b - Identify the person in the photo, based on the face.
                        var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds);

                        var result = results[0].Candidates[0].PersonId;

                        // Step 4c - Fetch the person from the PersonId and display their name.
                        var person = await faceServiceClient.GetPersonAsync(personGroupId, result);

                        UserDialogs.Instance.ShowSuccess($"Person identified is {person.FirstName} {person.LastName}.");
                    }
                }
            }
            catch (Exception ex)
            {
                UserDialogs.Instance.ShowError(ex.Message);
            }
            finally
            {
                IsBusy = false;
                DeactivateSpinner();
            }
        }
Beispiel #25
0
        public async Task <string> Recognize(HttpFileCollection files, string personGroupId)
        {
            string recognitionString = "We have recognized: ";
            int    guests            = 0;
            int    knownPeople       = 0;

            if (files.Count < 1)
            {
                recognitionString = "None of the files has come down to us :) Try again, please.";
                return(recognitionString);
            }
            try
            {
                Stream s     = files[0].InputStream;
                var    faces = await _faceServiceClient.DetectAsync(s);

                var faceIds = faces.Select(face => face.FaceId).ToArray();
                var results = await _faceServiceClient.IdentifyAsync(personGroupId, faceIds);

                foreach (var result in results)
                {
                    if (result.Candidates.Length == 0)
                    {
                        guests           += 1;
                        recognitionString = $"We have found {guests} guests on the photo and no employees";
                        return(recognitionString);
                    }
                    var candidateId = result.Candidates[0].PersonId;
                    var person      = await _faceServiceClient.GetPersonAsync(personGroupId, candidateId);

                    knownPeople++;
                    recognitionString += person.Name + " ";
                }
                if (guests != 0)
                {
                    recognitionString += $"and {guests} guests";
                }
            }
            catch (Exception ex)
            {
                return(ex.Message);
            }

            return(recognitionString);
        }
        /// <summary>
        /// Identify the person by using Cognitive Face API
        /// </summary>
        private async void Identify()
        {
            while (true)
            {
                Message = "Seeing you...";
                using (var stream = await DetectFaceAsync())
                {
                    try
                    {
                        var faces = await faceClient.DetectAsync(ImageConverter.ConvertImage(stream));

                        if (!faces.Any())
                        {
                            continue;
                        }

                        Person person;

                        if (newModel)
                        {
                            person = await RegisterAsync(stream);
                        }
                        else
                        {
                            var identifyResults = await faceClient.IdentifyAsync(Settings.PersonGroupId, faces.Select(x => x.FaceId).ToArray());

                            if (identifyResults.FirstOrDefault()?.Candidates?.Count() > 0)
                            {
                                person = await faceClient.GetPersonAsync(Settings.PersonGroupId, identifyResults.First().Candidates.First().PersonId);
                            }
                            else
                            {
                                person = await RegisterAsync(stream);
                            }
                        }
                        Message = $"Hi {person.Name}!";
                        await Task.Delay(2000);
                    }
                    catch (Exception ex)
                    {
                    }
                }
            }
        }
Beispiel #27
0
        public async void facedetect()
        {
            try
            {
                //string testImageFile = @"D:\Pictures\test_img1.jpg";

                string subscriptionKey = ConfigurationManager.AppSettings["subscriptionKey"].ToString();

                byte[] data = System.Convert.FromBase64String(Request.Form["formfield"]);

                using (Stream s = new MemoryStream(data))
                {
                    using (FaceServiceClient faceClient = new FaceServiceClient(faceapikey))
                    {
                        var faces = await faceClient.DetectAsync(s);

                        var    faceIds       = faces.Select(face => face.FaceId).ToArray();
                        string personGroupId = "myfriends";
                        var    results       = await faceClient.IdentifyAsync(personGroupId, faceIds);

                        foreach (var identifyResult in results)
                        {
                            if (identifyResult.Candidates.Length == 0)
                            {
                                TextBox1.Text = string.Format("No one identified");
                            }
                            else
                            {
                                // Get top 1 among all candidates returned
                                var candidateId = identifyResult.Candidates[0].PersonId;
                                var person      = await faceClient.GetPersonAsync(personGroupId, candidateId);

                                TextBox1.Text = string.Format("Identified as {0}", person.Name);
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                showerror(ex);
                //throw ex;
            }
        }
        public async Task <List <IdentifiedPerson> > IdentifyAsync(StorageFile image)
        {
            var people = new List <IdentifiedPerson>();

            await Task.Run(async() =>
            {
                using (var fileStream = File.OpenRead(image.Path))
                {
                    // detect faces
                    var faces = await _faceClient.DetectAsync(fileStream);

                    // max 10 faces
                    faces = faces.Take(10).ToArray();
                    Log?.WriteLine($"Found {faces.Count()} number of faces.");

                    // identify each face
                    var identifyResult = await _faceClient.IdentifyAsync(_groupName, faces.Select(ff => ff.FaceId).ToArray());
                    foreach (var face in faces)
                    {
                        var identifiedPerson = new IdentifiedPerson {
                            Face = face
                        };

                        var identity = identifyResult.SingleOrDefault(i => i.FaceId == face.FaceId);

                        if (identity != null && identity.Candidates.Length > 0)
                        {
                            var candidate = identity.Candidates.OrderByDescending(c => c.Confidence).First();
                            var person    = await _faceClient.GetPersonAsync(_groupName, candidate.PersonId);

                            Log?.WriteLine($"Found {person.Name}.");

                            identifiedPerson.PersonName = person.Name;
                            identifiedPerson.Face       = faces.Single(f => f.FaceId == identity.FaceId);
                        }

                        people.Add(identifiedPerson);
                    }
                }
            });

            return(people);
        }
        /// <summary> Function which submits a frame to the Face API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the faces returned by the API. </returns>
        private async Task <LiveCameraResult> IdentifyFaceFunction(VideoFrame frame)
        {
            var imageStream = frame.Image.ToMemoryStream();
            var result      = new LiveCameraResult();

            try
            {
                // First detect any faces
                var faces = await _faceClient.DetectAsync(imageStream);

                if (faces.Length <= 0)
                {
                    return(result);
                }

                // Identify each face
                // Call identify REST API, the result contains identified person information
                var identifyResult = await _faceClient.IdentifyAsync(_groupName, faces.Select(ff => ff.FaceId).ToArray());

                for (int idx = 0; idx < faces.Length; idx++)
                {
                    // Update identification result for rendering
                    var res = identifyResult[idx];
                    if (res.Candidates.Length > 0 && _persons.Any(p => p.PersonId == res.Candidates[0].PersonId))
                    {
                        var personName = _persons.Where(p => p.PersonId == res.Candidates[0].PersonId).First().Name;
                        await _grabber.StopProcessingAsync();

                        result.PeopleIdentified.Add(personName);
                    }
                    else
                    {
                        result.UnknownFaceCount++;
                    }
                }
            }
            catch (FaceAPIException ex)
            {
                Log($"Response: {ex.ToString()}");
            }

            return(result);
        }
Beispiel #30
0
        private async Task <Face[]> UploadAndDetectFaces(string imageFilePath)
        {
            // The list of Face attributes to return.
            IEnumerable <FaceAttributeType> faceAttributes =
                new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Occlusion, FaceAttributeType.HeadPose, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.FacialHair };

            // Call the Face API.
            try {
                using (Stream imageFileStream = File.OpenRead(imageFilePath)) {
                    var faces = await faceApi.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : true, returnFaceAttributes : faceAttributes);

                    var faceIds = faces.Select(face => face.FaceId).ToArray();

                    if (faces.Count() > 0)
                    {
                        var results = await faceApi.IdentifyAsync(personGroupId, faceIds);

                        if (results.Count() > 0)
                        {
                            lst.Items.Clear();

                            lst.Items.Insert(0, $"Person: {results.First().Candidates?.First()?.PersonId} ({results.First().Candidates?.First()?.Confidence})");
                            var person = await faceApi.GetPersonAsync("myfamily", results.First().Candidates.First().PersonId);

                            Console.WriteLine($"Identified as {person.Name}");
                            lst.Items.Insert(0, $"Identified as {person.Name}({results.First().Candidates?.First()?.Confidence})");
                        }
                    }

                    return(faces);
                }
            }
            // Catch and display Face API errors.
            catch (FaceAPIException f) {
                Console.WriteLine(f.ErrorMessage, f.ErrorCode);
                return(new Face[0]);
            }
            // Catch and display all other errors.
            catch (Exception e) {
                Console.WriteLine(e.Message, "Error");
                return(new Face[0]);
            }
        }
        public async Task<string[]> FaceUpload(string DeviceId)
        {
            Stream req = null;
            req = await Request.Content.ReadAsStreamAsync();
            byte[] bytes = null;
            MemoryStream ms = new MemoryStream();
            int count = 0;
            do
            {
                byte[] buf = new byte[1024];
                count = req.Read(buf, 0, 1024);
                ms.Write(buf, 0, count);
            } while (req.CanRead && count > 0);
            bytes = ms.ToArray();
            Stream stream = new MemoryStream(bytes);
            FaceServiceClient faceclient = new FaceServiceClient(ConfigurationManager.AppSettings["OxfordSubscriptionKeyPrimary"]);
            Face[] faceresult = null;
            try
            {
                faceresult = await faceclient.DetectAsync(stream, false, false, false, false);
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }

            if (faceresult.Length == 0)
            {
                return new string[]{"Invalid"};
            }
            Guid[] FaceIdSet = new Guid[faceresult.Length];
            for (int i = 0; i < faceresult.Length; i ++)
            {
                FaceIdSet[i] = faceresult[i].FaceId;
            }
            IdentifyResult[] identityresultnew = await faceclient.IdentifyAsync(ConfigurationManager.AppSettings["MemberGroupId"], FaceIdSet, 1);
            string IdentifyResultName = null;
            string[] IdentifyResultJson = new String[identityresultnew.Length];
            int StrangerNum = 0;
            for (int j = 0; j < identityresultnew.Length; j++)
            {
                if (identityresultnew[j].Candidates.Length == 0)
                {
                    IdentifyResultJson[j] = "Stranger";
                    StrangerNum ++;
                }
                else
                {
                    string candidateid = identityresultnew[j].Candidates[0].PersonId.ToString();
                    Person candidate = await faceclient.GetPersonAsync(ConfigurationManager.AppSettings["MemberGroupId"], new Guid(candidateid));
                    IdentifyResultName += candidate.Name + "_";
                    IdentifyResultJson[j] = candidate.Name;
                }
            }
            DateTime temp = DateTime.Now;
            string ImageNameDate = temp.Year.ToString() + "Y" + temp.Month.ToString() + "M" + temp.Day.ToString() + "D" + temp.Hour.ToString() + "h" + temp.Minute.ToString() + "m" + temp.Second.ToString() + "s";
            string ImagePath = await StorageUpload("visitorcapture", ImageNameDate + "_" + IdentifyResultName + StrangerNum.ToString() + "Strangers", bytes);
            
            return IdentifyResultJson;

        }
        /// <summary>
        /// Pick image, detect and identify all faces detected
        /// </summary>
        /// <param name="sender">Event sender</param>
        /// <param name="e">Event arguments</param>
        private async void Identify_Click(object sender, RoutedEventArgs e)
        {
            // Show file picker
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".jpg";
            dlg.Filter = "Image files(*.jpg) | *.jpg";
            var result = dlg.ShowDialog();

            if (result.HasValue && result.Value)
            {
                // User picked one image
                // Clear previous detection and identification results
                TargetFaces.Clear();
                SelectedFile = dlg.FileName;

                var sw = Stopwatch.StartNew();

                var imageInfo = UIHelper.GetImageInfoForRendering(dlg.FileName);

                MainWindow mainWindow = Window.GetWindow(this) as MainWindow;
                string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey;

                var faceServiceClient = new FaceServiceClient(subscriptionKey);

                // Call detection REST API
                using (var fileStream = File.OpenRead(dlg.FileName))
                {
                    try
                    {
                        var faces = await faceServiceClient.DetectAsync(fileStream);

                        // Convert detection result into UI binding object for rendering
                        foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
                        {
                            TargetFaces.Add(face);
                        }

                        MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, GroupName);

                        // Identify each face
                        // Call identify REST API, the result contains identified person information
                        var identifyResult = await faceServiceClient.IdentifyAsync(GroupName, faces.Select(ff => ff.FaceId).ToArray());
                        for (int idx = 0; idx < faces.Length; idx++)
                        {
                            // Update identification result for rendering
                            var face = TargetFaces[idx];
                            var res = identifyResult[idx];
                            if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString()))
                            {
                                face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName;
                            }
                            else
                            {
                                face.PersonName = "Unknown";
                            }
                        }

                        var outString = new StringBuilder();
                        foreach (var face in TargetFaces)
                        {
                            outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName);
                        }

                        MainWindow.Log("Response: Success. {0}", outString);
                    }
                    catch (FaceAPIException ex)
                    {
                        MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage);
                    }
                }
            }
        }
        /// <summary>
        /// Identify a list of photos based on an existing training group.  
        /// </summary>
        /// <param name="PersonGroupID">Name of the training group</param>
        /// <param name="Photos">List of photos to be tagged</param>
        /// <returns></returns>
        public async Task identifyPhotosInGroup(string PersonGroupID, List<Photo> Photos)
        {
            IFaceServiceClient faceClient = new FaceServiceClient(SubscriptionKey);
                        
            try
            {
                foreach (Photo photo in Photos)
                {
                    photo.NumberOfMatchedFaces = 0;
                    photo.NumberOfUnmatchedFaces = 0;
                    photo.PeopleInPhoto.Clear();

                    // convert image bytes into a stream
                    Stream stream = new MemoryStream(photo.Image);

                    // identify faces in the image (an image could have multiple faces in it)
                    var faces = await faceClient.DetectAsync(stream);

                    if (faces.Length > 0)
                    {
                        // match each face to the training group photos.  
                        var identifyResult = await faceClient.IdentifyAsync(PersonGroupID, faces.Select(ff => ff.FaceId).ToArray());
                        for (int idx = 0; idx < faces.Length; idx++)
                        {
                            var res = identifyResult[idx];
                            if (res.Candidates.Length > 0)
                            {
                                // found a match so add the original ID of the training person to the photo
                                if (TrainingPhotos.Keys.Contains(res.Candidates[0].PersonId))
                                {
                                    photo.PeopleInPhoto.Add(TrainingPhotos[res.Candidates[0].PersonId]);
                                    photo.NumberOfMatchedFaces += 1;
                                }
                                // didn't find a match so count as an unmatched face.
                                else
                                    photo.NumberOfUnmatchedFaces += 1;
                            }
                            // didn't find a match so count as an unmatched face.
                            else
                                photo.NumberOfUnmatchedFaces += 1;

                        }
                    }

                }


            }
            catch (ClientException ex)
            {
                throw;
            }



        }