public async Task DetectEmotionAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.ImageUrl);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.GetImageStreamCallback);
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                this.DetectedEmotion = Enumerable.Empty <Emotion>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }
            }
            finally
            {
                this.OnEmotionRecognitionCompleted();
            }
        }
        public async Task DetectEmotionAsync()
        {
            try
            {
                // Implement #1: If there is ImageUrl you should call the proper EmotionServiceHelper method to detect emotions
                //if (this.ImageUrl != null)
                //{
                //    //this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.ImageUrl);
                //    throw new NotImplementedException();
                //}
                // Implement #2: If GetImageStreamCallback is not null, you should call the proper EmotionServiceHelper method to detect emotions
                //else
                if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.GetImageStreamCallback);
                }

                // Implement #3: If FilterOutSmallFaces is enabled, filter the DetectedEmotion using the CoreUtil IsFaceBigEnoughForDetection method results
                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight)).ToList();
                }
            }
            catch (Exception e)
            {
                // Implement #4: If there is an error, call the ErrorTrackingHelper helper class to record the issue.
                //               and return an empty emotion list
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                //this.DetectedEmotion = Enumerable.Empty<Emotion>();
                this.DetectedEmotion = null;

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }

#if DEBUG
                throw e;
#endif
            }
            finally
            {
                // Implement #5: Call the event OnEmotionRecognitionCompleted
                this.OnEmotionRecognitionCompleted();
            }
        }
        public async Task DetectFacesAsync(bool detectFaceAttributes = false, bool detectFaceLandmarks = false)
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.DetectedFaces = await FaceServiceHelper.DetectAsync(
                        this.ImageUrl,
                        returnFaceId : true,
                        returnFaceLandmarks : detectFaceLandmarks,
                        returnFaceAttributes : detectFaceAttributes?DefaultFaceAttributeTypes : null);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedFaces = await FaceServiceHelper.DetectAsync(
                        this.GetImageStreamCallback,
                        returnFaceId : true,
                        returnFaceLandmarks : detectFaceLandmarks,
                        returnFaceAttributes : detectFaceAttributes?DefaultFaceAttributeTypes : null);
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedFaces = this.DetectedFaces.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API DetectAsync error");

                this.DetectedFaces = Enumerable.Empty <Face>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    //await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Face detection failed.");
                    ErrorMessage = e.Message + " - Face Detection Failed";
                }
            }
            finally
            {
                this.OnFaceDetectionCompleted();
            }
        }
Beispiel #4
0
        /// <summary>
        /// Calling emotion detect for every recognized face
        /// </summary>
        /// <returns></returns>
        public async Task DetectEmotionWithRectanglesAsync()
        {
            try
            {
                var rectangles = new List <Rectangle>();
                foreach (var f in this.DetectedFaces)
                {
                    Rectangle r = new Rectangle()
                    {
                        Top = f.FaceRectangle.Top, Height = f.FaceRectangle.Height, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width
                    };
                    rectangles.Add(r);
                }
                if (this.ImageUrl != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeWithFaceRectanglesAsync(this.ImageUrl, rectangles.ToArray());
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeWithFaceRectanglesAsync(await this.GetImageStreamCallback(), rectangles.ToArray());
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                this.DetectedEmotion = Enumerable.Empty <Emotion>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }
            }
            finally
            {
                this.OnEmotionRecognitionCompleted();
            }
        }
Beispiel #5
0
        public async Task <List <FaceSendInfo> > FindSimilarPersonWithEmotion()
        {
            var facesInfo = new List <FaceSendInfo>();
            List <SimilarFaceMatch> result = new List <SimilarFaceMatch>();

            //Loop thru all detected faces from previous steps and fill the facesInfo array
            //For ease of processing in Azure Stream Analytics we create single level object
            foreach (var f in this.DetectedFaces)
            {
                var fsi = new FaceSendInfo();

                //Add emotions
                var e = CoreUtil.FindEmotionForFace(f, this.DetectedEmotion);
                FeedFaceInfo(f, fsi, e);
                //We sen also info how many faces in total were recognized on the picture with current face
                fsi.facesNo = this.DetectedFaces.Count();



                Tuple <SimilarPersistedFace, string> similarPersistedFace = await FaceListManager.FindSimilarPersistedFaceAsync(await this.GetImageStreamCallback(), f.FaceId, f.FaceRectangle);

                if (similarPersistedFace != null)
                {
                    result.Add(new SimilarFaceMatch {
                        Face = f, SimilarPersistedFace = similarPersistedFace.Item1
                    });
                    fsi.canid   = similarPersistedFace.Item1.PersistedFaceId.ToString();
                    fsi.canconf = similarPersistedFace.Item1.Confidence;

                    //In order to get also name we need to obtain Person
                    //p = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);
                    fsi.canname = similarPersistedFace.Item1.PersistedFaceId.ToString();
                }

                facesInfo.Add(fsi);
            }
            SimilarFaceMatches = result;
            return(facesInfo);
        }
Beispiel #6
0
 public static Emotion FindFaceClosestToRegion(IEnumerable <Emotion> emotion, FaceRectangle region)
 {
     return(emotion?.Where(e => CoreUtil.AreFacesPotentiallyTheSame(e.FaceRectangle, region))
            .OrderBy(e => Math.Abs(region.Left - e.FaceRectangle.Left) + Math.Abs(region.Top - e.FaceRectangle.Top)).FirstOrDefault());
 }
Beispiel #7
0
        public async Task <List <FaceSendInfo> > IdentifyOrAddPersonWithEmotionsAsync(string groupName, ObservableCollection <IdentifiedFaces> identifiedPersonsIdCollection)
        {
            var facesInfo = new List <FaceSendInfo>();

            //Loop thru all detected faces from previous steps and fill the facesInfo array
            //For ease of processing in Azure Stream Analytics we create single level object
            foreach (var f in this.DetectedFaces)
            {
                var fsi = new FaceSendInfo();

                //Add emotions
                var e = CoreUtil.FindEmotionForFace(f, this.DetectedEmotion);
                FeedFaceInfo(f, fsi, e);
                //We sen also info how many faces in total were recognized on the picture with current face
                fsi.facesNo = this.DetectedFaces.Count();

                facesInfo.Add(fsi);
            }

            //Now we proceed to face recognition/identification
            //First we create group if it does not exist
            try
            {
                var g = await FaceServiceHelper.CreatePersonGroupIfNoGroupExists(groupName);

                groupId = g.PersonGroupId;
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Problem creating group");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem creating group");
                }
            }


            //We need to find candidate for every face
            try
            {
                IdentifyResult[] groupResults = await this.IdentifyFacesAsync(groupId);

                //We loop thri all faces again in order to find candidate
                foreach (var f in this.DetectedFaces)
                {
                    bool needToRetrain = true;
                    var  fi            = facesInfo.Where(fin => fin.faceId == f.FaceId.ToString()).FirstOrDefault();
                    var  newPersonID   = Guid.NewGuid();

                    if (groupResults != null && groupResults.Where(gr => gr.FaceId == f.FaceId).Any() && groupResults.Where(gr => gr.FaceId == f.FaceId).FirstOrDefault().Candidates.Any())
                    {
                        var candidates = groupResults.Where(gr => gr.FaceId == f.FaceId).FirstOrDefault().Candidates.OrderByDescending(ca => ca.Confidence);

                        Person p   = new Person();
                        var    can = candidates.FirstOrDefault();

                        //If we have sufficient confidence, we add Face for person
                        if (can.Confidence >= SettingsHelper.Instance.Confidence)
                        {
                            fi.canid   = can.PersonId.ToString();
                            fi.canconf = can.Confidence;

                            //In order to get also name we need to obtain Person
                            p = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                            fi.canname = p.Name;

                            var identifiedPersonFromList = identifiedPersonsIdCollection.Where(ip => ip.Id == can.PersonId.ToString()).FirstOrDefault();

                            //Check whether we did not added too much photos lately, it is not neccesary to add photo for face every time
                            if (identifiedPersonFromList == null)
                            {
                                await AddFaceToPerson(f, p, can.PersonId);
                            }
                            else if (identifiedPersonFromList.NumOfAddedPhotosInLastPeriod < SettingsHelper.Instance.NumberOfPhotoAddsInPeriod)
                            {
                                await AddFaceToPerson(f, p, can.PersonId);

                                identifiedPersonFromList.NumOfAddedPhotosInLastPeriod++;
                            }
                            else if ((DateTime.Now - identifiedPersonFromList.FirstPhotoAddedInLastPeriod).Hours > SettingsHelper.Instance.PhotoAddPeriodSize)
                            {
                                identifiedPersonFromList.NumOfAddedPhotosInLastPeriod = 1;
                                identifiedPersonFromList.FirstPhotoAddedInLastPeriod  = DateTime.Now;
                                await AddFaceToPerson(f, p, can.PersonId);
                            }
                            else
                            {
                                needToRetrain = false;
                            }
                        }
                        else
                        {
                            //if not sufficient confidence we also need to check whether there is similar face/ if not create new person
                            await CreatePrsonIfNoSimilarFaceExistsAsync(facesInfo, newPersonID, f);
                        }
                    }
                    else
                    {
                        //if no candidate we also need to check whether there is similar fac,e if not create new person
                        await CreatePrsonIfNoSimilarFaceExistsAsync(facesInfo, newPersonID, f);
                    }
                    try
                    {
                        //We need to train after operation on top of group (addition of photo, person etc.)
                        if (needToRetrain)
                        {
                            await FaceServiceHelper.TrainPersonGroupAsync(groupId);
                        }
                    }
                    catch (Exception e)
                    {
                        // Catch error with training of group
                        ErrorTrackingHelper.TrackException(e, "Problem training group");

                        if (this.ShowDialogOnFaceApiErrors)
                        {
                            await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem training group");
                        }
                    }


                    //Handle the identified persons collection to which we locally save every identified person
                    if (!identifiedPersonsIdCollection.Where(ip => ip.Id == fi.canid).Any())
                    {
                        identifiedPersonsIdCollection.Add(new IdentifiedFaces()
                        {
                            Id = fi.canid
                        });
                    }

                    //Increase counter of identifications
                    else if (identifiedPersonsIdCollection.Where(ip => ip.Id == fi.canid).Any())
                    {
                        identifiedPersonsIdCollection.Where(ip => ip.Id == fi.canid).FirstOrDefault().NumberOfIdentifications++;
                    }

                    //Find faces which were wrongly learned (small number of identifications)
                    var tbd = new List <IdentifiedFaces>();
                    foreach (var ip in identifiedPersonsIdCollection)
                    {
                        if (ip.NumberOfIdentifications <= SettingsHelper.Instance.NeededFaceIdentNum && (ip.CreatedAt.AddSeconds(SettingsHelper.Instance.DeleteWindow) < DateTime.Now))
                        {
                            var    g    = (await FaceServiceHelper.GetPersonGroupsAsync()).Where(gr => gr.Name == groupName).FirstOrDefault();
                            Person pers = await FaceServiceHelper.GetPersonAsync(g.PersonGroupId, new Guid(ip.Id));

                            //if we saved insufficient number of faces than delete
                            if (pers.PersistedFaceIds.Length <= SettingsHelper.Instance.NeededFaceIdentNum)
                            {
                                await FaceServiceHelper.DeletePersonAsync(g.PersonGroupId, pers.PersonId);

                                string similarFaceId = "";
                                using (var db = new KioskDBContext())
                                {
                                    var sfToDelete = db.SimilarFaces.Where(sf => sf.PersonId == pers.PersonId.ToString()).FirstOrDefault();
                                    similarFaceId = sfToDelete.FaceId.ToString();
                                    db.SimilarFaces.Remove(sfToDelete);
                                }

                                await FaceListManager.DeleteFaceFromFaceList(similarFaceId);

                                await FaceServiceHelper.TrainPersonGroupAsync(g.PersonGroupId);

                                tbd.Add(ip);
                            }
                        }
                    }


                    foreach (var iptodelete in tbd)
                    {
                        identifiedPersonsIdCollection.Remove(iptodelete);
                    }
                }
            }
            catch (Exception e)
            {
                // Catch error with training of group
                ErrorTrackingHelper.TrackException(e, "Problem with cognitive services");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem with cognitive services");
                }
            }
            return(facesInfo);
        }
Beispiel #8
0
        public PhotoEvent(ImageAnalyzer capture)
        {
            LocalTime = DateTime.Now;

            List <FaceInfo> faceInfoList = new List <FaceInfo>();

            if (capture.DetectedFaces != null)
            {
                foreach (var detectedFace in capture.DetectedFaces)
                {
                    FaceInfo faceInfo = new FaceInfo();

                    // Check if we have age/gender for this face.
                    if (detectedFace.FaceAttributes != null)
                    {
                        faceInfo.AgeGenderInfo = new AgeGenderInfo {
                            Age = detectedFace.FaceAttributes.Age, Gender = detectedFace.FaceAttributes.Gender
                        };
                    }

                    // Check if we identified this face. If so send the name along.
                    if (capture.IdentifiedPersons != null)
                    {
                        var matchingPerson = capture.IdentifiedPersons.FirstOrDefault(p => p.FaceId == detectedFace.FaceId);
                        if (matchingPerson != null)
                        {
                            faceInfo.Name = matchingPerson.Person.Name;
                        }
                    }

                    // Check if we have emotion for this face. If so send it along.
                    if (capture.DetectedEmotion != null)
                    {
                        Emotion matchingEmotion = CoreUtil.FindFaceClosestToRegion(capture.DetectedEmotion, detectedFace.FaceRectangle);
                        if (matchingEmotion != null)
                        {
                            faceInfo.Emotion = matchingEmotion.Scores;
                        }
                    }

                    // Check if we have an unique Id for this face. If so send it along.
                    if (capture.SimilarFaceMatches != null)
                    {
                        var matchingPerson = capture.SimilarFaceMatches.FirstOrDefault(p => p.Face.FaceId == detectedFace.FaceId);
                        if (matchingPerson != null)
                        {
                            faceInfo.UniqueId = matchingPerson.SimilarPersistedFace.PersistedFaceId.ToString("N").Substring(0, 4);
                        }
                    }

                    faceInfoList.Add(faceInfo);
                }
            }
            else if (capture.DetectedEmotion != null)
            {
                // If we are here we only have emotion. No age/gender or id.
                faceInfoList.AddRange(capture.DetectedEmotion.Select(emotion => new FaceInfo {
                    Emotion = emotion.Scores
                }));
            }

            this.FaceInfo = faceInfoList.ToArray();
        }
        public async Task <List <FaceSendInfo> > IdentifyOrAddPersonWithEmotionsAsync(string groupName, double confidence)
        {
            var time = DateTime.Now;
            //We also add emotions
            var facesInfo = new List <FaceSendInfo>();

            foreach (var f in this.DetectedFaces)
            {
                var fsi = new FaceSendInfo();
                var e   = CoreUtil.FindEmotionForFace(f, this.DetectedEmotion);

                fsi.faceId = f.FaceId.ToString();
                fsi.age    = f.FaceAttributes.Age;

                fsi.faceRecHeight = f.FaceRectangle.Height;
                fsi.faceRecLeft   = f.FaceRectangle.Left;
                fsi.faceRecTop    = f.FaceRectangle.Top;
                fsi.faceRecWidth  = f.FaceRectangle.Width;

                fsi.gender = f.FaceAttributes.Gender;

                fsi.smile = f.FaceAttributes.Smile;

                fsi.beard     = f.FaceAttributes.FacialHair.Beard;
                fsi.moustache = f.FaceAttributes.FacialHair.Moustache;
                fsi.sideburns = f.FaceAttributes.FacialHair.Sideburns;

                fsi.glasses = f.FaceAttributes.Glasses.ToString();

                fsi.headYaw   = f.FaceAttributes.HeadPose.Yaw;
                fsi.headRoll  = f.FaceAttributes.HeadPose.Roll;
                fsi.headPitch = f.FaceAttributes.HeadPose.Pitch;

                fsi.anger     = e.Scores.Anger;
                fsi.contempt  = e.Scores.Contempt;
                fsi.disgust   = e.Scores.Disgust;
                fsi.fear      = e.Scores.Fear;
                fsi.happiness = e.Scores.Happiness;
                fsi.neutral   = e.Scores.Neutral;
                fsi.sadness   = e.Scores.Sadness;
                fsi.surprise  = e.Scores.Surprise;

                fsi.timeStamp = time;
                fsi.facesNo   = this.DetectedFaces.Count();

                facesInfo.Add(fsi);
            }
            var newPersonID = Guid.NewGuid();

            //Move to initialization
            try
            {
                var g = await FaceServiceHelper.CreatePersonGroupIfNoGroupExists(groupName);

                groupId = g.PersonGroupId;
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Problem creating group");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem creating group");
                }
            }


            List <IdentifiedPerson> ipresult = new List <IdentifiedPerson>();

            // Compute Face Identification and Unique Face Ids
            //We need to map detected faceID with actual personID (Identified face)
            try
            {
                IdentifyResult[] groupResults = await this.IdentifyFacesAsync(groupId);

                foreach (var f in this.DetectedFaces)
                {
                    if (groupResults != null && groupResults.Where(gr => gr.FaceId == f.FaceId).Any() && groupResults.Where(gr => gr.FaceId == f.FaceId).FirstOrDefault().Candidates.Any())
                    {
                        var candidates = groupResults.Where(gr => gr.FaceId == f.FaceId).FirstOrDefault().Candidates.OrderByDescending(ca => ca.Confidence);

                        var    fi = facesInfo.Where(fin => fin.faceId == f.FaceId.ToString()).FirstOrDefault();
                        int    i  = 0;
                        Person p  = new Person();
                        foreach (var can in candidates)
                        {
                            switch (i)
                            {
                            case 0:
                                fi.can1id   = can.PersonId.ToString();
                                fi.can1conf = can.Confidence;
                                p           = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                                fi.can1name = p.Name;
                                if (can.Confidence >= confidence)
                                {
                                    ipresult.Add(new IdentifiedPerson()
                                    {
                                        Person = p, Confidence = can.Confidence, FaceId = f.FaceId
                                    });
                                    if (p.PersistedFaceIds.Length == 248)
                                    {
                                        Guid persistedFaceId = p.PersistedFaceIds.OrderBy(x => Guid.NewGuid()).FirstOrDefault();
                                        await FaceServiceHelper.DeletePersonFaceAsync(groupId, can.PersonId, persistedFaceId);
                                    }
                                    try
                                    {
                                        await FaceServiceHelper.AddPersonFaceAsync(groupId, can.PersonId, await this.GetImageStreamCallback(), "", f.FaceRectangle);
                                    }
                                    catch (Exception e)
                                    {
                                        // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                                        // another one.
                                        ErrorTrackingHelper.TrackException(e, "Problem adding face to group");

                                        if (this.ShowDialogOnFaceApiErrors)
                                        {
                                            await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem adding face to group");
                                        }
                                    }
                                }
                                else
                                {
                                    //create new Guy if confidence not sufficient
                                    SimilarFaceMatch result = await GetSimilarFace(f);

                                    //using (var db = new KioskDbContext())
                                    //{
                                    //    Blogs.ItemsSource = db.Blogs.ToList();
                                    //}

                                    await CreatePerson(facesInfo, newPersonID, f);
                                }
                                break;

                            case 1:
                                fi.can2id   = can.PersonId.ToString();
                                fi.can2conf = can.Confidence;
                                p           = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                                fi.can2name = p.Name;
                                break;

                            case 2:
                                fi.can3id   = can.PersonId.ToString();
                                fi.can3conf = can.Confidence;
                                p           = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                                fi.can3name = p.Name;
                                break;

                            case 3:
                                fi.can4id   = can.PersonId.ToString();
                                fi.can4conf = can.Confidence;
                                p           = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                                fi.can4name = p.Name;
                                break;
                            }
                            i++;
                        }
                    }

                    else
                    {
                        //if no candidate we also need to create new person
                        await CreatePerson(facesInfo, newPersonID, f);
                    }
                    try
                    {
                        await FaceServiceHelper.TrainPersonGroupAsync(groupId);
                    }
                    catch (Exception e)
                    {
                        // Catch error with training of group
                        ErrorTrackingHelper.TrackException(e, "Problem training group");

                        if (this.ShowDialogOnFaceApiErrors)
                        {
                            await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem training group");
                        }
                    }
                }
            }
            catch (Exception e)
            {
                // Catch error with training of group
                ErrorTrackingHelper.TrackException(e, "Problem with cognitive services");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem with cognitive services");
                }
            }
            return(facesInfo);
        }