Example #1
0
        public async Task FindSimilarPersistedFacesAsync()
        {
            this.SimilarFaceMatches = Enumerable.Empty <SimilarFaceMatch>();

            if (this.DetectedFaces == null || !this.DetectedFaces.Any())
            {
                return;
            }

            List <SimilarFaceMatch> result = new List <SimilarFaceMatch>();

            foreach (DetectedFace detectedFace in this.DetectedFaces)
            {
                try
                {
                    SimilarFace similarPersistedFace = null;
                    if (this.ImageUrl != null)
                    {
                        similarPersistedFace = await FaceListManager.FindSimilarPersistedFaceAsync(ImageUrl, detectedFace.FaceId.GetValueOrDefault(), detectedFace);
                    }
                    else
                    {
                        similarPersistedFace = await FaceListManager.FindSimilarPersistedFaceAsync(this.GetImageStreamCallback, detectedFace.FaceId.GetValueOrDefault(), detectedFace);
                    }
                    if (similarPersistedFace != null)
                    {
                        result.Add(new SimilarFaceMatch {
                            Face = detectedFace, SimilarPersistedFace = similarPersistedFace
                        });
                    }
                }
                catch (Exception e)
                {
                    ErrorTrackingHelper.TrackException(e, "FaceListManager.FindSimilarPersistedFaceAsync error");

                    if (this.ShowDialogOnFaceApiErrors)
                    {
                        await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure finding similar faces");
                    }
                }
            }

            this.SimilarFaceMatches = result;
        }
Example #2
0
        public async Task AnalyzeImageAsync(bool detectCelebrities = false, IList <VisualFeatureTypes> visualFeatures = null)
        {
            try
            {
                if (visualFeatures == null)
                {
                    visualFeatures = DefaultVisualFeatures;
                }

                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.ImageUrl,
                        visualFeatures,
                        detectCelebrities?new List <Details> {
                        Details.Celebrities
                    } : null);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.GetImageStreamCallback,
                        visualFeatures,
                        detectCelebrities?new List <Details> {
                        Details.Celebrities
                    } : null);
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Vision API AnalyzeImageAsync error");

                this.AnalysisResult = new ImageAnalysis();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
            finally
            {
                this.ComputerVisionAnalysisCompleted?.Invoke(this, EventArgs.Empty);
            }
        }
        private async Task <IList <IdentifyResult> > GetFaceIdentificationResultsOrDefaultAsync(string personGroupId, Guid[] detectedFaceIds)
        {
            try
            {
                return(await FaceServiceHelper.IdentifyAsync(personGroupId, detectedFaceIds));
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Face API IdentifyAsync error");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure identifying faces");
                }
            }
            return(new List <IdentifyResult>());
        }
Example #4
0
        /// <summary>
        /// Calling emotion detect for every recognized face
        /// </summary>
        /// <returns></returns>
        public async Task DetectEmotionWithRectanglesAsync()
        {
            try
            {
                var rectangles = new List <Rectangle>();
                foreach (var f in this.DetectedFaces)
                {
                    Rectangle r = new Rectangle()
                    {
                        Top = f.FaceRectangle.Top, Height = f.FaceRectangle.Height, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width
                    };
                    rectangles.Add(r);
                }
                if (this.ImageUrl != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeWithFaceRectanglesAsync(this.ImageUrl, rectangles.ToArray());
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeWithFaceRectanglesAsync(await this.GetImageStreamCallback(), rectangles.ToArray());
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                this.DetectedEmotion = Enumerable.Empty <Emotion>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }
            }
            finally
            {
                this.OnEmotionRecognitionCompleted();
            }
        }
Example #5
0
        public async Task DetectFacesAsync(bool detectFaceAttributes = false)
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.DetectedFaces = await FaceServiceHelper.DetectAsync(
                        this.ImageUrl,
                        returnFaceId : true,
                        returnFaceLandmarks : false,
                        returnFaceAttributes : DefaultFaceAttributeTypes);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedFaces = await FaceServiceHelper.DetectAsync(
                        await this.GetImageStreamCallback(),
                        returnFaceId : true,
                        returnFaceLandmarks : false,
                        //returnFaceAttributes: detectFaceAttributes ? DefaultFaceAttributeTypes : null);
                        returnFaceAttributes : DefaultFaceAttributeTypes);
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedFaces = this.DetectedFaces.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API DetectAsync error");

                this.DetectedFaces = Enumerable.Empty <Face>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Face detection failed.");
                }
            }
            finally
            {
                this.OnFaceDetectionCompleted();
            }
        }
        private async Task <IEnumerable <PersonGroup> > GetPersonGroupsAsync()
        {
            IEnumerable <PersonGroup> personGroups = Enumerable.Empty <PersonGroup>();

            try
            {
                personGroups = (await FaceServiceHelper.ListPersonGroupsAsync(PeopleGroupsUserDataFilter))
                               .Where(x => x.RecognitionModel.Equals(FaceServiceHelper.LatestRecognitionModelName, StringComparison.OrdinalIgnoreCase));
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API GetPersonGroupsAsync error");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure getting PersonGroups");
                }
            }
            return(personGroups);
        }
Example #7
0
        public async Task DetectQRCodesAsync(bool detectFaceAttributes = false, bool detectFaceLandmarks = false)
        {
            try
            {
                await QRCodeProcessHelper.IdentifyQRCode(this.Data, this.DecodedImageWidth, this.DecodedImageWidth);
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API DetectAsync error");

                this.DetectedFaces = Enumerable.Empty <Face>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "QR Code detection failed.");
                }
            }
            finally
            {
            }
        }
Example #8
0
        public async Task DetectEmotionAsync()
        {
            try
            {
                // Implement #1: If there is ImageUrl you should call the proper EmotionServiceHelper method to detect emotions
                if (this.ImageUrl != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.ImageUrl);
                }
                // Implement #2: If GetImageStreamCallback is not null, you should call the proper EmotionServiceHelper method to detect emotions
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.GetImageStreamCallback);
                }

                // Implement #3: If FilterOutSmallFaces is enabled, filter the DetectedEmotion using the CoreUtil IsFaceBigEnoughForDetection method results
                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                // Implement #4: If there is an error, call the ErrorTrackingHelper helper class to record the issue.
                //               and return an empty emotion list
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                this.DetectedEmotion = Enumerable.Empty <Emotion>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }
            }
            finally
            {
                // Implement #5: Call the event OnEmotionRecognitionCompleted
                this.OnEmotionRecognitionCompleted();
            }
        }
Example #9
0
        public async Task AnalyzeImageAsync(bool detectCelebrities = false, IEnumerable <VisualFeature> visualFeatures = null)
        {
            try
            {
                if (visualFeatures == null)
                {
                    visualFeatures = DefaultVisualFeatures;
                }

                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.ImageUrl,
                        visualFeatures,
                        detectCelebrities?new string[] { "Celebrities" } : null);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.GetImageStreamCallback,
                        visualFeatures,
                        detectCelebrities?new string[] { "Celebrities" } : null);
                }
            }
            catch (Exception e)
            {
                this.AnalysisResult = new Microsoft.ProjectOxford.Vision.Contract.AnalysisResult();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
            finally
            {
                this.ComputerVisionAnalysisCompleted?.Invoke(this, EventArgs.Empty);
            }
        }
Example #10
0
        public async Task DescribeAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.DescribeAsync(this.ImageUrl);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.DescribeAsync(this.GetImageStreamCallback);
                }
            }
            catch (Exception e)
            {
                this.AnalysisResult = new Microsoft.ProjectOxford.Vision.Contract.AnalysisResult();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
        }
        public async Task AnalyzeImageAsync(IList <Details> details = null, IList <VisualFeatureTypes> visualFeatures = null)
        {
            try
            {
                if (visualFeatures == null)
                {
                    visualFeatures = DefaultVisualFeatures;
                }

                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.ImageUrl,
                        visualFeatures,
                        details);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.GetImageStreamCallback,
                        visualFeatures,
                        details);
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Vision API AnalyzeImageAsync error");

                this.AnalysisResult = new ImageAnalysis();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
        }
Example #12
0
        private async Task AddFaceToPerson(Face f, Person p, Guid personId)
        {
            //Maximum faces that we are able to persist
            if (p.PersistedFaceIds.Length == 248)
            {
                Guid persistedFaceId = p.PersistedFaceIds.OrderBy(x => Guid.NewGuid()).FirstOrDefault();
                await FaceServiceHelper.DeletePersonFaceAsync(groupId, personId, persistedFaceId);
            }
            try
            {
                await FaceServiceHelper.AddPersonFaceAsync(groupId, personId, await this.GetImageStreamCallback(), "", f.FaceRectangle);
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Problem adding face to group");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem adding face to group");
                }
            }
        }
Example #13
0
        public async Task IdentifyFacesAsync()
        {
            this.IdentifiedPersons = Enumerable.Empty <IdentifiedPerson>();

            Guid[] detectedFaceIds = this.DetectedFaces?.Select(f => f.FaceId).ToArray();
            if (detectedFaceIds != null && detectedFaceIds.Any())
            {
                List <IdentifiedPerson> result = new List <IdentifiedPerson>();

                IEnumerable <PersonGroup> personGroups = Enumerable.Empty <PersonGroup>();
                try
                {
                    personGroups = await FaceServiceHelper.GetPersonGroupsAsync(PeopleGroupsUserDataFilter);
                }
                catch (Exception e)
                {
                    ErrorTrackingHelper.TrackException(e, "Face API GetPersonGroupsAsync error");

                    if (this.ShowDialogOnFaceApiErrors)
                    {
                        await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure getting PersonGroups");
                    }
                }

                foreach (var group in personGroups)
                {
                    try
                    {
                        IdentifyResult[] groupResults = await FaceServiceHelper.IdentifyAsync(group.PersonGroupId, detectedFaceIds);

                        foreach (var match in groupResults)
                        {
                            if (!match.Candidates.Any())
                            {
                                continue;
                            }

                            Person person = await FaceServiceHelper.GetPersonAsync(group.PersonGroupId, match.Candidates[0].PersonId);

                            IdentifiedPerson alreadyIdentifiedPerson = result.FirstOrDefault(p => p.Person.PersonId == match.Candidates[0].PersonId);
                            if (alreadyIdentifiedPerson != null)
                            {
                                // We already tagged this person in another group. Replace the existing one if this new one if the confidence is higher.
                                if (alreadyIdentifiedPerson.Confidence < match.Candidates[0].Confidence)
                                {
                                    alreadyIdentifiedPerson.Person     = person;
                                    alreadyIdentifiedPerson.Confidence = match.Candidates[0].Confidence;
                                    alreadyIdentifiedPerson.FaceId     = match.FaceId;
                                }
                            }
                            else
                            {
                                result.Add(new IdentifiedPerson {
                                    Person = person, Confidence = match.Candidates[0].Confidence, FaceId = match.FaceId
                                });
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                        // another one.
                        ErrorTrackingHelper.TrackException(e, "Face API IdentifyAsync error");

                        if (this.ShowDialogOnFaceApiErrors)
                        {
                            await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure identifying faces");
                        }
                    }
                }

                this.IdentifiedPersons = result;
            }

            this.OnFaceRecognitionCompleted();
        }
Example #14
0
        public async Task <List <FaceSendInfo> > IdentifyOrAddPersonWithEmotionsAsync(string groupName, ObservableCollection <IdentifiedFaces> identifiedPersonsIdCollection)
        {
            var facesInfo = new List <FaceSendInfo>();

            //Loop thru all detected faces from previous steps and fill the facesInfo array
            //For ease of processing in Azure Stream Analytics we create single level object
            foreach (var f in this.DetectedFaces)
            {
                var fsi = new FaceSendInfo();

                //Add emotions
                var e = CoreUtil.FindEmotionForFace(f, this.DetectedEmotion);
                FeedFaceInfo(f, fsi, e);
                //We sen also info how many faces in total were recognized on the picture with current face
                fsi.facesNo = this.DetectedFaces.Count();

                facesInfo.Add(fsi);
            }

            //Now we proceed to face recognition/identification
            //First we create group if it does not exist
            try
            {
                var g = await FaceServiceHelper.CreatePersonGroupIfNoGroupExists(groupName);

                groupId = g.PersonGroupId;
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Problem creating group");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem creating group");
                }
            }


            //We need to find candidate for every face
            try
            {
                IdentifyResult[] groupResults = await this.IdentifyFacesAsync(groupId);

                //We loop thri all faces again in order to find candidate
                foreach (var f in this.DetectedFaces)
                {
                    bool needToRetrain = true;
                    var  fi            = facesInfo.Where(fin => fin.faceId == f.FaceId.ToString()).FirstOrDefault();
                    var  newPersonID   = Guid.NewGuid();

                    if (groupResults != null && groupResults.Where(gr => gr.FaceId == f.FaceId).Any() && groupResults.Where(gr => gr.FaceId == f.FaceId).FirstOrDefault().Candidates.Any())
                    {
                        var candidates = groupResults.Where(gr => gr.FaceId == f.FaceId).FirstOrDefault().Candidates.OrderByDescending(ca => ca.Confidence);

                        Person p   = new Person();
                        var    can = candidates.FirstOrDefault();

                        //If we have sufficient confidence, we add Face for person
                        if (can.Confidence >= SettingsHelper.Instance.Confidence)
                        {
                            fi.canid   = can.PersonId.ToString();
                            fi.canconf = can.Confidence;

                            //In order to get also name we need to obtain Person
                            p = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                            fi.canname = p.Name;

                            var identifiedPersonFromList = identifiedPersonsIdCollection.Where(ip => ip.Id == can.PersonId.ToString()).FirstOrDefault();

                            //Check whether we did not added too much photos lately, it is not neccesary to add photo for face every time
                            if (identifiedPersonFromList == null)
                            {
                                await AddFaceToPerson(f, p, can.PersonId);
                            }
                            else if (identifiedPersonFromList.NumOfAddedPhotosInLastPeriod < SettingsHelper.Instance.NumberOfPhotoAddsInPeriod)
                            {
                                await AddFaceToPerson(f, p, can.PersonId);

                                identifiedPersonFromList.NumOfAddedPhotosInLastPeriod++;
                            }
                            else if ((DateTime.Now - identifiedPersonFromList.FirstPhotoAddedInLastPeriod).Hours > SettingsHelper.Instance.PhotoAddPeriodSize)
                            {
                                identifiedPersonFromList.NumOfAddedPhotosInLastPeriod = 1;
                                identifiedPersonFromList.FirstPhotoAddedInLastPeriod  = DateTime.Now;
                                await AddFaceToPerson(f, p, can.PersonId);
                            }
                            else
                            {
                                needToRetrain = false;
                            }
                        }
                        else
                        {
                            //if not sufficient confidence we also need to check whether there is similar face/ if not create new person
                            await CreatePrsonIfNoSimilarFaceExistsAsync(facesInfo, newPersonID, f);
                        }
                    }
                    else
                    {
                        //if no candidate we also need to check whether there is similar fac,e if not create new person
                        await CreatePrsonIfNoSimilarFaceExistsAsync(facesInfo, newPersonID, f);
                    }
                    try
                    {
                        //We need to train after operation on top of group (addition of photo, person etc.)
                        if (needToRetrain)
                        {
                            await FaceServiceHelper.TrainPersonGroupAsync(groupId);
                        }
                    }
                    catch (Exception e)
                    {
                        // Catch error with training of group
                        ErrorTrackingHelper.TrackException(e, "Problem training group");

                        if (this.ShowDialogOnFaceApiErrors)
                        {
                            await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem training group");
                        }
                    }


                    //Handle the identified persons collection to which we locally save every identified person
                    if (!identifiedPersonsIdCollection.Where(ip => ip.Id == fi.canid).Any())
                    {
                        identifiedPersonsIdCollection.Add(new IdentifiedFaces()
                        {
                            Id = fi.canid
                        });
                    }

                    //Increase counter of identifications
                    else if (identifiedPersonsIdCollection.Where(ip => ip.Id == fi.canid).Any())
                    {
                        identifiedPersonsIdCollection.Where(ip => ip.Id == fi.canid).FirstOrDefault().NumberOfIdentifications++;
                    }

                    //Find faces which were wrongly learned (small number of identifications)
                    var tbd = new List <IdentifiedFaces>();
                    foreach (var ip in identifiedPersonsIdCollection)
                    {
                        if (ip.NumberOfIdentifications <= SettingsHelper.Instance.NeededFaceIdentNum && (ip.CreatedAt.AddSeconds(SettingsHelper.Instance.DeleteWindow) < DateTime.Now))
                        {
                            var    g    = (await FaceServiceHelper.GetPersonGroupsAsync()).Where(gr => gr.Name == groupName).FirstOrDefault();
                            Person pers = await FaceServiceHelper.GetPersonAsync(g.PersonGroupId, new Guid(ip.Id));

                            //if we saved insufficient number of faces than delete
                            if (pers.PersistedFaceIds.Length <= SettingsHelper.Instance.NeededFaceIdentNum)
                            {
                                await FaceServiceHelper.DeletePersonAsync(g.PersonGroupId, pers.PersonId);

                                string similarFaceId = "";
                                using (var db = new KioskDBContext())
                                {
                                    var sfToDelete = db.SimilarFaces.Where(sf => sf.PersonId == pers.PersonId.ToString()).FirstOrDefault();
                                    similarFaceId = sfToDelete.FaceId.ToString();
                                    db.SimilarFaces.Remove(sfToDelete);
                                }

                                await FaceListManager.DeleteFaceFromFaceList(similarFaceId);

                                await FaceServiceHelper.TrainPersonGroupAsync(g.PersonGroupId);

                                tbd.Add(ip);
                            }
                        }
                    }


                    foreach (var iptodelete in tbd)
                    {
                        identifiedPersonsIdCollection.Remove(iptodelete);
                    }
                }
            }
            catch (Exception e)
            {
                // Catch error with training of group
                ErrorTrackingHelper.TrackException(e, "Problem with cognitive services");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem with cognitive services");
                }
            }
            return(facesInfo);
        }
        public async Task <List <FaceSendInfo> > IdentifyOrAddPersonWithEmotionsAsync(string groupName, double confidence)
        {
            var time = DateTime.Now;
            //We also add emotions
            var facesInfo = new List <FaceSendInfo>();

            foreach (var f in this.DetectedFaces)
            {
                var fsi = new FaceSendInfo();
                var e   = CoreUtil.FindEmotionForFace(f, this.DetectedEmotion);

                fsi.faceId = f.FaceId.ToString();
                fsi.age    = f.FaceAttributes.Age;

                fsi.faceRecHeight = f.FaceRectangle.Height;
                fsi.faceRecLeft   = f.FaceRectangle.Left;
                fsi.faceRecTop    = f.FaceRectangle.Top;
                fsi.faceRecWidth  = f.FaceRectangle.Width;

                fsi.gender = f.FaceAttributes.Gender;

                fsi.smile = f.FaceAttributes.Smile;

                fsi.beard     = f.FaceAttributes.FacialHair.Beard;
                fsi.moustache = f.FaceAttributes.FacialHair.Moustache;
                fsi.sideburns = f.FaceAttributes.FacialHair.Sideburns;

                fsi.glasses = f.FaceAttributes.Glasses.ToString();

                fsi.headYaw   = f.FaceAttributes.HeadPose.Yaw;
                fsi.headRoll  = f.FaceAttributes.HeadPose.Roll;
                fsi.headPitch = f.FaceAttributes.HeadPose.Pitch;

                fsi.anger     = e.Scores.Anger;
                fsi.contempt  = e.Scores.Contempt;
                fsi.disgust   = e.Scores.Disgust;
                fsi.fear      = e.Scores.Fear;
                fsi.happiness = e.Scores.Happiness;
                fsi.neutral   = e.Scores.Neutral;
                fsi.sadness   = e.Scores.Sadness;
                fsi.surprise  = e.Scores.Surprise;

                fsi.timeStamp = time;
                fsi.facesNo   = this.DetectedFaces.Count();

                facesInfo.Add(fsi);
            }
            var newPersonID = Guid.NewGuid();

            //Move to initialization
            try
            {
                var g = await FaceServiceHelper.CreatePersonGroupIfNoGroupExists(groupName);

                groupId = g.PersonGroupId;
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Problem creating group");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem creating group");
                }
            }


            List <IdentifiedPerson> ipresult = new List <IdentifiedPerson>();

            // Compute Face Identification and Unique Face Ids
            //We need to map detected faceID with actual personID (Identified face)
            try
            {
                IdentifyResult[] groupResults = await this.IdentifyFacesAsync(groupId);

                foreach (var f in this.DetectedFaces)
                {
                    if (groupResults != null && groupResults.Where(gr => gr.FaceId == f.FaceId).Any() && groupResults.Where(gr => gr.FaceId == f.FaceId).FirstOrDefault().Candidates.Any())
                    {
                        var candidates = groupResults.Where(gr => gr.FaceId == f.FaceId).FirstOrDefault().Candidates.OrderByDescending(ca => ca.Confidence);

                        var    fi = facesInfo.Where(fin => fin.faceId == f.FaceId.ToString()).FirstOrDefault();
                        int    i  = 0;
                        Person p  = new Person();
                        foreach (var can in candidates)
                        {
                            switch (i)
                            {
                            case 0:
                                fi.can1id   = can.PersonId.ToString();
                                fi.can1conf = can.Confidence;
                                p           = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                                fi.can1name = p.Name;
                                if (can.Confidence >= confidence)
                                {
                                    ipresult.Add(new IdentifiedPerson()
                                    {
                                        Person = p, Confidence = can.Confidence, FaceId = f.FaceId
                                    });
                                    if (p.PersistedFaceIds.Length == 248)
                                    {
                                        Guid persistedFaceId = p.PersistedFaceIds.OrderBy(x => Guid.NewGuid()).FirstOrDefault();
                                        await FaceServiceHelper.DeletePersonFaceAsync(groupId, can.PersonId, persistedFaceId);
                                    }
                                    try
                                    {
                                        await FaceServiceHelper.AddPersonFaceAsync(groupId, can.PersonId, await this.GetImageStreamCallback(), "", f.FaceRectangle);
                                    }
                                    catch (Exception e)
                                    {
                                        // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                                        // another one.
                                        ErrorTrackingHelper.TrackException(e, "Problem adding face to group");

                                        if (this.ShowDialogOnFaceApiErrors)
                                        {
                                            await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem adding face to group");
                                        }
                                    }
                                }
                                else
                                {
                                    //create new Guy if confidence not sufficient
                                    SimilarFaceMatch result = await GetSimilarFace(f);

                                    //using (var db = new KioskDbContext())
                                    //{
                                    //    Blogs.ItemsSource = db.Blogs.ToList();
                                    //}

                                    await CreatePerson(facesInfo, newPersonID, f);
                                }
                                break;

                            case 1:
                                fi.can2id   = can.PersonId.ToString();
                                fi.can2conf = can.Confidence;
                                p           = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                                fi.can2name = p.Name;
                                break;

                            case 2:
                                fi.can3id   = can.PersonId.ToString();
                                fi.can3conf = can.Confidence;
                                p           = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                                fi.can3name = p.Name;
                                break;

                            case 3:
                                fi.can4id   = can.PersonId.ToString();
                                fi.can4conf = can.Confidence;
                                p           = await FaceServiceHelper.GetPersonAsync(groupId, can.PersonId);

                                fi.can4name = p.Name;
                                break;
                            }
                            i++;
                        }
                    }

                    else
                    {
                        //if no candidate we also need to create new person
                        await CreatePerson(facesInfo, newPersonID, f);
                    }
                    try
                    {
                        await FaceServiceHelper.TrainPersonGroupAsync(groupId);
                    }
                    catch (Exception e)
                    {
                        // Catch error with training of group
                        ErrorTrackingHelper.TrackException(e, "Problem training group");

                        if (this.ShowDialogOnFaceApiErrors)
                        {
                            await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem training group");
                        }
                    }
                }
            }
            catch (Exception e)
            {
                // Catch error with training of group
                ErrorTrackingHelper.TrackException(e, "Problem with cognitive services");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem with cognitive services");
                }
            }
            return(facesInfo);
        }