Exemple #1
0
        public async Task DetectObjectsAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    var response = await VisionServiceHelper.DetectObjectsAsync(this.ImageUrl);

                    this.DetectedObjects = response?.Objects?.ToList();
                }
                else if (this.GetImageStreamCallback != null)
                {
                    var response = await VisionServiceHelper.DetectObjectsInStreamAsync(this.GetImageStreamCallback);

                    this.DetectedObjects = response?.Objects?.ToList();
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Vision API DetectObjectsAsync error");

                this.DetectedObjects = new List <DetectedObject>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
            finally
            {
                this.ObjectDetectionCompleted?.Invoke(this, EventArgs.Empty);
            }
        }
Exemple #2
0
        public async Task RecognizeTextAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.OcrResults = await VisionServiceHelper.RecognizeTextAsync(this.ImageUrl);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.OcrResults = await VisionServiceHelper.RecognizeTextAsync(this.GetImageStreamCallback);
                }
            }
            catch (Exception e)
            {
                this.OcrResults = new Microsoft.ProjectOxford.Vision.Contract.OcrResults();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
            finally
            {
                this.OcrAnalysisCompleted?.Invoke(this, EventArgs.Empty);
            }
        }
Exemple #3
0
        public async Task IdentifyCelebrityAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(this.ImageUrl);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.GetImageStreamCallback,
                        new List <VisualFeatureTypes>() { VisualFeatureTypes.Categories },
                        new List <Details>() { Details.Celebrities });
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Vision API AnalyzeImageAsync error");

                this.AnalysisResult = new ImageAnalysis();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
        }
Exemple #4
0
        public async Task RecognizeTextAsync(TextRecognitionMode textRecognitionMode)
        {
            try
            {
                this.TextRecognitionMode = textRecognitionMode;
                if (this.ImageUrl != null)
                {
                    this.TextOperationResult = await VisionServiceHelper.RecognizeTextAsync(this.ImageUrl, textRecognitionMode);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.TextOperationResult = await VisionServiceHelper.RecognizeTextAsync(this.GetImageStreamCallback, textRecognitionMode);
                }
            }
            catch (Exception ex)
            {
                ErrorTrackingHelper.TrackException(ex, "Vision API RecognizeTextAsync error");

                this.TextOperationResult = new TextOperationResult();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(ex, "Vision API failed.");
                }
            }
            finally
            {
                this.TextRecognitionCompleted?.Invoke(this, EventArgs.Empty);
            }
        }
Exemple #5
0
        private async Task <Guid> CreatePerson(List <FaceSendInfo> facesInfo, Guid newPersonID, Face f)
        {
            AddPersistedFaceResult result = null;

            try
            {
                //No candidate we are going to create new person and set candidate to be same as newly created person with confidence of 100%
                var name = f.FaceAttributes.Gender + "-" + f.FaceAttributes.Age + "-" + newPersonID.ToString();

                newPersonID = (await FaceServiceHelper.CreatePersonWithResultAsync(groupId, name)).PersonId;
                var fi = facesInfo.Where(fin => fin.faceId == f.FaceId.ToString()).FirstOrDefault();
                fi.canid   = newPersonID.ToString();
                fi.canname = name;
                fi.canconf = 1;

                await FaceServiceHelper.AddPersonFaceAsync(groupId, newPersonID, await this.GetImageStreamCallback(), "", f.FaceRectangle);
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Problem adding face to group");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem adding face to group");
                }
            }
            return(newPersonID);
        }
Exemple #6
0
        public async Task DescribeAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.ImageDescription = await VisionServiceHelper.DescribeAsync(this.ImageUrl);

                    this.AnalysisResult = GetAnalysisResult(this.ImageDescription);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.ImageDescription = await VisionServiceHelper.DescribeAsync(this.GetImageStreamCallback);

                    this.AnalysisResult = GetAnalysisResult(this.ImageDescription);
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Vision API DescribeAsync error");

                this.AnalysisResult = new ImageAnalysis();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
        }
Exemple #7
0
        public async Task IdentifyCelebrityAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(this.ImageUrl);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(this.GetImageStreamCallback, new VisualFeature[] { VisualFeature.Categories }, new string[] { "Celebrities" });
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Vision API AnalyzeImageAsync error");

                this.AnalysisResult = new Microsoft.ProjectOxford.Vision.Contract.AnalysisResult();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
        }
Exemple #8
0
        private async Task <SimilarFaceMatch> GetSimilarFace(Face detectedFace)
        {
            SimilarFaceMatch result = new SimilarFaceMatch();

            try
            {
                Tuple <SimilarPersistedFace, string> similarPersistedFace = await FaceListManager.FindSimilarPersistedFaceAsync(await this.GetImageStreamCallback(), detectedFace.FaceId, detectedFace.FaceRectangle);

                if (similarPersistedFace != null)
                {
                    bool isNew = (similarPersistedFace.Item2 == null);
                    result = new SimilarFaceMatch()
                    {
                        Face = detectedFace, SimilarPersistedFace = similarPersistedFace.Item1, isNew = isNew
                    };
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "FaceListManager.FindSimilarPersistedFaceAsync error");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure finding similar faces");
                }
            }

            return(result);
        }
Exemple #9
0
        public async Task RecognizeTextAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.TextOperationResult = (await VisionServiceHelper.ReadFileAsync(this.ImageUrl))?.ReadResults?.FirstOrDefault();
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.TextOperationResult = (await VisionServiceHelper.ReadFileAsync(this.GetImageStreamCallback))?.ReadResults?.FirstOrDefault();
                }
            }
            catch (Exception ex)
            {
                ErrorTrackingHelper.TrackException(ex, "Vision API RecognizeTextAsync error");

                this.TextOperationResult = new ReadResult();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(ex, "Vision API failed.");
                }
            }
        }
Exemple #10
0
        public async Task DetectEmotionAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.ImageUrl);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.GetImageStreamCallback);
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                this.DetectedEmotion = Enumerable.Empty <Emotion>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }
            }
            finally
            {
                this.OnEmotionRecognitionCompleted();
            }
        }
        private static async Task <TResponse> RunTaskWithAutoRetryOnQuotaLimitExceededError <TResponse>(Func <Task <TResponse> > action)
        {
            int retriesLeft = FaceServiceHelper.RetryCountOnQuotaLimitError;
            int delay       = FaceServiceHelper.RetryDelayOnQuotaLimitError;

            TResponse response = default(TResponse);

            while (true)
            {
                try
                {
                    response = await action();

                    break;
                }
                catch (ClientException exception) when(exception.HttpStatus == (System.Net.HttpStatusCode) 429 && retriesLeft > 0)
                {
                    ErrorTrackingHelper.TrackException(exception, "Emotion API throttling error");
                    if (retriesLeft == 1 && Throttled != null)
                    {
                        Throttled();
                    }

                    await Task.Delay(delay);

                    retriesLeft--;
                    delay *= 2;
                    continue;
                }
            }

            return(response);
        }
        public async Task DetectEmotionAsync()
        {
            try
            {
                // Implement #1: If there is ImageUrl you should call the proper EmotionServiceHelper method to detect emotions
                //if (this.ImageUrl != null)
                //{
                //    //this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.ImageUrl);
                //    throw new NotImplementedException();
                //}
                // Implement #2: If GetImageStreamCallback is not null, you should call the proper EmotionServiceHelper method to detect emotions
                //else
                if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.GetImageStreamCallback);
                }

                // Implement #3: If FilterOutSmallFaces is enabled, filter the DetectedEmotion using the CoreUtil IsFaceBigEnoughForDetection method results
                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight)).ToList();
                }
            }
            catch (Exception e)
            {
                // Implement #4: If there is an error, call the ErrorTrackingHelper helper class to record the issue.
                //               and return an empty emotion list
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                //this.DetectedEmotion = Enumerable.Empty<Emotion>();
                this.DetectedEmotion = null;

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }

#if DEBUG
                throw e;
#endif
            }
            finally
            {
                // Implement #5: Call the event OnEmotionRecognitionCompleted
                this.OnEmotionRecognitionCompleted();
            }
        }
Exemple #13
0
        public static async Task ResetFaceLists()
        {
            faceLists = new Dictionary <string, FaceListInfo>();

            try
            {
                IEnumerable <FaceList> metadata = await FaceServiceHelper.GetFaceListsAsync(FaceListsUserDataFilter);

                foreach (var item in metadata)
                {
                    await FaceServiceHelper.DeleteFaceListAsync(item.FaceListId);
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Error resetting face lists");
            }
        }
Exemple #14
0
        public async Task AnalyzeImageAsync(bool detectCelebrities = false, IList <VisualFeatureTypes> visualFeatures = null)
        {
            try
            {
                if (visualFeatures == null)
                {
                    visualFeatures = DefaultVisualFeatures;
                }

                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.ImageUrl,
                        visualFeatures,
                        detectCelebrities?new List <Details> {
                        Details.Celebrities
                    } : null);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.GetImageStreamCallback,
                        visualFeatures,
                        detectCelebrities?new List <Details> {
                        Details.Celebrities
                    } : null);
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Vision API AnalyzeImageAsync error");

                this.AnalysisResult = new ImageAnalysis();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
            finally
            {
                this.ComputerVisionAnalysisCompleted?.Invoke(this, EventArgs.Empty);
            }
        }
        public async Task FindSimilarPersistedFacesAsync()
        {
            this.SimilarFaceMatches = Enumerable.Empty <SimilarFaceMatch>();

            if (this.DetectedFaces == null || !this.DetectedFaces.Any())
            {
                return;
            }

            List <SimilarFaceMatch> result = new List <SimilarFaceMatch>();

            foreach (DetectedFace detectedFace in this.DetectedFaces)
            {
                try
                {
                    SimilarFace similarPersistedFace = null;
                    if (this.ImageUrl != null)
                    {
                        similarPersistedFace = await FaceListManager.FindSimilarPersistedFaceAsync(ImageUrl, detectedFace.FaceId.GetValueOrDefault(), detectedFace);
                    }
                    else
                    {
                        similarPersistedFace = await FaceListManager.FindSimilarPersistedFaceAsync(this.GetImageStreamCallback, detectedFace.FaceId.GetValueOrDefault(), detectedFace);
                    }
                    if (similarPersistedFace != null)
                    {
                        result.Add(new SimilarFaceMatch {
                            Face = detectedFace, SimilarPersistedFace = similarPersistedFace
                        });
                    }
                }
                catch (Exception e)
                {
                    ErrorTrackingHelper.TrackException(e, "FaceListManager.FindSimilarPersistedFaceAsync error");

                    if (this.ShowDialogOnFaceApiErrors)
                    {
                        await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure finding similar faces");
                    }
                }
            }

            this.SimilarFaceMatches = result;
        }
        public async Task DetectFacesAsync(bool detectFaceAttributes = false, bool detectFaceLandmarks = false)
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.DetectedFaces = await FaceServiceHelper.DetectAsync(
                        this.ImageUrl,
                        returnFaceId : true,
                        returnFaceLandmarks : detectFaceLandmarks,
                        returnFaceAttributes : detectFaceAttributes?DefaultFaceAttributeTypes : null);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedFaces = await FaceServiceHelper.DetectAsync(
                        this.GetImageStreamCallback,
                        returnFaceId : true,
                        returnFaceLandmarks : detectFaceLandmarks,
                        returnFaceAttributes : detectFaceAttributes?DefaultFaceAttributeTypes : null);
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedFaces = this.DetectedFaces.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API DetectAsync error");

                this.DetectedFaces = Enumerable.Empty <Face>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    //await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Face detection failed.");
                    ErrorMessage = e.Message + " - Face Detection Failed";
                }
            }
            finally
            {
                this.OnFaceDetectionCompleted();
            }
        }
Exemple #17
0
        /// <summary>
        /// Calling emotion detect for every recognized face
        /// </summary>
        /// <returns></returns>
        public async Task DetectEmotionWithRectanglesAsync()
        {
            try
            {
                var rectangles = new List <Rectangle>();
                foreach (var f in this.DetectedFaces)
                {
                    Rectangle r = new Rectangle()
                    {
                        Top = f.FaceRectangle.Top, Height = f.FaceRectangle.Height, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width
                    };
                    rectangles.Add(r);
                }
                if (this.ImageUrl != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeWithFaceRectanglesAsync(this.ImageUrl, rectangles.ToArray());
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeWithFaceRectanglesAsync(await this.GetImageStreamCallback(), rectangles.ToArray());
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                this.DetectedEmotion = Enumerable.Empty <Emotion>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }
            }
            finally
            {
                this.OnEmotionRecognitionCompleted();
            }
        }
        private async Task <IList <IdentifyResult> > GetFaceIdentificationResultsOrDefaultAsync(string personGroupId, Guid[] detectedFaceIds)
        {
            try
            {
                return(await FaceServiceHelper.IdentifyAsync(personGroupId, detectedFaceIds));
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Face API IdentifyAsync error");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure identifying faces");
                }
            }
            return(new List <IdentifyResult>());
        }
Exemple #19
0
        public static async Task Initialize()
        {
            faceLists = new Dictionary <string, FaceListInfo>();

            try
            {
                IEnumerable <FaceList> metadata = await FaceServiceHelper.GetFaceListsAsync(FaceListsUserDataFilter);

                foreach (var item in metadata)
                {
                    faceLists.Add(item.FaceListId, new FaceListInfo {
                        FaceListId = item.FaceListId, LastMatchTimestamp = DateTime.Now
                    });
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API GetFaceListsAsync error");
            }
        }
        private async Task <IEnumerable <PersonGroup> > GetPersonGroupsAsync()
        {
            IEnumerable <PersonGroup> personGroups = Enumerable.Empty <PersonGroup>();

            try
            {
                personGroups = (await FaceServiceHelper.ListPersonGroupsAsync(PeopleGroupsUserDataFilter))
                               .Where(x => x.RecognitionModel.Equals(FaceServiceHelper.LatestRecognitionModelName, StringComparison.OrdinalIgnoreCase));
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API GetPersonGroupsAsync error");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure getting PersonGroups");
                }
            }
            return(personGroups);
        }
Exemple #21
0
        public async Task <IdentifyResult[]> IdentifyFacesAsync(string groupId)
        {
            this.IdentifiedPersons = Enumerable.Empty <IdentifiedPerson>();
            IdentifyResult[] groupResults    = null;
            Guid[]           detectedFaceIds = this.DetectedFaces?.Select(f => f.FaceId).ToArray();
            if (detectedFaceIds != null && detectedFaceIds.Any())
            {
                List <IdentifiedPerson> result = new List <IdentifiedPerson>();

                try {
                    groupResults = await FaceServiceHelper.IdentifyAsync(groupId, detectedFaceIds);

                    foreach (var match in groupResults)
                    {
                        if (!match.Candidates.Any())
                        {
                            continue;
                        }
                        Person person = await FaceServiceHelper.GetPersonAsync(groupId, match.Candidates[0].PersonId);

                        result.Add(new IdentifiedPerson {
                            Person = person, Confidence = match.Candidates[0].Confidence, FaceId = match.FaceId
                        });
                    }
                }
                catch (Exception e)
                {
                    // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                    // another one.
                    ErrorTrackingHelper.TrackException(e, "Face API IdentifyAsync error");

                    //if (this.ShowDialogOnFaceApiErrors)
                    //{
                    //    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure identifying faces");
                    //}
                }
                this.IdentifiedPersons = result;
            }

            return(groupResults);
        }
        public async Task DetectQRCodesAsync(bool detectFaceAttributes = false, bool detectFaceLandmarks = false)
        {
            try
            {
                await QRCodeProcessHelper.IdentifyQRCode(this.Data, this.DecodedImageWidth, this.DecodedImageWidth);
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API DetectAsync error");

                this.DetectedFaces = Enumerable.Empty <Face>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "QR Code detection failed.");
                }
            }
            finally
            {
            }
        }
        public async Task AnalyzeImageAsync(bool detectCelebrities = false, IEnumerable <VisualFeature> visualFeatures = null)
        {
            try
            {
                if (visualFeatures == null)
                {
                    visualFeatures = DefaultVisualFeatures;
                }

                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.ImageUrl,
                        visualFeatures,
                        detectCelebrities?new string[] { "Celebrities" } : null);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.GetImageStreamCallback,
                        visualFeatures,
                        detectCelebrities?new string[] { "Celebrities" } : null);
                }
            }
            catch (Exception e)
            {
                this.AnalysisResult = new Microsoft.ProjectOxford.Vision.Contract.AnalysisResult();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
            finally
            {
                this.ComputerVisionAnalysisCompleted?.Invoke(this, EventArgs.Empty);
            }
        }
        public async Task DescribeAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.DescribeAsync(this.ImageUrl);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.DescribeAsync(this.GetImageStreamCallback);
                }
            }
            catch (Exception e)
            {
                this.AnalysisResult = new Microsoft.ProjectOxford.Vision.Contract.AnalysisResult();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
        }
        public async Task AnalyzeImageAsync(IList <Details> details = null, IList <VisualFeatureTypes> visualFeatures = null)
        {
            try
            {
                if (visualFeatures == null)
                {
                    visualFeatures = DefaultVisualFeatures;
                }

                if (this.ImageUrl != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.ImageUrl,
                        visualFeatures,
                        details);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.AnalysisResult = await VisionServiceHelper.AnalyzeImageAsync(
                        this.GetImageStreamCallback,
                        visualFeatures,
                        details);
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Vision API AnalyzeImageAsync error");

                this.AnalysisResult = new ImageAnalysis();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Vision API failed.");
                }
            }
        }
Exemple #26
0
        private async Task AddFaceToPerson(Face f, Person p, Guid personId)
        {
            //Maximum faces that we are able to persist
            if (p.PersistedFaceIds.Length == 248)
            {
                Guid persistedFaceId = p.PersistedFaceIds.OrderBy(x => Guid.NewGuid()).FirstOrDefault();
                await FaceServiceHelper.DeletePersonFaceAsync(groupId, personId, persistedFaceId);
            }
            try
            {
                await FaceServiceHelper.AddPersonFaceAsync(groupId, personId, await this.GetImageStreamCallback(), "", f.FaceRectangle);
            }
            catch (Exception e)
            {
                // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Problem adding face to group");

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Problem adding face to group");
                }
            }
        }
        public async Task IdentifyFacesAsync()
        {
            this.IdentifiedPersons = Enumerable.Empty <IdentifiedPerson>();

            Guid[] detectedFaceIds = this.DetectedFaces?.Select(f => f.FaceId).ToArray();
            if (detectedFaceIds != null && detectedFaceIds.Any())
            {
                List <IdentifiedPerson> result = new List <IdentifiedPerson>();

                IEnumerable <PersonGroup> personGroups = Enumerable.Empty <PersonGroup>();
                try
                {
                    personGroups = await FaceServiceHelper.GetPersonGroupsAsync(PeopleGroupsUserDataFilter);
                }
                catch (Exception e)
                {
                    ErrorTrackingHelper.TrackException(e, "Face API GetPersonGroupsAsync error");

                    if (this.ShowDialogOnFaceApiErrors)
                    {
                        await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure getting PersonGroups");
                    }
                }

                foreach (var group in personGroups)
                {
                    try
                    {
                        IdentifyResult[] groupResults = await FaceServiceHelper.IdentifyAsync(group.PersonGroupId, detectedFaceIds);

                        foreach (var match in groupResults)
                        {
                            if (!match.Candidates.Any())
                            {
                                continue;
                            }

                            Person person = await FaceServiceHelper.GetPersonAsync(group.PersonGroupId, match.Candidates[0].PersonId);

                            IdentifiedPerson alreadyIdentifiedPerson = result.FirstOrDefault(p => p.Person.PersonId == match.Candidates[0].PersonId);
                            if (alreadyIdentifiedPerson != null)
                            {
                                // We already tagged this person in another group. Replace the existing one if this new one if the confidence is higher.
                                if (alreadyIdentifiedPerson.Confidence < match.Candidates[0].Confidence)
                                {
                                    alreadyIdentifiedPerson.Person     = person;
                                    alreadyIdentifiedPerson.Confidence = match.Candidates[0].Confidence;
                                    alreadyIdentifiedPerson.FaceId     = match.FaceId;
                                }
                            }
                            else
                            {
                                result.Add(new IdentifiedPerson {
                                    Person = person, Confidence = match.Candidates[0].Confidence, FaceId = match.FaceId
                                });
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        // Catch errors with individual groups so we can continue looping through all groups. Maybe an answer will come from
                        // another one.
                        ErrorTrackingHelper.TrackException(e, "Face API IdentifyAsync error");

                        if (this.ShowDialogOnFaceApiErrors)
                        {
                            await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Failure identifying faces");
                        }
                    }
                }

                this.IdentifiedPersons = result;
            }

            this.OnFaceRecognitionCompleted();
        }
Exemple #28
0
        public static async Task UpdatePersonGroupsWithNewRecModelAsync(PersonGroup oldPersonGroup, string userDataFilder, IProgress <FaceIdentificationModelUpdateStatus> progress = null)
        {
            try
            {
                bool allPeopleHaveAtLeastOneFaceMigrated = true;

                // just make sure the person group use previous recognition model
                bool isOldPersonGroup = oldPersonGroup?.RecognitionModel != null && !oldPersonGroup.RecognitionModel.Equals(LatestRecognitionModelName, StringComparison.OrdinalIgnoreCase);

                // get persons
                IList <Person> personsInGroup = isOldPersonGroup ? await GetPersonsAsync(oldPersonGroup.PersonGroupId) : new List <Person>();

                if (personsInGroup.Any())
                {
                    // create new person group
                    string newPersonGroupId = Guid.NewGuid().ToString();
                    await CreatePersonGroupAsync(newPersonGroupId, oldPersonGroup.Name, userDataFilder);

                    // create new persons
                    var newPersonList = new List <Tuple <Person, Person> >();
                    foreach (Person oldPerson in personsInGroup)
                    {
                        Person newPerson = await CreatePersonAsync(newPersonGroupId, oldPerson.Name);

                        newPersonList.Add(new Tuple <Person, Person>(oldPerson, newPerson));
                    }

                    // add face images
                    foreach (var(personItem, index) in newPersonList.Select((v, i) => (v, i)))
                    {
                        Person oldPerson = personItem.Item1;
                        Person newPerson = personItem.Item2;

                        // get face images from the old model
                        var personFaces = new List <PersistedFace>();
                        foreach (Guid face in oldPerson.PersistedFaceIds)
                        {
                            PersistedFace personFace = await GetPersonFaceAsync(oldPersonGroup.PersonGroupId, oldPerson.PersonId, face);

                            personFaces.Add(personFace);
                        }

                        bool addedAtLeastOneFaceImageForPerson = false;
                        // add face images to the new model
                        foreach (PersistedFace persistedFace in personFaces)
                        {
                            try
                            {
                                bool isUri = !string.IsNullOrEmpty(persistedFace.UserData) ? Uri.IsWellFormedUriString(persistedFace.UserData, UriKind.Absolute) : false;
                                if (isUri)
                                {
                                    await AddPersonFaceFromUrlAsync(newPersonGroupId, newPerson.PersonId, imageUrl : persistedFace.UserData, userData : persistedFace.UserData, targetFaceRect : null);
                                }
                                else
                                {
                                    StorageFile localImage = await StorageFile.GetFileFromPathAsync(persistedFace.UserData);
                                    await AddPersonFaceFromStreamAsync(newPersonGroupId, newPerson.PersonId, imageStreamCallback : localImage.OpenStreamForReadAsync, userData : localImage.Path, targetFaceRect : null);
                                }

                                addedAtLeastOneFaceImageForPerson = true;
                            }
                            catch { /* Ignore the error and continue. Other images might work */ }
                        }

                        if (!addedAtLeastOneFaceImageForPerson)
                        {
                            allPeopleHaveAtLeastOneFaceMigrated = false;
                        }

                        progress?.Report(new FaceIdentificationModelUpdateStatus {
                            State = FaceIdentificationModelUpdateState.Running, Count = index + 1, Total = personsInGroup.Count
                        });
                    }

                    // delete old person group
                    await DeletePersonGroupAsync(oldPersonGroup.PersonGroupId);

                    // train new person group
                    await TrainPersonGroupAsync(newPersonGroupId);
                }

                progress?.Report(new FaceIdentificationModelUpdateStatus {
                    State = allPeopleHaveAtLeastOneFaceMigrated ? FaceIdentificationModelUpdateState.Complete : FaceIdentificationModelUpdateState.CompletedWithSomeEmptyPeople
                });
            }
            catch (Exception ex)
            {
                ErrorTrackingHelper.TrackException(ex, "Face API: Update PersonGroup using new recognition model error");
                progress?.Report(new FaceIdentificationModelUpdateStatus {
                    State = FaceIdentificationModelUpdateState.Error
                });
            }
        }
        /// <summary>
        ///
        /// </summary>
        /// <param name="imageStream"></param>
        /// <param name="faceId"></param>
        /// <param name="faceRectangle"></param>
        /// <param name="complexScenario"> - Whether we are identifying face hard way - means whether we need to use db or not</param>
        /// <returns></returns>
        public static async Task <Tuple <SimilarPersistedFace, string> > FindSimilarPersistedFaceAsync(Stream imageStream, Guid faceId, FaceRectangle faceRectangle)
        {
            if (faceLists == null)
            {
                await Initialize();
            }

            Tuple <SimilarPersistedFace, string> bestMatch = null;

            var faceListId = faceLists.FirstOrDefault().Key;

            //Delete for testing purposes
            //await FaceServiceHelper.DeleteFaceListAsync(faceListId);
            try
            {
                SimilarPersistedFace similarFace = null;
                if (faceListId != null)
                {
                    try
                    {
                        similarFace = (await FaceServiceHelper.FindSimilarAsync(faceId, faceListId))?.FirstOrDefault();
                    }
                    catch (Exception e)
                    {
                        if ((e as FaceAPIException).ErrorCode == "FaceListNotReady")
                        {
                            // do nothing, list is empty but we continue
                        }
                    }
                }
                if (similarFace != null)
                {
                    if (bestMatch != null)
                    {
                        // We already found a match for this face in another list. Replace the previous one if the new confidence is higher.
                        if (bestMatch.Item1.Confidence < similarFace.Confidence)
                        {
                            bestMatch = new Tuple <SimilarPersistedFace, string>(similarFace, faceListId);
                        }
                    }
                    else
                    {
                        bestMatch = new Tuple <SimilarPersistedFace, string>(similarFace, faceListId);
                    }
                }
                else
                {
                    // If we are here we didnt' find a match, so let's add the face to the first FaceList that we can add it to. We
                    // might create a new list if none exist, and if all lists are full we will delete the oldest face list (based on when we
                    // last matched anything on it) so that we can add the new one.

                    if (!faceLists.Any())
                    {
                        // We don't have any FaceLists yet. Create one
                        newFaceListId = Guid.NewGuid().ToString();
                        // await FaceServiceHelper.CreateFaceListAsync(newFaceListId, "ManagedFaceList", FaceListsUserDataFilter);
                        //We are not using filters
                        await FaceServiceHelper.CreateFaceListAsync(newFaceListId, "ManagedFaceList");

                        faceLists.Add(newFaceListId, new FaceListInfo {
                            FaceListId = newFaceListId, LastMatchTimestamp = DateTime.Now
                        });
                    }

                    AddPersistedFaceResult addResult = null;

                    var faceList = faceLists.First();
                    if (faceList.Value.IsFull)
                    {
                        //It is here only for complex scenario where we use groups and lists mappings
                        try
                        {
                            DBSimilarFace faceToDelete = null;
                            using (var db = new KioskDBContext())
                            {
                                faceToDelete = db.SimilarFaces.OrderByDescending(sf => sf.PersonId).First();
                                db.SimilarFaces.Remove(faceToDelete);
                                await db.SaveChangesAsync();
                            }

                            await FaceServiceHelper.DeleteFaceFromFaceListAsync(faceList.Key, new Guid(faceToDelete.FaceId));
                        }
                        catch (Exception e)
                        {
                            Debug.WriteLine("No face to be deleted" + e.Message);
                        }
                    }

                    addResult = await FaceServiceHelper.AddFaceToFaceListAsync(faceList.Key, imageStream, faceRectangle);

                    if (addResult != null)
                    {
                        bestMatch = new Tuple <SimilarPersistedFace, string>(new SimilarPersistedFace {
                            Confidence = 1, PersistedFaceId = addResult.PersistedFaceId
                        }, null);
                    }
                }
            }
            catch (Exception e)
            {
                // Catch errors with individual face lists so we can continue looping through all lists. Maybe an answer will come from
                // another one.
                ErrorTrackingHelper.TrackException(e, "Face API FindSimilarAsync error");
            }

            return(bestMatch);
        }
Exemple #30
0
        private static async Task <SimilarFace> FindSimilarOrInsertAsync(string imageUrl, Func <Task <Stream> > imageStreamCallback, Guid faceId, DetectedFace face)
        {
            if (faceLists == null)
            {
                await Initialize();
            }

            Tuple <SimilarFace, string> bestMatch = null;

            bool foundMatch = false;

            foreach (var faceListId in faceLists.Keys)
            {
                try
                {
                    SimilarFace similarFace = (await FaceServiceHelper.FindSimilarAsync(faceId, faceListId))?.FirstOrDefault();
                    if (similarFace == null)
                    {
                        continue;
                    }

                    foundMatch = true;

                    if (bestMatch != null)
                    {
                        // We already found a match for this face in another list. Replace the previous one if the new confidence is higher.
                        if (bestMatch.Item1.Confidence < similarFace.Confidence)
                        {
                            bestMatch = new Tuple <SimilarFace, string>(similarFace, faceListId);
                        }
                    }
                    else
                    {
                        bestMatch = new Tuple <SimilarFace, string>(similarFace, faceListId);
                    }
                }
                catch (Exception e)
                {
                    // Catch errors with individual face lists so we can continue looping through all lists. Maybe an answer will come from
                    // another one.
                    ErrorTrackingHelper.TrackException(e, "Face API FindSimilarAsync error");
                }
            }

            if (!foundMatch)
            {
                // If we are here we didnt' find a match, so let's add the face to the first FaceList that we can add it to. We
                // might create a new list if none exist, and if all lists are full we will delete the oldest face list (based on when we
                // last match anything on) so that we can add the new one.

                double maxAngle = 30;
                if (face.FaceAttributes.HeadPose != null &&
                    (Math.Abs(face.FaceAttributes.HeadPose.Yaw) > maxAngle ||
                     Math.Abs(face.FaceAttributes.HeadPose.Pitch) > maxAngle ||
                     Math.Abs(face.FaceAttributes.HeadPose.Roll) > maxAngle))
                {
                    // This isn't a good frontal shot, so let's not use it as the primary example face for this person
                    return(null);
                }

                if (!faceLists.Any())
                {
                    // We don't have any FaceLists yet. Create one
                    string newFaceListId = Guid.NewGuid().ToString();
                    await FaceServiceHelper.CreateFaceListAsync(newFaceListId, "ManagedFaceList", FaceListsUserDataFilter);

                    faceLists.Add(newFaceListId, new FaceListInfo {
                        FaceListId = newFaceListId, LastMatchTimestamp = DateTime.Now
                    });
                }

                PersistedFace addResult = null;
                bool          failedToAddToNonFullList = false;
                foreach (var faceList in faceLists)
                {
                    if (faceList.Value.IsFull)
                    {
                        continue;
                    }

                    try
                    {
                        if (imageUrl != null)
                        {
                            addResult = await FaceServiceHelper.AddFaceToFaceListFromUrlAsync(faceList.Key, imageUrl, face.FaceRectangle);
                        }
                        else
                        {
                            addResult = await FaceServiceHelper.AddFaceToFaceListFromStreamAsync(faceList.Key, imageStreamCallback, face.FaceRectangle);
                        }
                        break;
                    }
                    catch (Exception ex)
                    {
                        if (ex is APIErrorException && ((APIErrorException)ex).Response.StatusCode == (System.Net.HttpStatusCode) 403)
                        {
                            // FaceList is full. Continue so we can try again with the next FaceList
                            faceList.Value.IsFull = true;
                            continue;
                        }
                        else
                        {
                            failedToAddToNonFullList = true;
                            break;
                        }
                    }
                }

                if (addResult == null && !failedToAddToNonFullList)
                {
                    // We were not able to add the face to an existing list because they were all full.

                    // If possible, let's create a new list now and add the new face to it. If we can't (e.g. we already maxed out on list count),
                    // let's delete an old list, create a new one and add the new face to it.

                    if (faceLists.Count == MaxFaceListCount)
                    {
                        // delete oldest face list
                        var oldestFaceList = faceLists.OrderBy(fl => fl.Value.LastMatchTimestamp).FirstOrDefault();
                        faceLists.Remove(oldestFaceList.Key);
                        await FaceServiceHelper.DeleteFaceListAsync(oldestFaceList.Key);
                    }

                    // create new list
                    string newFaceListId = Guid.NewGuid().ToString();
                    await FaceServiceHelper.CreateFaceListAsync(newFaceListId, "ManagedFaceList", FaceListsUserDataFilter);

                    faceLists.Add(newFaceListId, new FaceListInfo {
                        FaceListId = newFaceListId, LastMatchTimestamp = DateTime.Now
                    });

                    // Add face to new list
                    if (imageUrl != null)
                    {
                        addResult = await FaceServiceHelper.AddFaceToFaceListFromUrlAsync(newFaceListId, imageUrl, face.FaceRectangle);
                    }
                    else
                    {
                        addResult = await FaceServiceHelper.AddFaceToFaceListFromStreamAsync(newFaceListId, imageStreamCallback, face.FaceRectangle);
                    }
                }

                if (addResult != null)
                {
                    bestMatch = new Tuple <SimilarFace, string>(new SimilarFace {
                        Confidence = 1, PersistedFaceId = addResult.PersistedFaceId
                    }, null);
                }
            }

            return(bestMatch?.Item1);
        }