Esempio n. 1
0
        public async Task DetectEmotionAsync()
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.ImageUrl);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.GetImageStreamCallback);
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                this.DetectedEmotion = Enumerable.Empty <Emotion>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }
            }
            finally
            {
                this.OnEmotionRecognitionCompleted();
            }
        }
        public async Task DetectEmotionAsync()
        {
            try
            {
                // Implement #1: If there is ImageUrl you should call the proper EmotionServiceHelper method to detect emotions
                //if (this.ImageUrl != null)
                //{
                //    //this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.ImageUrl);
                //    throw new NotImplementedException();
                //}
                // Implement #2: If GetImageStreamCallback is not null, you should call the proper EmotionServiceHelper method to detect emotions
                //else
                if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeAsync(this.GetImageStreamCallback);
                }

                // Implement #3: If FilterOutSmallFaces is enabled, filter the DetectedEmotion using the CoreUtil IsFaceBigEnoughForDetection method results
                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight)).ToList();
                }
            }
            catch (Exception e)
            {
                // Implement #4: If there is an error, call the ErrorTrackingHelper helper class to record the issue.
                //               and return an empty emotion list
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                //this.DetectedEmotion = Enumerable.Empty<Emotion>();
                this.DetectedEmotion = null;

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }

#if DEBUG
                throw e;
#endif
            }
            finally
            {
                // Implement #5: Call the event OnEmotionRecognitionCompleted
                this.OnEmotionRecognitionCompleted();
            }
        }
        public async Task DetectFacesAsync(bool detectFaceAttributes = false, bool detectFaceLandmarks = false)
        {
            try
            {
                if (this.ImageUrl != null)
                {
                    this.DetectedFaces = await FaceServiceHelper.DetectAsync(
                        this.ImageUrl,
                        returnFaceId : true,
                        returnFaceLandmarks : detectFaceLandmarks,
                        returnFaceAttributes : detectFaceAttributes?DefaultFaceAttributeTypes : null);
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedFaces = await FaceServiceHelper.DetectAsync(
                        this.GetImageStreamCallback,
                        returnFaceId : true,
                        returnFaceLandmarks : detectFaceLandmarks,
                        returnFaceAttributes : detectFaceAttributes?DefaultFaceAttributeTypes : null);
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedFaces = this.DetectedFaces.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Face API DetectAsync error");

                this.DetectedFaces = Enumerable.Empty <Face>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    //await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Face detection failed.");
                    ErrorMessage = e.Message + " - Face Detection Failed";
                }
            }
            finally
            {
                this.OnFaceDetectionCompleted();
            }
        }
Esempio n. 4
0
        /// <summary>
        /// Calling emotion detect for every recognized face
        /// </summary>
        /// <returns></returns>
        public async Task DetectEmotionWithRectanglesAsync()
        {
            try
            {
                var rectangles = new List <Rectangle>();
                foreach (var f in this.DetectedFaces)
                {
                    Rectangle r = new Rectangle()
                    {
                        Top = f.FaceRectangle.Top, Height = f.FaceRectangle.Height, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width
                    };
                    rectangles.Add(r);
                }
                if (this.ImageUrl != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeWithFaceRectanglesAsync(this.ImageUrl, rectangles.ToArray());
                }
                else if (this.GetImageStreamCallback != null)
                {
                    this.DetectedEmotion = await EmotionServiceHelper.RecognizeWithFaceRectanglesAsync(await this.GetImageStreamCallback(), rectangles.ToArray());
                }

                if (this.FilterOutSmallFaces)
                {
                    this.DetectedEmotion = this.DetectedEmotion.Where(f => CoreUtil.IsFaceBigEnoughForDetection(f.FaceRectangle.Height, this.DecodedImageHeight));
                }
            }
            catch (Exception e)
            {
                ErrorTrackingHelper.TrackException(e, "Emotion API RecognizeAsync error");

                this.DetectedEmotion = Enumerable.Empty <Emotion>();

                if (this.ShowDialogOnFaceApiErrors)
                {
                    await ErrorTrackingHelper.GenericApiCallExceptionHandler(e, "Emotion detection failed.");
                }
            }
            finally
            {
                this.OnEmotionRecognitionCompleted();
            }
        }