Пример #1
0
        private static FaceDetails MapToDomain(this Microsoft.ProjectOxford.Face.Contract.Face face)
        {
            var emotion = GetEmotionValueFromScores(face.FaceAttributes.Emotion);

            var domainEntity = new FaceDetails()
            {
                FaceId         = face.FaceId.ToString(),
                Age            = face.FaceAttributes.Age,
                Emotion        = emotion.emotionValue,
                EmotionScore   = emotion.emotionScore,
                HasBeard       = face.FaceAttributes.FacialHair.Beard > FacialHairThreshold,
                BeardScore     = face.FaceAttributes.FacialHair.Beard,
                HasMoustache   = face.FaceAttributes.FacialHair.Moustache > FacialHairThreshold,
                MoustacheScore = face.FaceAttributes.FacialHair.Moustache,
                HasSideburns   = face.FaceAttributes.FacialHair.Sideburns > FacialHairThreshold,
                SideburnsScore = face.FaceAttributes.FacialHair.Sideburns,
                Gender         = face.FaceAttributes.Gender == "male" ? Gender.Male : Gender.Female,
                HasGlasses     = face.FaceAttributes.Glasses != Microsoft.ProjectOxford.Face.Contract.Glasses.NoGlasses,
                GlassesType    = (GlassesType)face.FaceAttributes.Glasses,
                HeadPose       = new HeadPose()
                {
                    Pitch = face.FaceAttributes.HeadPose.Pitch, Roll = face.FaceAttributes.HeadPose.Roll, Yaw = face.FaceAttributes.HeadPose.Yaw
                },
                SmileScore  = face.FaceAttributes.Smile,
                BoundingBox = new Rectangle {
                    X = face.FaceRectangle.Left, Y = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height
                },
            };

            return(domainEntity);
        }
 public CognitiveImageAnalysis()
 {
     VisionAnalysis  = new AnalysisResult();
     TextAnalysis    = new OcrResults();
     EmotionAnalysis = new Emotion[0];
     FacialAnalysis  = new Microsoft.ProjectOxford.Face.Contract.Face[0];
 }
        /// <summary> Function which submits a frame to the Emotion API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the emotions returned by the API. </returns>
        private async Task<LiveCameraResult> EmotionAnalysisFunction(VideoFrame frame)
        {
            // Encode image. 
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            // Submit image to API. 
            FaceAPI.Contract.Face[] faces = null;

            // See if we have local face detections for this image.
            var localFaces = (OpenCvSharp.Rect[])frame.UserData;
            if (localFaces == null || localFaces.Count() > 0)
            {
                // If localFaces is null, we're not performing local face detection.
                // Use Cognigitve Services to do the face detection.
                Properties.Settings.Default.FaceAPICallCount++;
                faces = await _faceClient.DetectAsync(
                    jpg,
                    /* returnFaceId= */ false,
                    /* returnFaceLandmarks= */ false,
                    new FaceAPI.FaceAttributeType[1] { FaceAPI.FaceAttributeType.Emotion });
            }
            else
            {
                // Local face detection found no faces; don't call Cognitive Services.
                faces = new FaceAPI.Contract.Face[0];
            }

            // Output. 
            return new LiveCameraResult
            {
                Faces = faces.Select(e => CreateFace(e.FaceRectangle)).ToArray(),
                // Extract emotion scores from results. 
                EmotionScores = faces.Select(e => e.FaceAttributes.Emotion).ToArray()
            };
        }
Пример #4
0
        private void ProcessMouth(Microsoft.ProjectOxford.Face.Contract.Face f)
        {
            double mouthWidth  = Java.Lang.Math.Abs(f.FaceLandmarks.MouthRight.X - f.FaceLandmarks.MouthLeft.X);
            double mouthHeight = Java.Lang.Math.Abs(f.FaceLandmarks.UpperLipBottom.Y - f.FaceLandmarks.UnderLipTop.Y);

            mouthAperture = mouthHeight / mouthWidth;
        }
Пример #5
0
        private void ProcessEyes(Microsoft.ProjectOxford.Face.Contract.Face f)
        {
            double leftEyeWidth  = Java.Lang.Math.Abs(f.FaceLandmarks.EyeLeftInner.X - f.FaceLandmarks.EyeLeftOuter.X);
            double leftEyeHeight = Java.Lang.Math.Abs(f.FaceLandmarks.EyeLeftBottom.Y - f.FaceLandmarks.EyeLeftTop.Y);



            double rightEyeWidth  = Java.Lang.Math.Abs(f.FaceLandmarks.EyeRightInner.X - f.FaceLandmarks.EyeRightOuter.X);
            double rightEyeHeight = Java.Lang.Math.Abs(f.FaceLandmarks.EyeRightBottom.Y - f.FaceLandmarks.EyeRightTop.Y);

            eyeAperture = Java.Lang.Math.Max(leftEyeHeight / leftEyeWidth, rightEyeHeight / rightEyeWidth);
        }
Пример #6
0
 public MSFace(Microsoft.ProjectOxford.Face.Contract.Face f, Emotion e)
 {
     face    = f;
     emotion = e;
     marks   = new float[] {
         e.Scores.Anger * 100,
             e.Scores.Contempt * 100,
             e.Scores.Disgust * 100,
             e.Scores.Fear * 100,
             e.Scores.Happiness * 100,
             e.Scores.Sadness * 100,
             e.Scores.Surprise * 100,
     };
 }
Пример #7
0
        public static async Task <StorageFile> SaveFaceImageFile(StorageFile file, Microsoft.ProjectOxford.Face.Contract.Face face)
        {
            // get face file
            var faceStartPoint = new Point(face.FaceRectangle.Left, face.FaceRectangle.Top);
            var faceSize       = new Size(face.FaceRectangle.Width, face.FaceRectangle.Height);

            // save face file
            var fileName      = string.Format(@"{0}.jpg", face.FaceId);
            var fileFaceImage =
                await ApplicationData.Current.TemporaryFolder.CreateFileAsync(fileName, CreationCollisionOption.ReplaceExisting);

            await CropBitmap.SaveCroppedBitmapAsync(file, fileFaceImage, faceStartPoint, faceSize);

            return(fileFaceImage);
        }
        /// <summary> Function which submits a frame to the Face API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the faces returned by the API. </returns>
        private async Task <LiveCameraResult> FacesAnalysisFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            // Submit image to API.
            var attrs = new List <FaceAPI.FaceAttributeType> {
                FaceAPI.FaceAttributeType.Age,
                FaceAPI.FaceAttributeType.Gender,
                FaceAPI.FaceAttributeType.HeadPose,
                FaceAPI.FaceAttributeType.Glasses,
                FaceAPI.FaceAttributeType.FacialHair,
            };


            var faces = await _faceClient.DetectAsync(jpg, returnFaceAttributes : attrs);

            // Count the API call.
            Properties.Settings.Default.FaceAPICallCount++;



            //IRUEL GET
            LiveCameraResult result = new LiveCameraResult {
                Faces = faces
            };

            Microsoft.ProjectOxford.Face.Contract.Face xx = new Microsoft.ProjectOxford.Face.Contract.Face();
            xx = result.Faces[0];

            Microsoft.ProjectOxford.Face.Contract.FaceAttributes dd = new Microsoft.ProjectOxford.Face.Contract.FaceAttributes();
            dd = xx.FaceAttributes;

            Trace.WriteLine(dd.Age.ToString() + ' ' + dd.Gender.ToString() + ' ' + dd.Glasses.ToString());


            //return new LiveCameraResult
            //{
            //    // Extract face rectangles from results.
            //    Faces = celebs.Select(c => CreateFace(c.FaceRectangle)).ToArray(),
            //    // Extract celebrity names from results.
            //    CelebrityNames = celebs.Select(c => c.Name).ToArray()
            //};

            // Output.
            return(new LiveCameraResult {
                Faces = faces
            });
        }
Пример #9
0
        private static string GetFaceText(Face face)
        {
            string gender;

            switch (face.FaceAttributes.Gender)
            {
            case "male":
                gender = "man";
                break;

            case "female":
                gender = "woman";
                break;

            default:
                gender = "person";
                break;
            }
            return($"`A {face.FaceAttributes.Age} years old {gender}`");
        }
Пример #10
0
        public void SetTransDriverlog(Tb_Trans_Driver_Logs t, Microsoft.ProjectOxford.Face.Contract.Face f)
        {
            t.EyeAperture            = eyeAperture;
            t.EyeLeftBottom_X        = f.FaceLandmarks.EyeLeftBottom.X;
            t.EyeLeftBottom_Y        = f.FaceLandmarks.EyeLeftBottom.Y;
            t.EyeLeftInner_X         = f.FaceLandmarks.EyeLeftInner.X;
            t.EyeLeftInner_Y         = f.FaceLandmarks.EyeLeftInner.Y;
            t.EyeLeftOuter_X         = f.FaceLandmarks.EyeLeftOuter.X;
            t.EyeLeftOuter_Y         = f.FaceLandmarks.EyeLeftOuter.Y;
            t.EyeLeftTop_X           = f.FaceLandmarks.EyeLeftTop.X;
            t.EyeLeftTop_Y           = f.FaceLandmarks.EyeLeftTop.Y;
            t.EyeRightBottom_X       = f.FaceLandmarks.EyeRightBottom.X;
            t.EyeRightBottom_Y       = f.FaceLandmarks.EyeRightBottom.Y;
            t.EyeRightInner_X        = f.FaceLandmarks.EyeRightInner.X;
            t.EyeRightInner_Y        = f.FaceLandmarks.EyeRightInner.Y;
            t.EyeRightOuter_X        = f.FaceLandmarks.EyeRightOuter.X;
            t.EyeRightOuter_Y        = f.FaceLandmarks.EyeRightOuter.Y;
            t.EyeRightTop_X          = f.FaceLandmarks.EyeRightTop.X;
            t.EyeRightTop_Y          = f.FaceLandmarks.EyeRightTop.Y;
            t.HeadPost_Pitch         = f.FaceAttributes.HeadPose.Pitch;
            t.HeadPost_PoseDeviation = headPoseDeviation;
            t.HeadPost_Roll          = f.FaceAttributes.HeadPose.Roll;
            t.HeadPost_Yaw           = f.FaceAttributes.HeadPose.Yaw;
            t.MouthAperture          = mouthAperture;
            t.MouthLeft_X            = f.FaceLandmarks.MouthLeft.X;
            t.MouthLeft_Y            = f.FaceLandmarks.MouthLeft.Y;
            t.MouthRight_X           = f.FaceLandmarks.MouthRight.X;
            t.MouthRight_Y           = f.FaceLandmarks.MouthRight.Y;
            t.RefRegisterID          = 1;
            t.TransID          = 1;
            t.TimeStamp        = DateTime.Now;
            t.UnderLipBottom_X = f.FaceLandmarks.UnderLipBottom.X;
            t.UnderLipBottom_Y = f.FaceLandmarks.UnderLipBottom.Y;

            t.UnderLipTop_X    = f.FaceLandmarks.UnderLipTop.X;
            t.UnderLipTop_Y    = f.FaceLandmarks.UnderLipTop.Y;
            t.UpperLipBottom_X = f.FaceLandmarks.UpperLipBottom.X;
            t.UpperLipBottom_Y = f.FaceLandmarks.UpperLipBottom.Y;
            t.UpperLipTop_X    = f.FaceLandmarks.UpperLipTop.X;
            t.UpperLipTop_Y    = f.FaceLandmarks.UpperLipTop.Y;
        }
Пример #11
0
        private void Logging(Face face)
        {
            InvokeGuiThread(() =>
            {
                recognizedLog.AppendText(string.Format("\n Average age: {0}", face.FaceAttributes.Age));
                recognizedLog.AppendText(string.Format("\n Gender: {0}", face.FaceAttributes.Gender));
                recognizedLog.AppendText(string.Format("\n Glasses: {0}", face.FaceAttributes.Glasses));

                recognizedLog.AppendText(string.Format("\n Blur: {0}", face.FaceAttributes.Blur.Value));
                recognizedLog.AppendText(string.Format("\n Noise: {0}", face.FaceAttributes.Noise.Value));

                recognizedLog.AppendText(string.Format("\n Smile: {0}", face.FaceAttributes.Smile));
                recognizedLog.AppendText(string.Format("\n Happiness: {0}", face.FaceAttributes.Emotion.Happiness));
                recognizedLog.AppendText(string.Format("\n Anger: {0}", face.FaceAttributes.Emotion.Anger));
                recognizedLog.AppendText(string.Format("\n Contempt: {0}", face.FaceAttributes.Emotion.Contempt));
                recognizedLog.AppendText(string.Format("\n Disgust: {0}", face.FaceAttributes.Emotion.Disgust));
                recognizedLog.AppendText(string.Format("\n Fear: {0}", face.FaceAttributes.Emotion.Fear));
                recognizedLog.AppendText(string.Format("\n Neutral: {0}", face.FaceAttributes.Emotion.Neutral));
                recognizedLog.AppendText(string.Format("\n Sadness: {0}", face.FaceAttributes.Emotion.Sadness));
                recognizedLog.AppendText(string.Format("\n Surprise: {0}", face.FaceAttributes.Emotion.Surprise));
            });
        }
Пример #12
0
        private async Task <LiveAnalyzeResult> EmotionAnalysisFunction(VideoFrame frame)
        {
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);

            FaceAPI.Contract.Face[] faces = null;

            var localFaces = (OpenCvSharp.Rect[])frame.UserData;

            if (localFaces == null || localFaces.Count() > 0)
            {
                // If localFaces is null, we're not performing local face detection.
                // Use Cognigitve Services to do the face detection.
                //Properties.Settings.Default.FaceAPICallCount++;
                faces = await _faceClient.DetectAsync(
                    jpg,
                    /* returnFaceId= */ false,
                    /* returnFaceLandmarks= */ false,
                    new FaceAPI.FaceAttributeType[1] {
                    FaceAPI.FaceAttributeType.Emotion
                });
            }
            else
            {
                faces = new FaceAPI.Contract.Face[0];
            }

            if (faces.Any())
            {
                return(new LiveAnalyzeResult()
                {
                    EmotionFaces = faces.ToArray()
                });
            }

            return(default(LiveAnalyzeResult));
        }
Пример #13
0
        /// <summary>
        /// Append detected face to UI binding collection
        /// </summary>
        /// <param name="collections">UI binding collection</param>
        /// <param name="imagePath">Original image path, used for rendering face region</param>
        /// <param name="face">Face structure returned from service</param>
        public static void UpdateFace(ObservableCollection <FaceViewModel> collections, string imagePath, Microsoft.ProjectOxford.Face.Contract.Face face)
        {
            var renderingImage = LoadImageAppliedOrientation(imagePath);

            collections.Add(new FaceViewModel()
            {
                ImageFile = renderingImage,
                Left      = face.FaceRectangle.Left,
                Top       = face.FaceRectangle.Top,
                Width     = face.FaceRectangle.Width,
                Height    = face.FaceRectangle.Height,
                FaceId    = face.FaceId.ToString(),
            });
        }
Пример #14
0
 /// <summary>
 /// Append detected face to UI binding collection
 /// </summary>
 /// <param name="collections">UI binding collection</param>
 /// <param name="path">Original image path, used for rendering face region</param>
 /// <param name="face">Face structure returned from service</param>
 public static void UpdateFace(ObservableCollection <Face> collections, string path, Microsoft.ProjectOxford.Face.Contract.Face face)
 {
     collections.Add(new Face()
     {
         ImagePath = path,
         Left      = face.FaceRectangle.Left,
         Top       = face.FaceRectangle.Top,
         Width     = face.FaceRectangle.Width,
         Height    = face.FaceRectangle.Height,
         FaceId    = face.FaceId.ToString(),
     });
 }
Пример #15
0
        private void ProcessHeadPose(Microsoft.ProjectOxford.Face.Contract.Face f)
        {
            headPoseDeviation = Java.Lang.Math.Abs(f.FaceAttributes.HeadPose.Yaw);

            //  this.headPoseIndicator.Margin = new Thickness((-f.FaceAttributes.HeadPose.Yaw / 90) * headPoseIndicatorHost.ActualWidth / 2, 0, 0, 0);
        }