Exemplo n.º 1
0
        private async Task <LiveAnalyzeResult> FacesAnalysisFunction(VideoFrame frame)
        {
            FaceAPI.Contract.Face[] faces = null;
            var jpg   = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            var attrs = new List <FaceAPI.FaceAttributeType> {
                FaceAPI.FaceAttributeType.Age,
                FaceAPI.FaceAttributeType.Gender,
                FaceAPI.FaceAttributeType.HeadPose
            };

            faces = await _faceClient.DetectAsync(jpg, returnFaceAttributes : attrs);

            var resultList = new List <string>();

            var faceIds = faces.Select(face => face.FaceId).ToArray();

            var identifyRes = await _faceClient.IdentifyAsync(SolutionConstant.personGroupId, faceIds);

            foreach (var identifyResult in identifyRes)
            {
                if (identifyResult.Candidates.Length > 0)
                {
                    // Get top 1 among all candidates returned, the highest scored candidate
                    var candidateId = identifyResult.Candidates[0].PersonId;
                    var person      = await _faceClient.GetPersonAsync(SolutionConstant.personGroupId, candidateId);

                    var result = $"{identifyResult.FaceId} is identified as '{person.Name}' in {SolutionConstant.personGroupId} person group!";

                    resultList.Add(result);
                }
            }

            return(new LiveAnalyzeResult()
            {
                FaceIdentifyResult = resultList.ToArray()
            });
        }
Exemplo n.º 2
0
        /// <summary> Function which submits a frame to the Face API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the faces returned by the API. </returns>
        private async Task <LiveCameraResult> FacesAnalysisFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            // Submit image to API.
            var attrs = new List <FaceAPI.FaceAttributeType> {
                FaceAPI.FaceAttributeType.Age,
                FaceAPI.FaceAttributeType.Gender,
                FaceAPI.FaceAttributeType.HeadPose,
                FaceAPI.FaceAttributeType.Glasses,
                FaceAPI.FaceAttributeType.Hair,
                FaceAPI.FaceAttributeType.FacialHair,
                FaceAPI.FaceAttributeType.Makeup,
                FaceAPI.FaceAttributeType.Smile
            };
            var faces = await _faceClient.DetectAsync(jpg, returnFaceAttributes : attrs);

            // Count the API call.
            Properties.Settings.Default.FaceAPICallCount++;
            // Output.
            return(new LiveCameraResult {
                Faces = faces
            });
        }
        public static async Task <Microsoft.ProjectOxford.Face.Contract.Face[]> DetectFace(MemoryStream msImage)
        {
            var items = FaceAttributeType.Age
                        | FaceAttributeType.Gender
                        | FaceAttributeType.HeadPose
                        | FaceAttributeType.Smile
                        | FaceAttributeType.FacialHair
                        | FaceAttributeType.Glasses
                        | FaceAttributeType.Emotion
                        | FaceAttributeType.Hair
                        | FaceAttributeType.Makeup
                        | FaceAttributeType.Occlusion
                        | FaceAttributeType.Accessories
                        | FaceAttributeType.Blur
                        | FaceAttributeType.Exposure
                        | FaceAttributeType.Noise;

            IEnumerable <FaceAttributeType> fat = new List <FaceAttributeType>()
            {
                FaceAttributeType.Age
                , FaceAttributeType.Gender
                , FaceAttributeType.HeadPose
                , FaceAttributeType.Smile
                , FaceAttributeType.FacialHair
                , FaceAttributeType.Glasses
                , FaceAttributeType.Emotion
                , FaceAttributeType.Hair
                , FaceAttributeType.Makeup
                , FaceAttributeType.Occlusion
                , FaceAttributeType.Accessories
                , FaceAttributeType.Blur
                , FaceAttributeType.Exposure
                , FaceAttributeType.Noise
            };

            try
            {
                var faces = await fsClient.DetectAsync(msImage, true, true, fat);

                return(faces);
            }
            catch (Exception e)
            {
                return(null);
            }
        }
Exemplo n.º 4
0
        /// <summary> Function which submits a frame to the Face API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the faces returned by the API. </returns>
        private async Task <LiveCameraResult> FacesIdentifyAnalysisFunctionWithClient(VideoFrame frame)
        {
            // 이미지 인코딩
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            // API에 이미지 전달
            var faces = await _faceClient.DetectAsync(jpg);

            // 얼굴 식별
            // 결과물은 식별된 사람의 정보를 포함
            var identifyResult = await _faceClient.IdentifyAsync
                                     (faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : this.GroupId);

            Properties.Settings.Default.FaceAPICallCount++;

            // 결과물 반환
            return(new LiveCameraResult {
                Faces = faces, IdentifyResults = identifyResult
            });
        }
Exemplo n.º 5
0
        /// <summary>
        /// Generate attributes to get from api and uploads it
        /// </summary>
        /// <param name="frame">Current picture</param>
        /// <returns>Analyced LiveCameraResult</returns>
        private async Task <LiveCameraResult> FacesAnalysisFunction(VideoFrame frame)
        {
            MemoryStream jpg = frame.Image.ToMemoryStream(".jpg", JpegParams);

            List <FaceAPI.FaceAttributeType> attrs = new List <FaceAPI.FaceAttributeType>
            {
                FaceAPI.FaceAttributeType.Age,
                FaceAPI.FaceAttributeType.Gender,
                FaceAPI.FaceAttributeType.Emotion,
                FaceAPI.FaceAttributeType.Hair,
                FaceAPI.FaceAttributeType.Exposure
            };

            Face[] faces = await FaceClient.DetectAsync(jpg, returnFaceAttributes : attrs, returnFaceLandmarks : true);

            EmotionScores[] scores = faces.Select(e => e.FaceAttributes.Emotion).ToArray();

            foreach (Face face in faces)
            {
                Helper.ConsoleLog($"-\nDetected face: {face.FaceId} with Attributes:\n\tAge: {face.FaceAttributes.Age}\n\tGender: {face.FaceAttributes.Gender}\n\tDomintant Emotion: {Helper.GetDominantEmotionAsString(face.FaceAttributes.Emotion)}\n\tExpose: {face.FaceAttributes.Exposure.ExposureLevel}, with value of {face.FaceAttributes.Exposure.Value}\n");

                if (!await CheckIfFaceWasSeenBefore(face.FaceId, FaceClient, _facesGuids))
                {
                    _facesGuids.Add(face.FaceId);
                    StatisticsData.UpdateStatistics(face.FaceAttributes);

                    if (_statisticsWindow.IsLoaded)
                    {
                        _statisticsWindow.SetStatistics(StatisticsData);
                    }

                    Helper.ConsoleLog(face.FaceId + " is new! [" + _facesGuids.Count + "]");
                }

                StatisticsData.UpdateHappiness(face.FaceAttributes.Emotion.Happiness);
                _statisticsWindow.SetHappinessGauge(StatisticsData.Happiness);
            }

            return(new LiveCameraResult {
                Faces = faces, EmotionScores = scores
            });
        }
Exemplo n.º 6
0
        /// <summary> Function which submits a frame to the Emotion API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the emotions returned by the API. </returns>
        private async Task <LiveCameraResult> EmotionAnalysisFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);

            // Submit image to API.
            FaceAPI.Contract.Face[] faces = null;

            // See if we have local face detections for this image.
            var localFaces = (OpenCvSharp.Rect[])frame.UserData;

            if (localFaces == null || localFaces.Count() > 0)
            {
                // If localFaces is null, we're not performing local face detection.
                // Use Cognigitve Services to do the face detection.
                Properties.Settings.Default.FaceAPICallCount++;
                faces = await _faceClient.DetectAsync(
                    jpg,
                    /* returnFaceId= */ false,
                    /* returnFaceLandmarks= */ false,
                    new FaceAPI.FaceAttributeType[1] {
                    FaceAPI.FaceAttributeType.Emotion
                });
            }
            else
            {
                // Local face detection found no faces; don't call Cognitive Services.
                faces = new FaceAPI.Contract.Face[0];
            }

            // Output.
            return(new LiveCameraResult
            {
                Faces = faces.Select(e => CreateFace(e.FaceRectangle)).ToArray(),
                // Extract emotion scores from results.
                EmotionScores = faces.Select(e => e.FaceAttributes.Emotion).ToArray()
            });
        }
        private async Task <LiveCameraResult> AnalysisFunction(VideoFrame frame)
        {
            // Reset data
            await Dispatcher.BeginInvoke((Action)(() =>
            {
            }));

            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);

            var faces = await _faceClient.DetectAsync(jpg);

            var faceIds = faces.Select(face => face.FaceId).ToArray();

            // Submit image to API.
            var results = await _faceClient.IdentifyAsync("residents", faceIds);

            Color?colorToUse = null;

            foreach (var identifyResult in results)
            {
                Console.WriteLine("Result of face: {0}", identifyResult.FaceId);
                if (identifyResult.Candidates.Length == 0)
                {
                    Console.WriteLine("No one identified");
                    await Dispatcher.BeginInvoke((Action)(() =>
                    {
                        VisitorImage.Visibility = Visibility.Visible;
                    }));

                    try
                    {
                        await notificationClient.SendAppleNativeNotificationAsync("{ \"elevator\": true, \"aircon\": false }");
                    }
                    catch (Exception ex)
                    {
                        // Ignore
                    }
                }
                else
                {
                    // Get top 1 among all candidates returned
                    var candidateId = identifyResult.Candidates[0].PersonId;
                    var person      = await _faceClient.GetPersonAsync("residents", candidateId);

                    Console.WriteLine("Identified as {0}", person.Name);
                    if (person.PersonId == saschaPersonId)
                    {
                        colorToUse = new Color {
                            R = 0, G = 255, B = 0, A = 255
                        };
                        await Dispatcher.BeginInvoke((Action)(() =>
                        {
                            ResidentImage.Visibility = Visibility.Visible;
                            PackageImage.Visibility = Visibility.Visible;
                        }));

                        try
                        {
                            await notificationClient.SendAppleNativeNotificationAsync("{\"aps\": { \"content-available\": 1, \"elevator\": true, \"aircon\": true }}");
                        }
                        catch (Exception ex)
                        {
                            // Ignore
                        }
                    }
                }
            }

            return(new LiveCameraResult {
                Faces = faces, Color = colorToUse
            });
        }
		private async void DetectAsync()
		{
			Shell.SetBusyVisibility( Visibility.Visible, "Taking photo.." );

			this.operationMode = OperationMode.Detect;

			this.viewModel.PhotoFile = await this.camera.CapturePhotoToFileAsync();
			await this.camera.CaptureManager.StopPreviewAsync();

			if( this.led != null )
			{
				this.led.TurnOff();
			}

			Shell.SetBusyVisibility( Visibility.Visible, "Detecting your face.." );
			
			Face.FaceServiceClient faceClient = new Face.FaceServiceClient( FACE_API_KEY );
			Stream stream = await this.viewModel.PhotoFile.OpenStreamForReadAsync();
			Face.Contract.Face[] faces = await faceClient.DetectAsync( stream, analyzesAge: true, analyzesGender: true );

			VoiceGender voiceGender = VoiceGender.Male;
			if( faces.Length == 1 )
			{
				Face.Contract.FaceAttribute face = faces[ 0 ].Attributes;
				string greet;

				if( face.Gender == "male" )
				{
					greet = "Hello Handsome!";
					voiceGender = VoiceGender.Female;
				}
				else
				{
					greet = "Hey, Sexy!";
					voiceGender = VoiceGender.Male;
				}
				this.viewModel.Greet = $"{greet} You look {face.Age} today.";

				await this.SpeakAsync( this.viewModel.Greet, voiceGender, true );
			}
			else
			{
				this.viewModel.Greet = "I cannot see your face :(";
			}

			Shell.SetBusyVisibility( Visibility.Visible, "Detecting your emotions.." );
			
			Emotion.EmotionServiceClient emotionClient = new Emotion.EmotionServiceClient( EMOTION_API_KEY );

			stream = await this.viewModel.PhotoFile.OpenStreamForReadAsync();
			Emotion.Contract.Emotion[] emotions = await emotionClient.RecognizeAsync( stream );
			if( emotions.Length == 1 )
			{
				Emotion.Contract.Scores scores = emotions[ 0 ].Scores;
				this.viewModel.Scores = scores;

				bool like = scores.Happiness > scores.Anger + scores.Sadness + scores.Disgust;

				this.viewModel.EvaluationResult = like
					? "So you liked it! I'm so happy to hear that! :)"
					: "Oh, really? I'm terribly sorry! :(";
				await this.SpeakAsync( this.viewModel.EvaluationResult, voiceGender, false );
			}
			else
			{
				this.viewModel.EvaluationResult = "I cannot see your emotions :(";
			}

			this.operationMode = OperationMode.Done;

			Shell.SetBusyVisibility( Visibility.Collapsed );
		}