Ejemplo n.º 1
0
        private async Task SavePhotoAndTrain()
        {
            if (string.IsNullOrEmpty(GroupIdTextBox.Text) || string.IsNullOrEmpty(GroupNameTextBox.Text) || string.IsNullOrEmpty(PersonNameTextBox.Text))
            {
                await ShowDialog("Group name, group ID and person name cannot be empty");

                return;
            }

            try
            {
                Stream stream = await GetImageStream();

                var groupID    = GroupIdTextBox.Text;
                var groupName  = GroupNameTextBox.Text;
                var personName = PersonNameTextBox.Text;

                await _faceRecognitionService.CreatePersonGroup(groupID, groupName);

                await _faceRecognitionService.AddNewPersonToGroup(groupID, personName);

                Tuple <string, CreatePersonResult> definePersonGroupResult = await _faceRecognitionService.AddNewPersonToGroup(groupID, personName);

                var registerPersonResult = await _faceRecognitionService.RegisterPerson(definePersonGroupResult.Item1, definePersonGroupResult.Item2, stream);

                await _faceRecognitionService.TrainPersonGroup(definePersonGroupResult.Item1);

                var trainingStatus = await _faceRecognitionService.VerifyTrainingStatus(definePersonGroupResult.Item1);

                stream = await GetImageStream();

                InfoTextBlock.Text = await _faceRecognitionService.VerifyFaceAgainstTraindedGroup(definePersonGroupResult.Item1, stream);
            }

            catch (FaceAPIException ex)
            {
                await ShowDialog("Unfortunately error occured: " + ex.Message);
            }
        }
Ejemplo n.º 2
0
        private async Task <string> CaptureAndAnalyzePhoto()
        {
            string analysisResult = string.Empty;

            using (var captureStream = new InMemoryRandomAccessStream())
            {
                await _mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), captureStream);

                await captureStream.FlushAsync();

                captureStream.Seek(0);

                var readStream = captureStream.AsStreamForRead();
                _takenImage = new byte[readStream.Length];
                await readStream.ReadAsync(_takenImage, 0, _takenImage.Length);

                var image = new BitmapImage();
                captureStream.Seek(0);
                await image.SetSourceAsync(captureStream);

                await Dispatcher.RunAsync(CoreDispatcherPriority.High, () =>
                {
                    VideoCaptureImage.Source = image;
                });

                try
                {
                    Stream stream = await GetImageStream();

                    var faces = await _faceRecognitionService.FaceServiceClient.DetectAsync(stream, false, false, new FaceAttributeType[2] {
                        FaceAttributeType.Accessories, FaceAttributeType.Gender
                    });

                    if (faces.Length > 0)
                    {
                        textPlaceHolder.Text = "Detected gender: " + faces[0].FaceAttributes?.Gender;
                        if (faces[0].FaceAttributes?.Accessories.Length > 1)
                        {
                            textPlaceHolder.Text = textPlaceHolder.Text + " with accessories: " + faces[0].FaceAttributes?.Accessories.FirstOrDefault();
                        }

                        analysisResult = analysisResult + textPlaceHolder.Text + "\n";

                        var faceRects   = faces.Select(face => face.FaceRectangle);
                        var fRectangles = faceRects.ToArray();

                        Rectangle rectangle = new Rectangle();
                        rectangle.StrokeThickness = 2;
                        rectangle.Stroke          = new SolidColorBrush(Colors.Red);

                        if (fRectangles.Length > 0)
                        {
                            foreach (var faceRect in fRectangles)
                            {
                                rectangle.SetValue(Canvas.LeftProperty, faceRect.Left);
                                rectangle.SetValue(Canvas.TopProperty, faceRect.Top);
                                rectangle.Width  = faceRect.Width;
                                rectangle.Height = faceRect.Height;

                                await Dispatcher.RunAsync(CoreDispatcherPriority.High, () =>
                                {
                                    VideoCaptureImageCanvas.Children.Add(rectangle);
                                });
                            }
                        }
                    }

                    stream = await GetImageStream();

                    var recognitionResult = await _faceRecognitionService.VerifyFaceAgainstTraindedGroup("myfamilytest", stream);

                    textPlaceHolder.Text = recognitionResult;
                    analysisResult       = analysisResult + recognitionResult;
                    return(analysisResult);
                }
                catch (FaceAPIException ex)
                {
                    textPlaceHolder.Text = "Unfortunately error occured: " + ex.Message;
                }

                return(analysisResult);
            }
        }