public async Task <CustomFaceModel[]> GetIdentifyResultAsync(Stream picture)
        {
            CustomFaceModel[] customFaceModels = null;
            try
            {
                // await WaitIfOverCallLimitAsync();
                Face[] detectResults = await _serviceClient.DetectAsync(picture);

                Guid[]           guids           = detectResults.Select(x => x.FaceId).ToArray();
                IdentifyResult[] identifyResults = await _serviceClient.IdentifyAsync(_groupId, guids);

                customFaceModels = new CustomFaceModel[detectResults.Length];
                for (int i = 0; i < identifyResults.Length; i++)
                {
                    FaceRectangle rectangle = detectResults[i].FaceRectangle;

                    // Set initial name to Unknown.
                    string name = "Unknown";
                    try
                    {
                        name = (await _serviceClient.GetPersonAsync(_groupId, identifyResults[i].Candidates[0].PersonId)).Name;
                    }
                    catch (Exception)
                    {
                        // Just catch person not found.
                        // It will return a name "Unknown" for this one.
                    }

                    CustomFaceModel model = new CustomFaceModel()
                    {
                        Name   = name,
                        Top    = rectangle.Top,
                        Left   = rectangle.Left,
                        Width  = rectangle.Width,
                        Height = rectangle.Height
                    };

                    customFaceModels[i] = model;
                }
                ;
            }
            catch (Exception)
            {
                // Just catch it.
            }

            return(customFaceModels);
        }
        public async Task <CustomFaceModel> GetIdentifySingleResultAsync(Stream picture)
        {
            CustomFaceModel customFaceModel = null;

            // await WaitIfOverCallLimitAsync();
            Face[] detectResults = await _serviceClient.DetectAsync(picture);

            if (detectResults.Length != 1)
            {
                return(customFaceModel);
            }

            Guid[]           guids           = detectResults.Select(x => x.FaceId).ToArray();
            IdentifyResult[] identifyResults = await _serviceClient.IdentifyAsync(_groupId, guids);

            string name = string.Empty;

            try
            {
                name = (await _serviceClient.GetPersonAsync(_groupId, identifyResults[0].Candidates[0].PersonId)).Name;
            }
            catch (Exception)
            {
                return(customFaceModel);
            }

            FaceRectangle rectangle = detectResults[0].FaceRectangle;

            CustomFaceModel model = new CustomFaceModel()
            {
                Name   = name,
                Top    = rectangle.Top,
                Left   = rectangle.Left,
                Width  = rectangle.Width,
                Height = rectangle.Height
            };

            return(customFaceModel);
        }
        private void ShowLoginSuccess(CustomFaceModel customFace)
        {
            TextBlock tb = new TextBlock()
            {
                Text     = "嗨, " + customFace.Name + "\n歡迎你來 😊",
                FontSize = 32
            };

            LoginStack.Children.Add(tb);

            Button bt = new Button()
            {
                FontFamily = new FontFamily("Segoe MDL2 Assets"),
                FontSize   = 30,
                Width      = 50,
                Height     = 50,
                Content    = "&#xEB51;"
            };

            bt.Click += Bt_Click;
            LoginStack.Children.Add(bt);
        }
Example #4
0
        public async Task <CustomFaceModel[]> GetDetectEmojiAsync(Stream picture)
        {
            CustomFaceModel[] customFaceModels = null;
            try
            {
                // await WaitIfOverCallLimitAsync();
                var    requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Emotion };
                Face[] detectResults          = await _serviceClient.DetectAsync(picture, false, true, returnFaceAttributes : requiredFaceAttributes);

                customFaceModels = new CustomFaceModel[detectResults.Length];
                for (int i = 0; i < detectResults.Length; i++)
                {
                    FaceRectangle   rectangle = detectResults[i].FaceRectangle;
                    CustomFaceModel model     = new CustomFaceModel()
                    {
                        Anger     = detectResults[i].FaceAttributes.Emotion.Anger,
                        Contempt  = detectResults[i].FaceAttributes.Emotion.Contempt,
                        Disgust   = detectResults[i].FaceAttributes.Emotion.Disgust,
                        Fear      = detectResults[i].FaceAttributes.Emotion.Fear,
                        Happiness = detectResults[i].FaceAttributes.Emotion.Happiness,
                        Neutral   = detectResults[i].FaceAttributes.Emotion.Neutral,
                        Sadness   = detectResults[i].FaceAttributes.Emotion.Sadness,
                        Suprise   = detectResults[i].FaceAttributes.Emotion.Surprise,
                        Top       = rectangle.Top,
                        Left      = rectangle.Left,
                        Width     = rectangle.Width,
                        Height    = rectangle.Height
                    };
                    customFaceModels[i] = model;
                }
            }
            catch (Exception)
            {
                // Just catch it.
            }

            return(customFaceModels);
        }
        private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer)
        {
            // If state is not Streaming, return.
            if (_state != StreamingState.Streaming)
            {
                return;
            }

            // If there has a process still running, return.
            if (!_semaphoreSlim.Wait(0))
            {
                return;
            }

            const BitmapPixelFormat PixelFormat = BitmapPixelFormat.Nv12;

            try
            {
                using (VideoFrame currentFrame = new VideoFrame(PixelFormat, (int)_videoProperties.Width, (int)_videoProperties.Height))
                {
                    // Get current preview frame from _mediaCaputre and copy into currentFrame.
                    await _mediaCapture.GetPreviewFrameAsync(currentFrame);

                    // Detected face by _faceTracker.
                    IList <DetectedFace> builtinFaces = await _faceTracker.ProcessNextFrameAsync(currentFrame);

                    SoftwareBitmap tempBitmap = SoftwareBitmap.Convert(currentFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8);

                    if (builtinFaces.Count != 0)
                    {
                        var frameSize = new Size(currentFrame.SoftwareBitmap.PixelWidth, currentFrame.SoftwareBitmap.PixelHeight);
                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            ShowFromBuiltIn(frameSize, builtinFaces);
                        });

                        // Get picture from videoframe.
                        IRandomAccessStream stream  = new InMemoryRandomAccessStream();
                        BitmapEncoder       encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);

                        encoder.SetSoftwareBitmap(tempBitmap);
                        await encoder.FlushAsync();

                        CustomFaceModel customFaces = await _faceApiHelper.GetIdentifySingleResultAsync(stream.AsStream());


                        if (customFaces != null)
                        {
                            await _dataHelper.ChangeAttendStatusAsync(customFaces.Name, true);

                            await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                                      ShowLoginSuccess(customFaces));
                        }
                        //await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        //    ShowFromFaceApi(frameSize, customFaces));
                    }
                    else
                    {
                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                                  PaintingCanvas.Children.Clear());
                    }
                }
            }
            catch (Microsoft.ProjectOxford.Face.FaceAPIException faceEx)
            {
                await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                          ShowAlertHelper.ShowDialog(faceEx.ErrorMessage, faceEx.ErrorCode));
            }
            catch (Exception ex)
            {
                await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                          ShowAlertHelper.ShowDialog(ex.Message));
            }
            finally
            {
                _semaphoreSlim.Release();
            }
        }