protected override async void OnNavigatedTo(NavigationEventArgs e) { _state = StreamingState.Idle; if (_faceTracker == null) { _faceTracker = await FaceTracker.CreateAsync(); } if (_faceApiHelper == null) { try { _faceApiHelper = new FaceApiHelper(); _eventHubHelper = new EventHubHelper(); //用不到 //await _faceApiHelper.CheckGroupExistAsync(); } catch (Microsoft.ProjectOxford.Face.FaceAPIException faceEx) { ShowErrorHelper.ShowDialog(faceEx.ErrorMessage, faceEx.ErrorCode); } catch (Microsoft.Azure.EventHubs.EventHubsException eventhubEx) { ShowErrorHelper.ShowDialog(eventhubEx.Message); } catch (Exception ex) { ShowErrorHelper.ShowDialog(ex.Message); } } }
private void ButtonGetKey_Click(object sender, RoutedEventArgs e) { if (SubscriptionKey.Text != null && EndPoint.Text != null) { _localSettings.Values["FaceAPIKey"] = SubscriptionKey.Text; _localSettings.Values["EndPoint"] = EndPoint.Text; ShowErrorHelper.ShowDialog("Your settings have been store", "Done"); } else { // Alert. return; } }
private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer) { // If state is not Streaming, return. if (_state != StreamingState.Streaming) { return; } // If there has a process still running, return. if (!_semaphoreSlim.Wait(0)) { return; } const BitmapPixelFormat PixelFormat = BitmapPixelFormat.Nv12; try { using (VideoFrame currentFrame = new VideoFrame(PixelFormat, (int)_videoProperties.Width, (int)_videoProperties.Height)) { // Get current preview frame from _mediaCaputre and copy into currentFrame. await _mediaCapture.GetPreviewFrameAsync(currentFrame); // Detected face by _faceTracker. IList <DetectedFace> builtinFaces = await _faceTracker.ProcessNextFrameAsync(currentFrame); SoftwareBitmap tempBitmap = SoftwareBitmap.Convert(currentFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8); if (builtinFaces.Count != 0) { var frameSize = new Size(currentFrame.SoftwareBitmap.PixelWidth, currentFrame.SoftwareBitmap.PixelHeight); //await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => //{ // ShowResult(frameSize, builtinFaces); //}); // Get picture from videoframe. IRandomAccessStream stream = new InMemoryRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream); encoder.SetSoftwareBitmap(tempBitmap); await encoder.FlushAsync(); CustomFaceModel[] customFaces = await _faceApiHelper.GetDetectEmojiAsync(stream.AsStream()); CustomFaceEmojiModel customFaceEmojiModel = new CustomFaceEmojiModel(); EmojiNum emojiNum = new EmojiNum(); float upperleft = 0, upperrignt = 0, buttomleft = 0, buttomright = 0, averageX = 0, averageY = 0; foreach (var eachemoliModel in customFaces) { averageX += eachemoliModel.Left; averageY += eachemoliModel.Top; } averageX /= customFaces.Length; averageY /= customFaces.Length; for (int i = 0; i < customFaces.Length; i++) { emojiNum.Emoji = -1 * (customFaces[i].Anger + customFaces[i].Contempt + customFaces[i].Disgust + customFaces[i].Fear + customFaces[i].Sadness) + customFaces[i].Happiness + customFaces[i].Neutral + customFaces[i].Suprise; EmojiNum model = new EmojiNum { Emoji = -1 * (customFaces[i].Anger + customFaces[i].Contempt + customFaces[i].Disgust + customFaces[i].Fear + customFaces[i].Sadness) + customFaces[i].Happiness + customFaces[i].Neutral + customFaces[i].Suprise }; //customFaceEmojiModel.Emojis[i] = model; //customFaceEmojiModel.Emojis[i].Emoji = -1 * (customFaces[i].Anger + customFaces[i].Contempt + customFaces[i].Disgust + customFaces[i].Fear + customFaces[i].Sadness) + customFaces[i].Happiness + customFaces[i].Neutral + customFaces[i].Suprise; customFaceEmojiModel.EmojiSum += model.Emoji; //customFaceEmojiModel.EmojiSum += customFaceEmojiModel.Emojis[i].Emoji; if (customFaces[i].Left <averageX && customFaces[i].Top> averageY) { upperleft += emojiNum.Emoji; } else if (customFaces[i].Left < averageX && customFaces[i].Top < averageY) { buttomleft += emojiNum.Emoji; } else if (customFaces[i].Left > averageX && customFaces[i].Top > averageY) { upperrignt += emojiNum.Emoji; } else if (customFaces[i].Left > averageX && customFaces[i].Top < averageY) { buttomright += emojiNum.Emoji; } } customFaceEmojiModel.UpperLeft /= upperleft; customFaceEmojiModel.ButtomLeft /= buttomleft; customFaceEmojiModel.UpperRight /= upperrignt; customFaceEmojiModel.ButtoRight /= buttomright; //CustomFaceEmojiModel customFaceEmojiModel = await _faceApiHelper.GetEmojiResult(customFaces); await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => ShowFromFaceApi(frameSize, customFaces, emojiNum)); await _eventHubHelper.SendMessagesToEventHub(customFaceEmojiModel); } else { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => PaintingCanvas.Children.Clear()); } } } catch (Microsoft.ProjectOxford.Face.FaceAPIException faceEx) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => ShowErrorHelper.ShowDialog(faceEx.ErrorMessage, faceEx.ErrorCode)); } catch (Exception ex) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => ShowErrorHelper.ShowDialog(ex.Message)); } finally { _semaphoreSlim.Release(); } }
private async Task <bool> StartStreamingAsync() { bool result = true; try { MediaCaptureInitializationSettings initializationSettings = new MediaCaptureInitializationSettings(); if (_localSettings.Values["CameraId"] == null) { ShowErrorHelper.ShowDialog("Cannot get your CamreaId, plase check your setting."); return(false); } initializationSettings.VideoDeviceId = _localSettings.Values["CameraId"].ToString(); initializationSettings.StreamingCaptureMode = StreamingCaptureMode.Video; // Select preview flow direction. var cp = (CameraPosition)_localSettings.Values["CameraPosition"]; switch (cp) { case CameraPosition.Front: CameraPreview.FlowDirection = FlowDirection.RightToLeft; PaintingCanvas.FlowDirection = FlowDirection.RightToLeft; break; case CameraPosition.Back: CameraPreview.FlowDirection = FlowDirection.LeftToRight; PaintingCanvas.FlowDirection = FlowDirection.LeftToRight; break; default: break; } // Prepare MediaCapture. _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(initializationSettings); _mediaCapture.Failed += MediaCapture_Failed; // Get preview video properties for FaceTracker. // e.g. hight and width. var deviceController = _mediaCapture.VideoDeviceController; _videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; CameraPreview.Source = _mediaCapture; await _mediaCapture.StartPreviewAsync(); TimeSpan period = TimeSpan.FromMilliseconds(66); _threadPoolTimer = ThreadPoolTimer.CreatePeriodicTimer(new TimerElapsedHandler(ProcessCurrentVideoFrame), period); } catch (UnauthorizedAccessException) { result = false; } catch (Exception ex) { ShowErrorHelper.ShowDialog("Cannot start to preview because " + ex.Message); result = false; } return(result); }