private async Task InitializeCamera() { _requestStopCancellationToken = new CancellationTokenSource(); _captureElement = new CaptureElement(); var videoCaptureDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var camera = videoCaptureDevices.FirstOrDefault(); MediaCaptureInitializationSettings initialisationSettings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video, VideoDeviceId = camera.Id }; _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(initialisationSettings); _captureElement.Source = _mediaCapture; await _mediaCapture.StartPreviewAsync(); var videoProperties = (_mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties); var videoSize = new Rect(0, 0, videoProperties.Width, videoProperties.Height); var detector = await FaceDetector.CreateAsync(); var bitmap = FaceDetector.GetSupportedBitmapPixelFormats().First(); try { await Task.Run(async() => { VideoFrame frame = new VideoFrame(bitmap, (int)videoSize.Width, (int)videoSize.Height); TimeSpan?lastFrameTime = null; while (true) { if (!_requestStopCancellationToken.Token.IsCancellationRequested) { await _mediaCapture.GetPreviewFrameAsync(frame); if ((!lastFrameTime.HasValue) || (lastFrameTime != frame.RelativeTime)) { var detectedFaces = await detector.DetectFacesAsync(frame.SoftwareBitmap); if (detectedFaces.Count == 1) { var convertedRgba16Bitmap = SoftwareBitmap.Convert(frame.SoftwareBitmap, BitmapPixelFormat.Rgba16); InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream); encoder.SetSoftwareBitmap(convertedRgba16Bitmap); await encoder.FlushAsync(); var detectedPerson = await _faceService.DetectFace(stream.AsStream()); if (detectedPerson != null && detectedPerson.PersonId.HasValue) { _userService.PersonId = detectedPerson.PersonId.Value; var user = await _userService.GetModelAsync(); if (user == null) { user = new UserProfileModel().RandomData(detectedPerson.Gender); user.PersonId = detectedPerson.PersonId.Value; user.FaceIds.Add(detectedPerson.FaceId.Value); user = await _userService.AddUserAsync(user); } await UserViewModel.SetValuesAsync(User, user); } else { // bug: when a person was not detected, the stream gets disposed //stream.Seek(0); stream = new InMemoryRandomAccessStream(); encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream); encoder.SetSoftwareBitmap(convertedRgba16Bitmap); await encoder.FlushAsync(); // TODO: ask new user for initial profile data var user = new UserProfileModel().RandomData(detectedPerson.Gender); user.PersonId = await _faceService.CreatePersonAsync(user.FullName); var faceIds = new List <Guid>(); faceIds.Add(await _faceService.AddFaceAsync(user.PersonId, stream.AsStream())); user.FaceIds.AddRange(faceIds); user = await _userService.AddUserAsync(user); await UserViewModel.SetValuesAsync(User, user); } await Task.Delay(CHECK_INTERVAL * 1000, _requestStopCancellationToken.Token); } } lastFrameTime = frame.RelativeTime; } } }, _requestStopCancellationToken.Token); } catch (Microsoft.ProjectOxford.Face.FaceAPIException fex) { Debug.WriteLine(fex.ErrorMessage); } catch (Exception ex) { Debug.WriteLine(ex.Message); } if (_requestStopCancellationToken.IsCancellationRequested) { await _mediaCapture.StopPreviewAsync(); _captureElement.Source = null; _requestStopCancellationToken.Dispose(); } }