public async Task <JsonResult> RegisterFace(string base64String) { try { if (!string.IsNullOrEmpty(base64String)) { var imageParts = base64String.Split(',').ToList <string>(); byte[] imgdata = Convert.FromBase64String(imageParts[1]); //DateTime nm = DateTime.Now; //Guid newGuide = Guid.NewGuid(); //string date = nm.ToString("yyyymmddMMss"); //string fileName = $"{date}-{newGuide}-NewCapture.jpg"; //var path = Server.MapPath("~/FaceUploads/" + fileName); //System.IO.File.WriteAllBytes(path, imageBytes); //var imageUrl = HostingEnvironment.MapPath("~/FaceUploads/"); //var imgdata = new WebClient().DownloadData(imageUrl + fileName); //var response = await MakeAnalysisRequest(imageBytes); await FaceService.CreatePersonAsync("Tharaka", string.Empty, imgdata); return(Json("success", JsonRequestBehavior.AllowGet)); } else { return(Json(false, JsonRequestBehavior.AllowGet)); } } catch (Exception ex) { return(Json(ex, JsonRequestBehavior.AllowGet)); } }
private async Task ExecuteAddPersonCommandAsync() { try { if (string.IsNullOrEmpty(PersonToAdd)) { throw new ArgumentNullException(nameof(GroupToAdd), "Please enter a person name."); } if (SelectedGroupToAddPerson == null) { throw new ArgumentNullException(nameof(SelectedGroupToAddPerson), "Please select a group."); } await FaceService.CreatePersonAsync(SelectedGroupToAddPerson.PersonGroupId, PersonToAdd); await MessageDialogHelper.MessageDialogAsync($"'{PersonToAdd}' successfully added."); //Cleanup UI PersonToAdd = string.Empty; if (SelectedPersonGroup != null) //Load Persons if some selected a group { await LoadPersonsOfGroupAsync(SelectedPersonGroup.PersonGroupId); } } catch (Exception ex) { var dialog = new MessageDialog(ex.Message, "Fehler"); await dialog.ShowAsync(); } }
private async Task ExecuteAddPersonCommand() { if (NewFaceName != string.Empty && Pictures.Count > 0 && SelectedPersonGroup != null) { IsLoading = true; try { List <AddPersistedFaceResult> faces = new List <AddPersistedFaceResult>(); var result = await FaceService.CreatePersonAsync(SelectedPersonGroup.PersonGroupId, NewFaceName); foreach (var picture in Pictures) { var currentPicture = picture.Bitmap; IRandomAccessStream randomAccessStream = new InMemoryRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream); encoder.SetSoftwareBitmap(currentPicture); await encoder.FlushAsync(); var stream = randomAccessStream.AsStreamForRead(); faces.Add(await FaceService.AddPersonFaceAsync(SelectedPersonGroup.PersonGroupId, result.PersonId, stream)); } await new MessageDialog($"Successfully added {faces.Count} faces for person {NewFaceName} ({result.PersonId}).").ShowAsync(); //Reset the form Pictures.Clear(); NewFaceName = ""; } catch (FaceAPIException e) { await new MessageDialog(e.ErrorMessage).ShowAsync(); //await new MessageDialog(loader.GetString("AddFace_CompleteInformation")).ShowAsync(); } finally { IsLoading = false; } } else { await new MessageDialog(loader.GetString("AddFace_CompleteInformation")).ShowAsync(); } }
private async Task InitializeCamera() { _requestStopCancellationToken = new CancellationTokenSource(); _captureElement = new CaptureElement(); var videoCaptureDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var camera = videoCaptureDevices.FirstOrDefault(); MediaCaptureInitializationSettings initialisationSettings = new MediaCaptureInitializationSettings() { StreamingCaptureMode = StreamingCaptureMode.Video, VideoDeviceId = camera.Id }; _mediaCapture = new MediaCapture(); await _mediaCapture.InitializeAsync(initialisationSettings); _captureElement.Source = _mediaCapture; await _mediaCapture.StartPreviewAsync(); var videoProperties = (_mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties); var videoSize = new Rect(0, 0, videoProperties.Width, videoProperties.Height); var detector = await FaceDetector.CreateAsync(); var bitmap = FaceDetector.GetSupportedBitmapPixelFormats().First(); try { await Task.Run(async() => { VideoFrame frame = new VideoFrame(bitmap, (int)videoSize.Width, (int)videoSize.Height); TimeSpan?lastFrameTime = null; while (true) { if (!_requestStopCancellationToken.Token.IsCancellationRequested) { await _mediaCapture.GetPreviewFrameAsync(frame); if ((!lastFrameTime.HasValue) || (lastFrameTime != frame.RelativeTime)) { var detectedFaces = await detector.DetectFacesAsync(frame.SoftwareBitmap); if (detectedFaces.Count == 1) { var convertedRgba16Bitmap = SoftwareBitmap.Convert(frame.SoftwareBitmap, BitmapPixelFormat.Rgba16); InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream); encoder.SetSoftwareBitmap(convertedRgba16Bitmap); await encoder.FlushAsync(); var detectedPerson = await _faceService.DetectFace(stream.AsStream()); if (detectedPerson != null && detectedPerson.PersonId.HasValue) { _userService.PersonId = detectedPerson.PersonId.Value; var user = await _userService.GetModelAsync(); if (user == null) { user = new UserProfileModel().RandomData(detectedPerson.Gender); user.PersonId = detectedPerson.PersonId.Value; user.FaceIds.Add(detectedPerson.FaceId.Value); user = await _userService.AddUserAsync(user); } await UserViewModel.SetValuesAsync(User, user); } else { // bug: when a person was not detected, the stream gets disposed //stream.Seek(0); stream = new InMemoryRandomAccessStream(); encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream); encoder.SetSoftwareBitmap(convertedRgba16Bitmap); await encoder.FlushAsync(); // TODO: ask new user for initial profile data var user = new UserProfileModel().RandomData(detectedPerson.Gender); user.PersonId = await _faceService.CreatePersonAsync(user.FullName); var faceIds = new List <Guid>(); faceIds.Add(await _faceService.AddFaceAsync(user.PersonId, stream.AsStream())); user.FaceIds.AddRange(faceIds); user = await _userService.AddUserAsync(user); await UserViewModel.SetValuesAsync(User, user); } await Task.Delay(CHECK_INTERVAL * 1000, _requestStopCancellationToken.Token); } } lastFrameTime = frame.RelativeTime; } } }, _requestStopCancellationToken.Token); } catch (Microsoft.ProjectOxford.Face.FaceAPIException fex) { Debug.WriteLine(fex.ErrorMessage); } catch (Exception ex) { Debug.WriteLine(ex.Message); } if (_requestStopCancellationToken.IsCancellationRequested) { await _mediaCapture.StopPreviewAsync(); _captureElement.Source = null; _requestStopCancellationToken.Dispose(); } }