private FaceService CreateFaceService() { var userId = Guid.Parse(User.Identity.GetUserId()); var faceService = new FaceService(userId); return(faceService); }
public IHttpActionResult Get(int id) { FaceService faceService = CreateFaceService(); var face = faceService.GetFaceById(id); return(Ok(face)); }
public IHttpActionResult Get() { FaceService faceService = CreateFaceService(); var faces = faceService.GetFaces(); return(Ok(faces)); }
public ReadyMainPage() { InitializeComponent(); _faceService = new FaceService(); _faceService.Init(""); }
public async Task <JsonResult> RegisterFace(string base64String) { try { if (!string.IsNullOrEmpty(base64String)) { var imageParts = base64String.Split(',').ToList <string>(); byte[] imgdata = Convert.FromBase64String(imageParts[1]); //DateTime nm = DateTime.Now; //Guid newGuide = Guid.NewGuid(); //string date = nm.ToString("yyyymmddMMss"); //string fileName = $"{date}-{newGuide}-NewCapture.jpg"; //var path = Server.MapPath("~/FaceUploads/" + fileName); //System.IO.File.WriteAllBytes(path, imageBytes); //var imageUrl = HostingEnvironment.MapPath("~/FaceUploads/"); //var imgdata = new WebClient().DownloadData(imageUrl + fileName); //var response = await MakeAnalysisRequest(imageBytes); await FaceService.CreatePersonAsync("Tharaka", string.Empty, imgdata); return(Json("success", JsonRequestBehavior.AllowGet)); } else { return(Json(false, JsonRequestBehavior.AllowGet)); } } catch (Exception ex) { return(Json(ex, JsonRequestBehavior.AllowGet)); } }
public async Task InitializeAsync() { try { IsLoading = true; if (FaceService == null) { FaceService = await FaceServiceHelper.CreateNewFaceServiceAsync(); } var personGroupResult = await FaceService.ListPersonGroupsAsync(); personGroupResult.OrderBy(pg => pg.Name); personGroupResult.ForEach(pg => PersonGroups.Add(pg)); IsLoading = false; } catch (FaceAPIException ex)//Handle API-Exception { await MessageDialogHelper.MessageDialogAsync(ex.ErrorMessage); } catch (Exception ex) { await MessageDialogHelper.MessageDialogAsync(ex.Message); } }
public MainPageViewModel() { service = new FaceService(); PickPhotoCommand = new Command(async() => await PickPhoto()); TakePhotoCommand = new Command(async() => await TakePhoto()); AnalyzeCommand = new Command(async() => await Analyze()); }
/// <summary> /// Loads groups and cleanup the ObservableCollection /// </summary> /// <returns></returns> private async Task LoadGroupsAsync() { PersonGroups.Clear(); var fscPersonGroups = await FaceService.ListPersonGroupsAsync(); fscPersonGroups.OrderBy(pg => pg.Name).ForEach(pg => PersonGroups.Add(pg)); }
private async Task ExecuteAddPersonCommandAsync() { try { if (string.IsNullOrEmpty(PersonToAdd)) { throw new ArgumentNullException(nameof(GroupToAdd), "Please enter a person name."); } if (SelectedGroupToAddPerson == null) { throw new ArgumentNullException(nameof(SelectedGroupToAddPerson), "Please select a group."); } await FaceService.CreatePersonAsync(SelectedGroupToAddPerson.PersonGroupId, PersonToAdd); await MessageDialogHelper.MessageDialogAsync($"'{PersonToAdd}' successfully added."); //Cleanup UI PersonToAdd = string.Empty; if (SelectedPersonGroup != null) //Load Persons if some selected a group { await LoadPersonsOfGroupAsync(SelectedPersonGroup.PersonGroupId); } } catch (Exception ex) { var dialog = new MessageDialog(ex.Message, "Fehler"); await dialog.ShowAsync(); } }
public GraphicFaceTracker(GraphicOverlay overlay, CameraSource cameraSource, FaceService faceService) { this.overlay = overlay; this.faceGraphic = new FaceGraphic(overlay); this.cameraSource = cameraSource; this.faceService = faceService; }
static void Main(string[] args) { var faceImage = "https://pbs.twimg.com/profile_images/747601253266395136/2HeCGdiG_400x400.jpg"; var ocrImage = "https://pbs.twimg.com/media/DtdfaSeVsAAeRis.jpg"; var colluptUrl = "xxxxxxxx"; Console.WriteLine("Cognitive Services - Face - DetectFace\n"); var faceClient = new FaceService(); var faces = faceClient.GetRemoteEmotionsAsync(colluptUrl).Result; Console.WriteLine($"Detected: {faces.Count} Person."); foreach (var face in faces) { Console.WriteLine($"Emotion Result: \nAge:{face.Age} Gender:{face.Gender} Happiness:{face.Happiness}%\n\n"); } Console.WriteLine("Cognitive Services - ComputerVision - OCR\n"); var computerVisionClient = new ComputerVisionService(); var regions = computerVisionClient.ExtractRemoteTextAsync(ocrImage).Result; Console.WriteLine($"Detedted: {regions.Count} Regions"); foreach (var region in regions) { Console.WriteLine($"OCR Result:\n{region}\n\n"); } Console.ReadLine(); }
public CameraFeedViewModel() { User.FirstName = "John"; User.LastName = "Doe"; var sqlContext = new SqliteContext(); _userService = new UserService(sqlContext); _faceService = new FaceService(); Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.High, async() => { try { await InitializeCamera(); ApiOfflineText = ""; } catch (Exception ex) { if (ex is System.Net.Http.HttpRequestException) { ApiOfflineText = "Offline"; } Debug.WriteLine(ex.Message); } }); }
protected BaseController() { this.FaceServiceClient = new FaceServiceClient(Config.FaceServiceClientKey, Config.FaceServiceApiRoot); this.EmotionService = new EmotionService(); this.FaceService = new FaceService(this.FaceServiceClient); this.SlackService = new SlackService(); }
public string IdentifyTag(Guid tagId, Guid personId) { IFaceService faceService = new FaceService(); faceService.IdentifyTagAsync(tagId, personId); return("OK"); }
/// <summary> /// Loads persons and cleanup ObservableCollection /// </summary> /// <returns></returns> private async Task LoadPersonsOfGroupAsync(string groupID) { Persons.Clear(); var persons = await FaceService.ListPersonsAsync(groupID); persons.OrderBy(p => p.Name).ForEach(p => Persons.Add(p)); }
public FaceMorph() { apiKey = "847e6315f892e21449da5f4077c5104f"; apiSecret = "BmskojfFyrZVQhkLfNSnRzX-lK8musO6"; faceService = new FaceService(apiKey, apiSecret); srcLandmark = new List <System.Drawing.Point>(); dstLandmark = new List <System.Drawing.Point>(); itermediateLandmark = new List <System.Drawing.Point>(); }
public FaceMorph() { apiKey = "847e6315f892e21449da5f4077c5104f"; apiSecret = "BmskojfFyrZVQhkLfNSnRzX-lK8musO6"; faceService = new FaceService(apiKey, apiSecret); srcLandmark = new List<System.Drawing.Point>(); dstLandmark = new List<System.Drawing.Point>(); itermediateLandmark = new List<System.Drawing.Point>(); }
public void GetImageAsByteArray_ShouldReturnNullWithInvalidFilePath(string filePath) { // Arrange FaceService svc = new FaceService(); // Act byte[] result = svc.GetImageAsByteArray(filePath); // Assert Assert.Null(result); }
public void GetImageUriContent_ShouldReturnNullWithEmptyOrNullUri(string uri) { // Arrange FaceService svc = new FaceService(); // Act var result = svc.GetImageUriContent(uri); // Assert Assert.Null(result); }
public void ValidateFileSize_ShouldReturnFalseWithInvalidFilePath(string filePath) { // Arrange FaceService svc = new FaceService(); // Act bool result = svc.ValidateFileSize(filePath); // Assert Assert.False(result); }
/// <summary> /// Handle a face detected event /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args) { // Only run one face detection call to Cognitive Services at a time if (!_isRecognizing) { //If we need the box for the detected face we can get them here //foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces) //{ // BitmapBounds faceRect = face.FaceBox; //} _isRecognizing = true; var lowLagCapture = await _mediaCapture.PrepareLowLagPhotoCaptureAsync(ImageEncodingProperties.CreateUncompressed(MediaPixelFormat.Bgra8)); var capturedPhoto = await lowLagCapture.CaptureAsync(); var softwareBitmap = capturedPhoto.Frame.SoftwareBitmap; await lowLagCapture.FinishAsync(); using (IRandomAccessStream randomAccessStream = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream); encoder.SetSoftwareBitmap(softwareBitmap); await encoder.FlushAsync(); var stream = randomAccessStream.AsStreamForRead(); try { //This call the Cognitive Services face API to detect the faces var faces = await FaceService.DetectAsync(stream, true, false); List <Guid> faceList = new List <Guid>(); foreach (var face in faces) { faceList.Add(face.FaceId); } LastFaces = faceList.ToArray(); } catch { //We could not detect faces using Cognitive Services } } _isRecognizing = false; } }
public async Task GetFaceResult_ShouldReturnResultSuccessFalseWithNullHttpResponse() { // Arrange FaceService svc = new FaceService(); // Act FaceServiceResult result = await svc.GetFaceResult(null); // Assert Assert.NotNull(result); Assert.False(result.Succeeded); }
public void GetImagePayload_ShouldReturnNullWithEmptyByteArray() { // Arrange byte[] bytes = { }; FaceService svc = new FaceService(); // Act ByteArrayContent result = svc.GetImagePayload(bytes); // Assert Assert.Null(result); }
public async Task ProcessImageAtLocal_ShouldReturnResultSuccessFalseWithInvalidFilePath(string filePath) { // Arrange FaceService svc = new FaceService(); // Act FaceServiceResult result = await svc.ProcessImageAtLocal(filePath); // Assert Assert.NotNull(result); Assert.False(result.Succeeded); }
public void ApiKey_ShouldSet() { // Arrange string expected = Guid.NewGuid().ToString(); FaceService svc = new FaceService(); // Act svc.ApiKey = expected; // Assert Assert.Equal(expected, svc.ApiKey); }
private void InitService(AccessService accessService, String projectId) { this.compareService = new CompareService(accessService, projectId); this.detectService = new DetectService(accessService, projectId); this.faceService = new FaceService(accessService, projectId); this.faceSetService = new FaceSetService(accessService, projectId); this.liveDetectService = new LiveDetectService(accessService, projectId); this.qualityService = new QualityService(accessService, projectId); this.searchService = new SearchService(accessService, projectId); this.apiCollectionV2 = new ApiCollectionV2(accessService, projectId); }
public MainWindow() { InitializeComponent(); faceService = new FaceService("2affcadaeddd18f422375adc869f3991", "EsU9hmgweuz8U-nwv6s4JP-9AJt64vhz"); savePath = settings.SaveLocation; detectPath = settings.DetectLocation; Loaded += MainWindow_Loaded; this.saveImagesElement.ItemsSource = saveImages; this.detectResultElement.ItemsSource = DetectFaceInfos; DocumentStore = new DocumentStore {Url = "http://localhost:8080/"}; }
public async Task ProcessImageAtUrl_ShouldReturnResultSuccessFalseWithInvalidUri(string uri) { // Arrange FaceService svc = new FaceService(); // Act FaceServiceResult result = await svc.ProcessImageAtUrl(uri); // Assert Assert.NotNull(result); Assert.False(result.Succeeded); }
public BaiduFace() { InitializeComponent(); _faceService = new FaceService(); btnSelect.Click += BtnSelect_Click; btnRegister.Click += BtnRegister_Click; btnVerify.Click += BtnVerify_Click; btnDelete.Click += BtnDelete_Click; btnIdentify.Click += BtnIdentify_Click; btnUpdate.Click += BtnUpdate_Click; btnFaceMatch.Click += BtnFaceMatch_Click; }
public async Task ProcessImageAsByteArray_ShouldReturnResultSuccessFalseWithNullByteArray() { // Arrange byte[] bytes = null; FaceService svc = new FaceService(); // Act FaceServiceResult result = await svc.ProcessImageAsByteArray(bytes); // Assert Assert.NotNull(result); Assert.False(result.Succeeded); }
public async Task <IActionResult> DeleteAsync() { try { await FaceService.Delete(Client); } catch (Exception ex) { _logger.LogError(ex.ToString()); return(BadRequest(new { Error = ex.Message })); } return(Ok(new { Info = "Face List Deleted" })); }
public async Task <IActionResult> Get() { var detected = await FaceService.Detect(Client, @"https://localhost:5001/assets/family.jpg", FaceService.RECOGNITION_MODEL1); var files = new List <string>(); var fileEntries = Directory.GetFiles(ASSETS); foreach (var file in fileEntries) { files.Add($"/assets/{Path.GetFileName(file)}"); } return(Ok(new { Images = files, Detected = detected })); }
static void Main(string[] args) { String apiKey = "847e6315f892e21449da5f4077c5104f"; String apiSecret = "BmskojfFyrZVQhkLfNSnRzX-lK8musO6"; FaceService faceService = new FaceService(apiKey, apiSecret); string filePath = "D:\\Codes\\datasets\\face_morph\\bbt.jpg"; DetectResult detectResult = faceService.Detection_DetectImg(filePath); Image<Bgr, Byte> srcImg = new Image<Bgr, Byte>(filePath); for(int cnt=0; cnt < detectResult.face.Count; cnt++) { string pointFileName = String.Format("D:\\Codes\\datasets\\face_morph\\result_bbt_face_{0}.txt", cnt); FileStream fileStream = new FileStream(pointFileName, FileMode.Create); StreamWriter streamWriter = new StreamWriter(fileStream); Rectangle faceRect = new Rectangle( (int)(detectResult.face[cnt].position.center.x * srcImg.Width /100 - detectResult.face[cnt].position.width * srcImg.Width * 0.5 / 100), (int)(detectResult.face[cnt].position.center.y * srcImg.Height / 100 - detectResult.face[cnt].position.height * srcImg.Height * 0.5 / 100), (int)detectResult.face[cnt].position.width * srcImg.Width / 100, (int)detectResult.face[cnt].position.height * srcImg.Height / 100); Image<Bgr, byte> faceImg = srcImg.GetSubRect(faceRect); string fileName = String.Format("D:\\Codes\\datasets\\face_morph\\result_bbt_face_{0}.jpg", cnt); faceImg.Save(fileName); IList<FaceppSDK.Point> featurePoints = new List<FaceppSDK.Point>(); //featurePoints.Add(detectResult.face[cnt].position.center); FaceppSDK.Point tempPoint1 = new FaceppSDK.Point(); tempPoint1.x = (detectResult.face[cnt].position.eye_left.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint1.y = (detectResult.face[cnt].position.eye_left.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint1); FaceppSDK.Point tempPoint2 = new FaceppSDK.Point(); tempPoint2.x = (detectResult.face[cnt].position.eye_right.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint2.y = (detectResult.face[cnt].position.eye_right.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint2); FaceppSDK.Point tempPoint3 = new FaceppSDK.Point(); tempPoint3.x = (detectResult.face[cnt].position.mouth_left.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint3.y = (detectResult.face[cnt].position.mouth_left.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint3); FaceppSDK.Point tempPoint4 = new FaceppSDK.Point(); tempPoint4.x = (detectResult.face[cnt].position.mouth_right.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint4.y = (detectResult.face[cnt].position.mouth_right.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint4); FaceppSDK.Point tempPoint5 = new FaceppSDK.Point(); tempPoint5.x = (detectResult.face[cnt].position.nose.x - detectResult.face[cnt].position.center.x) / detectResult.face[cnt].position.width; tempPoint5.y = (detectResult.face[cnt].position.nose.y - detectResult.face[cnt].position.center.y) / detectResult.face[cnt].position.height; featurePoints.Add(tempPoint5); foreach (FaceppSDK.Point featurePoint in featurePoints) { streamWriter.WriteLine(featurePoint.x.ToString()); streamWriter.WriteLine(featurePoint.y.ToString()); System.Drawing.PointF point = new System.Drawing.PointF((float)featurePoint.x * srcImg.Width / 100, (float)featurePoint.y * srcImg.Height / 100); Cross2DF cross = new Cross2DF(point, (float)3.0, (float)3.0); srcImg.Draw(cross, new Bgr(0, 255, 0), 3); } streamWriter.Flush(); streamWriter.Close(); fileStream.Close(); //srcImg.Save("D:\\Codes\\datasets\\face_morph\\result_bbt.jpg"); } }