private async void button3_Click(object sender, EventArgs e) { string AccountSid = "ACd489d0930dc658a3384b1b52a28cbced"; string AuthToken = "b4f632beb8bbf85f696693d0df69dba3"; FaceServiceClient faceClient = new FaceServiceClient("0e58dbc56e5445ac8fcdfa9ffbf5ef60"); if (!cam.IsRunning) { System.Threading.Thread.Sleep(1000); } bit.Save(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg"); Thread.Sleep(1000); StorageCredentials storageCredentials = new StorageCredentials("faceimage", "DYrgou0cTTp6J7KDdMVVxR3BDtM31zh393oyf0CfWdTuihRUgDwyryQuIqj203SnPHMJVK7VvLGm/KtfIpUncw=="); CloudStorageAccount storageAccount = new CloudStorageAccount(storageCredentials, true); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference("facecontainer"); container.CreateIfNotExistsAsync(); CloudBlockBlob blockBlob = container.GetBlockBlobReference("pic.jpg"); using (var fileStream = System.IO.File.OpenRead(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg")) { blockBlob.UploadFromStream(fileStream); } using (var fileStream = System.IO.File.OpenRead(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg")) { blockBlob.UploadFromStream(fileStream); } double[] ages = await UploadAndDetectFaceAges(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg", faceClient); string[] genders = await UploadAndDetectFaceGender(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg", faceClient); Guid[] ids = await UploadAndDetectFaceId(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg", faceClient); InsertData(ids[0].ToString(), genders[0], ages[0].ToString(), textBox1.Text); }
/// <summary> /// Detecta las Caras que hay en una foto y muestra la edad estimada y el sexo /// </summary> /// <param name="url">Url donde esta la imagén</param> /// <returns></returns> public static async Task<ResultFace> DetecFacesAndDisplayResult(string url) { var subscriptionKey = ""; try { ResultFace result = new ResultFace(); var client = new FaceServiceClient(subscriptionKey); var faces =await client.DetectAsync(url, false, true, true); Console.WriteLine(" > " + faces.Length + " detected."); foreach (var face in faces) { Console.WriteLine(" >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender); result.Age = face.Attributes.Age; result.Gender = face.Attributes.Gender; } return result; } catch (Exception exception) { return new ResultFace();; Console.WriteLine(exception.ToString()); } }
public async Task<Tuple<ObservableCollection<Face>, ObservableCollection<Face>>> StartFaceDetection(string selectedFile, string subscriptionKeyFace, string subscriptionKeyEmotions) { var detectedFaces = new ObservableCollection<Face>(); var facesRect = new ObservableCollection<Face>(); Debug.WriteLine("Request: Detecting {0}", selectedFile); using (var fileStreamFace = File.OpenRead(selectedFile)) { try { var client = new FaceServiceClient(subscriptionKeyFace); var faces = await client.DetectAsync(fileStreamFace, false, true, true); Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile); var imageInfo = GetImageInfoForRendering(selectedFile); Debug.WriteLine("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { var detectedFace = new Face() { ImagePath = selectedFile, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId, Gender = face.Attributes.Gender, Age = face.Attributes.Age.ToString(), }; detectedFaces.Add(detectedFace); } // Convert detection result into UI binding object for rendering foreach (var face in CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { facesRect.Add(face); } // update emotions detectedFaces = await UpdateEmotions(selectedFile, detectedFaces, subscriptionKeyEmotions); foreach (var faceRect in facesRect) { foreach (var detectedFace in detectedFaces.Where(detectedFace => faceRect.FaceId == detectedFace.FaceId)) { faceRect.Scores = detectedFace.Scores; faceRect.Age = detectedFace.Age; faceRect.Gender = detectedFace.Gender; } } } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } var returnData = new Tuple<ObservableCollection<Face>, ObservableCollection<Face>>(detectedFaces, facesRect); return returnData; } }
private async void AddFaces(object sender, RoutedEventArgs e) { var storageAccount = CloudStorageAccount.Parse(_storageConnectionString); var blobClient = storageAccount.CreateCloudBlobClient(); var container = blobClient.GetContainerReference(_containerName); await container.SetPermissionsAsync(new BlobContainerPermissions { PublicAccess = BlobContainerPublicAccessType.Blob }); var detectedFaces = 0; var currentFaceListId = ""; var faceServiceClient = new FaceServiceClient(_subscriptionKey); foreach (var blob in await ListBlobsAsync(container)) { Debug.WriteLine(blob.Uri); try { var faces = await faceServiceClient.DetectAsync(blob.Uri.ToString(), true, true, null); foreach (var face in faces) { if (detectedFaces++ == 0) { currentFaceListId = await CreateFaceListAsync(faceServiceClient); Debug.WriteLine(currentFaceListId); } try { var faceData = new FaceData { BlobUrl = blob.Uri.ToString(), FaceRectangle = face.FaceRectangle }; var faceDataJS = JsonConvert.SerializeObject(faceData); var faceResult = await faceServiceClient.AddFaceToFaceListAsync(currentFaceListId, blob.Uri.ToString(), faceDataJS, face.FaceRectangle); Debug.WriteLine(faceResult.PersistedFaceId); } catch (Exception ex) { Debug.WriteLine(ex.Message); } if (detectedFaces >= 1000) { detectedFaces = 0; } } } catch (Exception ex) { Debug.WriteLine(ex.Message); } } }
public static async void DetecFacesAndDisplayResult(string url, string id ) { var subscriptionKey = ""; try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(url, false, true, true); Console.WriteLine(" > " + faces.Length + " detected."); if (faces.Length == 0) UpdateSharePoint(id, "0", "Sin identificar"); foreach (var face in faces) { Console.WriteLine( " >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender); UpdateSharePoint(id, face.Attributes.Age.ToString(), face.Attributes.Gender); } } catch (Exception exception) { UpdateSharePoint(id, "0", "Sin identificar"); Console.WriteLine(exception.ToString()); } }
protected async override void OnNavigatedTo(NavigationEventArgs e) { _instance = new FaceServiceClient("5fd4bdd5a80c4ab086ab1267b75d0e4c"); base.OnNavigatedTo(e); await InitializeCameraAsync(); await CreateFaceDetectionEffectAsync(); }
public async Task InitializeAsync() { var oxfordEmotionClientKey = _configurationReader["OxfordEmotionClientKey"]; var oxfordFaceClientKey = _configurationReader["OxfordFaceClientKey"]; _emotionServiceClient = new EmotionServiceClient(_httpClient, oxfordEmotionClientKey); _faceServiceClient = new FaceServiceClient(oxfordFaceClientKey); await Task.FromResult<object>(null); }
private static async void DetecFacesAndDisplayResult(string fileLocation, string subscriptionKeyFace, string subscriptionKeyEmotion) { using (var fileStreamFace = File.OpenRead(fileLocation)) { using (var fileStreamEmotions = File.OpenRead(fileLocation)) { try { var faceServiceClient = new FaceServiceClient(subscriptionKeyFace); var emotionServiceClient = new EmotionServiceClient(subscriptionKeyEmotion); var faces = await faceServiceClient.DetectAsync(fileStreamFace, false, true, true); Console.WriteLine(" > " + faces.Length + " detected."); if (faces.Length > 0) { var faceRectangles = new List<Rectangle>(); foreach (var face in faces) { Console.WriteLine(" >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender); var rectangle = new Rectangle { Height = face.FaceRectangle.Height, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width }; faceRectangles.Add(rectangle); } // on face detected we start emotion analysis var emotions = await emotionServiceClient.RecognizeAsync(fileStreamEmotions, faceRectangles.ToArray()); var emotionsDetails = ""; foreach (var emotion in emotions) { emotionsDetails += $@" Anger: {emotion.Scores.Anger} Contempt: {emotion.Scores.Contempt} Disgust: {emotion.Scores.Disgust} Fear: {emotion.Scores.Fear} Happiness: {emotion.Scores.Happiness} Neutral: {emotion.Scores.Neutral} Sadness: {emotion.Scores.Sadness} Surprise: {emotion.Scores.Surprise} "; } Console.WriteLine(" >> emotions: " + emotionsDetails); } } catch (Exception exception) { Console.WriteLine(exception.ToString()); } } } }
public void DetectAsync945() { using (PexDisposableContext disposables = PexDisposableContext.Create()) { Task<global::Microsoft.ProjectOxford.Face.Contract.Face[]> task; FaceServiceClient s0 = new FaceServiceClient((string)null); task = this.DetectAsync(s0, (string)null, false, false, false, false); disposables.Add((IDisposable)task); disposables.Dispose(); } }
private static async void GetFaces(FaceServiceClient client) { string imagePath = @"C:\SD\OneDrive\Event Materials\2015 05 30 MalagaDotNet Coding4Fun\Face Samples\princesas.jpg"; using (var img = File.OpenRead(imagePath)) { var faces = await client.DetectAsync(img, false, true, true); foreach (var face in faces) { Console.WriteLine("age:" + face.Attributes.Age); Console.WriteLine("gender:" + face.Attributes.Gender); } } }
private async void ButtonGetFacesImage1_Click(object sender, RoutedEventArgs e) { try { var subscriptionKey = "4c138b4d82b947beb2e2926c92d1e514"; var fileUrl = GetSelectedItemUrl(); var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(fileUrl, false, true, true); } catch (Exception exception) { TextBlockOutput.Text = exception.ToString(); } }
public async Task<ObservableCollection<Face>> StartFaceDetection(string selectedFile, StorageFile file, Tuple<int, int> imageInfo, string subscriptionKey) { var detectedFaces = new ObservableCollection<Face>(); Debug.WriteLine("Request: Detecting {0}", selectedFile); var sampleFile = await StorageFile.GetFileFromPathAsync(selectedFile); var fs = await FileIO.ReadBufferAsync(sampleFile); using (var stream = fs.AsStream()) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(stream, true, true, true); Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile); Debug.WriteLine("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { var fileFaceImage = await FileHelper.SaveFaceImageFile(file, face); var newFace = new Face { ImagePath = selectedFile, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.Attributes.Gender, Age = face.Attributes.Age, AgeComplete = string.Format("{0:#} years old", face.Attributes.Age), ImageFacePath = fileFaceImage.Path }; // calculate rect image newFace = CalculateFaceRectangleForRendering(newFace, MaxImageSize, imageInfo); detectedFaces.Add(newFace); } } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } } return detectedFaces; }
public static async Task <bool> CrearGrupoEmpleados() { try { var clienteFace = new FaceServiceClient(Constantes.FaceApiKey, Constantes.FaceApiURL); await clienteFace.CreatePersonGroupAsync(Constantes.FaceGroupID, Constantes.FaceGroupName, Constantes.FaceGroupDescription); return(true); } catch (Exception ex) { return(false); } }
public static async Task <FaceRectangle[]> UploadAndDetectFaces(Stream stream) { var faceClient = new FaceServiceClient(CognitiveServicesKeys.FaceKey, "https://westcentralus.api.cognitive.microsoft.com/face/v1.0"); var faces = await faceClient.DetectAsync(stream); var faceRects = faces.Select(face => face.FaceRectangle); if (faceRects == null || faceRects.Count() == 0) { throw new Exception("Can't detect the faces"); } return(faceRects.ToArray()); }
private void FaceCamera() { _faceClient = new FaceServiceClient(ConfigurationManager.AppSettings["FaceAPIKey"], ConfigurationManager.AppSettings["FaceAPIHost"]); _emotionClient = new EmotionServiceClient(ConfigurationManager.AppSettings["EmotionAPIKey"], ConfigurationManager.AppSettings["EmotionAPIHost"]); _visionClient = new VisionServiceClient(ConfigurationManager.AppSettings["VisionAPIKey"], ConfigurationManager.AppSettings["VisionAPIHost"]); //Emotion _grabber.AnalysisFunction = FacesAnalysisFunction; _fuseClientRemoteResults = true; imageDrawing.Visible = true; imageBase.Visible = false; StartCamera(); }
public async Task <Tuple <ObservableCollection <Face>, ObservableCollection <Face> > > StartFaceDetection(string selectedFile, string subscriptionKey) { var detectedFaces = new ObservableCollection <Face>(); var facesRect = new ObservableCollection <Face>(); Debug.WriteLine("Request: Detecting {0}", selectedFile); // Call detection REST API using (var fileStream = File.OpenRead(selectedFile)) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(fileStream, false, true, true); Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile); var imageInfo = GetImageInfoForRendering(selectedFile); Debug.WriteLine("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { detectedFaces.Add(item: new Face() { ImagePath = selectedFile, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.Attributes.Gender, Age = string.Format("{0:#} years old", face.Attributes.Age), }); } // Convert detection result into UI binding object for rendering foreach (var face in CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { facesRect.Add(face); } } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } var returnData = new Tuple <ObservableCollection <Face>, ObservableCollection <Face> >(detectedFaces, facesRect); return(returnData); } }
public static async Task <IActionResult> Run([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "cognitiveservices/age")] HttpRequest req, ILogger log) { try { var body = await req.ReadAsStringAsync(); var cognitiveServicesRequestItem = JsonConvert.DeserializeObject <CognitiveServicesRequestItem>(body); var url = cognitiveServicesRequestItem.Url; var image = cognitiveServicesRequestItem.ImageBytes; var apiKey = cognitiveServicesRequestItem.ApiKey; var domainEndpoint = cognitiveServicesRequestItem.DomainEndpoint; if (string.IsNullOrEmpty(apiKey) || string.IsNullOrEmpty(domainEndpoint)) { return(new BadRequestObjectResult("Please provide an api key and a domain endpoint")); } if (string.IsNullOrEmpty(url) && image == null) { return(new BadRequestObjectResult("Please provide an image or an url")); } // analyze image from url with the provided apikey var service = new FaceServiceClient(apiKey, $"https://{domainEndpoint}.api.cognitive.microsoft.com/face/v1.0"); var faceAttributes = new[] { FaceAttributeType.Age }; Face[] result = null; if (string.IsNullOrEmpty(url)) { result = await service.DetectAsync(new MemoryStream(image), true, false, faceAttributes); } else { result = await service.DetectAsync(url, true, false, faceAttributes); } var ageResult = result?.Select(r => new { r.FaceId, r.FaceRectangle, r.FaceAttributes.Age }); // send the result back return(new OkObjectResult(ageResult)); } catch (Exception e) { return(new BadRequestObjectResult(e.Message)); } }
public async Task<Tuple<ObservableCollection<Face>, ObservableCollection<Face>>> StartFaceDetection(string selectedFile, string subscriptionKey) { var detectedFaces = new ObservableCollection<Face>(); var facesRect = new ObservableCollection<Face>(); Debug.WriteLine("Request: Detecting {0}", selectedFile); // Call detection REST API using (var fileStream = File.OpenRead(selectedFile)) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(fileStream, false, true, true); Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile); var imageInfo = GetImageInfoForRendering(selectedFile); Debug.WriteLine("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { detectedFaces.Add(item: new Face() { ImagePath = selectedFile, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.Attributes.Gender, Age = string.Format("{0:#} years old", face.Attributes.Age), }); } // Convert detection result into UI binding object for rendering foreach (var face in CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { facesRect.Add(face); } } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } var returnData = new Tuple<ObservableCollection<Face>, ObservableCollection<Face>>(detectedFaces, facesRect); return returnData; } }
private bool disposedValue = false; // To detect redundant calls protected virtual void Dispose(bool disposing) { if (!disposedValue) { if (disposing) { if (Api != null) { Api.Dispose(); Api = null; } } disposedValue = true; } }
public async Task RegisterFaceAsync(string faceId, ImageRequest request) { var client = new FaceServiceClient(subscriptionKey: Secrets.CongnitiveServiceFaceApiKey, apiRoot: Consts.CognitiveServiceFaceApiEndPoint); try { await client.GetFaceListAsync(faceId); } catch (FaceAPIException) { // not found await client.CreateFaceListAsync(faceListId : faceId, name : faceId); } await client.AddFaceToFaceListAsync(faceListId : faceId, imageStream : new MemoryStream(request.Image)); }
public async Task <Guid> CreateFaceAsync(ImageRequest request) { var personListId = await this.PersonListIdRepository.GetIdAsync(); var client = new FaceServiceClient(subscriptionKey: Secrets.CongnitiveServiceFaceApiKey, apiRoot: Consts.CognitiveServiceFaceApiEndPoint); await this.CreatePersonGroupIsNotExist(client, personListId); var r = await client.CreatePersonAsync(personGroupId : personListId, name : "noname"); await client.AddPersonFaceAsync(personGroupId : personListId, personId : r.PersonId, imageStream : new MemoryStream(request.Image)); await this.TrainPersonGroupAsync(client, personListId); return(r.PersonId); }
/// <summary> /// Detect all faces in a given image. /// </summary> /// <param name="image">The image to check for faces.</param> /// <returns>An array of Face instances describing each face in the image.</returns> private async Task <Face[]> DetectFaces(BitmapImage image) { // write the image to a stream var stream = new MemoryStream(); var encoder = new JpegBitmapEncoder(); encoder.Frames.Add(BitmapFrame.Create(image)); encoder.Save(stream); stream.Seek(0, SeekOrigin.Begin); // detect faces var faceClient = new FaceServiceClient(FACE_KEY, FACE_API); var attributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Accessories, FaceAttributeType.Emotion, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.Makeup }; return(await faceClient.DetectAsync(stream, true, false, attributes)); }
private async Task InitializeFaceServiceAsync(FaceServiceClient faceService) { try { var personGroups = await faceService.ListPersonGroupsAsync(); identifyPersonGroupId = (personGroups.FirstOrDefault(p => p.Name.ContainsIgnoreCase("See4Me") || p.UserData.ContainsIgnoreCase("See4Me") || p.Name.ContainsIgnoreCase("_default") || p.UserData.ContainsIgnoreCase("_default")) ?? personGroups.FirstOrDefault())?.PersonGroupId; } catch { } finally { faceServiceInitialized = true; } }
private static async Task <Microsoft.ProjectOxford.Face.Contract.Face[]> FaceDetectAsync(string faceApiKey) { var client = new FaceServiceClient(faceApiKey); var url = "http://yukainanakamatati.com/wp-content/uploads/2014/07/a1-e1406013277329.jpg"; var faces = await client.DetectAsync(url, true, false, new List <FaceAttributeType>() { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Glasses }); return(faces); }
public static async Task <bool> SubirArchivo(Stream imagenStream, Guid userGuid) { Windows.Storage.ApplicationDataContainer localSettings = Windows.Storage.ApplicationData.Current.LocalSettings; Windows.Storage.StorageFolder localFolder = Windows.Storage.ApplicationData.Current.LocalFolder; string subscriptionKey = localSettings.Values["apiKey"] as string; string subscriptionEndpoint = "https://southcentralus.api.cognitive.microsoft.com/face/v1.0"; var faceServiceClient = new FaceServiceClient(subscriptionKey, subscriptionEndpoint); var faces = await faceServiceClient.AddPersonFaceInLargePersonGroupAsync(GroupId, userGuid, imagenStream, null, null); return(true); }
private static async Task WaitUntilTrainingEnds(FaceServiceClient faceServiceClient) { TrainingStatus trainingStatus = null; while (true) { trainingStatus = await faceServiceClient.GetPersonGroupTrainingStatusAsync(Constants.PERSON_GROUP_ID); if (trainingStatus.Status != Status.Running) { break; } await Task.Delay(1000); } }
/// <summary> Populate CameraList in the UI, once it is loaded. </summary> /// <param name="sender"> Source of the event. </param> /// <param name="e"> Routed event information. </param> //private void CameraList_Loaded(object sender, RoutedEventArgs e) //{ // int numCameras = _grabber.GetNumCameras(); // if (numCameras == 0) // { // MessageArea.Text = "No cameras found!"; // } // var comboBox = sender as ComboBox; // comboBox.ItemsSource = Enumerable.Range(0, numCameras).Select(i => string.Format("Camera {0}", i + 1)); // comboBox.SelectedIndex = 0; //} /// <summary> Populate ModeList in the UI, once it is loaded. </summary> /// <param name="sender"> Source of the event. </param> /// <param name="e"> Routed event information. </param> private async void StartButton_Click(object sender, RoutedEventArgs e) { ProgramStatusMessage.Text = "Currently Running"; // if (!CameraList.HasItems) // { // MessageArea.Text = "No cameras found; cannot start processing"; // return; // } _faceClient = new FaceServiceClient("ff1f62c7ab4c4295838ec290e9a85300", "https://westcentralus.api.cognitive.microsoft.com/face/v1.0"); _grabber.TriggerAnalysisOnInterval(new TimeSpan(secondsBetweenAnalysis * TimeSpan.TicksPerSecond)); MessageArea.Text = "Starting Genesys Hackathon Project - No errors yet!"; _startTime = DateTime.Now; await _grabber.StartProcessingCameraAsync(); // CameraList.SelectedIndex); }
private async Task <Microsoft.ProjectOxford.Face.Contract.Face[]> DetectFaces(Stream imageStream) { var faceServiceClient = new FaceServiceClient(_faceKey); try { var faces = await faceServiceClient.DetectAsync(imageStream, false, true, new FaceAttributeType[] { FaceAttributeType.Emotion, FaceAttributeType.Gender, FaceAttributeType.HeadPose, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.Glasses }); return(faces); } catch (Exception ex) { Debug.WriteLine(ex.ToString()); throw; } }
/// <summary> /// MainViewModel constructor. Creates the required API clients and ViewModels /// </summary> public MainViewModel() { _visionClient = new VisionServiceClient("API_KEY_HERE"); CelebrityVm = new CelebrityViewModel(_visionClient); DescriptionVm = new DescriptionViewModel(_visionClient); ImageAnalysisVm = new ImageAnalysisViewModel(_visionClient); OcrVm = new OcrViewModel(_visionClient); ThumbnailVm = new ThumbnailViewModel(_visionClient); _faceServiceClient = new FaceServiceClient("API_KEY_HERE"); SimilarFaceVm = new SimilarFaceViewModel(_faceServiceClient); FaceGroupingVm = new FaceGroupingViewModel(_faceServiceClient); FaceVerificationVm = new FaceVerificationViewModel(_faceServiceClient); }
public static async Task <IdentifiedFace> CheckGroupAsync(FaceServiceClient faceClient, Stream stream, string personGroupId, string groupImagesFolder) { try { var response = await FaceApiHelper.IdentifyPersonAsync(faceClient, stream, personGroupId); if (response?.Candidates == null || response.Candidates.Length == 0) { return(null); } // Due to legal limitations, Face API does not support images retrieval in any circumstance currently.You need to store the images and maintain the relationship between face ids and images by yourself. var personsFolder = await PicturesHelper.GetPersonFolderAsync(groupImagesFolder); var dataSet = await faceClient.ListPersonsAsync(personGroupId); var matches = from c in response.Candidates join p in dataSet on c.PersonId equals p.PersonId into ps from p in ps.DefaultIfEmpty() select new IdentifiedFace { Confidence = c.Confidence, PersonName = p == null ? "(No matching face)" : p.Name, FaceId = c.PersonId }; var match = matches.OrderByDescending(m => m.Confidence).FirstOrDefault(); if (match == null) { return(null); } var matchFile = await personsFolder.GetFileAsync($"{match.PersonName}.{Constants.LocalPersonFileExtension}"); IRandomAccessStream photoStream = await matchFile.OpenReadAsync(); match.FaceStream = photoStream.CloneStream().AsStream(); return(match); } catch (Exception) { return(null); } }
private async void OnApplyFaceApiKey(object sender, RoutedEventArgs e) { if (String.IsNullOrEmpty(MicrosoftCognitiveServicesFaceKey)) { return; } var page = RootFrame.Content as FacePage; if (page == null) { return; } faceClient = new FaceServiceClient(MicrosoftCognitiveServicesFaceKey, "https://westeurope.api.cognitive.microsoft.com/face/v1.0"); var isInitialized = await page.InitializeServiceAsync(faceClient); if (!isInitialized) { return; } var dialog = new MessageDialog("Excellent, do you want to save this key for future use?", "Face API Key applied"); dialog.Commands.Add(new UICommand("Yes") { Id = 0 }); dialog.Commands.Add(new UICommand("No") { Id = 1 }); var result = await dialog.ShowAsync(); if ((int)result.Id == 0) { await SaveFaceKeyAsync(MicrosoftCognitiveServicesFaceKey); } if (page.PersonGroups.Length > 0) { await new MessageDialog("Great, now check your existing groups on the left.", "Face API Key applied").ShowAsync(); } else { await new MessageDialog("Great, you can now start adding person groups.", "Face API Key applied").ShowAsync(); } }
public static async Task <HttpResponseMessage> Run([HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "CognitiveServices/Age")] HttpRequestMessage req, TraceWriter log) { try { var data = await req.Content.ReadAsAsync <CognitiveServicesRequestItem>(); var url = data.Url; var image = data.ImageBytes; var apiKey = data.ApiKey; var domainEndpoint = data.DomainEndpoint; if (string.IsNullOrEmpty(apiKey) || string.IsNullOrEmpty(domainEndpoint)) { return(req.CreateResponse(HttpStatusCode.BadRequest, "Please provide an api key and a domain endpoint")); } if (string.IsNullOrEmpty(url) && image == null) { return(req.CreateResponse(HttpStatusCode.BadRequest, "Please provide an image or an url")); } // analyze image from url with the provided apikey var service = new FaceServiceClient(apiKey, $"https://{domainEndpoint}.api.cognitive.microsoft.com/face/v1.0"); var faceAttributes = new[] { FaceAttributeType.Age }; Face[] result = null; if (string.IsNullOrEmpty(url)) { result = await service.DetectAsync(new MemoryStream(image), true, false, faceAttributes); } else { result = await service.DetectAsync(url, true, false, faceAttributes); } var ageResult = result?.Select(r => new { r.FaceId, r.FaceRectangle, r.FaceAttributes.Age }); // send the result back return(req.CreateResponse(HttpStatusCode.OK, ageResult)); } catch (Exception e) { return(req.CreateResponse(HttpStatusCode.BadRequest, e.Message)); } }
public async Task DetectFaces(byte[] imgBytes = null, string path = null) { CurrentFaceIds = new List <Guid>(); try { Stream imageFileStream = null; if (!string.IsNullOrEmpty(path)) { imageFileStream = File.OpenRead(path); } else if (imgBytes != null) { imageFileStream = new MemoryStream(imgBytes); } var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Glasses }; var faceServiceClient = new FaceServiceClient(FaceApiKey); var faces = await faceServiceClient.DetectAsync( imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : requiredFaceAttributes ); var zxc = faces; foreach (var face in faces.ToArray()) { DetectedFaceIds.Add(face.FaceId); CurrentFaceIds.Add(face.FaceId); } this.DetectedFaces = faces.ToArray(); } catch (Exception ex) { this.DetectedFaces = Enumerable.Empty <Face>(); } }
/// <summary> /// Add face to a person in FaceAPI from a stream /// </summary> /// <param name="faceClient"></param> /// <param name="facesListId"></param> /// <param name="personName"></param> /// <param name="faceStream"></param> /// <param name="filePath">Local file path</param> /// <returns></returns> public static async Task <List <TrainedFace> > UploadFaceAsync(FaceServiceClient faceClient, string facesListId, string personName, Stream faceStream, string filePath) { var persistedFaces = new List <TrainedFace>(); try { bool rateLimitExceeded; do { rateLimitExceeded = false; try { AddPersistedFaceResult uploadedFace = await faceClient.AddFaceToLargeFaceListAsync(facesListId, faceStream, personName); persistedFaces.Add(new TrainedFace( new Face { FaceId = uploadedFace.PersistedFaceId }, personName, filePath)); } catch (FaceAPIException e) { if (e.ErrorCode == "RateLimitExceeded") { rateLimitExceeded = true; await Task.Delay(1); } else if (e.ErrorCode != "InvalidURL" && e.ErrorCode != "InvalidImage") { throw; } // otherwise, just ignore this image } } while (rateLimitExceeded); return(persistedFaces); } catch (FileNotFoundException e) { Console.WriteLine(e); // just ignore this face return(null); } }
/// <summary> /// Go through the following article to enable async Main method if you work on .Net Framework lesser thatn 4.7: /// https://www.c-sharpcorner.com/article/enabling-c-sharp-7-compilation-with-visual-studio-2017/ /// </summary> /// <param name="args"></param> /// <returns></returns> static async Task Main(string[] args) { IFaceServiceClient faceServiceClient = new FaceServiceClient("<Key>", "https://centralindia.api.cognitive.microsoft.com/face/v1.0"); var faceAttributes = new[] { FaceAttributeType.Emotion, FaceAttributeType.Age }; var detectedFaces = await faceServiceClient.DetectAsync("https://www.codeproject.com/script/Membership/Uploads/7869570/Faces.png", returnFaceAttributes : faceAttributes); foreach (var detectedFace in detectedFaces) { Console.WriteLine($"{detectedFace.FaceId}"); Console.WriteLine($"Age = {detectedFace.FaceAttributes.Age}, Happiness = {detectedFace.FaceAttributes.Emotion.Happiness}"); } Console.ReadLine(); }
private async Task Init() { MC = new MediaCapture(); var cameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); var camera = cameras.First(); var settings = new MediaCaptureInitializationSettings() { VideoDeviceId = camera.Id }; await MC.InitializeAsync(settings); ViewFinder.Source = MC; await MC.StartPreviewAsync(); OxFaceRecognizer = new FaceServiceClient(OxfordFaceAPIKey); }
private async Task <List <Guid> > GetFaceIds(byte[] image, ConnectionApiData connectionApiData) { using (var msImage = new MemoryStream(image)) using (var faceClient = new FaceServiceClient(connectionApiData.SubscriptionKey, connectionApiData.Location)) { Face[] faces = await faceClient.DetectAsync(msImage); var faceIds = faces.Count() > 0 ? faces.Select(f => f.FaceId).ToList() : null; if (faceIds == null) { _logger.LogError(string.Concat("AzureFaceAdapter.GetFaceIds::Cannot get faces from image")); } return(faceIds); } }
public static async Task DetectAsync(Stream stream) { string faceAPIKey = "275f7ae3c0ca42fda3eca8bee0956fad"; string imageUrl = "http://hogehoge***.jpg"; var client = new FaceServiceClient(faceAPIKey); var faces = await client.DetectAsync(imageUrl, true, true); foreach (var face in faces) { //顔の座標 var top = face.FaceRectangle.Top; var left = face.FaceRectangle.Left; //顔のパーツの座標 var eyeleftinner = face.FaceLandmarks.EyeLeftInner; } }
/// <summary> /// Handles the Loaded event of the ManageGroupsControl control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="RoutedEventArgs"/> instance containing the event data.</param> private async void ManageGroupsControl_Loaded(object sender, RoutedEventArgs e) { try { _mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = _mainWindow._scenariosControl.SubscriptionKey; string endpoint = _mainWindow._scenariosControl.SubscriptionEndpoint; _faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint); await LoadGroups(); } catch (Exception exc) { MainWindow.Log($"ManageGroupsControl_Loaded: {exc}"); MessageBox.Show($"Error loading Group Manager: {exc.Message}"); } }
public static async Task <IList <Face> > UploadToFaceApiAsync(string fullPath) { IList <Face> detectedFaces = new List <Face>(); Console.WriteLine($"Uploading photo {Path.GetFileNameWithoutExtension(fullPath)} to faceApi"); if (File.Exists(fullPath)) { FileStream fileStream = new FileStream(fullPath, FileMode.Open); //File.OpenRead(fullPath); //detectedFaces = await faceClient.Face.DetectWithStreamAsync(fileStream); var faceClient = new FaceServiceClient(Constants.APIkey, Constants.APIUri); detectedFaces = await faceClient.DetectAsync(fileStream); } Console.WriteLine($"Detected {detectedFaces.Count} faces on that image"); return(detectedFaces); }
private static async Task <Face> DetectFaceAsync(string imageFilePath) { try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { faceServiceClient = new FaceServiceClient(Configuration["FaceAPIKey"], "https://eastus.api.cognitive.microsoft.com/face/v1.0"); var faces = await faceServiceClient.DetectAsync(imageFileStream); return(faces.FirstOrDefault()); } } catch (Exception) { return(null); } }
public async Task <ActionResult> AddPersonFace([FromBody] string imageFile, string groupId, string personId) { var client = new FaceServiceClient(_faceApiKey, _faceApiEndpoint); Stream image = Images.DecodeBase64Image(imageFile); try { var addFaceResult = await client.AddPersonFaceAsync(groupId, Guid.Parse(personId), image); return(Created(addFaceResult.PersistedFaceId.ToString(), addFaceResult.PersistedFaceId)); } catch (FaceAPIException ex) { return(BadRequest(ex.ErrorMessage)); } }
public static async Task Run([BlobTrigger("faces/{name}", Connection = "BlobStorageConnection")] Stream myBlob, string name, TraceWriter log) { log.Info("------------"); log.Info($"FaceService Function - Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); /* STEP 3 - Create facegroup */ FaceServiceClient faceService = new FaceServiceClient(ConfigurationManager.AppSettings["faceApiKey"], "https://westeurope.api.cognitive.microsoft.com/face/v1.0"); string personGroupname = ConfigurationManager.AppSettings["personGroupName"]; log.Info($"-- Person group --"); try { await faceService.CreatePersonGroupAsync(personGroupname, personGroupname); log.Info($"Created"); } catch (FaceAPIException ex) { log.Info(ex.ErrorMessage); log.Info(ex.ErrorCode); } /* STEP 4 - Add a person to a facegroup */ log.Info($"\n\n-- Face Add person -- "); // Create a person var person = await faceService.CreatePersonAsync(personGroupname, name.Split('.').First()); // Add a face to the person var persistantFace = await faceService.AddPersonFaceAsync(personGroupname, person.PersonId, myBlob); // Train the group await faceService.TrainPersonGroupAsync(personGroupname); log.Info($"-- Person added: {name.Split('.').First()} / {person.PersonId} / {persistantFace.PersistedFaceId} -- \n\n"); log.Info($"\n\n-- DONE-- \n\n"); }
public static async void DetecFacesAndDisplayResult(string url, string urlComparation, string id ,string campo, int value ) { var subscriptionKey = "idSupscription"; try { var client = new FaceServiceClient(subscriptionKey); var faces1 = await client.DetectAsync(url, false, true, true); var faces2 = await client.DetectAsync(urlComparation, false, true, true); if (faces1 == null || faces2 == null) { UpdateSharePoint(id, 0,campo,value); } if (faces1.Count() == 0 || faces2.Count() == 0) { UpdateSharePoint(id, 0,campo,value); } if (faces1.Count() > 1 || faces2.Count() > 1) { UpdateSharePoint(id, 0,campo,value); } var res = await client.VerifyAsync(faces1[0].FaceId, faces2[0].FaceId); double score = 0; if (res.IsIdentical) score = 100; else { score = Math.Round((res.Confidence / 0.5) * 100); } UpdateSharePoint(id, score,campo,value); } catch (Exception exception) { UpdateSharePoint(id, 0,campo,value); Console.WriteLine(exception.ToString()); } }
private static async void DetecFacesAndDisplayResult(string fileLocation, string subscriptionKey) { using (var fileStream = File.OpenRead(fileLocation)) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(fileStream, false, true, true); Console.WriteLine(" > " + faces.Length + " detected."); foreach (var face in faces) { Console.WriteLine(" >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender); } } catch (Exception exception) { Console.WriteLine(exception.ToString()); } } }
/// <summary> /// Creates a training group. /// </summary> /// <param name="PersonGroupID">Name of the person group.</param> /// <returns></returns> public async Task createFaceGroup(string PersonGroupID) { bool groupExists = false; IFaceServiceClient faceClient = new FaceServiceClient(SubscriptionKey); // Test whether the group already exists try { await faceClient.GetPersonGroupAsync(PersonGroupID); groupExists = true; } catch (ClientException ex) { if (ex.Error.Code != "PersonGroupNotFound") { throw; } else { } } // check to see if group exists and if so delete the group. if (groupExists) { await faceClient.DeletePersonGroupAsync(PersonGroupID); } try { await faceClient.CreatePersonGroupAsync(PersonGroupID, PersonGroupID); } catch (ClientException ex) { throw; } }
public async Task<Face[]> GetAge() { var faceServiceClient = new FaceServiceClient("keey"); var face= await faceServiceClient.DetectAsync(this.ImageResult.Url,true,true,true,true); this.FacesCollection = face; var image = new ImageView { Edad = face[0].Attributes.Age.ToString(), Nombre = ImageResult.Nombre, Url = ImageResult.Url, Sexo = (face[0].Attributes.Gender.Equals("male")?"Hombre":"Mujer") }; var urlComparation = image.Sexo.Equals("Hombre") ? "http://aimworkout.com/wp-content/uploads/2014/11/Chuck_Norris.jpg" : "http://www.beevoz.com/wp-content/uploads/2015/08/angelinajolie.jpg"; var face1 = await faceServiceClient.DetectAsync(urlComparation); var result=await faceServiceClient.VerifyAsync(face[0].FaceId, face1[0].FaceId); image.Similar= (Convert.ToInt32(result.Confidence*100)).ToString(); ImageCollection.Add(image); return face; }
/// <summary> /// Pick folder, then group detected faces by similarity /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Grouping_Click(object sender, RoutedEventArgs e) { // Show folder picker FolderBrowserDialog dlg = new FolderBrowserDialog(); var result = dlg.ShowDialog(); // Set the suggestion count is intent to minimum the data preparetion step only, // it's not corresponding to service side constraint const int SuggestionCount = 10; if (result == DialogResult.OK) { // User picked one folder List<Task> tasks = new List<Task>(); int processCount = 0; bool forceContinue = false; // Clear previous grouping result GroupedFaces.Clear(); Faces.Clear(); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); Output = Output.AppendLine("Request: Preparing faces for grouping, detecting faces in choosen folder."); foreach (var img in Directory.EnumerateFiles(dlg.SelectedPath, "*.jpg", SearchOption.AllDirectories)) { tasks.Add(Task.Factory.StartNew( async (obj) => { var imgPath = obj as string; // Detect faces in image using (var fStream = File.OpenRead(imgPath)) { try { var faces = await faceServiceClient.DetectAsync(fStream); return new Tuple<string, ClientContract.Face[]>(imgPath, faces); } catch (ClientException) { // Here we simply ignore all detection failure in this sample // You may handle these exceptions by check the Error.Code and Error.Message property for ClientException object return new Tuple<string, ClientContract.Face[]>(imgPath, null); } } }, img).Unwrap().ContinueWith((detectTask) => { // Update detected faces on UI var res = detectTask.Result; if (res.Item2 == null) { return; } foreach (var f in res.Item2) { this.Dispatcher.Invoke( new Action<ObservableCollection<Face>, string, ClientContract.Face>(UIHelper.UpdateFace), Faces, res.Item1, f); } })); if (processCount >= SuggestionCount && !forceContinue) { var continueProcess = System.Windows.Forms.MessageBox.Show("Found many images under choosen folder, may take long time if proceed. Continue?", "Warning", MessageBoxButtons.YesNo); if (continueProcess == DialogResult.Yes) { forceContinue = true; } else { break; } } } await Task.WhenAll(tasks); Output = Output.AppendLine(string.Format("Response: Success. Total {0} faces are detected.", Faces.Count)); try { Output = Output.AppendLine(string.Format("Request: Grouping {0} faces.", Faces.Count)); // Call grouping, the grouping result is a group collection, each group contains similar faces var groupRes = await faceServiceClient.GroupAsync(Faces.Select(f => Guid.Parse(f.FaceId)).ToArray()); // Update grouping results for rendering foreach (var g in groupRes.Groups) { var gg = new GroupingResult() { Faces = new ObservableCollection<Face>(), IsMessyGroup = false, }; foreach (var fr in g) { gg.Faces.Add(Faces.First(f => f.FaceId == fr.ToString())); } GroupedFaces.Add(gg); } // MessyGroup contains all faces which are not similar to any other faces. // Take an extreme case for exampe: // On grouping faces which are not similar to any other faces, the grouping result will contains only one messy group if (groupRes.MessyGroup.Length > 0) { var messyGroup = new GroupingResult() { Faces = new ObservableCollection<Face>(), IsMessyGroup = true }; foreach (var messy in groupRes.MessyGroup) { messyGroup.Faces.Add(Faces.First(f => f.FaceId == messy.ToString())); } GroupedFaces.Add(messyGroup); } Output = Output.AppendLine(string.Format("Response: Success. {0} faces are grouped into {1} groups.", Faces.Count, GroupedFaces.Count)); } catch (ClientException ex) { Output = Output.AppendLine(string.Format("Response: {0}. {1}", ex.Error.Code, ex.Error.Message)); } } }
/// <summary> /// Pick image folder and detect all faces in these images /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FolderPicker_Click(object sender, RoutedEventArgs e) { // Show folder picker System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog(); var result = dlg.ShowDialog(); bool forceContinue = false; if (result == System.Windows.Forms.DialogResult.OK) { // Enumerate all ".jpg" files in the folder, call detect List<Task> tasks = new List<Task>(); FacesCollection.Clear(); TargetFaces.Clear(); FindSimilarCollection.Clear(); SelectedFile = null; // Set the suggestion count is intent to minimum the data preparetion step only, // it's not corresponding to service side constraint const int SuggestionCount = 10; int processCount = 0; MainWindow.Log("Request: Preparing, detecting faces in chosen folder."); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); _faceListName = Guid.NewGuid().ToString(); await faceServiceClient.CreateFaceListAsync(_faceListName, _faceListName, "face list for sample"); foreach (var img in Directory.EnumerateFiles(dlg.SelectedPath, "*.jpg", SearchOption.AllDirectories)) { tasks.Add(Task.Factory.StartNew( async (obj) => { var imgPath = obj as string; // Call detection using (var fStream = File.OpenRead(imgPath)) { try { var faces = await faceServiceClient.AddFaceToFaceListAsync(_faceListName, fStream); return new Tuple<string, ClientContract.AddPersistedFaceResult>(imgPath, faces); } catch (FaceAPIException) { // Here we simply ignore all detection failure in this sample // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object return new Tuple<string, ClientContract.AddPersistedFaceResult>(imgPath, null); } } }, img).Unwrap().ContinueWith((detectTask) => { var res = detectTask.Result; if (res.Item2 == null) { return; } // Update detected faces on UI this.Dispatcher.Invoke( new Action<ObservableCollection<Face>, string, ClientContract.AddPersistedFaceResult>(UIHelper.UpdateFace), FacesCollection, res.Item1, res.Item2); })); processCount++; if (processCount >= SuggestionCount && !forceContinue) { var continueProcess = System.Windows.Forms.MessageBox.Show("The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?", "Warning", System.Windows.Forms.MessageBoxButtons.YesNo); if (continueProcess == System.Windows.Forms.DialogResult.Yes) { forceContinue = true; } else { break; } } } await Task.WhenAll(tasks); MainWindow.Log("Response: Success. Total {0} faces are detected.", FacesCollection.Count); } }
/// <summary> /// Pick image and call find similar for each faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FindSimilar_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var filePicker = dlg.ShowDialog(); if (filePicker.HasValue && filePicker.Value) { // User picked image // Clear previous detection and find similar results TargetFaces.Clear(); FindSimilarCollection.Clear(); var sw = Stopwatch.StartNew(); SelectedFile = dlg.FileName; var imageInfo = UIHelper.GetImageInfoForRendering(SelectedFile); // Detect all faces in the picked image using (var fileStream = File.OpenRead(SelectedFile)) { MainWindow.Log("Request: Detecting faces in {0}", SelectedFile); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var faces = await faceServiceClient.DetectAsync(fileStream); // Update detected faces on UI foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Response: Success. Detected {0} face(s) in {0}", faces.Length, SelectedFile); // Find similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; MainWindow.Log("Request: Finding similar faces for face {0}", faceId); try { // Call find similar REST API, the result contains all the face ids which similar to the query face const int requestCandidatesCount = 3; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); // Update find similar results collection for rendering var gg = new FindSimilarResult(); gg.Faces = new ObservableCollection<Face>(); gg.QueryFace = new Face() { ImagePath = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { gg.Faces.Add(FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString())); } MainWindow.Log("Response: Found {0} similar faces for face {1}", gg.Faces.Count, faceId); FindSimilarCollection.Add(gg); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } } }
public async Task<string[]> UploadAndDetectFaceGender(string imageFilePath, FaceServiceClient faceServiceClient) { try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { var faces = await faceServiceClient.DetectAsync(imageFileStream, false, true, true, false); var faceGender = faces.Select(face => face.Attributes.Gender); return faceGender.ToArray(); } } catch (Exception) { return new string[1]; } }
public async Task<Guid[]> UploadAndDetectFaceId(string imageFilePath, FaceServiceClient faceServiceClient) { try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { var faces = await faceServiceClient.DetectAsync(imageFileStream, false, true, true, false); var faceId = faces.Select(face => face.FaceId); return faceId.ToArray(); } } catch (Exception) { return new Guid[1]; } }
/// <summary> /// Initialize Face service client given subscription key /// </summary> /// <param name="subscriptionKey">subscription key</param> public static void Initialize(string subscriptionKey) { _instance = new FaceServiceClient(subscriptionKey); }
private void Button_Click(object sender, RoutedEventArgs e) { var client = new FaceServiceClient("5fe605566efd44d6aacbdb51b7719cb0"); client.DetectAsync("https://oxfordportal.blob.core.windows.net/face/demo/detection%201%20thumbnail.jpg").Wait(); }
async private void button2_Click(object sender, EventArgs e) { string AccountSid = "ACd489d0930dc658a3384b1b52a28cbced"; string AuthToken = "b4f632beb8bbf85f696693d0df69dba3"; FaceServiceClient faceClient = new FaceServiceClient("0e58dbc56e5445ac8fcdfa9ffbf5ef60"); double[] ages = await UploadAndDetectFaceAges(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg", faceClient); string[] genders = await UploadAndDetectFaceGender(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg", faceClient); while(true) { VerifyResult verification = new VerifyResult(); verification.Confidence = 0; int numBoys = 0; int numgirls = 0; int ppl = 0; int avgAge = 0; int totAge = 0; if (!cam.IsRunning) { System.Threading.Thread.Sleep(1000); } bit.Save(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg"); Thread.Sleep(1000); double[] ages1 = await UploadAndDetectFaceAges(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg", faceClient); string[] genders2 = await UploadAndDetectFaceGender(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg", faceClient); StorageCredentials storageCredentials = new StorageCredentials("faceimage", "DYrgou0cTTp6J7KDdMVVxR3BDtM31zh393oyf0CfWdTuihRUgDwyryQuIqj203SnPHMJVK7VvLGm/KtfIpUncw=="); CloudStorageAccount storageAccount = new CloudStorageAccount(storageCredentials, true); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference("facecontainer"); container.CreateIfNotExistsAsync(); CloudBlockBlob blockBlob = container.GetBlockBlobReference("pic.jpg"); using (var fileStream = System.IO.File.OpenRead(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg")) { blockBlob.UploadFromStream(fileStream); } Guid[] ids = await UploadAndDetectFaceId(@"C:\Users\ma_eg_000\Desktop\PrincetonHack\pic.jpg", faceClient); InsertData(ids[0].ToString(), genders2[0], ages1[0].ToString(), "5149941737"); List<facetable> ftable = await FetchAllFaces(); string toCall = "null"; foreach(facetable fTable in ftable) { ppl++; if(fTable.Gender == "male") { numBoys++; } else { numgirls++; } totAge = totAge + Int32.Parse(fTable.Age); Guid id2 = new Guid(fTable.IdFace); VerifyResult temp = await faceClient.VerifyAsync(ids[0],id2 ); if (temp.Confidence >= verification.Confidence) { verification = temp; toCall = fTable.PhoneNumber; } } avgAge = totAge / ppl; if(verification.Confidence>= 0.40) { richTextBox1.Text = "Number of Males Customers : "+numBoys+ " Number of Female Customers :"+numgirls+ " Average age of Customers :"+avgAge; var twilio = new TwilioRestClient(AccountSid, AuthToken); var message = twilio.SendMessage("16263449948", toCall, "WE HAVE THE BEST DEALS FOR YOU TODAY!!! Free Selfie Sticks and T-Shirts Gallore", ""); } Thread.Sleep(1000); } }
/// <summary> /// Pick image for detection, get detection result and put detection results into RightResultCollection /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event argument</param> private async void RightImagePicker_Click(object sender, RoutedEventArgs e) { // Show image picker, show jpg type files only Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { VerifyResult = string.Empty; // User already picked one image var pickedImagePath = dlg.FileName; var imageInfo = UIHelper.GetImageInfoForRendering(pickedImagePath); RightImageDisplay.Source = new BitmapImage(new Uri(pickedImagePath)); // Clear last time detection results RightResultCollection.Clear(); MainWindow.Log("Request: Detecting in {0}", pickedImagePath); var sw = Stopwatch.StartNew(); // Call detection REST API, detect faces inside the image using (var fileStream = File.OpenRead(pickedImagePath)) { try { MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var faces = await faceServiceClient.DetectAsync(fileStream); // Handle REST API calling error if (faces == null) { return; } MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, pickedImagePath); // Convert detection results into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { // Detected faces are hosted in result container, will be used in the verification later RightResultCollection.Add(face); } } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } } } }
/// <summary> /// Pick image, detect and identify all faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Identify_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image // Clear previous detection and identification results TargetFaces.Clear(); SelectedFile = dlg.FileName; var sw = Stopwatch.StartNew(); var imageInfo = UIHelper.GetImageInfoForRendering(dlg.FileName); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); // Call detection REST API using (var fileStream = File.OpenRead(dlg.FileName)) { try { var faces = await faceServiceClient.DetectAsync(fileStream); // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, GroupName); // Identify each face // Call identify REST API, the result contains identified person information var identifyResult = await faceServiceClient.IdentifyAsync(GroupName, faces.Select(ff => ff.FaceId).ToArray()); for (int idx = 0; idx < faces.Length; idx++) { // Update identification result for rendering var face = TargetFaces[idx]; var res = identifyResult[idx]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in TargetFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } MainWindow.Log("Response: Success. {0}", outString); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } }
/// <summary> /// Initialize the facial recognition client /// </summary> /// <param name="key"></param> public static void InitFaceClient(string key) { faceClient = new FaceServiceClient(key); }
/// <summary> /// Verify two detected faces, get whether these two faces belong to the same person /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event argument</param> private async void Verification_Click(object sender, RoutedEventArgs e) { // Call face to face verification, verify REST API supports one face to one face verification only // Here, we handle single face image only if (LeftResultCollection.Count == 1 && RightResultCollection.Count == 1) { VerifyResult = "Verifying..."; var faceId1 = LeftResultCollection[0].FaceId; var faceId2 = RightResultCollection[0].FaceId; MainWindow.Log("Request: Verifying face {0} and {1}", faceId1, faceId2); // Call verify REST API with two face id try { MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var res = await faceServiceClient.VerifyAsync(Guid.Parse(faceId1), Guid.Parse(faceId2)); // Verification result contains IsIdentical (true or false) and Confidence (in range 0.0 ~ 1.0), // here we update verify result on UI by VerifyResult binding VerifyResult = string.Format("{0} ({1:0.0})", res.IsIdentical ? "Equals" : "Does not equal", res.Confidence); MainWindow.Log("Response: Success. Face {0} and {1} {2} to the same person", faceId1, faceId2, res.IsIdentical ? "belong" : "not belong"); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } } else { MessageBox.Show("Verification accepts two faces as input, please pick images with only one detectable face in it.", "Warning", MessageBoxButton.OK); } }
/// <summary> /// Pick the root person database folder, to minimum the data preparation logic, the folder should be under following construction /// Each person's image should be put into one folder named as the person's name /// All person's image folder should be put directly under the root person database folder /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event argument</param> private async void FolderPicker_Click(object sender, RoutedEventArgs e) { bool groupExists = false; MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); // Test whether the group already exists try { MainWindow.Log("Request: Group {0} will be used for build person database. Checking whether group exists.", GroupName); await faceServiceClient.GetPersonGroupAsync(GroupName); groupExists = true; MainWindow.Log("Response: Group {0} exists.", GroupName); } catch (FaceAPIException ex) { if (ex.ErrorCode != "PersonGroupNotFound") { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } else { MainWindow.Log("Response: Group {0} does not exist before.", GroupName); } } if (groupExists) { var cleanGroup = System.Windows.MessageBox.Show(string.Format("Requires a clean up for group \"{0}\" before setup new person database. Click OK to proceed, group \"{0}\" will be fully cleaned up.", GroupName), "Warning", MessageBoxButton.OKCancel); if (cleanGroup == MessageBoxResult.OK) { await faceServiceClient.DeletePersonGroupAsync(GroupName); } else { return; } } // Show folder picker System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog(); var result = dlg.ShowDialog(); // Set the suggestion count is intent to minimum the data preparation step only, // it's not corresponding to service side constraint const int SuggestionCount = 15; if (result == System.Windows.Forms.DialogResult.OK) { // User picked a root person database folder // Clear person database Persons.Clear(); TargetFaces.Clear(); SelectedFile = null; // Call create person group REST API // Create person group API call will failed if group with the same name already exists MainWindow.Log("Request: Creating group \"{0}\"", GroupName); try { await faceServiceClient.CreatePersonGroupAsync(GroupName, GroupName); MainWindow.Log("Response: Success. Group \"{0}\" created", GroupName); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } int processCount = 0; bool forceContinue = false; MainWindow.Log("Request: Preparing faces for identification, detecting faces in chosen folder."); // Enumerate top level directories, each directory contains one person's images foreach (var dir in System.IO.Directory.EnumerateDirectories(dlg.SelectedPath)) { var tasks = new List<Task>(); var tag = System.IO.Path.GetFileName(dir); Person p = new Person(); p.PersonName = tag; var faces = new ObservableCollection<Face>(); p.Faces = faces; // Call create person REST API, the new create person id will be returned MainWindow.Log("Request: Creating person \"{0}\"", p.PersonName); p.PersonId = (await faceServiceClient.CreatePersonAsync(GroupName, p.PersonName)).PersonId.ToString(); MainWindow.Log("Response: Success. Person \"{0}\" (PersonID:{1}) created", p.PersonName, p.PersonId); // Enumerate images under the person folder, call detection foreach (var img in System.IO.Directory.EnumerateFiles(dir, "*.jpg", System.IO.SearchOption.AllDirectories)) { tasks.Add(Task.Factory.StartNew( async (obj) => { var imgPath = obj as string; using (var fStream = File.OpenRead(imgPath)) { try { // Update person faces on server side var persistFace = await faceServiceClient.AddPersonFaceAsync(GroupName, Guid.Parse(p.PersonId), fStream, imgPath); return new Tuple<string, ClientContract.AddPersistedFaceResult>(imgPath, persistFace); } catch (FaceAPIException) { // Here we simply ignore all detection failure in this sample // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object return new Tuple<string, ClientContract.AddPersistedFaceResult>(imgPath, null); } } }, img).Unwrap().ContinueWith((detectTask) => { // Update detected faces for rendering var detectionResult = detectTask.Result; if (detectionResult == null || detectionResult.Item2 == null) { return; } this.Dispatcher.Invoke( new Action<ObservableCollection<Face>, string, ClientContract.AddPersistedFaceResult>(UIHelper.UpdateFace), faces, detectionResult.Item1, detectionResult.Item2); })); if (processCount >= SuggestionCount && !forceContinue) { var continueProcess = System.Windows.Forms.MessageBox.Show("The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?", "Warning", System.Windows.Forms.MessageBoxButtons.YesNo); if (continueProcess == System.Windows.Forms.DialogResult.Yes) { forceContinue = true; } else { break; } } } Persons.Add(p); await Task.WhenAll(tasks); } MainWindow.Log("Response: Success. Total {0} faces are detected.", Persons.Sum(p => p.Faces.Count)); try { // Start train person group MainWindow.Log("Request: Training group \"{0}\"", GroupName); await faceServiceClient.TrainPersonGroupAsync(GroupName); // Wait until train completed while (true) { await Task.Delay(1000); var status = await faceServiceClient.GetPersonGroupTrainingStatusAsync(GroupName); MainWindow.Log("Response: {0}. Group \"{1}\" training process is {2}", "Success", GroupName, status.Status); if (status.Status != Contract.Status.Running) { break; } } } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } }