private async System.Threading.Tasks.Task UpdatePerson(string person) { try { Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient( new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(settings.FaceKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = settings.FaceEndpoint; var groups = await faceClient.PersonGroup.ListWithHttpMessagesAsync(); var group = groups.Body.FirstOrDefault(x => x.Name == settings.GroupName); var people = await faceClient.PersonGroupPerson.ListWithHttpMessagesAsync(group.PersonGroupId); var personObject = people.Body.First(x => x.Name == person); var trainingStatus = await faceClient.PersonGroup.GetTrainingStatusWithHttpMessagesAsync(group.PersonGroupId); txtPersonFaceInfo.Text = $"{personObject.Name} has {personObject.PersistedFaceIds.Count()} faces : training status is {trainingStatus.Body.Status.ToString()}"; } catch (Exception) { } }
private async System.Threading.Tasks.Task AddPerson(string person) { try { Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient( new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(settings.FaceKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = settings.FaceEndpoint; var groups = await faceClient.PersonGroup.ListWithHttpMessagesAsync(); var group = groups.Body.FirstOrDefault(x => x.Name == settings.GroupName); var perople = await faceClient.PersonGroupPerson.ListWithHttpMessagesAsync(group.PersonGroupId); if (perople.Body.Count(x => x.Name == person) == 0) { await faceClient.PersonGroupPerson.CreateWithHttpMessagesAsync(group.PersonGroupId, person); } await BindFaces(); dropdownPerson.SelectedValue = person; } catch (Exception) { } }
private async System.Threading.Tasks.Task UploadPics() { try { txtPersonFaceInfo.Text = "Uploading"; var picker = new Windows.Storage.Pickers.FileOpenPicker(); picker.ViewMode = Windows.Storage.Pickers.PickerViewMode.Thumbnail; picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.PicturesLibrary; picker.FileTypeFilter.Add(".jpg"); picker.FileTypeFilter.Add(".jpeg"); picker.FileTypeFilter.Add(".png"); var files = await picker.PickMultipleFilesAsync(); if (files != null) { Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient( new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(settings.FaceKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = settings.FaceEndpoint; var groups = await faceClient.PersonGroup.ListWithHttpMessagesAsync(); var group = groups.Body.FirstOrDefault(x => x.Name == settings.GroupName); var people = await faceClient.PersonGroupPerson.ListWithHttpMessagesAsync(group.PersonGroupId); var personObject = people.Body.First(x => x.Name == dropdownPerson.SelectedItem.ToString()); foreach (var file in files) { var s = await file.OpenReadAsync(); // Detect faces in the image and add to Anna await faceClient.PersonGroupPerson.AddFaceFromStreamWithHttpMessagesAsync(group.PersonGroupId, personObject.PersonId, s.AsStream()); } await UpdatePerson(dropdownPerson.SelectedItem.ToString()); await faceClient.PersonGroup.TrainWithHttpMessagesAsync(group.PersonGroupId); } else { txtPersonFaceInfo.Text = "No Files"; } } catch (Exception ex) { txtPersonFaceInfo.Text = "Upload Error: " + ex.Message; } }
private async System.Threading.Tasks.Task BindFaces() { if (groupsLoading) { return; } try { groupsLoading = true; Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient( new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(settings.FaceKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = settings.FaceEndpoint; var groups = await faceClient.PersonGroup.ListWithHttpMessagesAsync(); var group = groups.Body.FirstOrDefault(x => x.Name == settings.GroupName); if (group == null) { var groupRet = await faceClient.PersonGroup.CreateWithHttpMessagesAsync(Guid.NewGuid().ToString(), settings.GroupName); groups = await faceClient.PersonGroup.ListWithHttpMessagesAsync(); group = groups.Body.FirstOrDefault(x => x.Name == settings.GroupName); } var perople = await faceClient.PersonGroupPerson.ListWithHttpMessagesAsync(group.PersonGroupId); dropdownPerson.ItemsSource = perople.Body.Select(x => x.Name).ToArray(); } catch (Exception) { } groupsLoading = false; }
private async System.Threading.Tasks.Task DeletePerson(string person) { try { Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient( new ApiKeyServiceClientCredentials(settings.FaceKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = settings.FaceEndpoint; var groups = await faceClient.PersonGroup.ListWithHttpMessagesAsync(); var group = groups.Body.FirstOrDefault(x => x.Name == settings.GroupName); var people = await faceClient.PersonGroupPerson.ListWithHttpMessagesAsync(group.PersonGroupId); var personObject = people.Body.First(x => x.Name == person); await faceClient.PersonGroupPerson.DeleteWithHttpMessagesAsync(group.PersonGroupId, personObject.PersonId); await BindFaces(); } catch (Exception) { } }
private async Task ProcessImage(SoftwareBitmap image) { try { Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient visionClient = new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient( new ApiKeyServiceClientCredentials(settings.VisionKey), new System.Net.Http.DelegatingHandler[] { }); Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient( new ApiKeyServiceClientCredentials(settings.FaceKey), new System.Net.Http.DelegatingHandler[] { }); visionClient.Endpoint = $"https://{settings.VisionRegion}.api.cognitive.microsoft.com"; faceClient.Endpoint = $"https://{settings.FaceRegion}.api.cognitive.microsoft.com"; List <VisualFeatureTypes> features = new List <VisualFeatureTypes>() { VisualFeatureTypes.Categories, VisualFeatureTypes.Description, VisualFeatureTypes.Tags, VisualFeatureTypes.Faces }; // The list of Face attributes to return. IList <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; try { if (DateTime.Now.Subtract(imageAnalysisLastDate).TotalSeconds > 1) { using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms); encoder.SetSoftwareBitmap(image); await encoder.FlushAsync(); var analysis = await visionClient.AnalyzeImageInStreamAsync(ms.AsStream(), features); UpdateWithAnalysis(analysis); } imageAnalysisLastDate = DateTime.Now; } using (var ms = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms); encoder.SetSoftwareBitmap(image); await encoder.FlushAsync(); var analysisFace = await faceClient.Face.DetectWithStreamWithHttpMessagesAsync(ms.AsStream(), returnFaceAttributes : faceAttributes); UpdateFaces(analysisFace, image.PixelHeight, image.PixelWidth); } } catch (Exception) { // Eat exception } } catch (Exception) { // eat this exception too } }
private async Task ProcessImage(SoftwareBitmap image) { try { Func <Task <Stream> > imageStreamCallback; using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream()) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream); encoder.SetSoftwareBitmap(image); await encoder.FlushAsync(); // Read the pixel bytes from the memory stream using (var reader = new DataReader(stream.GetInputStreamAt(0))) { var bytes = new byte[stream.Size]; await reader.LoadAsync((uint)stream.Size); reader.ReadBytes(bytes); imageStreamCallback = () => Task.FromResult <Stream>(new MemoryStream(bytes)); } } Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient visionClient = new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ComputerVisionClient( new ApiKeyServiceClientCredentials(settings.ComputerVisionKey), new System.Net.Http.DelegatingHandler[] { }); // Create a prediction endpoint, passing in the obtained prediction key CustomVisionPredictionClient customVisionClient = new CustomVisionPredictionClient() { ApiKey = settings.CustomVisionKey, Endpoint = $"https://{settings.CustomVisionRegion}.api.cognitive.microsoft.com" }; Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient faceClient = new Microsoft.Azure.CognitiveServices.Vision.Face.FaceClient( new ApiKeyServiceClientCredentials(settings.FaceKey), new System.Net.Http.DelegatingHandler[] { }); visionClient.Endpoint = settings.ComputerVisionEndpoint; faceClient.Endpoint = settings.FaceEndpoint; List <VisualFeatureTypes> features = new List <VisualFeatureTypes>() { VisualFeatureTypes.Categories, VisualFeatureTypes.Description, VisualFeatureTypes.Tags, VisualFeatureTypes.Faces, VisualFeatureTypes.Brands }; // The list of Face attributes to return. IList <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; try { if (!imageAnalysisRunning && DateTime.Now.Subtract(imageAnalysisLastDate).TotalMilliseconds > 1000) { imageAnalysisRunning = true; _ = Task.Run(async() => { ImageAnalysis analysis = await visionClient.AnalyzeImageInStreamAsync(await imageStreamCallback(), features); ImagePrediction analysisCV = null; try { analysisCV = await customVisionClient.DetectImageWithNoStoreAsync(new Guid(settings.CustomVisionProjectId), settings.CustomVisionIterationName, await imageStreamCallback()); } catch (Exception) { // Throw away error } UpdateWithAnalysis(analysis, analysisCV); imageAnalysisLastDate = DateTime.Now; imageAnalysisRunning = false; }); } var analysisFace = await faceClient.Face.DetectWithStreamWithHttpMessagesAsync(await imageStreamCallback(), returnFaceId : true, returnFaceAttributes : faceAttributes); imageWidth = image.PixelWidth; imageHeight = image.PixelHeight; facesControl.UpdateEvent(new CognitiveEvent() { Faces = analysisFace.Body, ImageWidth = image.PixelWidth, ImageHeight = image.PixelHeight }); if (analysisFace.Body.Count() > 0 && settings.DoFaceDetection) { var groups = await faceClient.PersonGroup.ListWithHttpMessagesAsync(); var group = groups.Body.FirstOrDefault(x => x.Name == settings.GroupName); if (group != null) { var results = await faceClient.Face.IdentifyWithHttpMessagesAsync(analysisFace.Body.Select(x => x.FaceId.Value).ToArray(), group.PersonGroupId); foreach (var identifyResult in results.Body) { var cand = identifyResult.Candidates.FirstOrDefault(x => x.Confidence > settings.FaceThreshold / 100d); if (cand == null) { Console.WriteLine("No one identified"); } else { // Get top 1 among all candidates returned var candidateId = cand.PersonId; var person = await faceClient.PersonGroupPerson.GetWithHttpMessagesAsync(group.PersonGroupId, candidateId); tagsControl.UpdateEvent(new CognitiveEvent() { IdentifiedPerson = person.Body, IdentifiedPersonPrediction = cand.Confidence }); Console.WriteLine("Identified as {0}", person.Body.Name); } } } } } catch (Exception) { // Eat exception } } catch (Exception) { // eat this exception too } }