public async Task<FaceAuthModel> FaceAuth([FromForm]DetectModel file) { var faceClient = new FaceClient( new ApiKeyServiceClientCredentials(SubscriptionKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = FaceEndpoint; var faceData = await DetectStream(faceClient, file.Face.OpenReadStream()); if (faceData == null) { return null; } var user = await _appDb.Users.FirstOrDefaultAsync(f => f.FaceId == faceData.FaceId); if (user != null) { return new FaceAuthModel(user.Id.ToString(), AuthHelper.GetToken(user.Id.ToString())) { FaceData = faceData, Name = user.Name }; } else { var newUser = AppUser.WithFace(faceData.FaceId, JsonConvert.SerializeObject(faceData)); await _appDb.Users.AddAsync(newUser); await _appDb.SaveChangesAsync(); return new FaceAuthModel(newUser.Id.ToString(), AuthHelper.GetToken(newUser.Id.ToString())) { FaceData = faceData }; } }
public void Authenticate() { _client = new FaceClient(new ApiKeyServiceClientCredentials(_creds.SubscriptionKey)) { Endpoint = _creds.Endpoint }; }
private void pnlbg_Click(object sender, EventArgs e) { Panel pnl = sender as Panel; if (pnl != null) { //byte[] bts = Common.GetFilebyte(pnl.BackgroundImage); picSource.Image = pnl.BackgroundImage; FaceClient client = new FaceClient("8c1be718fa976083a6940009f36c056e"); var face = client.FaceDetect(picSource.Image);//获取特征值 //var face = client.FaceDetectAgeAndSex(picSource.Image);//获取特征值 if (face != null && face.facemodels.Count > 0) { byte[] bts = Common.GetFilebyte(pnl.BackgroundImage); Image img = Image.FromStream(new MemoryStream(bts)); foreach (var model in face.facemodels) { string strInfo = string.Format("左眼坐标(X:{0},Y{1}) 右眼坐标(X:{2},Y{3})", model.lefteye.x, model.lefteye.y , model.righteye.x, model.righteye.y); //string strInfo = string.Format("性别:{0},年龄{1}", model.ageandsex.sex == "Female" ? "女" : "男", model.ageandsex.age.ToString("0.00")); img = Common.DrawImage(img, model.facerectanglex, model.facerectangley, model.facerectanglewidth, model.facerectangleheight); } picCheck.Image = img; } } }
public async Task <ActionResult> Edit(Person person) { var personGroupId = Request.Form["PersonGroupId"]; if (string.IsNullOrEmpty(personGroupId)) { return(HttpNotFound("PersonGroupId is missing")); } if (!ModelState.IsValid) { ViewBag.PersonGroupId = personGroupId; return(View(person)); } try { if (person.PersonId == Guid.Empty) { await FaceClient.CreatePersonAsync(personGroupId, person.Name, person.UserData); } else { await FaceClient.UpdatePersonAsync(personGroupId, person.PersonId, person.Name, person.UserData); } return(RedirectToAction("Index", new { id = personGroupId })); } catch (FaceAPIException fex) { ModelState.AddModelError(string.Empty, fex.ErrorMessage); } return(View(person)); }
static void Main(string[] args) { // Create service collection ServiceCollection serviceCollection = new ServiceCollection(); ConfigureServices(serviceCollection); // Create service provider IServiceProvider serviceProvider = serviceCollection.BuildServiceProvider(); // Get data from appsettings.json string inputFilePath = configuration.GetSection("InputFilePath").Get <string>(); string subscriptionKey = configuration.GetSection("SubscriptionKey").Get <string>(); string uriBase = configuration.GetSection("UriBase").Get <string>(); string vocabularyPath = configuration.GetSection("VocabularyPath").Get <string>(); // Create FaceClient (Azure face API) IFaceClient client = new FaceClient(new ApiKeyServiceClientCredentials(subscriptionKey)) { Endpoint = "https://midiseu.cognitiveservices.azure.com" }; // Library using FaceDetectionLibrary.DetectFacesAsync(inputFilePath, subscriptionKey, uriBase, client, vocabularyPath); }
// It uses the Face service client to detect faces in a remote image, referenced by a URL. // Note that it uses the faceAttributes field�the DetectedFace objects added to faceList will have the specified attributes (in this case, age and gender) private static async Task DetectedRemoteAsync(ILogger log, FaceClient faceClient, string ImageUrl) { log.LogInformation("Image URL: " + ImageUrl); if (!Uri.IsWellFormedUriString(ImageUrl, UriKind.Absolute)) { log.LogError(string.Format("\n Invalid remote Imageurl : \n {0} \n", ImageUrl)); return; } try { IList <DetectedFace> faceList = await faceClient.Face.DetectWithUrlAsync( ImageUrl, true, false, faceAttributes); DisplayAttributes(log, GetFaceAttributes(faceList, ImageUrl), ImageUrl); log.LogInformation(string.Format("Total Face detected :: {0}", totalFaceDetected)); log.LogInformation(string.Format("Total Female detected :: {0}", femaleDetected)); log.LogInformation(string.Format("Total Male detected :: {0}", maleDetected)); } catch (APIErrorException e) { log.LogError("Error Processing the Image File"); log.LogError(ImageUrl + ":" + e.Message); } }
private async void imgPhoto_ImageOpened(object sender, RoutedEventArgs e) { size_image = new Size((imgPhoto.Source as BitmapImage).PixelWidth, (imgPhoto.Source as BitmapImage).PixelHeight); var f_client = new FaceClient( new ApiKeyServiceClientCredentials(key_face), new System.Net.Http.DelegatingHandler[] { }); // need to provide and endpoint and a delegate. key_face, face_apiroot); f_client.Endpoint = face_apiroot; var requiedFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Emotion, FaceAttributeType.Glasses }; var faces_task = await f_client.Face.DetectWithUrlAsync(txtLocation.Text, true, true, requiedFaceAttributes); faces = faces_task.ToArray(); if (faces != null) { DisplayFacesData(faces); DisplayEmotionsData(faces); } ringLoading.IsActive = false; }
private async void ExecuteVerification() { VerifyResult verify_result = null; mProgressDialog.Show(); AddLog("Request: Verifying face " + mFaceId + " and person " + mPersonId); try { var faceClient = new FaceClient(); mProgressDialog.SetMessage("Verifying..."); SetInfo("Verifying..."); verify_result = await faceClient.Verify(mFaceId, mPersonGroupId, mPersonId); } catch (Java.Lang.Exception e) { AddLog(e.Message); } RunOnUiThread(() => { if (verify_result != null) { AddLog("Response: Success. Face " + mFaceId + " " + mPersonId + (verify_result.IsIdentical ? " " : " don't ") + "belong to person " + mPersonId); } // Show the result on screen when verification is done. SetUiAfterVerification(verify_result); }); }
public async Task <ActionResult> Emotions() { var personGroupId = Request["PersonGroupId"]; var model = new IdentifyFacesModel(); var groups = await FaceClient.ListPersonGroupsAsync(); model.PersonGroups = groups.Select(g => new SelectListItem { Value = g.PersonGroupId, Text = g.Name }).ToList(); if (Request.HttpMethod == "GET") { return(View(model)); } Face[] faces = new Face[] { }; Guid[] faceIds = new Guid[] { }; IdentifyResult[] results = new IdentifyResult[] { }; await RunOperationOnImage(async stream => { var emotionsType = new[] { FaceAttributeType.Emotion }; faces = await FaceClient.DetectAsync(stream, returnFaceAttributes: emotionsType); faceIds = faces.Select(f => f.FaceId).ToArray(); if (faceIds.Length > 0) { results = await FaceClient.IdentifyAsync(personGroupId, faceIds); } }); if (faceIds.Length == 0) { model.Error = "No faces detected"; return(View(model)); } foreach (var result in results) { var identifiedFace = new IdentifiedFace(); identifiedFace.Face = faces.FirstOrDefault(f => f.FaceId == result.FaceId); foreach (var candidate in result.Candidates) { var person = await FaceClient.GetPersonAsync(personGroupId, candidate.PersonId); identifiedFace.PersonCandidates.Add(person.PersonId, person.Name); } model.IdentifiedFaces.Add(identifiedFace); identifiedFace.Color = Settings.ImageSquareColors[model.IdentifiedFaces.Count]; } model.ImageDump = GetInlineImageWithIdentifiedFaces(model.IdentifiedFaces); return(View(model)); }
private static async Task <string> DetectEmotionAsync(byte[] image, IConfigurationSection faceAPISettings) { var client = new FaceClient(new ApiKeyServiceClientCredentials(faceAPISettings["Key"])); client.BaseUri = new Uri(faceAPISettings["Endpoint"]); var result = await client.Face.DetectWithStreamAsync(new MemoryStream(image), returnFaceAttributes : new List <FaceAttributeType> { FaceAttributeType.Emotion, }); if (!result.Any()) { return(null); } var emotion = result.OrderByDescending(x => x.FaceRectangle.Width * x.FaceRectangle.Height) .First() .FaceAttributes .Emotion; return(typeof(Emotion) .GetProperties() .Select(x => (name: x.Name, value: x.GetValue(emotion))) .OrderByDescending(x => x.value) .First() .name); }
public static async Task <HttpResponseMessage> Run( [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, ILogger log, ExecutionContext context) { #region Config var config = new ConfigurationBuilder() .SetBasePath(context.FunctionAppDirectory) .AddJsonFile("local.settings.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .Build(); var faceEndpoint = config["faceEndpoint"]; var subscriptionKey = config["faceSubscriptionKey"]; #endregion #region Read Body string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); dynamic data = JsonConvert.DeserializeObject(requestBody); string imageUrl = data?.imageUrl; string personName = data?.personName; string personId = data?.personId; #endregion #region Cognitive Services Calls FaceClient faceClient = new FaceClient(new ApiKeyServiceClientCredentials(subscriptionKey), new System.Net.Http.DelegatingHandler[] { }) { Endpoint = faceEndpoint }; //Sample Person Group is created at first run for demo purposes. //await faceClient.PersonGroup.CreateAsync(PersonGroupId, PersonGroupId); PersonGroup humanGroup = await faceClient.PersonGroup.GetAsync(PersonGroupId); Person human = null; if (string.IsNullOrEmpty(personId)) { human = await faceClient.PersonGroupPerson.CreateAsync(humanGroup.PersonGroupId, personName); } else { human = await faceClient.PersonGroupPerson.GetAsync(humanGroup.PersonGroupId, new System.Guid(personId)); } PersistedFace face = await faceClient.PersonGroupPerson.AddFaceFromUrlAsync(humanGroup.PersonGroupId, human.PersonId, imageUrl); await faceClient.PersonGroup.TrainAsync(PersonGroupId); #endregion #region Return JSON var myObj = new { faceId = face.PersistedFaceId, personId = human.PersonId }; var jsonToReturn = JsonConvert.SerializeObject(myObj); return(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(jsonToReturn, Encoding.UTF8, "application/json") }); #endregion }
public static void JustDebug() { try { FaceClient faceClient = new FaceClient(new ApiKeyServiceClientCredentials(Helper.MyConstants.FACESUBSCRIPTIONKEY), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = Helper.MyConstants.FACEENDPOINT; //faceClient.PersonGroup.CreateAsync("vlbg", "vorarlberg").GetAwaiter().GetResult(); //Person p = faceClient.PersonGroupPerson.CreateAsync("vlbg", "Harry").GetAwaiter().GetResult(); // using (Stream imageStream = File.OpenRead(@"E:\TRAININGSDATEN\FACE\12965664X_Grabherr_Elmar\233c523c8213e3783035956bfe26d545e61cffc0.jpg")) // { // faceClient.PersonGroupPerson.AddFaceFromStreamAsync("vlbg", p.PersonId, imageStream).GetAwaiter().GetResult(); // } // faceClient.PersonGroup.TrainAsync("vlbg").GetAwaiter().GetResult(); // Console.WriteLine(faceClient.PersonGroup.GetTrainingStatusWithHttpMessagesAsync("vlbg").Status); faceClient.PersonGroup.TrainAsync(Helper.MyConstants.PERSONGROUPID).GetAwaiter().GetResult(); } catch (Exception e) { Console.WriteLine(e.Message); } }
static void Main(string[] args) { Console.WriteLine("Hello World!"); FaceClient faceClient = new FaceClient(new Microsoft.Azure.CognitiveServices.Vision.Face.ApiKeyServiceClientCredentials(faceSubscriptionKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = faceEndpoint; var computerVision = new ComputerVisionClient( new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(visionSubscriptionKey), new System.Net.Http.DelegatingHandler[] { }); computerVision.Endpoint = faceEndpoint; //var features = new List<VisualFeatureTypes>() // { // VisualFeatureTypes.Categories, VisualFeatureTypes.Description, // VisualFeatureTypes.Faces, VisualFeatureTypes.ImageType, // VisualFeatureTypes.Tags // }; Console.WriteLine("Faces being detected ..."); var t1 = DetectRemoteAsync(faceClient, remoteImageUrl); //var t2 = DetectLocalAsync(faceClient, localImagePath); Task.WhenAll(t1).Wait(5000); Console.WriteLine("Press any key to exit"); Console.ReadLine(); }
// Detect faces in a local image private static async Task<FaceData> DetectStream(FaceClient faceClient, Stream stream) { try { var faceList = await faceClient.Face.DetectWithStreamAsync( stream, true, false, FaceAttributes); var faces = faceList.Where(w => w.FaceId != null) .Select(s => s.FaceId.Value).ToList(); var identify = await faceClient.Face.IdentifyWithHttpMessagesAsync(faces, Group); var result = identify.Body.FirstOrDefault(); if (result == null) { return null; } var data = new FaceData(); foreach (var face in faceList) { var age = face.FaceAttributes.Age; var gender = face.FaceAttributes.Gender.ToString(); data.Age = age; data.FaceId = result.Candidates.First().PersonId.ToString(); data.Gender = gender; } return data; } catch (APIErrorException e) { return null; } }
/// <summary> /// Loads stored settings, instantiates FaceServiceClient, and generates a GUID for FaceList if one didn't previously exist. /// </summary> private static async Task LoadSettingsAsync() { _faceClient = new FaceClient(new ApiKeyServiceClientCredentials(((App)Application.Current).AppSettings.FaceApiKey)) { Endpoint = ((App)Application.Current).AppSettings.FaceApiEndpoint }; if (await ApplicationData.Current.LocalFolder.TryGetItemAsync("FaceSettings.xml") != null) { StorageFile SettingsInfo = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appdata:///local/FaceSettings.xml")); var SettingsDocument = await XmlDocument.LoadFromFileAsync(SettingsInfo); _listKey = SettingsDocument.GetElementsByTagName("ListKey").FirstOrDefault <IXmlNode>().Attributes.GetNamedItem("Key").InnerText; if (_listKey == "") { _listKey = Guid.NewGuid().ToString(); await SaveSettingsAsync(); } } else { XmlDocument SettingsDocument = new XmlDocument(); var DefaultXml = "<FacialVerificationSettings>\n" + "\t<ListKey Key =\"\"/>\n" + "</FacialVerificationSettings>\n"; SettingsDocument.LoadXml(DefaultXml); _listKey = Guid.NewGuid().ToString(); SettingsDocument.GetElementsByTagName("ListKey").FirstOrDefault <IXmlNode>().Attributes.GetNamedItem("Key").InnerText = _listKey; StorageFile SettingsInfo = await ApplicationData.Current.LocalFolder.CreateFileAsync("FaceSettings.xml"); await SettingsDocument.SaveToFileAsync(SettingsInfo); } }
private async void ExecuteTrainPersonGroup(string mPersonGroupId) { string result = string.Empty; mProgressDialog.Show(); AddLog("Request: Training group " + mPersonGroupId); try { var faceClient = new FaceClient(); mProgressDialog.SetMessage("Training person group..."); SetInfo("Training person group..."); await faceClient.TrainPersonGroup(mPersonGroupId); result = mPersonGroupId; } catch (Java.Lang.Exception e) { result = null; AddLog(e.Message); } RunOnUiThread(() => { mProgressDialog.Dismiss(); if (result != null) { AddLog("Response: Success. Group " + result + " training completed"); Finish(); } }); }
public static Tuple <int, int, int> Process(ILogger log, string remoteImageUrl, string cognitiveServicesKey, string endPoint) { log.LogInformation("Welcome to Face API..."); MyKeys mykey = new MyKeys(cognitiveServicesKey, endPoint); string subscriptionkey = mykey.Subscriptionkey; string faceEndpoint = mykey.FaceEndpoint; log.LogInformation(string.Format("subsKey {0}", subscriptionkey)); log.LogInformation(string.Format("EndKey {0}", faceEndpoint)); FaceClient faceClient = new FaceClient( new ApiKeyServiceClientCredentials(subscriptionkey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = faceEndpoint; log.LogInformation("Face being detected ..."); var t1 = DetectedRemoteAsync(log, faceClient, remoteImageUrl); //var t2 = DetectLocalAsync(faceClient, localImagePath); Task.WhenAll(t1).Wait(5000); //Task.WhenAll(t2).Wait(5000); //Console.WriteLine("Press any key to exit"); //Console.ReadLine(); return(new Tuple <int, int, int>(maleDetected, femaleDetected, totalFaceDetected)); }
private async void ExecuteDeletePersonGroup(string mPersonGroupId) { string result = string.Empty; mProgressDialog.Show(); AddLog("Request: Delete Group " + mPersonGroupId); try { var faceClient = new FaceClient(); mProgressDialog.SetMessage("Deleting selected person groups..."); SetInfo("Deleting selected person groups..."); await faceClient.DeletePersonGroup(mPersonGroupId); result = mPersonGroupId; } catch (Java.Lang.Exception e) { result = null; AddLog(e.Message); } RunOnUiThread(() => { mProgressDialog.Dismiss(); if (result != null) { SetInfo("Person group " + result + " successfully deleted"); AddLog("Response: Success. Deleting Group " + result + " succeed"); } }); }
public async Task <ActionResult> Delete(string id, string personId) { var personGuid = Guid.Parse(Request["personId"]); await FaceClient.DeletePersonAsync(id, personGuid); return(RedirectToAction("Index", new { id = id })); }
public async Task <IList <DetectedFace> > GetDetectFaceWitUrl(string imageUrl) { client = GetClient(); IList <DetectedFace> faceList = await client.Face.DetectWithUrlAsync(imageUrl, true, false, Settings.FaceAttributes); return(faceList); }
private async Task <String> ExtractFace(Stream image) { String responseString = string.Empty; using (var client = new FaceClient(new ApiKeyServiceClientCredentials(ApiKey)) { Endpoint = Endpoint }) { var attributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses }; var detectedFaces = await client.Face.DetectWithStreamAsync(image, returnFaceAttributes : attributes); if (detectedFaces?.Count == 0) { responseString = ">No faces detected from image."; } else { foreach (var face in detectedFaces) { var rect = face.FaceRectangle; responseString = $">Rectangle: {rect.Left} {rect.Top} {rect.Width} {rect.Height}\n"; responseString += $">Gender: {face.FaceAttributes.Gender}\n"; responseString += $">Age: {face.FaceAttributes.Age}\n"; responseString += $">Smile: {face.FaceAttributes.Smile}\n"; responseString += $">Glasses: {face.FaceAttributes.Glasses}\n"; } } } return(responseString); }
public async Task <IList <DetectedFace> > GetDetectFaceWitLocalImage(Stream image) { client = GetClient(); IList <DetectedFace> faceList = await client.Face.DetectWithStreamAsync(image, true, false, Settings.FaceAttributes); return(faceList); }
// Detect faces in a local image private async Task DetectLocalAsync(FaceClient faceClient, string imagePath, List <FaceAttributes> attribs) { if (!File.Exists(imagePath)) { Console.WriteLine( "\nUnable to open or read localImagePath:\n{0} \n", imagePath); return; } try { using (Stream imageStream = new FileStream(imagePath, FileMode.Open, FileAccess.Read, FileShare.Read)) { IList <DetectedFace> faceList = await faceClient.Face.DetectWithStreamAsync( image : imageStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); string a = string.Format("{0}:\n{1}\n", imagePath.Split(new char[] { '\\' }, StringSplitOptions.RemoveEmptyEntries).Last(), GetFaceAttributes(faceList.Count > 0 ? faceList[0] : null, attribs)); Console.WriteLine(a); UpdateLog(a); } } catch (APIErrorException e) { Console.WriteLine(imagePath + ": " + e.Message); } }
/// <summary> /// Add faces to person group. /// </summary> /// <param name="subscriptionKey">The subscription key.</param> /// <param name="endpoint">The Azure service endpoint.</param> /// <param name="groupId">The group id.</param> /// <param name="directory">The directory in which to look for images (organize by per-person subdirectories).</param> /// <param name="loggingCallback">An optional callback to which human-friendly information will be logged.</param> /// <param name="throttleMs">The time to wait between calls.</param> /// <returns>Async task.</returns> public static async Task AddFaces(string subscriptionKey, string endpoint, string groupId, string directory, Action <string> loggingCallback = null, int throttleMs = 5000) { using (var client = new FaceClient(new ApiKeyServiceClientCredentials(subscriptionKey)) { Endpoint = endpoint }) { foreach (var sub in Directory.GetDirectories(directory)) { var name = Path.GetFileName(sub); loggingCallback?.Invoke($"Adding person '{name}'"); var person = await client.PersonGroupPerson.CreateAsync(groupId, name); await Task.Delay(throttleMs); foreach (var file in Directory.GetFiles(sub)) { try { var face = await client.PersonGroupPerson.AddFaceFromStreamAsync(groupId, person.PersonId, File.OpenRead(file)); loggingCallback?.Invoke($" Face {Path.GetFileName(file)} ({face.PersistedFaceId})"); await Task.Delay(throttleMs); } catch (APIErrorException ex) { loggingCallback?.Invoke($" ERROR: {Path.GetFileName(file)} ERROR: {ex.Body.Error.Message}"); // e.g. more than single face } } } } }
// Detect faces in a local image private static async Task DetectLocalAsync(FaceClient faceClient, string imagePath) { if (!File.Exists(imagePath)) { Console.WriteLine( "\nUnable to open or read localImagePath:\n{0} \n", imagePath); return; } try { using (Stream imageStream = File.OpenRead(imagePath)) { IList <DetectedFace> faceList = await faceClient.Face.DetectWithStreamAsync( imageStream, true, false, faceAttributes); DisplayAttributes( GetFaceAttributes(faceList, imagePath), imagePath); } } catch (APIErrorException e) { Console.WriteLine(imagePath + ": " + e.Message); } }
/// <summary> /// Creates a person group. /// </summary> /// <param name="subscriptionKey">The subscription key.</param> /// <param name="endpoint">The Azure service endpoint.</param> /// <param name="groupId">The group id.</param> /// <param name="groupName">The group name.</param> /// <param name="deleteExisting">Whether to delete existing group (if any).</param> /// <param name="loggingCallback">An optional callback to which human-friendly information will be logged.</param> /// <param name="throttleMs">The time to wait between calls.</param> /// <returns>Async task.</returns> public static async Task Create(string subscriptionKey, string endpoint, string groupId, string groupName, bool deleteExisting, Action <string> loggingCallback = null, int throttleMs = 5000) { using (var client = new FaceClient(new ApiKeyServiceClientCredentials(subscriptionKey)) { Endpoint = endpoint }) { loggingCallback?.Invoke($"Checking whether person group exists ({groupId}, {groupName})"); bool groupExists = (await client.PersonGroup.ListAsync()).Where(g => g.PersonGroupId == groupId).Count() != 0; await Task.Delay(throttleMs); if (groupExists && deleteExisting) { loggingCallback?.Invoke($"Deleting existing person group ({groupId}, {groupName})"); await client.PersonGroup.DeleteAsync(groupId); await Task.Delay(throttleMs); } if (!groupExists || deleteExisting) { loggingCallback?.Invoke($"Creating person group ({groupId}, {groupName})"); await client.PersonGroup.CreateAsync(groupId, groupName, recognitionModel : RecognitionModel); await Task.Delay(throttleMs); } } }
public static async Task Run(string endpoint, string key) { Console.WriteLine("Sample of finding similar faces in face ids."); IFaceClient client = new FaceClient(new ApiKeyServiceClientCredentials(key)) { Endpoint = endpoint }; const string ImageUrlPrefix = "https://csdx.blob.core.windows.net/resources/Face/Images/"; List <string> targetImageFileNames = new List <string> { "Family1-Dad1.jpg", "Family1-Daughter1.jpg", "Family1-Mom1.jpg", "Family1-Son1.jpg", "Family2-Lady1.jpg", "Family2-Man1.jpg", "Family3-Lady1.jpg", "Family3-Man1.jpg" }; string sourceImageFileName = "findsimilar.jpg"; IList <Guid?> targetFaceIds = new List <Guid?>(); foreach (var targetImageFileName in targetImageFileNames) { // Detect faces from target image url. var faces = await Common.DetectFaces(client, $"{ImageUrlPrefix}{targetImageFileName}"); // Add detected faceId to targetFaceIds. targetFaceIds.Add(faces[0].FaceId.Value); } // Detect faces from source image url. IList <DetectedFace> detectedFaces = await Common.DetectFaces( client, $"{ImageUrlPrefix}{sourceImageFileName}"); // Find similar example of faceId to faceIds. IList <SimilarFace> similarResults = await client.Face.FindSimilarAsync( detectedFaces[0].FaceId.Value, null, null, targetFaceIds); if (similarResults.Count == 0) { Console.WriteLine($"No similar faces to {sourceImageFileName}."); } foreach (var similarResult in similarResults) { Console.WriteLine( $"Faces from {sourceImageFileName} & {similarResult.FaceId} are similar with confidence: {similarResult.Confidence}."); } Console.WriteLine(); }
/// <summary> /// Creates a person group. /// </summary> /// <param name="subscriptionKey">The subscription key.</param> /// <param name="endpoint">The Azure service endpoint.</param> /// <param name="groupId">The group id.</param> /// <param name="loggingCallback">An optional callback to which human-friendly information will be logged.</param> /// <param name="throttleMs">The time to wait between calls.</param> /// <returns>Async task.</returns> public static async Task Delete(string subscriptionKey, string endpoint, string groupId, Action <string> loggingCallback = null, int throttleMs = 5000) { using (var client = new FaceClient(new ApiKeyServiceClientCredentials(subscriptionKey)) { Endpoint = endpoint }) { try { loggingCallback?.Invoke($"Deleting person group ('{groupId}')."); await client.PersonGroup.DeleteAsync(groupId); loggingCallback?.Invoke($"Done."); } catch (APIErrorException ex) { if (ex.Response.ReasonPhrase == "Not Found") { loggingCallback?.Invoke($"Person group does not exist ('{groupId}')."); } throw; } await Task.Delay(throttleMs); } }
public static async Task <IActionResult> Run( [HttpTrigger(AuthorizationLevel.Function, "get", Route = null)] HttpRequest req, ILogger log) { log.LogInformation("C# HTTP trigger function processed a request."); var subscriptionKey = Environment.GetEnvironmentVariable("subscriptionkey"); var faceEndpoint = Environment.GetEnvironmentVariable("faceEndpoint"); var personGroupId = Environment.GetEnvironmentVariable("personGroupId"); FaceClient faceClient = new FaceClient( new ApiKeyServiceClientCredentials(subscriptionKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = faceEndpoint; var personGroupPersons = await faceClient.PersonGroupPerson.ListAsync(personGroupId); foreach (var personGroupPerson in personGroupPersons) { await faceClient.PersonGroupPerson.DeleteAsync(personGroupId, personGroupPerson.PersonId); } await faceClient.PersonGroup.TrainAsync(personGroupId); return((ActionResult) new OkObjectResult($"All data deleted")); }
public async Task FaceAuth([FromForm] AddFaceModel model) { var faceClient = new FaceClient( new ApiKeyServiceClientCredentials(SubscriptionKey), new System.Net.Http.DelegatingHandler[] { }); faceClient.Endpoint = FaceEndpoint; // await faceClient.PersonGroup.CreateAsync(Group, Group); using (var main = new MemoryStream()) using (var modelMain = model.Main.OpenReadStream()) using (var modelOne = model.One.OpenReadStream()) using (var modelTwo = model.Two.OpenReadStream()) { modelMain.CopyTo(main); main.Seek(0, 0); var person = await faceClient.PersonGroupPerson.CreateAsync(Group, Guid.NewGuid().ToString()); await faceClient.PersonGroupPerson.AddFaceFromStreamAsync(Group, person.PersonId, model.Main.OpenReadStream()); await faceClient.PersonGroupPerson.AddFaceFromStreamAsync(Group, person.PersonId, modelOne); await faceClient.PersonGroupPerson.AddFaceFromStreamAsync(Group, person.PersonId, modelTwo); await faceClient.PersonGroup.TrainAsync(Group); var user = AppUser.WithFace(person.PersonId.ToString(), ""); user.Name = model.Name; user.Passport = model.Passport; main.Seek(0, 0); user.Avatar = main.GetBuffer(); await _appDb.AddAsync(user); await _appDb.SaveChangesAsync(); } }