private async void BtnRecognize_Click(object sender, RoutedEventArgs e) { try { txtStatus.Text = "Recognizing..."; // take their photo StorageFile photoFile = await Webcam.TakePhoto(); txtStatus.Text = "Recognizing..."; // use Cognitive Services to identify them string name = await FaceAPI.GetViewerName(photoFile, txtGroupId.Text); txtStatus.Text = "Greetings " + name; // delete photo taken of viewer await photoFile.DeleteAsync(); } catch (Exception ex) { // clear waiting text txtStatus.Text = ex.Message; } }
private async Task RecognizeAndGreetViewer() { this.IsRecognizing = true; // let viewer know we're taking picture await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => InterruptText.Text = "Taking picture..."); // take their photo StorageFile photoFile = await Webcam.TakePhoto(); // let viewer know we're recognizing them await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => InterruptText.Text = "Recognizing..."); // use Cognitive Services to identify them string name = await FaceAPI.GetViewerName(photoFile); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { // clear waiting text InterruptText.Text = ""; // greet viewer on screen GpioStatus.Text = "Greetings, " + name; }); // clear the interrupt flag so that interrupt can occur again vncl4010Sensor.ClearInterruptFlag(); // delete photo taken of viewer await photoFile.DeleteAsync(); this.IsRecognizing = false; }
private async void BtnTrainPerson_Click(object sender, RoutedEventArgs e) { try { txtStatus.Text = "Taking picture..."; // take their photo StorageFile photoFile = await Webcam.TakePhoto(); txtStatus.Text = "Adding face..."; var persistedFace = await FaceAPI.AddFace(photoFile, txtGroupId.Text, new Guid(txtPersonId.Text)); if (persistedFace == null) { txtStatus.Text = "No face detected"; } else { // clear waiting text txtStatus.Text = ""; } // delete photo taken of viewer await photoFile.DeleteAsync(); this.lstPeople.ItemsSource = await FaceAPI.GetPeopleByGroup(txtGroupId.Text); } catch (Exception ex) { // clear waiting text txtStatus.Text = ex.Message; } }
private async void BtnTrainUrl_Click(object sender, RoutedEventArgs e) { try { txtStatus.Text = "Adding face..."; var persistedFace = await FaceAPI.AddFace(txtUrl.Text, txtGroupId.Text, new Guid(txtPersonId.Text)); if (persistedFace == null) { txtStatus.Text = "No face detected"; } else { // clear waiting text txtStatus.Text = ""; } this.lstPeople.ItemsSource = await FaceAPI.GetPeopleByGroup(txtGroupId.Text); } catch (Exception ex) { // clear waiting text txtStatus.Text = ex.Message; } }
// Use this for initialization void Start() { faceAPI = new FaceAPI(); faceAPI.parent = this; faceAPI.faceAPIEndPoint = faceAPIEndPoint; faceAPI.faceAPIKey = faceAPIKey; faceAPI.personGroupId = personGroupId; faceAPI.rateLimit = Limit; }
public FaceFinderViewModel() { TakePhotoCommand = new Command(async() => await TakePhoto()); _FaceFinder = new FaceAPI(new ApiKeyServiceClientCredentials(ApiKeys.FaceApiKey)) { AzureRegion = ApiKeys.FaceApiRegion }; }
protected AzureServiceBase() { Client = new MobileServiceClient(FunctionAppUrl); #error REPLACE [YOUR API KEY HERE] var creds = new ApiKeyServiceClientCredentials("[YOUR API KEY HERE]"); faceApi = new FaceAPI(creds) { AzureRegion = AzureRegions.Westeurope }; }
private async void BtnGetPeople_Click(object sender, RoutedEventArgs e) { try { this.lstPeople.ItemsSource = await FaceAPI.GetPeopleByGroup(txtGroupId.Text); } catch (Exception ex) { // clear waiting text txtStatus.Text = ex.Message; } }
private async void BtnCreatePerson_Click(object sender, RoutedEventArgs e) { try { var person = await FaceAPI.CreatePerson(txtGroupId.Text, txtPersonName.Text); this.lstPeople.ItemsSource = await FaceAPI.GetPeopleByGroup(txtGroupId.Text); txtPersonId.Text = person.PersonId.ToString(); // clear waiting text txtStatus.Text = ""; } catch (Exception ex) { // clear waiting text txtStatus.Text = ex.Message; } }
void Start() { mahalo = false; cameraReady = false; Debug.Log("Camera is OFF"); PhotoCapture.CreateAsync(true, OnPhotoCaptureCreated); if (!string.IsNullOrEmpty(this.subscription_key) && !string.IsNullOrEmpty(this.personGroupId)) { this.faceAPIClient = new FaceAPI(subscription_key, personGroupId); } else if (!string.IsNullOrEmpty(this.subscription_key)) { this.faceAPIClient = new FaceAPI(subscription_key); } else { this.faceAPIClient = new FaceAPI(); } }
private void InitIfRequired() { if (_client == null) { var credentials = new ApiKeyServiceClientCredentials(ApiKeys.ContentModeratorKey); _client = new ContentModeratorClient(credentials); } if (_faceApi == null) { var credentials = new ApiKeyServiceClientCredentials(ApiKeys.FaceApiKey); _faceApi = new FaceAPI(credentials); _faceApi.AzureRegion = ApiKeys.FaceApiRegion; } if (customVisionPredictionClient == null) { customVisionPredictionClient = new CustomVisionPredictionClient { ApiKey = ApiKeys.PredictionKey, Endpoint = "https://jfversluis-customvisionsample.cognitiveservices.azure.com/" } } ; }
private void InitIfRequired() { if (_client == null) { var credentials = new ApiKeyServiceClientCredentials(ApiKeys.ContentModeratorKey); _client = new ContentModeratorClient(credentials); _client.BaseUrl = ApiKeys.ContentModeratorBaseUrl; } if (_faceApi == null) { var credentials = new ApiKeyServiceClientCredentials(ApiKeys.FaceApiKey); _faceApi = new FaceAPI(credentials); _faceApi.AzureRegion = ApiKeys.FaceApiRegion; } if (_endpoint == null) { _endpoint = new PredictionEndpoint { ApiKey = ApiKeys.PredictionKey } } ; }
private async Task RecognizeAndGreetViewerUsingBlogStorage() { // let viewer know we're taking picture await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => InterruptText.Text = "Taking picture..."); // take their photo var photoFile = await Webcam.TakePhotoAndUploadToTeBlobStorage(); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => InterruptText.Text = "Recognizing..."); // use Cognitive Services to identify them string name = await FaceAPI.GetViewerName(photoFile); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { // clear waiting text InterruptText.Text = ""; // greet viewer on screen GpioStatus.Text = "Greetings, " + name; }); // clear the interrupt flag so that interrupt can occur again vncl4010Sensor.ClearInterruptFlag(); }
private async Task MessageReceivedAsync(IDialogContext context, IAwaitable <IMessageActivity> argument) { var message = await argument; if (message.Attachments != null && message.Attachments.Count > 0) { var attachment = message.Attachments[0]; if (attachment.ContentType == "image/png" || attachment.ContentType == "image/jpeg") { dynamic json = await FaceAPI.GetFaceAPIJson(attachment.ContentUrl); this.user.photoUrl = await AzureBlob.UploadPhoto(attachment.ContentUrl, attachment.ContentType); if (json.Count > 0) { var face = json[0]["faceAttributes"]; this.user.gender = face["gender"].ToString(); this.user.age = decimal.Parse(face["age"].ToString()); this.user.smile = decimal.Parse(face["smile"].ToString()); this.user.glasses = face["glasses"].ToString(); this.user.anger = decimal.Parse(face["emotion"]["anger"].ToString()); this.user.eyeMakeup = Convert.ToBoolean(face["makeup"]["eyeMakeup"].ToString()); this.user.lipMakeup = Convert.ToBoolean(face["makeup"]["lipMakeup"].ToString()); this.user.hair = face["hair"].ToString(); this.user.bald = decimal.Parse(face["smile"].ToString()); var hairColor = face["hair"]["hairColor"]; if (hairColor.Count > 0) { this.user.hairColor = hairColor[0]["color"].ToString(); } else { this.user.hairColor = ""; } this.user.moustache = decimal.Parse(face["facialHair"]["moustache"].ToString()); this.user.beard = decimal.Parse(face["facialHair"]["beard"].ToString()); this.user.emotion = face["emotion"].ToString(); if (this.user.gender == "male") { this.user.genderThai = "ท่านหมื่น"; } else { this.user.genderThai = "แม่หญิง"; } if (this.user.eyeMakeup || this.user.lipMakeup) { this.user.makeupStr = "ชอบการแต่งตัว"; } else { this.user.makeupStr = "เป็นคนง่ายๆ สบายๆ"; } if (this.user.smile > 0.0M) { this.user.smileStr = "รักความสนุกสนาน"; } else { if (this.user.eyeMakeup || this.user.lipMakeup) { this.user.smileStr = "ชอบความเรียบง่าย"; } else { this.user.smileStr = "ชอบความโก้หรู"; } } if (this.user.anger > 0.7M) { this.user.angerStr = "ชอบความปราดเปรียว"; } else { this.user.angerStr = ""; } var quiz = $"ข้าเห็นหน้าออเจ้าแล้ว ออเจ้าเป็น{this.user.genderThai} ใช่หรือไม่"; PromptDialog.Choice(context, this.OnGenderSelected, yesNoOptions, quiz, "ออเจ้าเลือกไม่ถูกต้อง", 3); } else { --attempts; await context.PostAsync($"ออเจ้าไม่ได้ส่งรูปใบหน้าของออเจ้ามา ส่งรูปหน้าออเจ้ามาให้ข้าด้วยเถิด"); context.Wait(MessageReceivedAsync); } } else { --attempts; await context.PostAsync($"ออเจ้าไม่ได้ส่งรูปใบหน้าของออเจ้ามา ส่งรูปหน้าออเจ้ามาให้ข้าด้วยเถิด"); context.Wait(MessageReceivedAsync); } } else { --attempts; await context.PostAsync($"ส่งรูปหน้าออเจ้ามาให้ข้าด้วยเถิด"); context.Wait(MessageReceivedAsync); } if (attempts <= 0) { context.Fail(new TooManyAttemptsException("รูปที่ออเจ้าส่งมาไม่ถูกต้อง")); } }
protected async override void OnNavigatedTo(NavigationEventArgs e) { base.OnNavigatedTo(e); this.lstPeople.ItemsSource = await FaceAPI.GetPeopleByGroup(txtGroupId.Text); }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); long detectionTime; List <Rectangle> faces = new List <Rectangle>(); List <Rectangle> eyes = new List <Rectangle>(); IImage image = (IImage)frame;//这一步是重点 faceImage = frame.Bitmap; DetectFace.Detect(image , "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime); #region 多人识别 多人识别存在较大误差率(图片库 如果高清,识别效果就是好) //Graphics g1 = Graphics.FromImage(frame.Bitmap); //List<FaceIdentifyModel> tempList = new List<FaceIdentifyModel>(); //foreach (Rectangle face in faces) //{ // Image rectImage1 = ImageHelper.CaptureImage(frame.Bitmap, face); // FaceIdentifyModel MoreIdentifyInfo = FaceAPI.FaceIdentify(rectImage1, tb_Group.Text.Trim(), 1, 1);//人脸识别 一个人的识别效果比较好 // MoreIdentifyInfo.rect = face; // tempList.Add(MoreIdentifyInfo); //} //Color color_of_pen1 = Color.Gray; //color_of_pen1 = Color.Yellow; //Pen pen1 = new Pen(color_of_pen1, 2.0f); //Font font1 = new Font("微软雅黑", 16, GraphicsUnit.Pixel); //SolidBrush drawBrush1 = new SolidBrush(Color.Yellow); //tb_Identify.Text = tempList.ToJson(); //foreach (var t in tempList) //{ // g1.DrawRectangle(pen1, t.rect); // if (t.result != null) // { // g1.DrawString(t.result[0].user_info.Replace(",", "\r\n"), font1, drawBrush1, new Point(t.rect.X + 20, t.rect.Y - 20)); // } //} #endregion #region 单人识别 //单人 人脸识别 多人效果比较差 foreach (Rectangle face in faces) { #region 采用画图,显示自己的文本框 Graphics g = Graphics.FromImage(frame.Bitmap); ImageModel tempImage = new ImageModel(); tempImage.Rect = face; tempImage.Image = frame.Bitmap; //接口查询速度差 //string faceInfo = FaceAPI.FaceDetect(ImageHelper.CaptureImage(frame.Bitmap, face));//人脸检测 Image rectImage = ImageHelper.CaptureImage(frame.Bitmap, face); FaceIdentifyModel IdentifyInfo = FaceAPI.FaceIdentify(rectImage, tb_Group.Text.Trim(), 1, 1);//人脸识别 一个人的识别效果比较好 // tb_Result.Text = faceInfo; tb_Identify.Text = IdentifyInfo.ToJson().ToString(); //采用画板 Color color_of_pen = Color.Gray; color_of_pen = Color.Yellow; Pen pen = new Pen(color_of_pen, 2.0f); Rectangle rect = face; g.DrawRectangle(pen, rect); Font font = new Font("微软雅黑", 16, GraphicsUnit.Pixel); SolidBrush drawBrush = new SolidBrush(Color.Yellow); if (IdentifyInfo != null) { if (IdentifyInfo.result != null) { for (int i = 0; i < IdentifyInfo.result.Count; i++) { string faceInfo = ""; faceInfo = IdentifyInfo.result[i].user_info.Replace(",", "\r\n"); //显示用户信息 g.DrawString(faceInfo, font, drawBrush, new Point(face.X + 20, face.Y - 20)); } } } //CvInvoke.Rectangle(frame, face, new MCvScalar(255.0, 255.0, 255.0), 2); //CvInvoke.PutText(frame, faceInfo, new Point(face.X + 20, face.Y - 20), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); // 保存原始截图 //System.Drawing.Image ResourceImage = frame.Bitmap; //ResourceImage.Save(saveDir + saveFileName); //线程队列 保存人脸识别截图 QueueHelper.WriteImage(tempImage); //t1 = new Thread(new ThreadStart(() => //{ // faceInfo = FaceAPI.FaceDetect(ImageHelper.CaptureImage(frame.Bitmap, face)); // this.Invoke(new Action(() => // { // g.DrawString(faceInfo, font, drawBrush, new Point(face.X + 20, face.Y - 20)); // })); //})); //t1.IsBackground = true; //t1.Start(); #endregion } #endregion #region 视频调用原有的Open CV 不支持中文字 //foreach (var pair in _tracker) //{ // CvTrack b = pair.Value; // #region 视频中调用open CV 上直接画文本框 // CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); // CvInvoke.PutText(frame, "man,show", new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); // if (b.BoundingBox.Width < 100 || b.BoundingBox.Height < 50) // { // continue; // } // #endregion //} #endregion imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
public EmotionService() { _faceAPI = new FaceAPI(new ApiKeyServiceClientCredentials(App.Secrets.FaceApiKey)); }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); long detectionTime; List <Rectangle> faces = new List <Rectangle>(); List <Rectangle> eyes = new List <Rectangle>(); IImage image = (IImage)frame;//这一步是重点 faceImage = frame.Bitmap; DetectFace.Detect(image , "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime); #region 多人识别 Graphics g1 = Graphics.FromImage(frame.Bitmap); List <FaceIdentifyModel> tempList = new List <FaceIdentifyModel>(); foreach (Rectangle face in faces) { Image rectImage1 = ImageHelper.CaptureImage(frame.Bitmap, face);// 自己封装的方法,通过大图截取矩形框的人脸图片,返回Image 对象 FaceIdentifyModel MoreIdentifyInfo = FaceAPI.FaceIdentify(rectImage1, tb_Group.Text.Trim(), 1, 1); MoreIdentifyInfo.rect = face; tempList.Add(MoreIdentifyInfo); } Color color_of_pen1 = Color.Gray; color_of_pen1 = Color.Yellow; Pen pen1 = new Pen(color_of_pen1, 2.0f); Font font1 = new Font("微软雅黑", 16, GraphicsUnit.Pixel); SolidBrush drawBrush1 = new SolidBrush(Color.Yellow); tb_Identify.Text = tempList.ToJson(); foreach (var t in tempList) { g1.DrawRectangle(pen1, t.rect); if (t.result != null) { g1.DrawString(t.result[0].user_info.Replace(",", "\r\n"), font1, drawBrush1, new Point(t.rect.X + 20, t.rect.Y - 20)); } 125 } #endregion imageBox1.Image = frame; imageBox2.Image = forgroundMask; }