private async void UpdateDemographics(ImageAnalyzer img) { if (this.lastSimilarPersistedFaceSample != null) { bool demographicsChanged = false; // Update the Visitor collection (either add new entry or update existing) foreach (var item in this.lastSimilarPersistedFaceSample) { Visitor visitor; String unique = "1"; if (this.visitors.TryGetValue(item.SimilarPersistedFace.PersistedFaceId, out visitor)) { visitor.Count++; unique = "0"; } else { demographicsChanged = true; visitor = new Visitor { UniqueId = item.SimilarPersistedFace.PersistedFaceId, Count = 1 }; this.visitors.Add(visitor.UniqueId, visitor); this.demographics.Visitors.Add(visitor); // Update the demographics stats. We only do it for new visitors to avoid double counting. AgeDistribution genderBasedAgeDistribution = null; if (string.Compare(item.Face.FaceAttributes.Gender, "male", StringComparison.OrdinalIgnoreCase) == 0) { this.demographics.OverallMaleCount++; genderBasedAgeDistribution = this.demographics.AgeGenderDistribution.MaleDistribution; } else { this.demographics.OverallFemaleCount++; genderBasedAgeDistribution = this.demographics.AgeGenderDistribution.FemaleDistribution; } if (item.Face.FaceAttributes.Age < 16) { genderBasedAgeDistribution.Age0To15++; } else if (item.Face.FaceAttributes.Age < 20) { genderBasedAgeDistribution.Age16To19++; } else if (item.Face.FaceAttributes.Age < 30) { genderBasedAgeDistribution.Age20s++; } else if (item.Face.FaceAttributes.Age < 40) { genderBasedAgeDistribution.Age30s++; } else if (item.Face.FaceAttributes.Age < 50) { genderBasedAgeDistribution.Age40s++; } else { genderBasedAgeDistribution.Age50sAndOlder++; } } if (lastDetectedFaceSample != null) { Random rand = new Random(); Dictionary <String, String> dictionary = new Dictionary <String, String>(); dictionary["id"] = item.SimilarPersistedFace.PersistedFaceId.ToString(); dictionary["gender"] = item.Face.FaceAttributes.Gender.ToString(); dictionary["age"] = item.Face.FaceAttributes.Age.ToString(); dictionary["date"] = DateTime.Now.ToString("yyyy-MM-dd HH:mm"); dictionary["smile"] = item.Face.FaceAttributes.Smile.ToString(); dictionary["glasses"] = item.Face.FaceAttributes.Glasses.ToString(); dictionary["avgs"] = rand.Next(5, 8).ToString(); dictionary["avgrank"] = (3 + rand.NextDouble() * 1.5).ToString(); EmotionScores averageScores = new EmotionScores { Happiness = img.DetectedFaces.Average(f => f.FaceAttributes.Emotion.Happiness), Anger = img.DetectedFaces.Average(f => f.FaceAttributes.Emotion.Anger), Sadness = img.DetectedFaces.Average(f => f.FaceAttributes.Emotion.Sadness), Contempt = img.DetectedFaces.Average(f => f.FaceAttributes.Emotion.Contempt), Disgust = img.DetectedFaces.Average(f => f.FaceAttributes.Emotion.Disgust), Neutral = img.DetectedFaces.Average(f => f.FaceAttributes.Emotion.Neutral), Fear = img.DetectedFaces.Average(f => f.FaceAttributes.Emotion.Fear), Surprise = img.DetectedFaces.Average(f => f.FaceAttributes.Emotion.Surprise) }; dictionary["isunique"] = unique; dictionary["anger"] = averageScores.Anger.ToString(); dictionary["contempt"] = averageScores.Contempt.ToString(); dictionary["disgust"] = averageScores.Disgust.ToString(); dictionary["fear"] = averageScores.Fear.ToString(); dictionary["happiness"] = averageScores.Happiness.ToString(); dictionary["neutral"] = averageScores.Neutral.ToString(); dictionary["sadness"] = averageScores.Sadness.ToString(); dictionary["surprise"] = averageScores.Surprise.ToString(); //#pragma warning restore 4014 System.Diagnostics.Debug.WriteLine("here!!!!!!!!"); var name = "null"; var person = ""; System.Diagnostics.Debug.WriteLine("Identify? : " + lastIdentifiedPersonSample == null); if (null != lastIdentifiedPersonSample && null != lastIdentifiedPersonSample.First().Item2) { name = lastIdentifiedPersonSample.First().Item2.Person.Name.ToString(); person = lastIdentifiedPersonSample.First().Item2.Person.PersonId.ToString(); } System.Diagnostics.Debug.WriteLine("Name: " + name); System.Diagnostics.Debug.WriteLine("ID: " + person); foreach (KeyValuePair <string, string> entry in dictionary) { System.Diagnostics.Debug.WriteLine(entry.Key.ToString() + ": " + entry.Value.ToString()); // do something with entry.Value or entry.Key } dictionary["personid"] = person; dictionary["personname"] = name; ////#pragma warning disable 4014 String str = SettingsHelper.Instance.IoTHubConnectString; await IoTClient.Start(dictionary, SettingsHelper.Instance.IoTHubConnectString); } } if (demographicsChanged) { this.ageGenderDistributionControl.UpdateData(this.demographics); } this.overallStatsControl.UpdateData(this.demographics); } }
private string GetGreettingFromFaces(ImageAnalyzer img) { if (img.IdentifiedPersons.Any()) { string names = img.IdentifiedPersons.Count() > 1 ? string.Join(", ", img.IdentifiedPersons.Select(p => p.Person.Name)) : img.IdentifiedPersons.First().Person.Name; if (img.DetectedFaces.Count() > img.IdentifiedPersons.Count()) { this.Details.Visibility = Visibility.Collapsed; //this.Buttons.Visibility = Visibility.Collapsed; return(string.Format("Welcome back, {0} and company! Please step one at a time to start!", names)); } else { if (img.IdentifiedPersons.Count() > 1) { this.Details.Visibility = Visibility.Collapsed; //this.Buttons.Visibility = Visibility.Collapsed; return(string.Format( "Welcome back, {0}! Please step one at a time in front of the camera to start!", names)); } else { //Send a notification to the customer's phone if (!sentNotification && notificationCount == 0) { this.Details.Visibility = Visibility.Collapsed; //this.Buttons.Visibility = Visibility.Collapsed; NotificationMethod(); notificationCount++; } if (sentNotification && authenticated == 2) { //Keep checking if the user is authenticated or not and assign authenticated value to the variable this.Details.Visibility = Visibility.Collapsed; //this.Buttons.Visibility = Visibility.Collapsed; GetAuthStatus(); //authenticated=2 if not authenticated till now //authenticated=1 if authentication was successful //authenticated=0 if authentication was not successful //If user is not authenticated, send this: return(string.Format( "Welcome back {0}! I've sent a notification on your phone. Please confirm to proceed!", names)); } else if (authenticated == 1) { this.Details.Text = "Divergence Customer Collaboration Center."; this.Details.Visibility = Visibility.Visible; //this.Buttons.Visibility = Visibility.Visible; return(string.Format("Welcome back {0}!", names)); } else if (authenticated == 0) { this.Details.Visibility = Visibility.Collapsed; //this.Buttons.Visibility = Visibility.Collapsed; if (!deletedRecords) { DeleteRecordsMethod(); deletedRecords = true; Thread.Sleep(1000); } if (deletedRecords) { GetAuthStatus(); } Thread.Sleep(1000); return(string.Format("We could not process the authentication. Please try again.")); } else { this.Details.Visibility = Visibility.Collapsed; //this.Buttons.Visibility = Visibility.Collapsed; return(string.Format( "Welcome back {0}! I've sent a notification on your phone. Please confirm to proceed!", names)); } } } } else { if (img.DetectedFaces.Count() > 1) { sentNotification = false; authenticated = 2; notificationCount = 0; this.Details.Visibility = Visibility.Collapsed; //this.Buttons.Visibility = Visibility.Collapsed; return("Hi everyone! Welcome to Divergence Customer Collaboration Center.."); } else { sentNotification = false; authenticated = 2; notificationCount = 0; this.Details.Visibility = Visibility.Collapsed; //this.Buttons.Visibility = Visibility.Collapsed; return("Hi there! Welcome to Divergence Customer Collaboration Center."); } } }
private async Task UpdateDemographics(ImageAnalyzer img) { if (this.lastSimilarPersistedFaceSample != null) { bool demographicsChanged = false; // Update the Visitor collection (either add new entry or update existing) foreach (var item in this.lastSimilarPersistedFaceSample) { Visitor visitor; if (this.visitors.TryGetValue(item.SimilarPersistedFace.PersistedFaceId, out visitor)) { visitor.Count++; } else { demographicsChanged = true; visitor = new Visitor { UniqueId = item.SimilarPersistedFace.PersistedFaceId, Count = 1 }; this.visitors.Add(visitor.UniqueId, visitor); await this.SaveIdentifiedFace(img); this.demographics.Visitors.Add(visitor); // Update the demographics stats. We only do it for new visitors to avoid double counting. AgeDistribution genderBasedAgeDistribution = null; if (string.Compare(item.Face.FaceAttributes.Gender, "male", StringComparison.OrdinalIgnoreCase) == 0) { this.demographics.OverallMaleCount++; genderBasedAgeDistribution = this.demographics.AgeGenderDistribution.MaleDistribution; } else { this.demographics.OverallFemaleCount++; genderBasedAgeDistribution = this.demographics.AgeGenderDistribution.FemaleDistribution; } if (item.Face.FaceAttributes.Age < 16) { genderBasedAgeDistribution.Age0To15++; } else if (item.Face.FaceAttributes.Age < 20) { genderBasedAgeDistribution.Age16To19++; } else if (item.Face.FaceAttributes.Age < 30) { genderBasedAgeDistribution.Age20s++; } else if (item.Face.FaceAttributes.Age < 40) { genderBasedAgeDistribution.Age30s++; } else if (item.Face.FaceAttributes.Age < 50) { genderBasedAgeDistribution.Age40s++; } else { genderBasedAgeDistribution.Age50sAndOlder++; } } } if (demographicsChanged) { this.ageGenderDistributionControl.UpdateData(this.demographics); } this.overallStatsControl.UpdateData(this.demographics); } }
private async Task ProcessCameraCapture(ImageAnalyzer e) { if (e == null) { this.isProcessingPhoto = false; return; } // detect emotions await e.DetectEmotionAsync(); if (e.DetectedEmotion.Any()) { // Update the average emotion response Scores averageScores = new Scores { Happiness = e.DetectedEmotion.Average(em => em.Scores.Happiness), Anger = e.DetectedEmotion.Average(em => em.Scores.Anger), Sadness = e.DetectedEmotion.Average(em => em.Scores.Sadness), Contempt = e.DetectedEmotion.Average(em => em.Scores.Contempt), Disgust = e.DetectedEmotion.Average(em => em.Scores.Disgust), Neutral = e.DetectedEmotion.Average(em => em.Scores.Neutral), Fear = e.DetectedEmotion.Average(em => em.Scores.Fear), Surprise = e.DetectedEmotion.Average(em => em.Scores.Surprise) }; double positiveEmotionResponse = Math.Min(averageScores.Happiness + averageScores.Surprise, 1); double negativeEmotionResponse = Math.Min(averageScores.Sadness + averageScores.Fear + averageScores.Disgust + averageScores.Contempt, 1); double netResponse = ((positiveEmotionResponse - negativeEmotionResponse) * 0.5) + 0.5; this.sentimentControl.Sentiment = netResponse; // show captured faces and their emotion if (this.emotionFacesGrid.Visibility == Visibility.Visible) { foreach (var face in e.DetectedEmotion) { // Get top emotion on this face EmotionData topEmotion = EmotionServiceHelper.ScoresToEmotionData(face.Scores).OrderByDescending(em => em.EmotionScore).First(); // Crop this face Rectangle rect = face.FaceRectangle; double heightScaleFactor = 1.8; double widthScaleFactor = 1.8; Rectangle biggerRectangle = new Rectangle { Height = Math.Min((int)(rect.Height * heightScaleFactor), e.DecodedImageHeight), Width = Math.Min((int)(rect.Width * widthScaleFactor), e.DecodedImageWidth) }; biggerRectangle.Left = Math.Max(0, rect.Left - (int)(rect.Width * ((widthScaleFactor - 1) / 2))); biggerRectangle.Top = Math.Max(0, rect.Top - (int)(rect.Height * ((heightScaleFactor - 1) / 1.4))); ImageSource croppedImage = await Util.GetCroppedBitmapAsync(e.GetImageStreamCallback, biggerRectangle); // Add the face and emotion to the collection of faces if (croppedImage != null && biggerRectangle.Height > 0 && biggerRectangle.Width > 0) { if (this.EmotionFaces.Count >= 9) { this.EmotionFaces.Clear(); } this.EmotionFaces.Add(new EmotionExpressionCapture { CroppedFace = croppedImage, TopEmotion = topEmotion.EmotionName }); } } } } this.isProcessingPhoto = false; }
private async Task ProcessPeopleInsightsAsync(ImageAnalyzer analyzer, int frameNumber) { foreach (var item in analyzer.SimilarFaceMatches) { bool demographicsChanged = false; Visitor personInVideo; if (this.peopleInVideo.TryGetValue(item.SimilarPersistedFace.PersistedFaceId, out personInVideo)) { personInVideo.Count++; if (this.pendingIdentificationAttemptCount.ContainsKey(item.SimilarPersistedFace.PersistedFaceId)) { // This is a face we haven't identified yet. See how many times we have tried it, if we need to do it again or stop trying if (this.pendingIdentificationAttemptCount[item.SimilarPersistedFace.PersistedFaceId] <= 5) { string personName = GetDisplayTextForPersonAsync(analyzer, item); if (string.IsNullOrEmpty(personName)) { // Increment the times we have tried and failed to identify this person this.pendingIdentificationAttemptCount[item.SimilarPersistedFace.PersistedFaceId]++; } else { // Bingo! Let's remove it from the list of pending identifications this.pendingIdentificationAttemptCount.Remove(item.SimilarPersistedFace.PersistedFaceId); VideoTrack existingTrack = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == item.SimilarPersistedFace.PersistedFaceId); if (existingTrack != null) { existingTrack.DisplayText = string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age)); } } } else { // Give up this.pendingIdentificationAttemptCount.Remove(item.SimilarPersistedFace.PersistedFaceId); } } } else { // New person... let's catalog it. // Crop the face, enlarging the rectangle so we frame it better double heightScaleFactor = 1.8; double widthScaleFactor = 1.8; FaceRectangle biggerRectangle = new FaceRectangle { Height = Math.Min((int)(item.Face.FaceRectangle.Height * heightScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelHeight), Width = Math.Min((int)(item.Face.FaceRectangle.Width * widthScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelWidth) }; biggerRectangle.Left = Math.Max(0, item.Face.FaceRectangle.Left - (int)(item.Face.FaceRectangle.Width * ((widthScaleFactor - 1) / 2))); biggerRectangle.Top = Math.Max(0, item.Face.FaceRectangle.Top - (int)(item.Face.FaceRectangle.Height * ((heightScaleFactor - 1) / 1.4))); var croppedImage = await Util.GetCroppedBitmapAsync(analyzer.GetImageStreamCallback, biggerRectangle); if (croppedImage == null || biggerRectangle.Height == 0 && biggerRectangle.Width == 0) { // Couldn't get a shot of this person continue; } demographicsChanged = true; string personName = GetDisplayTextForPersonAsync(analyzer, item); if (string.IsNullOrEmpty(personName)) { personName = item.Face.FaceAttributes.Gender; // Add the person to the list of pending identifications so we can try again on some future frames this.pendingIdentificationAttemptCount.Add(item.SimilarPersistedFace.PersistedFaceId, 1); } personInVideo = new Visitor { UniqueId = item.SimilarPersistedFace.PersistedFaceId }; this.peopleInVideo.Add(item.SimilarPersistedFace.PersistedFaceId, personInVideo); this.demographics.Visitors.Add(personInVideo); // Update the demographics stats. this.UpdateDemographics(item); VideoTrack videoTrack = new VideoTrack { Tag = item.SimilarPersistedFace.PersistedFaceId, CroppedFace = croppedImage, DisplayText = string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age)), Duration = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds, }; videoTrack.Tapped += this.TimelineTapped; this.peopleListView.Children.Insert(0, videoTrack); } // Update the timeline for this person VideoTrack track = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == item.SimilarPersistedFace.PersistedFaceId); if (track != null) { track.SetVideoFrameState(frameNumber, item.Face.FaceAttributes.Emotion); uint childIndex = (uint)this.peopleListView.Children.IndexOf(track); if (childIndex > 5) { // Bring to towards the top so it becomes visible this.peopleListView.Children.Move(childIndex, 5); } } if (demographicsChanged) { this.ageGenderDistributionControl.UpdateData(this.demographics); } this.overallStatsControl.UpdateData(this.demographics); } }
public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection) { try { if (!ViewController.isFaceRegistered || isProcessing) { // Console.WriteLine("OutputDelegate - Exit (isProcessing: " + DateTime.Now); sampleBuffer.Dispose(); //Console.WriteLine("processing.."); return; } //Console.WriteLine("IsProcessing: "); isProcessing = true; connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; connection.VideoScaleAndCropFactor = 1.0f; var image = GetImageFromSampleBuffer(sampleBuffer); var ciImage = CIImage.FromCGImage(image.CGImage); var cleanAperture = sampleBuffer.GetVideoFormatDescription().GetCleanAperture(false); /*For Face Detection using iOS APIs*/ DispatchQueue.MainQueue.DispatchAsync(() => drawFacesCallback(ciImage, cleanAperture)); //Console.WriteLine(ciImage); Task.Run(async() => { try { //if (ViewController.IsFaceDetected) //{ Console.WriteLine("face detected: "); imageAnalyzer = new ImageAnalyzer(() => Task.FromResult <Stream>(image.ResizeImageWithAspectRatio(300, 400).AsPNG().AsStream())); await ProcessCameraCapture(imageAnalyzer); //} } finally { imageAnalyzer = null; isProcessing = false; Console.WriteLine("OUT "); } }); } catch (Exception ex) { Console.Write(ex); } finally { sampleBuffer.Dispose(); } }
private async void UpdateResults(ImageAnalyzer img) { this.searchErrorTextBlock.Visibility = Visibility.Collapsed; Microsoft.Azure.CognitiveServices.Vision.CustomVision.Prediction.Models.ImagePrediction result = null; var currentProjectViewModel = (ProjectViewModel)this.projectsComboBox.SelectedValue; var currentProject = ((ProjectViewModel)this.projectsComboBox.SelectedValue).Model; var trainingApi = this.userProvidedTrainingApi; var predictionApi = this.userProvidedPredictionApi; try { var iteractions = await trainingApi.GetIterationsAsync(currentProject.Id); var latestTrainedIteraction = iteractions.Where(i => i.Status == "Completed").OrderByDescending(i => i.TrainedAt.Value).FirstOrDefault(); if (latestTrainedIteraction == null) { throw new Exception("This project doesn't have any trained models yet. Please train it, or wait until training completes if one is in progress."); } if (img.ImageUrl != null) { result = await CustomVisionServiceHelper.PredictImageUrlWithRetryAsync(predictionApi, currentProject.Id, new Microsoft.Azure.CognitiveServices.Vision.CustomVision.Prediction.Models.ImageUrl(img.ImageUrl), latestTrainedIteraction.Id); } else { result = await CustomVisionServiceHelper.PredictImageWithRetryAsync(predictionApi, currentProject.Id, img.GetImageStreamCallback, latestTrainedIteraction.Id); } } catch (Exception ex) { await Util.GenericApiCallExceptionHandler(ex, "Error"); } this.progressRing.IsActive = false; this.resultsDetails.Visibility = Visibility.Visible; var matches = result?.Predictions?.Where(r => Math.Round(r.Probability * 100) > 0); if (matches == null || !matches.Any()) { this.searchErrorTextBlock.Visibility = Visibility.Visible; } else { if (!currentProjectViewModel.IsObjectDetection) { this.resultsGridView.ItemsSource = matches.Select(t => new { Tag = t.TagName, Probability = string.Format("{0}%", Math.Round(t.Probability * 100)) }); } else { this.resultsDetails.Visibility = Visibility.Collapsed; this.currentDetectedObjects = matches.Where(m => m.Probability >= 0.6); ShowObjectDetectionBoxes(this.currentDetectedObjects); } } if (result?.Predictions != null && !currentProjectViewModel.IsObjectDetection) { this.activeLearningButton.Opacity = 1; this.PredictionDataForRetraining.Clear(); this.PredictionDataForRetraining.AddRange(result.Predictions.Select( t => new ActiveLearningTagViewModel { PredictionResultId = result.Id, TagId = t.TagId, TagName = t.TagName, HasTag = Math.Round(t.Probability * 100) > 0 })); } else { this.activeLearningButton.Opacity = 0; } }
private void UpdateEmotionMeter(ImageAnalyzer img) { if (senseHat != null) { var scores = img.DetectedFaces.First().FaceAttributes.Emotion; senseHat.Display.Clear(); // col 0: anger senseHat.Display.Screen[0, 7] = Color.FromArgb(0xff, 0xff, 0x54, 0x2c); for (int row = 1; row < Math.Round(8 * scores.Anger); row++) { senseHat.Display.Screen[0, 7 - row] = Color.FromArgb(0xff, 0xff, 0x54, 0x2c); } // col 1: contempt senseHat.Display.Screen[1, 7] = Color.FromArgb(0xff, 0xce, 0x2d, 0x90); for (int row = 1; row < Math.Round(8 * scores.Contempt); row++) { senseHat.Display.Screen[1, 7 - row] = Color.FromArgb(0xff, 0xce, 0x2d, 0x90); } //col 2: disgust senseHat.Display.Screen[2, 7] = Color.FromArgb(0xff, 0x8c, 0x43, 0xbd); for (int row = 1; row < Math.Round(8 * scores.Disgust); row++) { senseHat.Display.Screen[2, 7 - row] = Color.FromArgb(0xff, 0x8c, 0x43, 0xbd); } //col 3: fear senseHat.Display.Screen[3, 7] = Color.FromArgb(0xff, 0xfe, 0xb5, 0x52); for (int row = 1; row < Math.Round(8 * scores.Disgust); row++) { senseHat.Display.Screen[3, 7 - row] = Color.FromArgb(0xff, 0xfe, 0xb5, 0x52); } // col 4: happiness senseHat.Display.Screen[4, 7] = Color.FromArgb(0xff, 0x4f, 0xc7, 0x45); for (int row = 1; row < Math.Round(8 * scores.Happiness); row++) { senseHat.Display.Screen[4, 7 - row] = Color.FromArgb(0xff, 0x4f, 0xc7, 0x45); } //col 5: neutral senseHat.Display.Screen[5, 7] = Color.FromArgb(0xff, 0x1e, 0x1e, 0x1e); for (int row = 1; row < Math.Round(8 * scores.Neutral); row++) { senseHat.Display.Screen[5, 7 - row] = Color.FromArgb(0xff, 0x1e, 0x1e, 0x1e); } //col 6: sadness senseHat.Display.Screen[6, 7] = Color.FromArgb(0xff, 0x47, 0x8b, 0xcb); for (int row = 1; row < Math.Round(8 * scores.Sadness); row++) { senseHat.Display.Screen[6, 7 - row] = Color.FromArgb(0xff, 0x47, 0x8b, 0xcb); } //col 7: surprise senseHat.Display.Screen[7, 7] = Color.FromArgb(0xff, 0xff, 0xf6, 0xd6); for (int row = 1; row < Math.Round(8 * scores.Surprise); row++) { senseHat.Display.Screen[7, 7 - row] = Color.FromArgb(0xff, 0xff, 0xf6, 0xd6); } senseHat.Display.Update(); } }
async void btnAnalizar_Clicked(object sender, EventArgs e) { if (foto != null) { try { Loading(true); // Fase 1 - Face var rostro = await ServicioFace.DetectarRostro(foto); var frente = ImageAnalyzer.AnalizarPostura(rostro); txtFrente.Text = frente.ToString("N2"); if (frente > Constantes.LookingAwayAngleThreshold) { txtFrente.TextColor = Color.Red; txtAnalisisFrente.TextColor = Color.Red; txtAnalisisFrente.Text = "No estás mirando al frente"; } else { txtFrente.TextColor = Color.Green; txtAnalisisFrente.TextColor = Color.Green; txtAnalisisFrente.Text = "OK"; } var boca = ImageAnalyzer.AnalizarBoca(rostro); txtBoca.Text = boca.ToString("N2"); if (boca > Constantes.YawningApertureThreshold) { txtBoca.TextColor = Color.Red; txtAnalisisBoca.TextColor = Color.Red; txtAnalisisBoca.Text = "Posiblemente está bostezando"; } else { txtBoca.TextColor = Color.Green; txtAnalisisBoca.TextColor = Color.Green; txtAnalisisBoca.Text = "OK"; } var ojos = ImageAnalyzer.AnalizarOjos(rostro); txtOjos.Text = ojos.ToString("N2"); if (ojos < Constantes.SleepingApertureThreshold) { txtOjos.TextColor = Color.Red; txtAnalisisOjos.TextColor = Color.Red; txtAnalisisOjos.Text = "¡Está dormido!"; } else { txtOjos.TextColor = Color.Green; txtAnalisisOjos.TextColor = Color.Green; txtAnalisisOjos.Text = "OK"; } // Fase 2 - Vision var descripcion = await ServicioVision.DescribirImagen(foto); var analisis = await ServicioVision.AnalizarImagen(foto); if (descripcion.Description.Captions.Length > 0) { var distraccion = descripcion.Description.Captions[0].Text; if (distraccion.Contains("phone")) { txtCelular.Text = "SI"; txtCelular.TextColor = Color.Red; txtAnalisisCelular.TextColor = Color.Red; txtAnalisisCelular.Text = "¡Está usando el teléfono móvil!"; } else { txtCelular.Text = "NO"; txtCelular.TextColor = Color.Green; txtAnalisisCelular.TextColor = Color.Green; txtAnalisisCelular.Text = "OK"; } } } catch (Exception ex) { await DisplayAlert("Error", "Excepción: " + ex.Message, "OK"); } finally { Loading(false); } } else { await DisplayAlert("Error", "Debes tomar la fotografía", "OK"); } }
private async void OnCameraPhotoCaptured(object sender, ImageAnalyzer e) { ProcessImageSelection(new ImageAnalyzer[] { e }); await this.cameraControl.StopStreamAsync(); }
private async Task ProcessCameraCapture(ImageAnalyzer e) { try { if (e == null) { this.lastDetectedFaceSample = null; this.lastIdentifiedPersonSample = null; this.lastSimilarPersistedFaceSample = null; this.lastEmotionSample = null; this.debugText.Text = ""; this.isProcessingPhoto = false; return; } await Task.WhenAll(e.DetectFacesAsync(detectFaceAttributes: true)); //await Task.WhenAll(e.DetectFacesAsync(detectFaceAttributes: true), e.AnalyzeImageAsync(detectCelebrities: false)); DateTime start = DateTime.Now; await Task.Run(() => SavePicturesToServer(e)); // Compute Emotion, Age and Gender //await Task.WhenAll(e.DetectEmotionAsync(), e.DetectFacesAsync(detectFaceAttributes: true)); //check tags //foreach (var item in e.AnalysisResult.Description.Tags) //{ // if (item.Contains("fire")) // { // isfiredetected = true; // } //} if (Constants.isFireDetected) { if (isfiredetected) { var stream = new InMemoryRandomAccessStream(); await stream.WriteAsync(e.Data.AsBuffer()); stream.Seek(0); await fireimage.SetSourceAsync(stream); await Task.Delay(300); ChatUserControl control = new ChatUserControl(); control.setControlProperties(500, "HAZARD", "Fire detected in sector 7", "peach", fireimage, true); ChatStack.Children.Add(control); // createsystemWarning("SYSTEM : ALERT", 400, 400, "Fire Detected in the meeting room", false, false, "sdf", true, "dark"); await Task.Delay(200); myScroll.ChangeView(0, myScroll.ExtentHeight, 1); isfiredetected = false; } } if (e.DetectedFaces == null || !e.DetectedFaces.Any()) { this.lastDetectedFaceSample = null; LastPersistedCount = 0; } else { this.lastDetectedFaceSample = e.DetectedFaces; //analysis image can be taken here and done if (!ishelmet) { ishelmet = true; ChatUserControl control = new ChatUserControl(); control.setControlProperties(280, "SAFETY VIOLATION", "Personnel with no Helmet", "blue", null, false); ChatStack.Children.Add(control); await Task.Delay(200); myScroll.ChangeView(0, myScroll.ExtentHeight, 1); var data = await getData("http://gitex2017backend.azurewebsites.net/api/ServiceRequest/AnswerQuestion?Answer=Personnel with no Helmet&Session=" + SessionID); } if (LastUpdatedDateTime.AddMinutes(4) < DateTime.Now) { ishelmet = true; } } // Compute Face Identification and Unique Face Ids await Task.WhenAll(e.IdentifyFacesAsync(), e.FindSimilarPersistedFacesAsync()); if (!e.IdentifiedPersons.Any()) { this.lastIdentifiedPersonSample = null; } else { this.lastIdentifiedPersonSample = e.DetectedFaces.Select(f => new Tuple <Face, IdentifiedPerson>(f, e.IdentifiedPersons.FirstOrDefault(p => p.FaceId == f.FaceId))); } if (!e.SimilarFaceMatches.Any()) { this.lastSimilarPersistedFaceSample = null; } else { this.lastSimilarPersistedFaceSample = e.SimilarFaceMatches; } int count = 0; unauth = true; if (lastIdentifiedPersonSample == null && e.DetectedFaces.Count() > 0) { var croppedimage = await GetPrimaryFaceFromCameraCaptureAsync(e, e.DetectedFaces.FirstOrDefault().FaceRectangle); if (croppedimage != null) { this.OnImageSearchCompleted(croppedimage); } } else { foreach (var item in lastIdentifiedPersonSample) { try { if (item.Item2 == null) { count++; var croppedImage = e; croppedImage = await GetPrimaryFaceFromCameraCaptureAsync(e, item.Item1.FaceRectangle); if (croppedImage != null) { this.OnImageSearchCompleted(croppedImage); } } } catch (Exception ex) { } } } if (!unauth) { unauth = true; ChatUserControl control = new ChatUserControl(); control.setControlProperties(200, "SECURITY VIOLATION", "Unauthorized personnel on Sector 7", "peach", null, false); ChatStack.Children.Add(control); myScroll.ChangeView(0, myScroll.ExtentHeight, 1); } if (LastPersistedCount < count) { // PopToast(string.Format("{0} Unauthorized faces found", count)); } LastPersistedCount = count; //this.UpdateDemographics(e); this.debugText.Text = string.Format("Latency: {0}ms", (int)(DateTime.Now - start).TotalMilliseconds); this.ErrorText.Text = e.ErrorMessage; this.isProcessingPhoto = false; } catch (Exception ex) { this.isProcessingPhoto = false; } }
private async void OnCameraPhotoCaptured(object sender, ImageAnalyzer img) { this.OnItemSearchCompleted?.Invoke(this, img); await this.cameraControl.StopStreamAsync(); }
private async Task ComputeFaceIdentificationAsync(ImageAnalyzer e) {
public void SetVideoFrameState(int videoFrameTimestampInSeconds, Emotion emotion, ImageAnalyzer analysisResult = null) { EmotionToColoredBar emotionResponse = new EmotionToColoredBar(); emotionResponse.UpdateEmotion(emotion); emotionResponse.Tag = videoFrameTimestampInSeconds; emotionResponse.Width = Math.Max(this.chart.ActualWidth / this.duration, 0.5); emotionResponse.HorizontalAlignment = HorizontalAlignment.Left; emotionResponse.Margin = new Thickness { Left = ((double)videoFrameTimestampInSeconds / this.duration) * this.chart.ActualWidth }; this.chart.Children.Add(emotionResponse); if (analysisResult != null) { this.AddFlyoutToElement(emotionResponse, analysisResult); } }
private async Task ProcessCameraCapture(ImageAnalyzer e) { if (e == null) { this.lastDetectedFaceSample = null; this.lastIdentifiedPersonSample = null; this.lastSimilarPersistedFaceSample = null; this.debugText.Text = ""; this.isProcessingPhoto = false; return; } //DateTime start = DateTime.Now; //Detect face await this.DetectFaceAttributesAsync(e); //// Compute Face Identification and Unique Face Ids //await ComputeFaceIdentificationAsync(e); //this.debugText.Text = string.Format("Độ trễ: {0}ms", (int)(DateTime.Now - start).TotalMilliseconds); //EmployeeModel itemPersons; //if (e.IdentifiedPersons != null) //{ // foreach (var item in e.IdentifiedPersons) // { // //Không tồn tại thì thêm mới // if (item.InfoEmployee != null) // { // //So sánh thời gian fix giữa 2 lần // var myValue = RealtimeFixModel.ListEmployeeRealtime.FirstOrDefault(x => x.Key == item.InfoEmployee.EmployeeId); // if (!myValue.Equals(default(KeyValuePair<string, DateTime>))) // { // TimeSpan timeSp = start - myValue.Value; // if (Math.Abs(timeSp.TotalSeconds) > InfoSettingFix.InfoSetting.Times) // { // RealtimeFixModel.ListEmployeeRealtime[item.InfoEmployee.EmployeeId] = dateNow; // itemPersons = new EmployeeModel() // { // DateTime = start.ToString("HH:mm:ss"), // Name = item.InfoEmployee.Name + " (" + item.InfoEmployee.Code + ") ", // JobTitle = item.InfoEmployee.JobTitleName + " / " + item.InfoEmployee.DepartmentName, // Type = "1", // }; // this.PersonsInCurrentGroup.Insert(0, itemPersons); // } // } // else // { // //Chưa có thì thêm mới // RealtimeFixModel.ListEmployeeRealtime.Add(item.InfoEmployee.EmployeeId, DateTime.Now); // itemPersons = new EmployeeModel() // { // DateTime = start.ToString("HH:mm:ss"), // Name = item.InfoEmployee.Name + " (" + item.InfoEmployee.Code + ") ", // JobTitle = item.InfoEmployee.JobTitleName + " / " + item.InfoEmployee.DepartmentName, // Type = "1", // }; // this.PersonsInCurrentGroup.Insert(0, itemPersons); // } // if (this.PersonsInCurrentGroup.Count() > 200) // this.PersonsInCurrentGroup.RemoveAt(201); // } // } //} //if (this.PersonsInCurrentGroup.Count() > 200) // this.PersonsInCurrentGroup.RemoveAt(201); this.isProcessingPhoto = false; }
private void OnImageCaptured(ImageAnalyzer imageWithFace) { this.ImageCaptured?.Invoke(this, imageWithFace); }
private async void OnCameraImageCaptured(object sender, ImageAnalyzer e) { this.cameraCaptureFlyout.Hide(); await this.HandleTrainingImageCapture(e); }
async void btnAnalizar_Clicked(object sender, EventArgs e) { if (foto != null) { try { Loading(true); // Fase 1 - Face var rostro = await ServicioFace.DetectarRostro(foto); var frente = ImageAnalyzer.AnalizarPostura(rostro); txtFrente.Text = frente.ToString("N2"); if (frente > Constantes.LookingAwayAngleThreshold) { txtFrente.TextColor = Color.Red; txtAnalisisFrente.TextColor = Color.Red; txtAnalisisFrente.Text = "Mire al frente se va a matar"; } else { txtFrente.TextColor = Color.Green; txtAnalisisFrente.TextColor = Color.Green; txtAnalisisFrente.Text = "OK"; } // Fase 2 - Vision var descripcion = await ServicioVision.DescribirImagen(foto); var analisis = await ServicioVision.AnalizarImagen(foto); if (descripcion.Description.Captions.Length > 0) { var distraccion = descripcion.Description.Captions[0].Text; if (distraccion.Contains("phone")) { txtCelular.Text = "SI"; txtCelular.TextColor = Color.Red; txtAnalisisCelular.TextColor = Color.Red; txtAnalisisCelular.Text = "¡El celular al bolante MATAAAAAAAAAAA......!"; } else { txtCelular.Text = "NO"; txtCelular.TextColor = Color.Green; txtAnalisisCelular.TextColor = Color.Green; txtAnalisisCelular.Text = "OK"; } } } catch (Exception ex) { await DisplayAlert("Error", "Excepción: " + ex.Message, "OK"); } finally { Loading(false); } } else { await DisplayAlert("Error", "Debes tomar la fotografía", "OK"); } }
private async void UpdateResults(ImageAnalyzer img) { ProcessedData pd = new ProcessedData(); pd.Location = "Area 1"; if (img.AnalysisResult.Tags == null || !img.AnalysisResult.Tags.Any()) { this.tagsGridView.ItemsSource = new[] { new { Name = "No tags" } }; } else { this.tagsGridView.ItemsSource = img.AnalysisResult.Tags.Select(t => new { Confidence = string.Format("({0}%)", Math.Round(t.Confidence * 100)), Name = t.Name }); } if (img.AnalysisResult.Description == null || !img.AnalysisResult.Description.Captions.Any(d => d.Confidence >= 0.2)) { this.descriptionGridView.ItemsSource = new[] { new { Description = "Not sure what that is" } }; pd.Report = "Not sure what that is"; } else { this.descriptionGridView.ItemsSource = img.AnalysisResult.Description.Captions.Select(d => new { Confidence = string.Format("({0}%)", Math.Round(d.Confidence * 100)), Description = d.Text }); pd.Report = img.AnalysisResult.Description.Captions[0].Text; } var celebNames = this.GetCelebrityNames(img); if (celebNames == null || !celebNames.Any()) { this.celebritiesTextBlock.Text = "None"; } else { this.celebritiesTextBlock.Text = string.Join(", ", celebNames.OrderBy(name => name)); } if (img.AnalysisResult.Color == null) { this.colorInfoListView.ItemsSource = new[] { new { Description = "Not available" } }; } else { this.colorInfoListView.ItemsSource = new[] { new { Description = "Dominant background color:", Colors = new string[] { img.AnalysisResult.Color.DominantColorBackground } }, new { Description = "Dominant foreground color:", Colors = new string[] { img.AnalysisResult.Color.DominantColorForeground } }, new { Description = "Dominant colors:", Colors = img.AnalysisResult.Color.DominantColors }, new { Description = "Accent color:", Colors = new string[] { "#" + img.AnalysisResult.Color.AccentColor } } }; } this.ocrToggle.IsEnabled = true; if (img.ImageUrl != null) { // Download - Start HttpClient client = new HttpClient(); byte[] buffer = await client.GetByteArrayAsync(img.ImageUrl); pd.Img = buffer; // Download - End } else if (img.Data != null) { pd.Img = img.Data; } HttpClient httpClient = new HttpClient(); MultipartFormDataContent form = new MultipartFormDataContent(); form.Add(new ByteArrayContent(pd.Img, 0, pd.Img.Length), "img", "image.jpg"); form.Add(new StringContent(pd.Location), "location"); form.Add(new StringContent(pd.Report), "report"); HttpResponseMessage response = await httpClient.PostAsync("http://192.168.43.46:8000/api/crime/uploadCrime", form); //HttpResponseMessage response = await httpClient.PostAsync("https://requestb.in/100adnv1", form); response.EnsureSuccessStatusCode(); httpClient.Dispose(); string sd = response.Content.ReadAsStringAsync().Result; }
private void UpdateResults(ImageAnalyzer img) { if (img.AnalysisResult.Tags == null || !img.AnalysisResult.Tags.Any()) { this.tagsGridView.ItemsSource = new[] { new { Name = "No tags" } }; } else { var tags = img.AnalysisResult.Tags.Select(t => new { Confidence = string.Format("({0}%)", Math.Round(t.Confidence * 100)), Name = t.Name }); if (!ShowAgeAndGender) { tags = tags.Where(t => !Util.ContainsGenderRelatedKeyword(t.Name)); } this.tagsGridView.ItemsSource = tags; } if (img.AnalysisResult.Description == null || !img.AnalysisResult.Description.Captions.Any(d => d.Confidence >= 0.2)) { this.descriptionGridView.ItemsSource = new[] { new { Description = "Not sure what that is" } }; } else { var descriptions = img.AnalysisResult.Description.Captions.Select(d => new { Confidence = string.Format("({0}%)", Math.Round(d.Confidence * 100)), Description = d.Text }); if (!ShowAgeAndGender) { descriptions = descriptions.Where(t => !Util.ContainsGenderRelatedKeyword(t.Description)); } if (descriptions.Any()) { this.descriptionGridView.ItemsSource = descriptions; } else { this.descriptionGridView.ItemsSource = new[] { new { Description = "Please enable Age/Gender prediction in the Settings Page to see the results" } }; } } var celebNames = this.GetCelebrityNames(img); if (celebNames == null || !celebNames.Any()) { this.celebritiesTextBlock.Text = "None"; } else { this.celebritiesTextBlock.Text = string.Join(", ", celebNames.OrderBy(name => name)); } var landmarkNames = this.GetLandmarkNames(img); if (landmarkNames == null || !landmarkNames.Any()) { this.landmarksTextBlock.Text = "None"; } else { this.landmarksTextBlock.Text = string.Join(", ", landmarkNames.OrderBy(name => name).Distinct()); } if (img.AnalysisResult.Color == null) { this.colorInfoListView.ItemsSource = new[] { new { Description = "Not available" } }; } else { this.colorInfoListView.ItemsSource = new[] { new { Description = "Dominant background color:", Colors = new string[] { img.AnalysisResult.Color.DominantColorBackground } }, new { Description = "Dominant foreground color:", Colors = new string[] { img.AnalysisResult.Color.DominantColorForeground } }, new { Description = "Dominant colors:", Colors = img.AnalysisResult.Color.DominantColors?.ToArray() }, new { Description = "Accent color:", Colors = new string[] { "#" + img.AnalysisResult.Color.AccentColor } } }; } this.ocrToggle.IsEnabled = true; this.objectDetectionToggle.IsEnabled = true; }
private void OnCameraPhotoCaptured(object sender, ImageAnalyzer img) { this.visualAlertBuilderWizardControl.AddNewImage(img); }
async void ProcessingImage(NSData uIImage) { lock (processingobjlocker) { if (isAPIprocessing) { return; } isAPIprocessing = true; } cancellationTokenSource?.Cancel(); cancellationTokenSource = new CancellationTokenSource(); cancellationTokenSource.Token.ThrowIfCancellationRequested(); //UIImage uIImage = null; //DispatchQueue.MainQueue.DispatchSync(() => { // uIImage = ivPictureTaken.Image; //}); try { if (task != null && (task.Status == TaskStatus.Running || task.Status == TaskStatus.WaitingToRun || task.Status == TaskStatus.WaitingForActivation)) { Console.WriteLine("Task has attempted to start while already running"); } else { Console.WriteLine("running api face recognition: "); task = await Task.Factory.StartNew(async() => { //await Task.Delay(10000); //await Task.Delay(10000); //await Task.Delay(10000); //UIImage uIImage = UIImage.FromFile(ruta); //using (uIImage) { ImageAnalyzer imageAnalyzer = new ImageAnalyzer(() => Task.FromResult <Stream>(uIImage.AsStream()), null); await LiveCamHelper.ProcessCameraCapture(imageAnalyzer).ConfigureAwait(false); //} return(true); }, cancellationTokenSource.Token, TaskCreationOptions.None, TaskScheduler.Default).ConfigureAwait(false);; await task; } } catch (Exception e) { Console.WriteLine("error api face recogniion "); } finally { //processingFaceDetection = false; //lock (lockerobj) { await Task.Delay(2000); processingFaceDetection = false; isAPIprocessing = false; setupAVFoundationFaceDetection(); } Console.WriteLine("finished processing "); }
private async void OnConfirmImportButtonClicked(object sender, RoutedEventArgs e) { this.addPeopleInBatchesFlyout.Hide(); this.commandBar.IsOpen = false; this.progressControl.IsActive = true; try { // UWP TextBox: new line is a '\r' symbol instead '\r\n' string[] names = new string[] { }; if (!string.IsNullOrEmpty(this.importNamesTextBox?.Text)) { string newLineSymbol = this.importNamesTextBox.Text.Contains(Environment.NewLine) ? Environment.NewLine : "\r"; names = this.importNamesTextBox.Text.Split(newLineSymbol); } foreach (var name in names) { string personName = Util.CapitalizeString(name.Trim()); if (string.IsNullOrEmpty(personName) || this.PersonsInCurrentGroup.Any(p => p.Name == personName)) { continue; } Person newPerson = await FaceServiceHelper.CreatePersonAsync(this.CurrentPersonGroup.PersonGroupId, personName); IEnumerable <string> faceUrls = await BingSearchHelper.GetImageSearchResults(string.Format("{0} {1} {2}", this.importImageSearchKeywordPrefix.Text, name, this.importImageSearchKeywordSufix.Text), count : 2); foreach (var url in faceUrls) { try { ImageAnalyzer imageWithFace = new ImageAnalyzer(url); await imageWithFace.DetectFacesAsync(); if (imageWithFace.DetectedFaces.Count() == 1) { await FaceServiceHelper.AddPersonFaceFromUrlAsync(this.CurrentPersonGroup.PersonGroupId, newPerson.PersonId, imageWithFace.ImageUrl, imageWithFace.ImageUrl, imageWithFace.DetectedFaces.First().FaceRectangle); } } catch (Exception) { // Ignore errors with any particular image and continue } // Force a delay to reduce the chance of hitting API call rate limits await Task.Delay(250); } this.needsTraining = true; this.PersonsInCurrentGroup.Add(newPerson); } } catch (Exception ex) { await Util.GenericApiCallExceptionHandler(ex, "Failure during batch processing"); } this.progressControl.IsActive = false; }
async void Camera.IPictureCallback.OnPictureTaken(byte[] data, Android.Hardware.Camera camera) { File pictureFile = getOutputMediaFile(); if (pictureFile == null) { return; } try { FileOutputStream fos = new FileOutputStream(pictureFile); fos.Write(data); fos.Close(); ExifInterface ei = new ExifInterface(pictureFile.AbsolutePath); var orientation = (Android.Media.Orientation)ei.GetAttributeInt(ExifInterface.TagOrientation, (int)Android.Media.Orientation.Undefined); Android.Graphics.Bitmap rotatedBitmap = null; Android.Graphics.Bitmap bitmap = LoadFromFile(pictureFile.AbsolutePath); switch (orientation) { case Android.Media.Orientation.Rotate90: rotatedBitmap = rotateImage(bitmap, 0); break; case Android.Media.Orientation.Rotate180: rotatedBitmap = rotateImage(bitmap, 90); break; case Android.Media.Orientation.Rotate270: rotatedBitmap = rotateImage(bitmap, 180); break; case Android.Media.Orientation.Normal: default: rotatedBitmap = rotateImage(bitmap, 270); break; } var data2 = bitmaptoByte(rotatedBitmap); FileOutputStream fos2 = new FileOutputStream(pictureFile); fos2.Write(data2); fos2.Close(); var rostro = await ServicioFace.DetectarRostro(data2); var descripcion = await ServicioVision.DescribirImagen(data2); if (rostro != null) { RunOnUiThread(() => { var frente = ImageAnalyzer.AnalizarPostura(rostro); TextView txtFrente = FindViewById <TextView>(Resource.Id.txtFrente); TextView txtAnalisisFrente = FindViewById <TextView>(Resource.Id.txtAnalisisFrente); txtFrente.Text = frente.ToString("N2"); if (frente > Constantes.LookingAwayAngleThreshold) { txtFrente.SetTextColor(Android.Graphics.Color.Red); txtAnalisisFrente.SetTextColor(Android.Graphics.Color.Red); txtAnalisisFrente.Text = "No estás mirando al frente"; } else { txtFrente.SetTextColor(Android.Graphics.Color.Green); txtAnalisisFrente.SetTextColor(Android.Graphics.Color.Green); txtAnalisisFrente.Text = "OK"; } var boca = ImageAnalyzer.AnalizarBoca(rostro); TextView txtBoca = FindViewById <TextView>(Resource.Id.txtBoca); TextView txtAnalisisBoca = FindViewById <TextView>(Resource.Id.txtAnalisisBoca); txtBoca.Text = boca.ToString("N2"); if (boca > Constantes.YawningApertureThreshold) { txtBoca.SetTextColor(Android.Graphics.Color.Red); txtAnalisisBoca.SetTextColor(Android.Graphics.Color.Red); txtAnalisisBoca.Text = "Posiblemente está bostezando"; } else { txtBoca.SetTextColor(Android.Graphics.Color.Green); txtAnalisisBoca.SetTextColor(Android.Graphics.Color.Green); txtAnalisisBoca.Text = "OK"; } var ojos = ImageAnalyzer.AnalizarOjos(rostro); TextView txtOjos = FindViewById <TextView>(Resource.Id.txtOjos); TextView txtAnalisisOjos = FindViewById <TextView>(Resource.Id.txtAnalisisOjos); txtOjos.Text = ojos.ToString("N2"); if (ojos < Constantes.SleepingApertureThreshold) { txtOjos.SetTextColor(Android.Graphics.Color.Red); txtAnalisisOjos.SetTextColor(Android.Graphics.Color.Red); txtAnalisisOjos.Text = "¡Está dormido!"; } else { txtOjos.SetTextColor(Android.Graphics.Color.Green); txtAnalisisOjos.SetTextColor(Android.Graphics.Color.Green); txtAnalisisOjos.Text = "OK"; } if (descripcion.Description.Captions.Length > 0) { var distraccion = descripcion.Description.Captions[0].Text; TextView txtCelular = FindViewById <TextView>(Resource.Id.txtCelular); TextView txtAnalisisCelular = FindViewById <TextView>(Resource.Id.txtAnalisisCelular); if (distraccion.Contains("phone")) { txtCelular.Text = "SI"; txtCelular.SetTextColor(Android.Graphics.Color.Red); txtAnalisisCelular.SetTextColor(Android.Graphics.Color.Red); txtAnalisisCelular.Text = "¡Está usando el teléfono móvil!"; } else { txtCelular.Text = "NO"; txtCelular.SetTextColor(Android.Graphics.Color.Green); txtAnalisisCelular.SetTextColor(Android.Graphics.Color.Green); txtAnalisisCelular.Text = "OK"; } } }); } } catch (FileNotFoundException e) { } catch (IOException e) { } finally { isAnalyzing = false; } }
private async Task DetectAndShowFaceBorders() { this.progressIndicator.IsActive = true; foreach (var child in this.hostGrid.Children.Where(c => !(c is Image)).ToArray()) { this.hostGrid.Children.Remove(child); } ImageAnalyzer imageWithFace = this.DataContext as ImageAnalyzer; if (imageWithFace != null) { if (imageWithFace.DetectedFaces == null) { await imageWithFace.DetectFacesAsync(detectFaceAttributes : this.DetectFaceAttributes); } double renderedImageXTransform = this.imageControl.RenderSize.Width / this.bitmapImage.PixelWidth; double renderedImageYTransform = this.imageControl.RenderSize.Height / this.bitmapImage.PixelHeight; foreach (Face face in imageWithFace.DetectedFaces) { FaceIdentificationBorder faceUI = new FaceIdentificationBorder() { Tag = face.FaceId, }; faceUI.Margin = new Thickness((face.FaceRectangle.Left * renderedImageXTransform) + ((this.ActualWidth - this.imageControl.RenderSize.Width) / 2), (face.FaceRectangle.Top * renderedImageYTransform) + ((this.ActualHeight - this.imageControl.RenderSize.Height) / 2), 0, 0); faceUI.BalloonBackground = this.BalloonBackground; faceUI.BalloonForeground = this.BalloonForeground; faceUI.ShowFaceRectangle(face.FaceRectangle.Width * renderedImageXTransform, face.FaceRectangle.Height * renderedImageYTransform); this.hostGrid.Children.Add(faceUI); if (!this.ShowMultipleFaces) { break; } } if (this.PerformRecognition) { if (imageWithFace.IdentifiedPersons == null) { await imageWithFace.IdentifyFacesAsync(); } if (this.ShowRecognitionResults) { foreach (Face face in imageWithFace.DetectedFaces) { // Get the border for the associated face id FaceIdentificationBorder faceUI = (FaceIdentificationBorder)this.hostGrid.Children.FirstOrDefault(e => e is FaceIdentificationBorder && (Guid)(e as FaceIdentificationBorder).Tag == face.FaceId); if (faceUI != null) { IdentifiedPerson faceIdIdentification = imageWithFace.IdentifiedPersons.FirstOrDefault(p => p.FaceId == face.FaceId); string name = this.DetectFaceAttributes && faceIdIdentification != null ? faceIdIdentification.Person.Name : null; string gender = this.DetectFaceAttributes ? face.FaceAttributes.Gender : null; double age = this.DetectFaceAttributes ? face.FaceAttributes.Age : 0; double confidence = this.DetectFaceAttributes && faceIdIdentification != null ? faceIdIdentification.Confidence : 0; faceUI.ShowIdentificationData(age, gender, (uint)Math.Round(confidence * 100), name); } } } } } this.progressIndicator.IsActive = false; }
private async Task DetectFacesAsync(ImageAnalyzer e) { await e.DetectFacesAsync(detectFaceAttributes : true); }
private async Task ProcessCameraCapture(ImageAnalyzer e) { if (e == null) { this.lastDetectedFaceSample = null; this.lastIdentifiedPersonSample = null; this.lastSimilarPersistedFaceSample = null; this.lastEmotionSample = null; this.debugText.Text = ""; this.isProcessingPhoto = false; return; } DateTime start = DateTime.Now; // Compute Emotion, Age and Gender await Task.WhenAll(e.DetectEmotionAsync(), e.DetectFacesAsync(detectFaceAttributes: true)); if (!e.DetectedEmotion.Any()) { this.lastEmotionSample = null; this.ShowTimelineFeedbackForNoFaces(); } else { this.lastEmotionSample = e.DetectedEmotion; EmotionScores averageScores = new EmotionScores { Happiness = e.DetectedEmotion.Average(em => em.Scores.Happiness), Anger = e.DetectedEmotion.Average(em => em.Scores.Anger), Sadness = e.DetectedEmotion.Average(em => em.Scores.Sadness), Contempt = e.DetectedEmotion.Average(em => em.Scores.Contempt), Disgust = e.DetectedEmotion.Average(em => em.Scores.Disgust), Neutral = e.DetectedEmotion.Average(em => em.Scores.Neutral), Fear = e.DetectedEmotion.Average(em => em.Scores.Fear), Surprise = e.DetectedEmotion.Average(em => em.Scores.Surprise) }; this.emotionDataTimelineControl.DrawEmotionData(averageScores); } if (e.DetectedFaces == null || !e.DetectedFaces.Any()) { this.lastDetectedFaceSample = null; } else { this.lastDetectedFaceSample = e.DetectedFaces; } // Compute Face Identification and Unique Face Ids await Task.WhenAll(e.IdentifyFacesAsync(), e.FindSimilarPersistedFacesAsync()); if (!e.IdentifiedPersons.Any()) { this.lastIdentifiedPersonSample = null; } else { this.lastIdentifiedPersonSample = e.DetectedFaces.Select(f => new Tuple <Face, IdentifiedPerson>(f, e.IdentifiedPersons.FirstOrDefault(p => p.FaceId == f.FaceId))); } if (!e.SimilarFaceMatches.Any()) { this.lastSimilarPersistedFaceSample = null; } else { this.lastSimilarPersistedFaceSample = e.SimilarFaceMatches; } this.UpdateDemographics(e); this.debugText.Text = string.Format("Latency: {0}ms", (int)(DateTime.Now - start).TotalMilliseconds); this.isProcessingPhoto = false; }
public async Task <ImageAnalyzer> CaptureFrameAsync(CurrentFrameModel currentFrame) { try { using (Stream stream = currentFrame.DataCurrent.AsBuffer().AsStream()) { stream.Position = 0; var decoder = await BitmapDecoder.CreateAsync(stream.AsRandomAccessStream()); var softwareBitmap = await decoder.GetSoftwareBitmapAsync(); var detector = await FaceDetector.CreateAsync(); using (SoftwareBitmap convertedBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Gray8)) { faces = await detector.DetectFacesAsync(convertedBitmap, SearchArea); convertedBitmap.Dispose(); } this.NumFacesOnLastFrame = faces.Count(); var previewFrameSize = new Windows.Foundation.Size(softwareBitmap.PixelWidth, softwareBitmap.PixelHeight); this.ShowFaceTrackingVisualization(previewFrameSize, faces); softwareBitmap.Dispose(); stream.Dispose(); } //Không có face thì không phân tích if (this.NumFacesOnLastFrame == 0) { faces = null; CoreUtil.FreeMemory(); return(null); } //Hai khung hình có số lượng khung mật giống nhau quá nửa thì không phân tích nữa if (this.AreFacesStill(this.detectedFacesFromPreviousFrame, faces)) { faces = null; CoreUtil.FreeMemory(); return(null); } this.detectedFacesFromPreviousFrame = faces; imageWithFace = new ImageAnalyzer(currentFrame.DataCurrent); imageWithFace.CameraIPAdres = CameraIPAdres; imageWithFace.imageWidth = InfoSettingFix.FixImageWidth; imageWithFace.imageHeight = InfoSettingFix.FixImageHeight; imageWithFace.CaptureTime = currentFrame.CaptureTime; imageWithFace.ListDetectedFaceJson = JsonConvert.SerializeObject(faces.Select(r => r.FaceBox).ToList()); faces = null; CoreUtil.FreeMemory(); return(imageWithFace); } catch (Exception ex) { CoreUtil.FreeMemory(); return(null); } }
private async Task ProcessCameraCapture(ImageAnalyzer e) { if (e == null) { this.lastDetectedFaceSample = null; this.lastIdentifiedPersonSample = null; this.lastSimilarPersistedFaceSample = null; this.lastEmotionSample = null; this.debugText.Text = ""; this.isProcessingPhoto = false; return; } DateTime start = DateTime.Now; // Compute Emotion, Age, Gender and Idetify person try { await e.DetectFacesAsync(detectFaceAttributes : true); //It is cheaper to detect emotions with rectangles posted, so we do it this way await e.DetectEmotionWithRectanglesAsync(); //Creating list of faces with summarized info for event hub message List <FaceSendInfo> facesToSend; if (SettingsHelper.Instance.ComplexIdentification) { facesToSend = await e.IdentifyOrAddPersonWithEmotionsAsync(SettingsHelper.Instance.GroupName, identifiedPersonsIdCollection); } else { //Simple face identification facesToSend = await e.FindSimilarPersonWithEmotion(); } //Util.SendAMQPMessage(JsonConvert.SerializeObject(dfwes)); //Util.SendMessageToEventHub(JsonConvert.SerializeObject(dfwes)); //sending onemessage per face foreach (var fts in facesToSend) { await Util.CallEventHubHttp(JsonConvert.SerializeObject(fts)); } //await Util.CallEventHubHttp(JsonConvert.SerializeObject(facesToSend)); //Updating Emotions UI, No need it final version if (!e.DetectedEmotion.Any()) { this.lastEmotionSample = null; this.ShowTimelineFeedbackForNoFaces(); } else { this.lastEmotionSample = e.DetectedEmotion; //Scores averageScores = new Scores //{ // Happiness = e.DetectedEmotion.Average(em => em.Scores.Happiness), // Anger = e.DetectedEmotion.Average(em => em.Scores.Anger), // Sadness = e.DetectedEmotion.Average(em => em.Scores.Sadness), // Contempt = e.DetectedEmotion.Average(em => em.Scores.Contempt), // Disgust = e.DetectedEmotion.Average(em => em.Scores.Disgust), // Neutral = e.DetectedEmotion.Average(em => em.Scores.Neutral), // Fear = e.DetectedEmotion.Average(em => em.Scores.Fear), // Surprise = e.DetectedEmotion.Average(em => em.Scores.Surprise) //}; //We do not want this info in this version //this.emotionDataTimelineControl.DrawEmotionData(averageScores); } if (e.DetectedFaces == null || !e.DetectedFaces.Any()) { this.lastDetectedFaceSample = null; } else { this.lastDetectedFaceSample = e.DetectedFaces; } if (SettingsHelper.Instance.ComplexIdentification) { var list = new List <Tuple <Face, IdentifiedPerson> >(); this.lastIdentifiedPersonSample = null; foreach (var ip in e.IdentifiedPersons) { list.Add(new Tuple <Face, IdentifiedPerson>(e.DetectedFaces.Where(fa => fa.FaceId == ip.FaceId).FirstOrDefault(), ip)); } if (list.Any()) { this.lastIdentifiedPersonSample = list; } } else { if (e.SimilarFaceMatches != null && !e.SimilarFaceMatches.Any()) { this.lastSimilarPersistedFaceSample = null; } else { this.lastSimilarPersistedFaceSample = e.SimilarFaceMatches; } } } catch (Exception ex) { debugText.Text = ex.Message; } finally { this.isProcessingPhoto = false; } //this.UpdateDemographics(e); this.isProcessingPhoto = false; }
private async Task DetectAndShowComputerVisionAnalysis() { this.progressIndicator.IsActive = true; this.imageControl.RenderTransform = null; foreach (var child in this.hostGrid.Children.Where(c => !(c is Image)).ToArray()) { this.hostGrid.Children.Remove(child); } ImageAnalyzer img = this.DataContext as ImageAnalyzer; if (img != null) { if (this.PerformOCRAnalysis && img.OcrResults == null) { await Task.WhenAll(img.AnalyzeImageAsync(detectCelebrities: true), img.RecognizeTextAsync()); } else if (img.AnalysisResult == null) { await img.AnalyzeImageAsync(detectCelebrities : true); } double renderedImageXTransform = this.imageControl.RenderSize.Width / this.bitmapImage.PixelWidth; double renderedImageYTransform = this.imageControl.RenderSize.Height / this.bitmapImage.PixelHeight; if (img.AnalysisResult.Faces != null) { foreach (Microsoft.ProjectOxford.Vision.Contract.Face face in img.AnalysisResult.Faces) { FaceIdentificationBorder faceUI = new FaceIdentificationBorder(); faceUI.Margin = new Thickness((face.FaceRectangle.Left * renderedImageXTransform) + ((this.ActualWidth - this.imageControl.RenderSize.Width) / 2), (face.FaceRectangle.Top * renderedImageYTransform) + ((this.ActualHeight - this.imageControl.RenderSize.Height) / 2), 0, 0); faceUI.BalloonBackground = this.BalloonBackground; faceUI.BalloonForeground = this.BalloonForeground; faceUI.ShowFaceRectangle(face.FaceRectangle.Width * renderedImageXTransform, face.FaceRectangle.Height * renderedImageYTransform); faceUI.ShowIdentificationData(face.Age, face.Gender, 0, null); this.hostGrid.Children.Add(faceUI); double celebRecoConfidence = 0; string celebRecoName; this.GetCelebrityInfoIfAvailable(img, face.FaceRectangle, out celebRecoName, out celebRecoConfidence); if (!string.IsNullOrEmpty(celebRecoName)) { Border celebUI = new Border { Child = new TextBlock { Text = string.Format("{0} ({1}%)", celebRecoName, (uint)Math.Round(celebRecoConfidence * 100)), Foreground = this.BalloonForeground, FontSize = 14 }, Background = this.BalloonBackground, VerticalAlignment = VerticalAlignment.Top, HorizontalAlignment = HorizontalAlignment.Left }; celebUI.SizeChanged += (ev, ar) => { celebUI.Margin = new Thickness(faceUI.Margin.Left - (celebUI.ActualWidth - face.FaceRectangle.Width * renderedImageXTransform) / 2, faceUI.Margin.Top + 2 + face.FaceRectangle.Height * renderedImageYTransform, 0, 0); }; this.hostGrid.Children.Add(celebUI); } } } if (this.PerformOCRAnalysis && img.OcrResults.Regions != null) { if (img.OcrResults.TextAngle.HasValue) { this.imageControl.RenderTransform = new RotateTransform { Angle = -img.OcrResults.TextAngle.Value, CenterX = this.imageControl.RenderSize.Width / 2, CenterY = this.imageControl.RenderSize.Height / 2 }; } foreach (Microsoft.ProjectOxford.Vision.Contract.Region ocrRegion in img.OcrResults.Regions) { foreach (var line in ocrRegion.Lines) { foreach (var word in line.Words) { OCRBorder ocrUI = new OCRBorder(); ocrUI.Margin = new Thickness((word.Rectangle.Left * renderedImageXTransform) + ((this.ActualWidth - this.imageControl.RenderSize.Width) / 2), (word.Rectangle.Top * renderedImageYTransform) + ((this.ActualHeight - this.imageControl.RenderSize.Height) / 2), 0, 0); ocrUI.SetData(word.Rectangle.Width * renderedImageXTransform, word.Rectangle.Height * renderedImageYTransform, word.Text); this.hostGrid.Children.Add(ocrUI); } } } } } this.progressIndicator.IsActive = false; }