private static PointF[] convertLandmarkFormation( ref FaceLandmarks _landmarks, ref FaceRectangle _rectangle) { PointF[] retLandmarks = new PointF[27] { convertPointFormation(_landmarks.EyebrowLeftOuter, _rectangle), convertPointFormation(_landmarks.EyebrowLeftInner, _rectangle), convertPointFormation(_landmarks.EyebrowRightOuter, _rectangle), convertPointFormation(_landmarks.EyebrowRightInner, _rectangle), convertPointFormation(_landmarks.EyeLeftOuter, _rectangle), convertPointFormation(_landmarks.EyeLeftTop, _rectangle), convertPointFormation(_landmarks.EyeLeftInner, _rectangle), convertPointFormation(_landmarks.EyeLeftBottom, _rectangle), convertPointFormation(_landmarks.PupilLeft, _rectangle), convertPointFormation(_landmarks.EyeRightOuter, _rectangle), convertPointFormation(_landmarks.EyeRightTop, _rectangle), convertPointFormation(_landmarks.EyeRightInner, _rectangle), convertPointFormation(_landmarks.EyeRightBottom, _rectangle), convertPointFormation(_landmarks.PupilRight, _rectangle), convertPointFormation(_landmarks.NoseRootLeft, _rectangle), convertPointFormation(_landmarks.NoseLeftAlarTop, _rectangle), convertPointFormation(_landmarks.NoseLeftAlarOutTip, _rectangle), convertPointFormation(_landmarks.NoseTip, _rectangle), convertPointFormation(_landmarks.NoseRightAlarOutTip, _rectangle), convertPointFormation(_landmarks.NoseRightAlarTop, _rectangle), convertPointFormation(_landmarks.NoseRootRight, _rectangle), convertPointFormation(_landmarks.MouthLeft, _rectangle), convertPointFormation(_landmarks.UpperLipTop, _rectangle), convertPointFormation(_landmarks.MouthRight, _rectangle), convertPointFormation(_landmarks.UnderLipBottom, _rectangle), convertPointFormation(_landmarks.UpperLipBottom, _rectangle), convertPointFormation(_landmarks.UnderLipTop, _rectangle), }; return(retLandmarks); }
private static Stream zoom(FaceRectangle faceArea, Stream inputStream) { var center = new Point( faceArea.Left + faceArea.Width / 2, faceArea.Top + faceArea.Height / 2); using (var sourceImage = Image.FromStream(inputStream)) { if (SHOULD_DRAW_GREEN_RECT) { using (var graphics = Graphics.FromImage(sourceImage)) graphics.DrawRectangle(Pens.Lime, new Rectangle(faceArea.Left, faceArea.Top, faceArea.Width, faceArea.Height)); } var zoomArea = getZoomArea(center, sourceImage.Size); using (var targetImage = new Bitmap(zoomArea.Width, zoomArea.Height)) { using (var graphics = Graphics.FromImage(targetImage)) graphics.DrawImage( sourceImage, new Rectangle(new Point(), zoomArea.Size), zoomArea, GraphicsUnit.Pixel); using (var encoders = new EncoderParameters()) { encoders.Param[0] = new EncoderParameter(System.Drawing.Imaging.Encoder.Quality, 85L); var outputStream = new MemoryStream(); targetImage.Save(outputStream, _codecInfo, encoders); outputStream.Seek(0, SeekOrigin.Begin); return(outputStream); } } } }
internal FaceImageDimensions(DetectedFace faceToInclude, double facePercentage, int imageHeight, int imageWidth) { FaceRectangle faceRectangle = faceToInclude.FaceRectangle; int imageSize = Calculate100Size(faceRectangle.Height > faceRectangle.Width ? faceRectangle.Height : faceRectangle.Width, facePercentage); Calculate(faceRectangle, imageSize, imageHeight, imageWidth); }
private void FillFaceEmotions(Recognition recognizedFace, FaceRectangle rectangle, IEnumerable <Microsoft.ProjectOxford.Common.Contract.Emotion> detectedEmotion) { if (detectedEmotion == null) { return; } Microsoft.ProjectOxford.Common.Contract.Emotion emotion = detectedEmotion.FirstOrDefault <Microsoft.ProjectOxford.Common.Contract.Emotion>(em => em.FaceRectangle.Left == rectangle.Left && em.FaceRectangle.Top == rectangle.Top && em.FaceRectangle.Width == rectangle.Width && em.FaceRectangle.Height == rectangle.Height); if (emotion != null) { recognizedFace.Anger = emotion.Scores.Anger; recognizedFace.Contempt = emotion.Scores.Contempt; recognizedFace.Disgust = emotion.Scores.Disgust; recognizedFace.Fear = emotion.Scores.Fear; recognizedFace.Happiness = emotion.Scores.Happiness; recognizedFace.Neutral = emotion.Scores.Neutral; recognizedFace.Sadness = emotion.Scores.Sadness; recognizedFace.Surprise = emotion.Scores.Surprise; } }
/// <summary> /// Handles the face frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FaceFrameArrived(object sender, FaceFrameArrivedEventArgs e) { using (FaceFrame faceFrame = e.FrameReference.AcquireFrame()) { if (faceFrame != null) { // get the index of the face source from the face source array int index = this.GetFaceSourceIndex(faceFrame.FaceFrameSource); // check if this face frame has valid face frame results if (this.ValidateFaceBoxAndPoints(faceFrame.FaceFrameResult)) { // store this face frame result to draw later this.faceFrameResults[index] = faceFrame.FaceFrameResult; RectI oldFace = faceFrame.FaceFrameResult.FaceBoundingBoxInColorSpace; FaceRectangle newface = new FaceRectangle(); newface.Left = oldFace.Left; newface.Top = oldFace.Top; newface.Height = (oldFace.Top - oldFace.Bottom); newface.Width = (oldFace.Left - oldFace.Right); DrawRect(newface); } else { // indicates that the latest face frame result from this reader is invalid this.faceFrameResults[index] = null; } } } }
/// <summary> /// Adds face to the user. /// </summary> /// <returns>User face ID.</returns> /// <param name="person">Person.</param> /// <param name="imageBytes">Image bytes.</param> /// <param name="faceRect">Face rectangle.</param> public string AddFaceToUser(Person person, byte[] imageBytes, FaceRectangle faceRect) { // create the user-group if needed if (userGroupId != initedGroupId) { GetOrGreateUserGroup(); } if (userGroupId != initedGroupId) { return(string.Empty); } if (faceManager != null && person != null && imageBytes != null) { PersonFace personFace = faceManager.AddFaceToPerson(userGroupId, person.personId, string.Empty, faceRect, imageBytes); if (personFace != null) { faceManager.TrainPersonGroup(userGroupId); return(personFace.persistedFaceId); } } return(string.Empty); }
private void PositionRectangle(FaceRectangle faceRect) { Width = faceRect.Width; Height = faceRect.Height; Left = faceRect.Left; Top = faceRect.Top; }
public static Bitmap DrawRectOnBitmap(Bitmap mBitmap, FaceRectangle faceRectangle, string status) { Bitmap bitmap = mBitmap.Copy(Bitmap.Config.Argb8888, true); Canvas canvas = new Canvas(bitmap); Paint paint = new Paint(); paint.AntiAlias = true; paint.SetStyle(Paint.Style.Stroke); paint.Color = Color.White; paint.StrokeWidth = 12; canvas.DrawRect(faceRectangle.left, faceRectangle.top, faceRectangle.left + faceRectangle.width, faceRectangle.top + faceRectangle.height, paint); int cX = faceRectangle.left + faceRectangle.width; int cY = faceRectangle.top + faceRectangle.height; DrawTextBelowRect(canvas, 100, cX / 2 + cX / 5, cY + 100, Color.White, status); return(bitmap); }
/// <summary> /// Function to add example faces to a given face list. Will loop through a folder and add all image files in that folder /// </summary> /// <param name="obj"></param> private async void AddExampleFacesToList(object obj) { string personGroupDirectory = Path.Combine(Environment.CurrentDirectory, "PersonGroup"); string[] images = GetImageFiles(personGroupDirectory); try { foreach (string image in images) { using (Stream fileStream = File.OpenRead(image)) { Face[] faces = await _faceServiceClient.DetectAsync(fileStream); FaceRectangle faceRectangle = faces[0].FaceRectangle; AddPersistedFaceResult addFacesResult = await _faceServiceClient.AddFaceToFaceListAsync( FaceListName.ToLower(), fileStream, null, faceRectangle); UpdateFaceGuidsAsync(); } } } catch (FaceAPIException ex) { Debug.WriteLine($"Failed to add faces to face list: {ex.ErrorMessage}"); } catch (Exception ex) { Debug.WriteLine(ex.Message); } }
public Task <AddPersistedFaceResult> AddFaceToFaceListAsync(string faceListId, Stream imageStream, string userData = null, FaceRectangle targetFace = null) { return(innerClient.AddFaceToFaceListAsync(faceListId, imageStream, userData, targetFace)); }
public async void DrawFaceRectangleStream(DetectedFace[] faceResult, InMemoryRandomAccessStream imageStream) { ImageCanvas.Children.Clear(); if (faceResult != null && faceResult.Length > 0) { BitmapDecoder decoder = await BitmapDecoder.CreateAsync(imageStream); double resizeFactorH = ImageCanvas.Height / decoder.PixelHeight; double resizeFactorW = ImageCanvas.Width / decoder.PixelWidth; foreach (var face in faceResult) { FaceRectangle faceRect = face.FaceRectangle; var rectangle1 = new Rectangle(); Windows.UI.Color faceColor = Windows.UI.Color.FromArgb(50, 255, 255, 255); Windows.UI.Color borderColor = Windows.UI.Colors.Blue; rectangle1.Fill = new SolidColorBrush(faceColor); rectangle1.Width = faceRect.Width; rectangle1.Height = faceRect.Height; rectangle1.Stroke = new SolidColorBrush(borderColor); rectangle1.StrokeThickness = 1; rectangle1.RadiusX = 10; rectangle1.RadiusY = 10; ImageCanvas.Children.Add(rectangle1); Canvas.SetLeft(rectangle1, faceRect.Left); Canvas.SetTop(rectangle1, faceRect.Top); } } }
public static Bitmap Denoise(Bitmap b) { // First, get the text. var text = Steganographer.extractText(b); var textArray = text.Split(); Console.WriteLine("Extracted text: " + text); // Parse the text for rectangles. var idx = 0; var n = Int32.Parse(textArray[idx++]); var faceRectangles = new FaceRectangle[n]; for (int i = 0; i < n; i++) { var faceRectangle = new FaceRectangle(); faceRectangle.Top = Int32.Parse(textArray[idx++]); faceRectangle.Height = Int32.Parse(textArray[idx++]); faceRectangle.Left = Int32.Parse(textArray[idx++]); faceRectangle.Width = Int32.Parse(textArray[idx++]); faceRectangles[i] = faceRectangle; } // Then, parse the text for the seed. int seed = Int32.Parse(textArray[idx++]); double intensity = Double.Parse(textArray[idx]); var denoiser = new FaceDenoiser(seed, b, intensity, faceRectangles); var decryptedB = denoiser.Denoise(); return(decryptedB); }
private PictureEmotionData Parser(List <object> list) { var pem = new PictureEmotionData(); var feceSubject = FaceRectangle.Subjects(); var emoSubject = EmotionScore.Subjects(); foreach (object obj in list) { var dic = obj as Dictionary <string, object>; Assert.IsNotNull(dic); var set = pem.AddSet(); var face = set.Key; var emo = set.Value; var fd = JsonParser <Dictionary <string, object> > .Parse(dic, "faceRectangle"); var ed = JsonParser <Dictionary <string, object> > .Parse(dic, "scores"); foreach (var fs in feceSubject) { face.Set(fs.Key, (int)JsonParser <System.Int64> .ParseObj(fd, fs.Value)); } foreach (var es in emoSubject) { emo.Set(es.Key, JsonParser <double> .ParseObj(ed, es.Value)); } Debug.Log("face.Get(FaceRectangle.VALUE.height)" + face.Get(FaceRectangle.VALUE.height)); Debug.Log("emo.Get(EmotionScore.VALUE.anger)" + emo.Get(EmotionScore.VALUE.anger)); } return(pem); }
private async Task <ImageAnalyzer> GetPrimaryFaceFromCameraCaptureAsync(ImageAnalyzer img) { if (img == null) { return(null); } await img.DetectFacesAsync(); if (img.DetectedFaces == null || !img.DetectedFaces.Any()) { return(null); } FaceRectangle rect = img.DetectedFaces.First().FaceRectangle; double heightScaleFactor = 1.8; double widthScaleFactor = 1.8; Rectangle biggerRectangle = new Rectangle { Height = Math.Min((int)(rect.Height * heightScaleFactor), img.DecodedImageHeight), Width = Math.Min((int)(rect.Width * widthScaleFactor), img.DecodedImageWidth) }; biggerRectangle.Left = Math.Max(0, rect.Left - (int)(rect.Width * ((widthScaleFactor - 1) / 2))); biggerRectangle.Top = Math.Max(0, rect.Top - (int)(rect.Height * ((heightScaleFactor - 1) / 1.4))); StorageFile tempFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync( "FaceRecoCameraCapture.jpg", CreationCollisionOption.GenerateUniqueName); await Util.CropBitmapAsync(img.GetImageStreamCallback, biggerRectangle, tempFile); return(new ImageAnalyzer(tempFile.OpenStreamForReadAsync, tempFile.Path)); }
private void PopulateUI(Face[] faces) { if (faces == null || faces.Length <= 0) { return; } var face = faces[0]; var emotion = face.FaceAttributes.Emotion; var headPose = face.FaceAttributes.HeadPose; AgeLabel.Text = $"Age: {face.FaceAttributes.Age}"; SmileLabel.Text = $"Smile: {face.FaceAttributes.Smile}"; GenderLabel.Text = $"Gender: {face.FaceAttributes.Gender}"; GlassesLabel.Text = $"Am I Wearing Glasses? {face.FaceAttributes.Glasses}"; HeadPoseLabel.Text = $"HeadPose: Yaw: {headPose.Yaw}, Pitch: {headPose.Pitch}, Roll: {headPose.Roll}"; var emotionsText = string.Empty; emotionsText += $"Anger: {emotion.Anger * 100}%\n"; emotionsText += $"Fear: {emotion.Fear * 100}%\n"; emotionsText += $"Happiness: {emotion.Happiness * 100}%\n"; emotionsText += $"Sadness: {emotion.Sadness * 100}%\n"; emotionsText += $"Surprise: {emotion.Surprise * 100}%"; EmotionsLabel.Text = emotionsText; _faceRectangle = face.FaceRectangle; ImageCanvas.InvalidateSurface(); }
public override int GetHashCode() { int r = (FaceRectangle == null) ? 0x33333333 : FaceRectangle.GetHashCode(); int s = (Scores == null) ? 0xccccccc : Scores.GetHashCode(); return(r ^ s); }
private async Task ProcessPictureStream(Stream photoStream) { // TODO: Rotate face before sending to Cognnitive Svcs FaceRectangle[] faces = await UploadAndDetectFaces(photoStream); if (faces.Length <= 0) { await DisplayAlert("Warning", "No faces detetected in photo", "OK"); return; } FaceRectangle firstFace = faces[0]; int top = 0; int left = 0; int height = 0; int width = 0; top = firstFace.Top; left = firstFace.Left; height = firstFace.Height; width = firstFace.Width; MyFaceImage.Source = ImageSource.FromStream(() => photoStream); var msg = String.Format($"top: {top}; left: {left}; heigh: {height}; width: {width}"); await DisplayAlert("title", msg, "OK"); // TODO: Crop face and display on top of ArtImage }
private async Task <Stream> TakePhotoAsync() { _imageBuffer = null; _faceRectangle = null; var cameraOptions = new StoreCameraMediaOptions { CompressionQuality = 92, PhotoSize = PhotoSize.Medium, DefaultCamera = CameraDevice.Front }; var mediaFile = await CrossMedia.Current.TakePhotoAsync(cameraOptions); var imageStream = mediaFile?.GetStream(); if (imageStream == null) { return(null); } _imageBuffer = new byte[imageStream.Length]; await imageStream.ReadAsync(_imageBuffer, 0, (int)imageStream.Length); return(new MemoryStream(_imageBuffer)); }
async void SelectPhoto(Object o, EventArgs e) { NSOpenPanel openDialog = NSOpenPanel.OpenPanel; openDialog.CanChooseFiles = true; openDialog.CanChooseDirectories = false; openDialog.AllowsMultipleSelection = false; if (openDialog.RunModal(new string[] { "jpg", "png" }) == 1) { string fileName = openDialog.Filename; NSImage image = new NSImage(fileName); ThePhoto.Image = image; ClearExistingBorders(); DetailView.TextStorage.SetString(new NSAttributedString("Processing...")); using (FileStream file = new FileStream(fileName, FileMode.Open)) { var faces = await client.DetectAsync(file, true, true, new List <FaceAttributeType>() { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses }); DetailView.TextStorage.SetString(new NSAttributedString("")); foreach (var face in faces) { FaceRectangle faceRect = face.FaceRectangle; DetailView.TextStorage.Append(FormatRect(faceRect)); DetailView.TextStorage.Append(FormatDetails(face.FaceAttributes)); AddFrameAroundFace(faceRect); } } } }
private async void OnBlurFacesCommandAsync(object obj) { string filePath = (string)obj; Uri fileUri = new Uri(filePath); BitmapImage bitmapSource = new BitmapImage(); bitmapSource.BeginInit(); bitmapSource.CacheOption = BitmapCacheOption.None; bitmapSource.UriSource = fileUri; bitmapSource.EndInit(); PhotoSource = bitmapSource; // Detect any faces in the image. Message = "Detecting..."; Faces = await UploadAndDetectFaces(filePath); Message = String.Format("Detection Finished. {0} face(s) detected", Faces.Length); FaceRectangle[] faceRectangles = new FaceRectangle[Faces.Length]; FaceDescriptions = new string[Faces.Length]; for (int i = 0; i < Faces.Length; i++) { faceRectangles[i] = Faces[i].FaceRectangle; FaceDescriptions[i] = FaceDescription(Faces[i]); } // blur faces and also draw a rectangle around each face. BlurFaces(faceRectangles, filePath); }
void AddFrameAroundFace(FaceRectangle faceRect) { NSImage image = ThePhoto.Image; // The API returns based on # of pixels, but NSImage scales, so we need to have scaled versions based on size; // The actual size of the image double imagePixelWidth = (double)image.Representations()[0].PixelsWide; double imagePixelHeight = (double)image.Representations()[0].PixelsHigh; // The photo frame itself is always square, but not every image is square CGRect photoFrame = ThePhoto.Subviews [0].Frame; photoFrame.Offset(ThePhoto.Frame.Location); // The % scaling needed in each axis double percentageX = photoFrame.Width / imagePixelWidth; double percentageY = photoFrame.Height / imagePixelHeight; // Scaled position - API gives top left, but Cocoa wants bottom left. double faceRectTopConverted = imagePixelHeight - faceRect.Top; double picX = (int)Math.Round(faceRect.Left * percentageX); double picY = (int)Math.Round(faceRectTopConverted * percentageY); // Scaled size double picWidth = (photoFrame.Width / imagePixelWidth) * faceRect.Width; double picHeight = (photoFrame.Height / imagePixelHeight) * faceRect.Height; BorderView borderView = new BorderView(new CGRect(photoFrame.X + picX, photoFrame.Y + picY - picHeight, picWidth, picHeight)); Borders.Add(borderView); View.AddSubview(borderView); }
/// <summary> /// Matches the recognized emotions to faces. /// </summary> /// <returns>The number of matched emotions.</returns> /// <param name="faces">Array of detected Faces.</param> /// <param name="emotions">Array of recognized Emotions.</param> public int MatchEmotionsToFaces(ref Face[] faces, ref Emotion[] emotions) { int matched = 0; if (faces == null || emotions == null) { return(matched); } foreach (Emotion emot in emotions) { FaceRectangle emotRect = emot.faceRectangle; for (int i = 0; i < faces.Length; i++) { if (Mathf.Abs(emotRect.left - faces[i].faceRectangle.left) <= 2 && Mathf.Abs(emotRect.top - faces[i].faceRectangle.top) <= 2) { faces[i].emotion = emot; matched++; break; } } } return(matched); }
private async Task <Image> GetFaceCropAsync(ImageAnalyzer img) { ImageSource croppedImage; if (img.DetectedFaces == null || !img.DetectedFaces.Any()) { croppedImage = new BitmapImage(); await((BitmapImage)croppedImage).SetSourceAsync((await img.GetImageStreamCallback()).AsRandomAccessStream()); } else { // Crop the primary face FaceRectangle rect = img.DetectedFaces.First().FaceRectangle; double heightScaleFactor = 1.8; double widthScaleFactor = 1.8; FaceRectangle biggerRectangle = new FaceRectangle { Height = Math.Min((int)(rect.Height * heightScaleFactor), img.DecodedImageHeight), Width = Math.Min((int)(rect.Width * widthScaleFactor), img.DecodedImageWidth) }; biggerRectangle.Left = Math.Max(0, rect.Left - (int)(rect.Width * ((widthScaleFactor - 1) / 2))); biggerRectangle.Top = Math.Max(0, rect.Top - (int)(rect.Height * ((heightScaleFactor - 1) / 1.4))); croppedImage = await Util.GetCroppedBitmapAsync(img.GetImageStreamCallback, biggerRectangle); } return(new Image { Source = croppedImage, Height = 200 }); }
public static Rectangle ToRectangle(this FaceRectangle faceRectangle) { return(new Rectangle() { Top = faceRectangle.Top, Height = faceRectangle.Height, Left = faceRectangle.Left, Width = faceRectangle.Width }); }
public override int GetHashCode() { unchecked { return(((FaceRectangle != null ? FaceRectangle.GetHashCode() : 0) * 397) ^ (Scores != null ? Scores.GetHashCode() : 0)); } }
public Player(Guid id, FaceRectangle position, FaceAttributes attributes) { this.Id = id; this.Position = position; this.DominantEmotion = this.GetDominantEmotion(attributes.Emotion); this.Attributes = attributes; //this.AccumulateScore(); }
public DetectedFace(FaceRectangle faceRectangle, System.Guid?faceId = default(System.Guid?), string recognitionModel = default(string), FaceLandmarks faceLandmarks = default(FaceLandmarks), FaceAttributes faceAttributes = default(FaceAttributes)) { FaceId = faceId; RecognitionModel = recognitionModel; FaceRectangle = faceRectangle; FaceLandmarks = faceLandmarks; FaceAttributes = faceAttributes; }
public Task <AddPersistedFaceResult> AddPersonFaceAsync(string personGroupId, Guid personId, Stream imageStream, string userData = null, FaceRectangle targetFace = null) { return(innerClient.AddPersonFaceAsync(personGroupId, personId, imageStream, userData, targetFace)); }
private int RectIntersectDifference(FaceRectangle face, Rectangle emotion) { Rect faceRect = new Rect(new Point(face.Left, face.Top), new Size(face.Width, face.Height)); faceRect.Intersect(new Rect(new Point(emotion.Left, emotion.Top), new Size(emotion.Width, emotion.Height))); return((int)(face.Width - faceRect.Width) + (int)(face.Height - faceRect.Height)); }
private static Rectangle convertRectangleFormation( FaceRectangle _rectangle) { return(new Rectangle( _rectangle.Left, _rectangle.Top, _rectangle.Width, _rectangle.Height)); }