public static Rectangle ToRectangle(this Common.Rectangle rectangle) => new Rectangle { Height = rectangle.Height, Width = rectangle.Width, Left = rectangle.Left, Top = rectangle.Top };
private FaceAPI.Contract.Face CreateFace(Common.Rectangle rect) { return(new FaceAPI.Contract.Face { FaceRectangle = new FaceAPI.Contract.FaceRectangle { Left = rect.Left, Top = rect.Top, Width = rect.Width, Height = rect.Height } }); }
private Face CreateFace(Microsoft.ProjectOxford.Common.Rectangle rect) { return(new Face { FaceRectangle = new FaceRectangle { Left = rect.Left, Top = rect.Top, Width = rect.Width, Height = rect.Height } }); }
async private static Task CropBitmapAsync(Stream localFileStream, Microsoft.ProjectOxford.Common.Rectangle rectangle, StorageFile resultFile) { var pixels = await GetCroppedPixelsAsync(localFileStream.AsRandomAccessStream(), rectangle); using (Stream resultStream = await resultFile.OpenStreamForWriteAsync()) { IRandomAccessStream randomAccessStream = resultStream.AsRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream); encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, (uint)rectangle.Width, (uint)rectangle.Height, DisplayInformation.GetForCurrentView().LogicalDpi, DisplayInformation.GetForCurrentView().LogicalDpi, pixels); await encoder.FlushAsync(); } }
private Windows.UI.Xaml.Shapes.Rectangle ConvertPreviewToUiRectangle(Microsoft.ProjectOxford.Common.Rectangle faceRectangle) { var result = new Windows.UI.Xaml.Shapes.Rectangle(); var previewStream = _previewProperties as VideoEncodingProperties; // If there is no available information about the preview, return an empty rectangle, as re-scaling to the screen coordinates will be impossible if (previewStream == null) { return(result); } // Similarly, if any of the dimensions is zero (which would only happen in an error case) return an empty rectangle if (previewStream.Width == 0 || previewStream.Height == 0) { return(result); } double streamWidth = previewStream.Width; double streamHeight = previewStream.Height; // For portrait orientations, the width and height need to be swapped if (_displayOrientation == DisplayOrientations.Portrait || _displayOrientation == DisplayOrientations.PortraitFlipped) { streamHeight = previewStream.Width; streamWidth = previewStream.Height; } // Get the rectangle that is occupied by the actual video feed var previewInUI = GetPreviewStreamRectInControl(previewStream, PreviewControl); // Scale the width and height from preview stream coordinates to window coordinates result.Width = (faceRectangle.Width / streamWidth) * previewInUI.Width; result.Height = (faceRectangle.Height / streamHeight) * previewInUI.Height; // Scale the X and Y coordinates from preview stream coordinates to window coordinates var x = (faceRectangle.Left / streamWidth) * previewInUI.Width; var y = (faceRectangle.Top / streamHeight) * previewInUI.Height; Canvas.SetLeft(result, x); Canvas.SetTop(result, y); return(result); }
async public static Task <ImageSource> GetCroppedBitmapAsync(Func <Task <Stream> > originalImgFile, Microsoft.ProjectOxford.Common.Rectangle rectangle) { try { using (IRandomAccessStream stream = (await originalImgFile()).AsRandomAccessStream()) { return(await GetCroppedBitmapAsync(stream, rectangle)); } } catch { // default to no image if we fail to crop the bitmap return(null); } }
async public static Task <ImageSource> GetCroppedBitmapAsync(IRandomAccessStream stream, Microsoft.ProjectOxford.Common.Rectangle rectangle) { var pixels = await GetCroppedPixelsAsync(stream, rectangle); // Stream the bytes into a WriteableBitmap WriteableBitmap cropBmp = new WriteableBitmap(rectangle.Width, rectangle.Height); cropBmp.FromByteArray(pixels); return(cropBmp); }
/// <summary> /// Recognize emotions on faces in an image. /// </summary> /// <param name="imageStream">Stream of the image</param> /// <returns>Async task, which, upon completion, will return rectangle and emotion scores for each face.</returns> public async Task<Contract.Emotion[]> RecognizeAsync(Stream imageStream, Rectangle[] faceRectangles) { return await PostAsync<Stream, Contract.Emotion[]>(GetRecognizeUrl(faceRectangles), imageStream); }
/// <summary> /// Recognize emotions on faces in an image. /// </summary> /// <param name="imageUrl">URL of the image.</param> /// <param name="faceRectangles">Array of face rectangles.</param> /// <returns>Async task, which, upon completion, will return rectangle and emotion scores for each recognized face.</returns> public async Task<Contract.Emotion[]> RecognizeAsync(String imageUrl, Rectangle[] faceRectangles) { return await PostAsync<UrlReqeust, Contract.Emotion[]>(GetRecognizeUrl(faceRectangles), new UrlReqeust { url = imageUrl }); }
private string GetRecognizeUrl(Rectangle[] faceRectangles) { var builder = new StringBuilder("/recognize"); if (faceRectangles != null && faceRectangles.Length > 0) { builder.Append("?faceRectangles="); builder.Append(string.Join(";", faceRectangles.Select(r => String.Format("{0},{1},{2},{3}", r.Left, r.Top, r.Width, r.Height)))); } return builder.ToString(); }
async public static Task CropBitmapAsync(Func <Task <Stream> > localFile, Microsoft.ProjectOxford.Common.Rectangle rectangle, StorageFile resultFile) { await CropBitmapAsync(await localFile(), rectangle, resultFile); }
public static bool AreFacesPotentiallyTheSame(Microsoft.ProjectOxford.Common.Rectangle face1, FaceRectangle face2) { return(AreFacesPotentiallyTheSame((int)face1.Left, (int)face1.Top, (int)face1.Width, (int)face1.Height, face2.Left, face2.Top, face2.Width, face2.Height)); }
private void drawEmotionRectangle(Canvas RectangleCanvas, Rectangle FaceRectangle, float score, string emotion) { double ratio = 1; double leftMargin = 0; double topMargin = 0; if (captureWidth > 0) { var hratio = RectangleCanvas.ActualHeight / captureHeight; var wratio = RectangleCanvas.ActualWidth / captureWidth; if (hratio < wratio) { ratio = hratio; leftMargin = (RectangleCanvas.ActualWidth - (captureWidth * ratio)) / 2; } else { ratio = wratio; topMargin = (RectangleCanvas.ActualHeight - (captureHeight * ratio)) / 2; } } RectangleCanvas.Children.Clear(); var r = new Windows.UI.Xaml.Shapes.Rectangle(); RectangleCanvas.Children.Add(r); r.Stroke = new SolidColorBrush(Windows.UI.Colors.Yellow); r.StrokeThickness = 5; r.Width = FaceRectangle.Width * ratio; r.Height = FaceRectangle.Height * ratio; Canvas.SetLeft(r, (FaceRectangle.Left * ratio) + leftMargin); Canvas.SetTop(r, (FaceRectangle.Top * ratio) + topMargin); Border b = new Border(); b.Background= new SolidColorBrush(Windows.UI.Colors.Yellow); b.Width = r.Width; RectangleCanvas.Children.Add(b); b.Padding = new Thickness(2); var t = new TextBlock(); b.Child=t; t.Width = r.Width; t.FontSize = 16; t.Foreground = new SolidColorBrush(Windows.UI.Colors.Black); Canvas.SetLeft(b, (FaceRectangle.Left * ratio) + leftMargin); Canvas.SetTop(b, (FaceRectangle.Top * ratio) + topMargin + r.Height -1); t.Text = $"{emotion}\r\n{score:N1}%"; }