private async Task <InMemoryRandomAccessStream> ResizeImage(InMemoryRandomAccessStream stream, Size requiredSize)
        {
            // Make a decoder for the current stream
            var decoder = await BitmapDecoder.CreateAsync(stream);

            var imageHeight = decoder.PixelHeight;
            var imageWidth  = decoder.PixelWidth;

            var widthRatio  = imageWidth / requiredSize.Width;
            var heightRatio = imageHeight / requiredSize.Height;

            uint outputHeight;
            uint outputWidth;
            var  centerOnX = false;

            if (widthRatio < heightRatio)
            {
                outputHeight = (uint)(imageHeight / widthRatio);
                outputWidth  = (uint)requiredSize.Width;
            }
            else
            {
                outputWidth  = (uint)(imageWidth / heightRatio);
                outputHeight = (uint)requiredSize.Height;
                centerOnX    = true;
            }

            // Make an output stream and an encoder
            var outputStream = new InMemoryRandomAccessStream();
            var enc          = await BitmapEncoder.CreateForTranscodingAsync(outputStream, decoder);

            // convert the entire bitmap to a 125px by 125px bitmap
            enc.BitmapTransform.ScaledHeight = outputHeight;
            enc.BitmapTransform.ScaledWidth  = outputWidth;
            var bound = new BitmapBounds();

            bound.Height = (uint)requiredSize.Height;
            bound.Width  = (uint)requiredSize.Width;

            // Choose Fant for quality over perf.
            enc.BitmapTransform.InterpolationMode = BitmapInterpolationMode.Fant;

            if (centerOnX)
            {
                var width = ((int)outputWidth / 2) - ((int)bound.Width / 2);
                bound.X = (uint)(width > 0 ? width : 0);
            }
            else
            {
                var height = ((int)outputHeight / 2) - ((int)bound.Height / 2);
                bound.Y = (uint)(height > 0 ? height : 0);
            }
            enc.BitmapTransform.Bounds = bound;

            // Do it
            await enc.FlushAsync();

            // Return the new stream
            return(outputStream);
        }
示例#2
0
        private async Task CropAndDisplayInputImageAsync(VideoFrame inputVideoFrame)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            BitmapBounds cropBounds = new BitmapBounds();
            uint         h          = 28;
            uint         w          = 28;

            var frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            var requiredAR = ((float)28 / 28);

            w                 = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            h                 = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);
            cropBounds.X      = (uint)((frameWidth - w) / 2);
            cropBounds.Y      = 0;
            cropBounds.Width  = w;
            cropBounds.Height = h;

            int nh = 224; //28
            int nw = 224; //28

            cropped_vf = new VideoFrame(BitmapPixelFormat.Bgra8, nh, nw, BitmapAlphaMode.Ignore);

            await inputVideoFrame.CopyToAsync(cropped_vf, cropBounds, null);
        }
示例#3
0
        public static async Task <IRandomAccessStream> BitmapScale(BitmapDecoder decoder, uint newHeight, uint newWidth)
        {
            // create a new stream and encoder for the new image
            var ras = new InMemoryRandomAccessStream();
            var enc = await BitmapEncoder.CreateForTranscodingAsync(ras, decoder);

            // convert the entire bitmap to a 100px by 100px bitmap
            enc.BitmapTransform.ScaledHeight = newHeight;
            enc.BitmapTransform.ScaledWidth  = newWidth;


            var bounds = new BitmapBounds
            {
                Height = newHeight,
                Width  = newWidth,
                X      = 0,
                Y      = 0
            };

            enc.BitmapTransform.Bounds = bounds;

            // write out to the stream
            try
            {
                await enc.FlushAsync();
            }
            catch (Exception ex)
            {
                string s = ex.ToString();
            }

            return(ras);
        }
示例#4
0
        public static async Task <ImageSource> CropAndPreviewAsync(IRandomAccessStream imageStream, BitmapEditState editState)
        {
            var decoder = await BitmapDecoder.CreateAsync(imageStream);

            var cropWidth  = (double)decoder.PixelWidth;
            var cropHeight = (double)decoder.PixelHeight;

            if (decoder.PixelWidth > 1280 || decoder.PixelHeight > 1280)
            {
                double ratioX = (double)1280 / cropWidth;
                double ratioY = (double)1280 / cropHeight;
                double ratio  = Math.Min(ratioX, ratioY);

                cropWidth  = cropWidth * ratio;
                cropHeight = cropHeight * ratio;
            }

            var cropRectangle = new Rect(
                editState.Rectangle.X * decoder.PixelWidth,
                editState.Rectangle.Y * decoder.PixelHeight,
                editState.Rectangle.Width * decoder.PixelWidth,
                editState.Rectangle.Height * decoder.PixelHeight);

            var(scaledCrop, scaledSize) = Scale(cropRectangle, new Size(decoder.PixelWidth, decoder.PixelHeight), new Size(cropWidth, cropHeight), 1280, 0);

            var bounds = new BitmapBounds();

            bounds.X      = (uint)scaledCrop.X;
            bounds.Y      = (uint)scaledCrop.Y;
            bounds.Width  = (uint)scaledCrop.Width;
            bounds.Height = (uint)scaledCrop.Height;

            var transform = new BitmapTransform();

            transform.ScaledWidth       = (uint)scaledSize.Width;
            transform.ScaledHeight      = (uint)scaledSize.Height;
            transform.Bounds            = bounds;
            transform.InterpolationMode = BitmapInterpolationMode.Linear;
            transform.Rotation          = editState.Rotation;
            transform.Flip = editState.Flip;

            var pixelData = await decoder.GetSoftwareBitmapAsync(decoder.BitmapPixelFormat, BitmapAlphaMode.Premultiplied, transform, ExifOrientationMode.RespectExifOrientation, ColorManagementMode.DoNotColorManage);

            if (editState.Strokes != null)
            {
                var stream = await DrawStrokesAsync(pixelData, editState.Strokes, editState.Rectangle, editState.Rotation, editState.Flip);

                var bitmapImage = new BitmapImage();
                await bitmapImage.SetSourceAsync(stream);

                return(bitmapImage);
            }
            else
            {
                var bitmapImage = new SoftwareBitmapSource();
                await bitmapImage.SetBitmapAsync(pixelData);

                return(bitmapImage);
            }
        }
示例#5
0
        /// <summary>
        ///     Helper to get the correct Bounds for 15:9 screens and to set finalPhotoAreaBorder values
        /// </summary>
        /// <returns></returns>
        private BitmapBounds GetFifteenByNineBounds()
        {
            var bounds = new BitmapBounds();

            // image size is raw pixels, so we need also here raw pixels
            var logicalPixelWidth  = Window.Current.Bounds.Width;
            var logicalPixelHeight = Window.Current.Bounds.Height;

            var rawPerViewPixels = DisplayInformation.GetForCurrentView().RawPixelsPerViewPixel;
            var rawPixelHeight   = logicalPixelHeight * rawPerViewPixels;
            var rawPixelWidth    = logicalPixelWidth * rawPerViewPixels;

            // calculate scale factor of UniformToFill Height (remember, we rotated the preview)
            var scaleFactorVisualHeight = this.maxResolution().Width / rawPixelHeight;

            // calculate the visual Width
            // (because UniFormToFill scaled the previewElement Width down to match the previewElement Height)
            var visualWidth = this.maxResolution().Height / scaleFactorVisualHeight;

            // calculate cropping area for 15:9
            var scaledBoundsWidth  = this.maxResolution().Height;
            var scaledBoundsHeight = (scaledBoundsWidth / 9) * 15;

            // we are starting at the top of the image
            bounds.Y = 0;

            // cropping the image width
            bounds.X      = 0;
            bounds.Height = scaledBoundsHeight;
            bounds.Width  = scaledBoundsWidth;

            return(bounds);
        }
        private static async Task <VideoFrame> CropAndDisplayInputImageAsync(VideoFrame inputVideoFrame, Size targetSize)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            var frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            var  requiredAR = targetSize.Width / targetSize.Height;
            uint w          = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            uint h          = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);

            var cropBounds = new BitmapBounds
            {
                X      = (uint)((frameWidth - w) / 2),
                Y      = 0,
                Width  = w,
                Height = h
            };

            var croppedVf = new VideoFrame(BitmapPixelFormat.Bgra8, (int)targetSize.Width, (int)targetSize.Height, BitmapAlphaMode.Ignore);

            await inputVideoFrame.CopyToAsync(croppedVf, cropBounds, null);

            return(croppedVf);
        }
        private async void FaceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            if (args.ResultFrame.DetectedFaces.Any())
            {
                var biggestFace = args.ResultFrame.DetectedFaces.OrderByDescending(f => f.FaceBox.Height * f.FaceBox.Width).FirstOrDefault();
                if (biggestFace != null)
                {
                    var faceBounds = new BitmapBounds
                    {
                        X      = biggestFace.FaceBox.X,
                        Y      = biggestFace.FaceBox.Y,
                        Height = biggestFace.FaceBox.Height,
                        Width  = biggestFace.FaceBox.Width
                    };
                    // Check if face is not too big for face bounding box extrapolation
                    if (false == TryExtendFaceBounds(
                            (int)_previewProperties.Width, (int)_previewProperties.Height,
                            Constants.FaceBoxRatio, ref faceBounds))
                    {
                        return;
                    }
                }

                // Ask the UI thread to render the face bounding boxes
                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => HighlightDetectedFaces(args.ResultFrame.DetectedFaces));

                FaceDetected?.Invoke(sender, args);

                if (IsCheckSmileEnabled)
                {
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => await CheckSmileAsync());
                }
            }
        }
示例#8
0
文件: FaceUi.cs 项目: vaclavpetr/Blog
        public static Rectangle ConvertPreviewToUiRectangle(BitmapBounds faceBoxInPreviewCoordinates, CaptureElement cameraViewer, IMediaEncodingProperties previewProperties)
        {
            var result = new Rectangle {
                StrokeThickness = 4
            };
            var previewStream = previewProperties as VideoEncodingProperties;

            if (previewStream == null)
            {
                return(result);
            }
            if (previewStream.Width == 0 || previewStream.Height == 0)
            {
                return(result);
            }

            double streamWidth  = previewStream.Width;
            double streamHeight = previewStream.Height;

            var previewInUi = GetPreviewStreamRectInControl(previewStream, cameraViewer);

            result.Width  = (faceBoxInPreviewCoordinates.Width / streamWidth) * previewInUi.Width;
            result.Height = (faceBoxInPreviewCoordinates.Height / streamHeight) * previewInUi.Height;

            var x = (faceBoxInPreviewCoordinates.X / streamWidth) * previewInUi.Width;
            var y = (faceBoxInPreviewCoordinates.Y / streamHeight) * previewInUi.Height;

            Canvas.SetLeft(result, x);
            Canvas.SetTop(result, y + 20);

            return(result);
        }
示例#9
0
文件: FaceUi.cs 项目: vaclavpetr/Blog
        public static Rectangle ConvertPreviewToUiRectangle(BitmapBounds faceBoxInPreviewCoordinates, CaptureElement cameraViewer, IMediaEncodingProperties previewProperties)
        {
            var result        = new Rectangle();
            var previewStream = previewProperties as VideoEncodingProperties;

            if (previewStream == null)
            {
                return(result);
            }
            if (previewStream.Width == 0 || previewStream.Height == 0)
            {
                return(result);
            }

            double streamWidth  = previewStream.Width;
            double streamHeight = previewStream.Height;

            // Get the rectangle that is occupied by the actual video feed
            var previewInUI = GetPreviewStreamRectInControl(previewStream, cameraViewer);

            // Scale the width and height from preview stream coordinates to window coordinates
            result.Width  = (faceBoxInPreviewCoordinates.Width / streamWidth) * previewInUI.Width;
            result.Height = (faceBoxInPreviewCoordinates.Height / streamHeight) * previewInUI.Height;

            // Scale the X and Y coordinates from preview stream coordinates to window coordinates
            var x = (faceBoxInPreviewCoordinates.X / streamWidth) * previewInUI.Width;
            var y = (faceBoxInPreviewCoordinates.Y / streamHeight) * previewInUI.Height;

            Canvas.SetLeft(result, x);
            Canvas.SetTop(result, y);

            return(result);
        }
示例#10
0
文件: FaceUi.cs 项目: vaclavpetr/Blog
        public static Image ConvertPreviewToUiHatImage(BitmapBounds faceBoxInPreviewCoordinates, CaptureElement cameraViewer, IMediaEncodingProperties previewProperties, Image imageHat)
        {
            var previewStream = previewProperties as VideoEncodingProperties;

            if (previewStream == null)
            {
                return(imageHat);
            }
            if (previewStream.Width == 0 || previewStream.Height == 0)
            {
                return(imageHat);
            }

            double streamWidth  = previewStream.Width;
            double streamHeight = previewStream.Height;

            var previewInUi = GetPreviewStreamRectInControl(previewStream, cameraViewer);

            imageHat.Width  = (faceBoxInPreviewCoordinates.Width / streamWidth) * previewInUi.Width;
            imageHat.Height = (faceBoxInPreviewCoordinates.Height / streamHeight) * previewInUi.Height;

            var x = (faceBoxInPreviewCoordinates.X / streamWidth) * previewInUi.Width;
            var y = (faceBoxInPreviewCoordinates.Y / streamHeight) * previewInUi.Height;

            Canvas.SetLeft(imageHat, x);
            Canvas.SetTop(imageHat, y - 20);

            imageHat.Visibility = Visibility.Visible;

            return(imageHat);
        }
示例#11
0
        public void Update(BlockColumnMeta update)
        {
            var rect = GetBlockRectangle(update.Position);

            if (!BitmapBounds.Contains(rect))
            {
                Log.WarnFormat("Attemptedto draw block outside the tile bounds, Block: {2}, Rect: {0}, Bitmap Bounds : {1}", rect, BitmapBounds, update.Position);
                return;
            }

            //using (var clip = new Region(rect))
            //{
            //Graphics.Clip = clip;
            lock (_bitmapSync)
            {
                Layer.Renderer.DrawBlock(Graphics, rect, update);

                foreach (var postProcessor in Layer.PostProcessors)
                {
                    postProcessor.PostProcess(this, Graphics, update);
                }
            }

            _updatedBlocks.Add(update.Position);

            //Graphics.ResetClip();
            //}
        }
示例#12
0
        public async Task <StorageFile> CropAsync()
        {
            var file = await ApplicationData.Current.TemporaryFolder.CreateFileAsync("crop.jpg", CreationCollisionOption.ReplaceExisting);

            using (var fileStream = await m_imageSource.OpenAsync(FileAccessMode.Read))
                using (var outputStream = await file.OpenAsync(FileAccessMode.ReadWrite))
                {
                    var decoder = await BitmapDecoder.CreateAsync(fileStream);

                    var bounds = new BitmapBounds();
                    bounds.X      = (uint)CropRectangle.X;
                    bounds.Y      = (uint)CropRectangle.Y;
                    bounds.Width  = (uint)CropRectangle.Width;
                    bounds.Height = (uint)CropRectangle.Height;

                    var transform = ComputeScalingTransformForSourceImage(decoder);
                    transform.Bounds = bounds;

                    var pixelData = await decoder.GetSoftwareBitmapAsync(decoder.BitmapPixelFormat, decoder.BitmapAlphaMode, transform, ExifOrientationMode.RespectExifOrientation, ColorManagementMode.DoNotColorManage);

                    var propertySet  = new BitmapPropertySet();
                    var qualityValue = new BitmapTypedValue(0.77, PropertyType.Single);
                    propertySet.Add("ImageQuality", qualityValue);

                    var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, outputStream);

                    encoder.SetSoftwareBitmap(pixelData);
                    await encoder.FlushAsync();
                }

            return(file);
        }
        public static BitmapBounds GetCropBounds(int srcWidth, int srcHeight, int targetWidth, int targetHeight)
        {
            var          modelHeight = targetHeight;
            var          modelWidth  = targetWidth;
            BitmapBounds bounds      = new BitmapBounds();
            // we need to recalculate the crop bounds in order to correctly center-crop the input image
            float flRequiredAspectRatio = (float)modelWidth / modelHeight;

            if (flRequiredAspectRatio * srcHeight > (float)srcWidth)
            {
                // clip on the y axis
                bounds.Height = (uint)Math.Min((srcWidth / flRequiredAspectRatio + 0.5f), srcHeight);
                bounds.Width  = (uint)srcWidth;
                bounds.X      = 0;
                bounds.Y      = (uint)(srcHeight - bounds.Height) / 2;
            }
            else // clip on the x axis
            {
                bounds.Width  = (uint)Math.Min((flRequiredAspectRatio * srcHeight + 0.5f), srcWidth);
                bounds.Height = (uint)srcHeight;
                bounds.X      = (uint)(srcWidth - bounds.Width) / 2;;
                bounds.Y      = 0;
            }
            return(bounds);
        }
示例#14
0
        /// <summary>
        /// Use BitmapTransform to define the region to crop, and then get the pixel data in the region.
        /// If you want to get the pixel data of a scaled image, set the scaledWidth and scaledHeight
        /// of the scaled image.
        /// </summary>
        async static private Task <byte[]> GetPixelData(BitmapDecoder decoder, uint startPointX, uint startPointY,
                                                        uint width, uint height, uint scaledWidth, uint scaledHeight)
        {
            BitmapTransform transform = new BitmapTransform();
            BitmapBounds    bounds    = new BitmapBounds();

            bounds.X         = startPointX;
            bounds.Y         = startPointY;
            bounds.Height    = height;
            bounds.Width     = width;
            transform.Bounds = bounds;

            transform.ScaledWidth  = scaledWidth;
            transform.ScaledHeight = scaledHeight;

            // Get the cropped pixels within the bounds of transform.
            PixelDataProvider pix = await decoder.GetPixelDataAsync(
                BitmapPixelFormat.Bgra8,
                BitmapAlphaMode.Straight,
                transform,
                ExifOrientationMode.IgnoreExifOrientation,
                ColorManagementMode.ColorManageToSRgb);

            byte[] pixels = pix.DetachPixelData();
            return(pixels);
        }
示例#15
0
        /// <summary>
        /// Updates any existing face bounding boxes in response to changes in the size of the Canvas.
        /// </summary>
        /// <param name="sender">Canvas whose size has changed</param>
        /// <param name="e">Event data</param>
        private void SnapshotCanvas_SizeChanged(object sender, SizeChangedEventArgs e)
        {
            try
            {
                // If the Canvas is resized we must recompute a new scaling factor and
                // apply it to each face box.
                if (this.currentState == ScenarioState.Snapshot && this.SnapshotCanvas.Background != null)
                {
                    WriteableBitmap displaySource = (this.SnapshotCanvas.Background as ImageBrush).ImageSource as WriteableBitmap;

                    double widthScale  = displaySource.PixelWidth / this.SnapshotCanvas.ActualWidth;
                    double heightScale = displaySource.PixelHeight / this.SnapshotCanvas.ActualHeight;

                    foreach (var item in this.SnapshotCanvas.Children)
                    {
                        Rectangle box = item as Rectangle;
                        if (box == null)
                        {
                            continue;
                        }

                        // We saved the original size of the face box in the rectangles Tag field.
                        BitmapBounds faceBounds = (BitmapBounds)box.Tag;
                        box.Width  = (uint)(faceBounds.Width / widthScale);
                        box.Height = (uint)(faceBounds.Height / heightScale);

                        box.Margin = new Thickness((uint)(faceBounds.X / widthScale), (uint)(faceBounds.Y / heightScale), 0, 0);
                    }
                }
            }
            catch (Exception ex)
            {
//                this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage);
            }
        }
示例#16
0
        private async Task <IRandomAccessStream> Capture()
        {
            var memStream = new InMemoryRandomAccessStream();
            var encoder   = await BitmapEncoder.CreateForTranscodingAsync(memStream, decoder);

            var imgViewW = imgView.RenderSize.Width;
            var imgViewH = imgView.RenderSize.Height;

            var deviationW = (containerGrid.RenderSize.Width - imgViewW) / 2;
            var deviationH = (containerGrid.RenderSize.Height - imgViewH) / 2;

            BitmapBounds bounds = new BitmapBounds();

            bounds.X      = (uint)((border.Margin.Left - deviationW) / imgViewW * decoder.PixelWidth);
            bounds.Y      = (uint)((border.Margin.Top - deviationH) / imgViewH * decoder.PixelHeight);
            bounds.Width  = (uint)(border.Width / imgViewW * decoder.PixelWidth);
            bounds.Height = (uint)(border.Height / imgViewH * decoder.PixelHeight);

            Debug.WriteLine("left:{0}, top:{1}, width:{2}, height:{3}", border.Margin.Left, border.Margin.Top, border.Width, border.Height);
            Debug.WriteLine("cwidth:{0}, cheight:{1}", imgViewW, imgViewH);
            Debug.WriteLine("pixelWidth:{0}, pixelHeight:{1}", decoder.PixelWidth, decoder.PixelHeight);
            Debug.WriteLine("x:{0}, y:{1}, w:{2}, h:{3}", bounds.X, bounds.Y, bounds.Width, bounds.Height);

            encoder.BitmapTransform.Bounds = bounds;

            // flush content in bounds into memStream
            await encoder.FlushAsync();

            return(memStream);
        }
示例#17
0
        public static async Task <ImageSource> CropAndPreviewAsync(IRandomAccessStream imageStream, Rect cropRectangle)
        {
            var decoder = await BitmapDecoder.CreateAsync(imageStream);

            var bounds = new BitmapBounds();

            bounds.X      = (uint)cropRectangle.X;
            bounds.Y      = (uint)cropRectangle.Y;
            bounds.Width  = (uint)cropRectangle.Width;
            bounds.Height = (uint)cropRectangle.Height;

            var transform = ComputeScalingTransformForSourceImage(decoder);

            transform.Bounds = bounds;

            var pixelData = await decoder.GetSoftwareBitmapAsync(decoder.BitmapPixelFormat, decoder.BitmapAlphaMode, transform, ExifOrientationMode.RespectExifOrientation, ColorManagementMode.DoNotColorManage);

            var propertySet  = new BitmapPropertySet();
            var qualityValue = new BitmapTypedValue(0.77, PropertyType.Single);

            propertySet.Add("ImageQuality", qualityValue);

            var bitmap = await decoder.GetSoftwareBitmapAsync(decoder.BitmapPixelFormat, BitmapAlphaMode.Premultiplied, transform, ExifOrientationMode.RespectExifOrientation, ColorManagementMode.DoNotColorManage);

            var bitmapImage = new SoftwareBitmapSource();
            await bitmapImage.SetBitmapAsync(bitmap);

            return(bitmapImage);
        }
        public async Task <VideoFrame> CropAndDisplayInputImageAsync(VideoFrame inputVideoFrame)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            BitmapBounds cropBounds  = new BitmapBounds();
            uint         h           = IMAGE_HEIGHT;
            uint         w           = IMAGE_HEIGHT;
            var          frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var          frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            var requiredAR = ((float)IMAGE_HEIGHT / IMAGE_HEIGHT);

            w                 = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            h                 = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);
            cropBounds.X      = (uint)((frameWidth - w) / 2);
            cropBounds.Y      = 0;
            cropBounds.Width  = w;
            cropBounds.Height = h;

            cropped_vf = new VideoFrame(BitmapPixelFormat.Bgra8, IMAGE_HEIGHT, IMAGE_HEIGHT, BitmapAlphaMode.Ignore);

            await inputVideoFrame.CopyToAsync(cropped_vf, cropBounds, null);

            return(cropped_vf);
        }
示例#19
0
        private async void _faceDetectionEffect_FaceDetected(FaceDetectionEffect sender, FaceDetectedEventArgs args)
        {
            await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
            {
                this.VisualizationCanvas.Children.Clear();
            });


            foreach (Windows.Media.FaceAnalysis.DetectedFace face in args.ResultFrame.DetectedFaces)
            {
                BitmapBounds faceRect = face.FaceBox;


                await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                {
                    Rectangle box       = new Rectangle();
                    box.Tag             = face.FaceBox;
                    box.Width           = (uint)(face.FaceBox.Width);
                    box.Height          = (uint)(face.FaceBox.Height);
                    box.Fill            = this.fillBrush;
                    box.Stroke          = this.lineBrush;
                    box.StrokeThickness = this.lineThickness;

                    box.Margin = new Thickness((uint)(face.FaceBox.X + 70), (uint)(face.FaceBox.Y + 150), 0, 0);

                    this.VisualizationCanvas.Children.Add(box);
                });
            }
        }
示例#20
0
        private float[] LocalizeLandmarks(SoftwareBitmap image, BitmapBounds boundingBox)
        {
            float[] landmarks = faceAligner.LocalizeLandmarks(image,
                                                              (int)boundingBox.X, (int)boundingBox.Y, (int)boundingBox.Height, (int)boundingBox.Width, nIters);

            return(landmarks);
        }
示例#21
0
        async private static Task <byte[]> GetCroppedPixelsAsync(IRandomAccessStream stream, FaceRectangle rectangle)
        {
            // Create a decoder from the stream. With the decoder, we can get
            // the properties of the image.
            BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

            // Create cropping BitmapTransform and define the bounds.
            BitmapTransform transform = new BitmapTransform();
            BitmapBounds    bounds    = new BitmapBounds();

            bounds.X         = (uint)rectangle.Left;
            bounds.Y         = (uint)rectangle.Top;
            bounds.Height    = (uint)rectangle.Height;
            bounds.Width     = (uint)rectangle.Width;
            transform.Bounds = bounds;

            // Get the cropped pixels within the bounds of transform.
            PixelDataProvider pix = await decoder.GetPixelDataAsync(
                BitmapPixelFormat.Bgra8,
                BitmapAlphaMode.Straight,
                transform,
                ExifOrientationMode.IgnoreExifOrientation,
                ColorManagementMode.ColorManageToSRgb);

            return(pix.DetachPixelData());
        }
示例#22
0
        public async Task <VideoFrame> GetCropedImage(VideoFrame inputVideoFrame)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            BitmapBounds cropBounds  = new BitmapBounds();
            uint         h           = 227;
            uint         w           = 227;
            var          frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var          frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            var requiredAR = ((float)227 / 227);

            w                 = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            h                 = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);
            cropBounds.X      = (uint)((frameWidth - w) / 2);
            cropBounds.Y      = 0;
            cropBounds.Width  = w;
            cropBounds.Height = h;

            var cropped_vf = new VideoFrame(BitmapPixelFormat.Bgra8, 227, 227, BitmapAlphaMode.Premultiplied);

            await inputVideoFrame.CopyToAsync(cropped_vf, cropBounds, null);

            return(cropped_vf);
        }
示例#23
0
        public static async Task <VideoFrame> CropVideoFrameAsync(this VideoFrame inputVideoFrame, uint targetWidth, uint targetHeight)
        {
            bool useDX = inputVideoFrame.SoftwareBitmap == null;

            BitmapBounds cropBounds  = new BitmapBounds();
            uint         h           = targetHeight;
            uint         w           = targetWidth;
            var          frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
            var          frameWidth  = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

            //var requiredAR = ((float)targetWidth / targetHeight);
            //w = Math.Min((uint)(requiredAR * frameHeight), (uint)frameWidth);
            //h = Math.Min((uint)(frameWidth / requiredAR), (uint)frameHeight);
            w                 = Math.Min((targetWidth), (uint)frameWidth);
            h                 = Math.Min((targetHeight), (uint)frameHeight);
            cropBounds.X      = (uint)((frameWidth - w) / 2);
            cropBounds.Y      = (uint)((frameHeight - h) / 2);
            cropBounds.Width  = w;
            cropBounds.Height = h;

            VideoFrame croppedVideoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, (int)targetWidth, (int)targetHeight, BitmapAlphaMode.Ignore);

            await inputVideoFrame.CopyToAsync(croppedVideoFrame, cropBounds, null);

            return(croppedVideoFrame);
        }
        private async Task <IRandomAccessStream> GetCropedImageStream()
        {
            var stream = await _file.OpenReadAsync();

            BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

            var           result  = new InMemoryRandomAccessStream();
            BitmapEncoder encoder = await BitmapEncoder.CreateForTranscodingAsync(result, decoder);

            BitmapBounds bounds = new BitmapBounds();
            var          min    = Math.Min(decoder.PixelHeight, decoder.PixelWidth);

            bounds.X      = (decoder.PixelWidth - min) / 2;
            bounds.Y      = (decoder.PixelHeight - min) / 2;
            bounds.Height = min;
            bounds.Width  = min;

            encoder.BitmapTransform.Bounds = bounds;

            try
            {
                await encoder.FlushAsync();

                result.Seek(0);

                return(result);
            }
            catch
            {
                return(null);
            }
        }
示例#25
0
        public void RecognizeFaces(JavaScriptValue faces, JavaScriptValue callback)
        {
            var boundList = new List <BitmapBounds>();

            for (int i = 0; i < faces.Length().Value; ++i)
            {
                var jsBounds = faces.Get(i).Get("bounds");
                var bounds   = new BitmapBounds()
                {
                    X      = (uint)jsBounds.Get("x").ToInt32(),
                    Y      = (uint)jsBounds.Get("y").ToInt32(),
                    Width  = (uint)jsBounds.Get("width").ToInt32(),
                    Height = (uint)jsBounds.Get("height").ToInt32()
                };
                boundList.Add(bounds);
            }

            int frameID = faces.Get(0).Get("frame").Get("id").ToInt32();
            var frame   = SceneCameraManager.Inst.GetFrameFromCache(frameID);

            callback.AddRef();
            faces.AddRef();

            server.RecognizeFaces(frame.bitmap, boundList, (s) => {
                JsonObject json;
                if (!JsonObject.TryParse(s, out json))
                {
                    ProjectRuntime.Inst.DispatchRuntimeCode(() => {
                        for (int i = 0; i < faces.Length().Value; ++i)
                        {
                            faces.Get(i).SetProperty(JavaScriptPropertyId.FromString("name"), JavaScriptValue.FromString("Unknown"), true);
                        }
                        callback.CallFunction(callback, faces);
                        callback.Release();
                        faces.Release();
                    });
                    return;
                }

                var responses = json.GetNamedArray("ResponsePerFace");
                var names     = new List <string>();
                for (int i = 0; i < responses.Count; ++i)
                {
                    var faceResponse = responses.GetObjectAt((uint)i);
                    names.Add(faceResponse.GetNamedString("FaceRecognition"));
                }

                ProjectRuntime.Inst.DispatchRuntimeCode(() => {
                    for (int i = 0; i < faces.Length().Value; ++i)
                    {
                        faces.Get(i).SetProperty(JavaScriptPropertyId.FromString("name"), JavaScriptValue.FromString(names[i]), true);
                    }
                    callback.CallFunction(callback, faces);
                    callback.Release();
                    faces.Release();
                });
            });
        }
示例#26
0
        public async void StampCopy()
        {
            double heightStampControl = PocketPaintApplication.GetInstance().StampControl.GetHeightOfRectangleStampSelection();
            double widthStampControl  = PocketPaintApplication.GetInstance().StampControl.GetWidthOfRectangleStampSelection();

            PocketPaintApplication.GetInstance().StampControl.SetOriginalSizeOfStampedImage(heightStampControl, widthStampControl);

            Point  leftTopPointStampSelection = PocketPaintApplication.GetInstance().StampControl.GetLeftTopPointOfStampedSelection();
            double xOffsetStampControl        = leftTopPointStampSelection.X;
            double yOffsetStampControl        = leftTopPointStampSelection.Y;

            string filename = "stamp" + ".png";
            await PocketPaintApplication.GetInstance().StorageIo.WriteBitmapToPngMediaLibrary(filename);

            StorageFile storageFile = await KnownFolders.PicturesLibrary.GetFileAsync(filename);

            InMemoryRandomAccessStream mrAccessStream = new InMemoryRandomAccessStream();

            using (Stream stream = await storageFile.OpenStreamForReadAsync())
            {
                using (var memStream = new MemoryStream())
                {
                    await stream.CopyToAsync(memStream);

                    memStream.Position = 0;

                    BitmapDecoder decoder = await BitmapDecoder.CreateAsync(memStream.AsRandomAccessStream());

                    BitmapEncoder encoder = await BitmapEncoder.CreateForTranscodingAsync(mrAccessStream, decoder);

                    encoder.BitmapTransform.ScaledHeight = (uint)PocketPaintApplication.GetInstance().PaintingAreaCanvas.RenderSize.Height;
                    encoder.BitmapTransform.ScaledWidth  = (uint)PocketPaintApplication.GetInstance().PaintingAreaCanvas.RenderSize.Width;

                    BitmapBounds bounds = new BitmapBounds
                    {
                        Height = (uint)heightStampControl - 1,
                        Width  = (uint)widthStampControl - 1,
                        X      = (uint)(xOffsetStampControl),
                        Y      = (uint)(yOffsetStampControl)
                    };
                    encoder.BitmapTransform.Bounds = bounds;

                    // write out to the stream
                    try
                    {
                        await encoder.FlushAsync();
                    }
                    catch (Exception)
                    {
                        // ignored
                    }
                }
                //render the stream to the screen
                WriteableBitmap wbCroppedBitmap = new WriteableBitmap((int)widthStampControl, (int)heightStampControl);
                wbCroppedBitmap.SetSource(mrAccessStream);
                PocketPaintApplication.GetInstance().StampControl.SetSourceImageStamp(wbCroppedBitmap);
            }
        }
        public EmotionScores GetLastEmotionForFace(BitmapBounds faceBox)
        {
            if (this.lastEmotionSample == null || !this.lastEmotionSample.Any())
            {
                return(null);
            }

            return(this.lastEmotionSample.OrderBy(f => Math.Abs(faceBox.X - f.FaceRectangle.Left) + Math.Abs(faceBox.Y - f.FaceRectangle.Top)).First().Scores);
        }
示例#28
0
        /// <summary>
        /// Crop image given a target width and height
        /// </summary>
        /// <param name="inputVideoFrame"></param>
        /// <returns></returns>
        public static IAsyncOperation <VideoFrame> CenterCropImageAsync(VideoFrame inputVideoFrame, uint targetWidth, uint targetHeight)
        {
            return(AsyncInfo.Run(async(token) =>
            {
                bool useDX = inputVideoFrame.SoftwareBitmap == null;
                VideoFrame result = null;
                float width = 640;
                float height = 440;
                // Center crop
                try
                {
                    //Scale image to appropriate size

                    // Since we will be center-cropping the image, figure which dimension has to be clipped
                    var frameHeight = useDX ? inputVideoFrame.Direct3DSurface.Description.Height : inputVideoFrame.SoftwareBitmap.PixelHeight;
                    var frameWidth = useDX ? inputVideoFrame.Direct3DSurface.Description.Width : inputVideoFrame.SoftwareBitmap.PixelWidth;

                    Rect cropRect = GetCropRect(frameWidth, frameHeight, targetWidth, targetHeight);
                    BitmapBounds cropBounds = new BitmapBounds()
                    {
                        Width = (uint)cropRect.Width,
                        Height = (uint)cropRect.Height,
                        X = (uint)cropRect.X,
                        Y = (uint)cropRect.Y
                    };

                    // Create the VideoFrame to be bound as input for evaluation
                    if (useDX)
                    {
                        if (inputVideoFrame.Direct3DSurface == null)
                        {
                            throw (new Exception("Invalid VideoFrame without SoftwareBitmap nor D3DSurface"));
                        }

                        result = new VideoFrame(BitmapPixelFormat.Bgra8,
                                                (int)targetWidth,
                                                (int)targetHeight,
                                                BitmapAlphaMode.Premultiplied);
                    }
                    else
                    {
                        result = new VideoFrame(BitmapPixelFormat.Bgra8,
                                                (int)targetWidth,
                                                (int)targetHeight,
                                                BitmapAlphaMode.Premultiplied);
                    }

                    await inputVideoFrame.CopyToAsync(result, cropBounds, null);
                }
                catch (Exception ex)
                {
                    Debug.WriteLine(ex.ToString());
                }

                return result;
            }));
        }
        public DetectedFace GetLastFaceAttributesForFace(BitmapBounds faceBox)
        {
            if (this.lastDetectedFaceSample == null || !this.lastDetectedFaceSample.Any())
            {
                return(null);
            }

            return(Util.FindFaceClosestToRegion(this.lastDetectedFaceSample, faceBox));
        }
示例#30
0
        /// <summary>
        /// Rescale the size and position of the face highlight box
        /// to account for the difference between the size of the image and
        /// the canvas.
        /// </summary>
        /// <param name="box">The element to rescale</param>
        /// <param name="widthScale">Horizontal adjustment factor</param>
        /// <param name="heightScale">Vertical adjustment factor</param>
        static void ApplyScale(FrameworkElement box, double widthScale, double heightScale)
        {
            // We saved the original size of the face box in the element's Tag field.
            BitmapBounds faceBox = (BitmapBounds)box.Tag;

            box.Width  = faceBox.Width * widthScale;
            box.Height = faceBox.Height * heightScale;
            box.Margin = new Thickness(faceBox.X * widthScale, faceBox.Y * heightScale, 0, 0);
        }