private async static Task <string> GetBase64OfBitmap(SoftwareBitmap sourceImage) { byte[] bytes = new byte[0]; using (var randomAccessStream = new InMemoryRandomAccessStream()) { var encoder = await BitmapEncoder.CreateAsync( BitmapEncoder.PngEncoderId, randomAccessStream ); SoftwareBitmap bitmap = new SoftwareBitmap( sourceImage.BitmapPixelFormat, sourceImage.PixelWidth, sourceImage.PixelHeight, sourceImage.BitmapAlphaMode ); sourceImage.CopyTo(bitmap); encoder.SetSoftwareBitmap(bitmap); await encoder.FlushAsync(); bytes = new byte[randomAccessStream.Size]; await randomAccessStream.ReadAsync( bytes.AsBuffer(), (uint)bytes.Length, InputStreamOptions.None ); } var base64String = Convert.ToBase64String(bytes); return(base64String); }
private async void Timer_Tick(object sender, object e) { lock (lockObj) { if (mediaCapture == null || mediaCapture.CameraStreamState != Windows.Media.Devices.CameraStreamState.Streaming) { return; } if (methodCount >= 4) { return; } methodCount++; } figure.RotateX(0.1); SoftwareBitmap previewBitmap = null; await Dispatcher.RunAsync(CoreDispatcherPriority.High, async() => { var previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties; VideoFrame videoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height); VideoFrame previewFrame = await mediaCapture.GetPreviewFrameAsync(videoFrame); previewBitmap = previewFrame.SoftwareBitmap; lock (lockObj) { if (background == null) { background = new SoftwareBitmap(BitmapPixelFormat.Bgra8, previewBitmap.PixelWidth, previewBitmap.PixelHeight, previewBitmap.BitmapAlphaMode); } previewBitmap.CopyTo(background); ColorsDetector detector = new ColorsDetector(); detector.Detect(background); } }); lock (lockObj) { methodCount--; } canvLeft.Invalidate(); canvRight.Invalidate(); }
/// <summary> /// Clients should override this method if the post-processing cannot be /// done in place. If the post-processing can be done in place, clients /// should override the /// Process(byte[], int, int, BitmapPixelFormat, BitmapAlphaMode) method. /// /// <para />The provided destination bitmap is of the same size as the /// source bitmap. There are no guarantees on the initial content of the /// destination bitmap, so the implementation has to make sure that it /// properly populates it. /// /// <para />The source bitmap must not be modified as it may be shared /// by the other clients. The implementation must use the provided /// destination bitmap as its output. /// </summary> /// <param name="destBitmap"> /// The destination bitmap to be used as output. /// </param> /// <param name="sourceBitmap"> /// The source bitmap to be used as input. /// </param> /// <param name="flexByteArrayPool"> /// The memory pool used for post process. /// </param> public unsafe virtual void Process( SoftwareBitmap destBitmap, SoftwareBitmap sourceBitmap, FlexByteArrayPool flexByteArrayPool) { Preconditions.CheckArgument(sourceBitmap.BitmapPixelFormat == destBitmap.BitmapPixelFormat); Preconditions.CheckArgument(!destBitmap.IsReadOnly); Preconditions.CheckArgument(destBitmap.PixelWidth == sourceBitmap.PixelWidth); Preconditions.CheckArgument(destBitmap.PixelHeight == sourceBitmap.PixelHeight); sourceBitmap.CopyTo(destBitmap); using (var buffer = destBitmap.LockBuffer(BitmapBufferAccessMode.Write)) using (var reference = buffer.CreateReference()) { // Get input data byte *srcData; uint capacity; ((IMemoryBufferByteAccess)reference).GetBuffer(out srcData, out capacity); // Allocate temp buffer for processing byte[] desData = default(byte[]); CloseableReference <byte[]> bytesArrayRef = default(CloseableReference <byte[]>); try { bytesArrayRef = flexByteArrayPool.Get((int)capacity); desData = bytesArrayRef.Get(); } catch (Exception) { // Allocates the byte array since the pool couldn't provide one desData = new byte[capacity]; } try { // Process output data Marshal.Copy((IntPtr)srcData, desData, 0, (int)capacity); Process(desData, destBitmap.PixelWidth, destBitmap.PixelHeight, destBitmap.BitmapPixelFormat, destBitmap.BitmapAlphaMode); Marshal.Copy(desData, 0, (IntPtr)srcData, (int)capacity); } finally { CloseableReference <byte[]> .CloseSafely(bytesArrayRef); } } }
public unsafe IResult Detect(SoftwareBitmap image) { CounterResult result = new CounterResult() { Count = 0, Image = new SoftwareBitmap(image.BitmapPixelFormat, image.PixelWidth, image.PixelHeight) }; image.CopyTo(result.Image); using (BitmapBuffer buffer = result.Image.LockBuffer(BitmapBufferAccessMode.Write)) { using (var reference = buffer.CreateReference()) { ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacity); // Fill-in the BGRA plane BitmapPlaneDescription bufferLayout = buffer.GetPlaneDescription(0); for (int i = 0; i < bufferLayout.Height; i++) { for (int j = 0; j < bufferLayout.Width; j++) { int b = dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] - Color.B; int g = dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] - Color.G; int r = dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] - Color.R; if (b * b + g * g + r * r < Delta * Delta) { result.Count++; dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = 0; dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = 0; dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = 255; dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = (byte)255; } } } } } return(result); }
private async Task <Tuple <Image <byte>, IList <PointF> > > PrepBitmapAsync(SoftwareBitmap bitmap) { if (bitmap.PixelHeight % 2 != 0) { var resized = new SoftwareBitmap(bitmap.BitmapPixelFormat, bitmap.PixelWidth, bitmap.PixelHeight + 1); bitmap.CopyTo(resized); bitmap = resized; } Rectangle firstFace; try { var detector = await FaceDetector.CreateAsync(); var formats = FaceDetector.GetSupportedBitmapPixelFormats(); var convertedBitmap = SoftwareBitmap.Convert(bitmap, formats.First()); var detected = await detector.DetectFacesAsync(convertedBitmap); var faces = detected .Select(x => x.FaceBox) .Select(x => new Rectangle((int)x.X, (int)x.X + (int)x.Width, (int)x.Y, (int)x.Y + (int)x.Height)); if (!faces.Any()) { return(null); } firstFace = faces.First(); } catch (Exception) { Debugger.Break(); throw; } IList <PointF> points; var image = ConvertTo.Image.FromSoftwareBitmap(bitmap); try { if (alignmentor == null) { using (var stream = ResourceManager.GetStream(ResourceKey.AsmAlignment)) { alignmentor = FaceAlignmentorFactory.Create(FaceAlignmentType.Asm87Points, stream); } } var grayImage = new ImageGray(image); points = alignmentor.Align(grayImage, firstFace).ToList(); if (!points.Any()) { return(null); } } catch (Exception) { Debugger.Break(); throw; } return(new Tuple <Image <byte>, IList <PointF> >(image, points)); }