コード例 #1
0
        public async Task <DecodeResult> DecodeAsync(SoftwareBitmap bitmap)
        {
            // The only thing we decode is OCR.
            if (!ActiveSymbologies.Contains(BarcodeSymbologies.OcrA))
            {
                return(null);
            }

            OcrResult ocrResult = await ocrEngine.RecognizeAsync(bitmap);

            // Get the text in the first non-empty line as result
            foreach (OcrLine line in ocrResult.Lines)
            {
                if (!string.IsNullOrEmpty(line.Text))
                {
                    return(new DecodeResult
                    {
                        Symbology = BarcodeSymbologies.OcrA,
                        Text = line.Text
                    });
                }
            }
            return(null);
        }
コード例 #2
0
        /// <summary>
        /// Transform image into Bgra8 image using given transform method.
        /// </summary>
        /// <param name="softwareBitmap">Input image to transform.</param>
        /// <param name="transformScanline">Method to map pixels in a scanline.</param>
        private static unsafe void TransformBitmap(
            SoftwareBitmap inputBitmap,
            SoftwareBitmap outputBitmap,
            TransformScanline transformScanline)
        {
            using (var input = inputBitmap.LockBuffer(BitmapBufferAccessMode.Read))
                using (var output = outputBitmap.LockBuffer(BitmapBufferAccessMode.Write))
                {
                    // Get stride values to calculate buffer position for a given pixel x and y position.
                    int inputStride  = input.GetPlaneDescription(0).Stride;
                    int outputStride = output.GetPlaneDescription(0).Stride;
                    int pixelWidth   = inputBitmap.PixelWidth;
                    int pixelHeight  = inputBitmap.PixelHeight;

                    using (var outputReference = output.CreateReference())
                        using (var inputReference = input.CreateReference())
                        {
                            // Get input and output byte access buffers.
                            byte *inputBytes;
                            uint  inputCapacity;
                            ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputBytes, out inputCapacity);
                            byte *outputBytes;
                            uint  outputCapacity;
                            ((IMemoryBufferByteAccess)outputReference).GetBuffer(out outputBytes, out outputCapacity);

                            // Iterate over all pixels and store converted value.
                            for (int y = 0; y < pixelHeight; y++)
                            {
                                byte *inputRowBytes  = inputBytes + y * inputStride;
                                byte *outputRowBytes = outputBytes + y * outputStride;

                                transformScanline(pixelWidth, inputRowBytes, outputRowBytes);
                            }
                        }
                }
        }
コード例 #3
0
        public static SoftwareBitmap RotateEffect(SoftwareBitmap Input, int Angle)
        {
            using (Mat inputMat = Input.SoftwareBitmapToMat())
                using (Mat outputMat = new Mat(inputMat.Rows, inputMat.Cols, MatType.CV_8UC4))
                {
                    switch (Angle)
                    {
                    case 90:
                    {
                        Cv2.Transpose(inputMat, outputMat);
                        Cv2.Flip(outputMat, outputMat, FlipMode.Y);
                        break;
                    }

                    case 180:
                    {
                        Cv2.Flip(outputMat, outputMat, FlipMode.XY);
                        break;
                    }

                    case -90:
                    {
                        Cv2.Transpose(inputMat, outputMat);
                        Cv2.Flip(outputMat, outputMat, FlipMode.X);
                        break;
                    }

                    default:
                    {
                        throw new Exception("Angle 仅支持90、180、-90度");
                    }
                    }

                    return(outputMat.MatToSoftwareBitmap());
                }
        }
コード例 #4
0
        /// <summary>
        /// Decode an image file into a VideoFrame
        /// </summary>
        /// <param name="file"></param>
        /// <returns></returns>
        private async Task GetFrameFromFileAsync(StorageFile file)
        {
            // Decoding image file content into a SoftwareBitmap, and wrap into VideoFrame
            SoftwareBitmap softwareBitmap = null;

            using (IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.Read))
            {
                // Create the decoder from the stream
                BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                // Get the SoftwareBitmap representation of the file in BGRA8 format
                softwareBitmap = await decoder.GetSoftwareBitmapAsync();

                // Convert to preferred format if specified and encapsulate the image in a VideoFrame instance
                var convertedSoftwareBitmap = m_desiredImageDescriptor == null?SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore)
                                                  : SoftwareBitmap.Convert(softwareBitmap, m_desiredImageDescriptor.SupportedBitmapPixelFormat, m_desiredImageDescriptor.SupportedBitmapAlphaMode);

                m_videoFrame = VideoFrame.CreateWithSoftwareBitmap(convertedSoftwareBitmap);
            }

            // Extract frame dimensions
            FrameWidth  = (uint)softwareBitmap.PixelWidth;
            FrameHeight = (uint)softwareBitmap.PixelHeight;
        }
コード例 #5
0
        public async void SaveImage(SoftwareBitmap softwareBitmap)
        {
            //var picker=new Windows.Storage.Pickers.FileSavePicker();
            FileSavePicker fileSavePicker = new FileSavePicker();

            fileSavePicker.SuggestedStartLocation = PickerLocationId.PicturesLibrary;
            //fileSavePicker.FileTypeChoices.Add("JPEG Dateien", new List<string>() { ".jpg" });
            fileSavePicker.FileTypeChoices.Add("PNG Dateien", new List <string>()
            {
                ".png"
            });
            fileSavePicker.SuggestedFileName = "Karte_" + DateTime.Now.ToString("dd_MM_yyyy");
            var outputFile = await fileSavePicker.PickSaveFileAsync();

            if (outputFile == null)
            {
                // The user cancelled the picking operation
                return;
            }
            else
            {
                SaveSoftwareBitmapToFile(softwareBitmap, outputFile);
            }
        }
コード例 #6
0
        private static async Task <byte[]> SoftwareBitmapToByteArray(SoftwareBitmap input_bitmap)
        {
            byte[] bitmap_data = null;

            using (var memory_stream = new InMemoryRandomAccessStream())
            {
                BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, memory_stream);

                encoder.SetSoftwareBitmap(input_bitmap);

                try
                {
                    await encoder.FlushAsync();
                }
                catch
                {
                    return(new byte[0]);
                }

                bitmap_data = new byte[memory_stream.Size];
                await memory_stream.ReadAsync(bitmap_data.AsBuffer(), (uint)memory_stream.Size, InputStreamOptions.None);
            }
            return(bitmap_data);
        }
コード例 #7
0
        /*
         * Metodo responsavel por capturar uma imagem e enviar a MainPage
         * Mais informações: https://docs.microsoft.com/pt-br/windows/uwp/audio-video-camera/capture-photos-and-video-with-cameracaptureui
         */
        private async void take_photo(object sender, RoutedEventArgs e)
        {
            try
            {
                //Captura assincrona
                photo = await this.captureUI.CaptureFileAsync(CameraCaptureUIMode.Photo);

                // Se o Usuario cancelou a captura da foto
                if (photo == null)
                {
                    return;
                }
                else
                {
                    //Carrego a foto
                    this.imageStream = await photo.OpenAsync(FileAccessMode.Read);

                    BitmapDecoder decoder = await BitmapDecoder.CreateAsync(imageStream);

                    SoftwareBitmap softBitmap = await decoder.GetSoftwareBitmapAsync();

                    //Converto com as exigencias de exibição na pagina XAML
                    SoftwareBitmap       softBitmapBGR8 = SoftwareBitmap.Convert(softBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                    SoftwareBitmapSource bitmapSource   = new SoftwareBitmapSource();
                    await bitmapSource.SetBitmapAsync(softBitmapBGR8);

                    //Anexo ao campo "image" a foto armazenada
                    image.Source = bitmapSource;
                }
            }
            catch
            {
                //Envio a mensagem de erro pelo campo text que criei na tela
                output.Text = "Erro: taking photo";
            }
        }
コード例 #8
0
        private async Task <byte[]> EncodeJpeg(WriteableBitmap bmp)
        {
            SoftwareBitmap soft = SoftwareBitmap.CreateCopyFromBuffer(bmp.PixelBuffer, BitmapPixelFormat.Bgra8, bmp.PixelWidth, bmp.PixelHeight);

            byte[] array = null;

            using (var ms = new InMemoryRandomAccessStream())
            {
                BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms);

                encoder.SetSoftwareBitmap(soft);

                try
                {
                    await encoder.FlushAsync();
                }
                catch { }

                array = new byte[ms.Size];
                await ms.ReadAsync(array.AsBuffer(), (uint)ms.Size, InputStreamOptions.None);
            }

            return(array);
        }
コード例 #9
0
        private async void FrameReader_FrameArrived(BarcodeScannerFrameReader sender, BarcodeScannerFrameReaderFrameArrivedEventArgs args)
        {
            if (!scannerEnabled)
            {
                // Return immediately if decoder is not enabled
                return;
            }

            using (args.GetDeferral())
            {
                SoftwareBitmap bitmap = null;
                using (BarcodeScannerVideoFrame frame = await sender.TryAcquireLatestFrameAsync())
                {
                    if (frame != null)
                    {
                        // Build software bitmap from pixel data
                        bitmap = SoftwareBitmap.CreateCopyFromBuffer(frame.PixelData, frame.Format, (int)frame.Width, (int)frame.Height);
                    }
                }

                if (bitmap != null)
                {
                    DecodeResult decodeResult = await decodeEngine.DecodeAsync(bitmap);

                    bitmap.Dispose();
                    bitmap = null;

                    // Report only if we have a result that is different from previous one.
                    if (decodeResult != null && !decodeResult.Equals(previousDecodeResult))
                    {
                        previousDecodeResult = decodeResult;
                        await connection.ReportScannedDataAsync(CreateBarcodeScannerReportFromString(decodeResult.Text, decodeResult.Symbology));
                    }
                }
            }
        }
コード例 #10
0
        public async Task <byte[]> EncodedBytes(SoftwareBitmap soft, Guid encoderId)
        {
            byte[] array = null;

            // First: Use an encoder to copy from SoftwareBitmap to an in-mem stream (FlushAsync)
            // Next:  Use ReadAsync on the in-mem stream to get byte[] array

            using (ms = new InMemoryRandomAccessStream())
            {
                BitmapEncoder encoder = await BitmapEncoder.CreateAsync(encoderId, ms);

                encoder.SetSoftwareBitmap(soft);

                try
                {
                    await encoder.FlushAsync();
                }
                catch (Exception ex) { return(new byte[0]); }

                array = new byte[ms.Size];
                await ms.ReadAsync(array.AsBuffer(), (uint)ms.Size, InputStreamOptions.None);
            }
            return(array);
        }
コード例 #11
0
        public static async Task <TensorFloat> NormalizeImage(VideoFrame frame, Vector3 mean, Vector3 std, uint width, uint height)
        {
            var bitmapBuffer = new SoftwareBitmap(frame.SoftwareBitmap.BitmapPixelFormat, frame.SoftwareBitmap.PixelHeight, frame.SoftwareBitmap.PixelHeight, BitmapAlphaMode.Ignore);
            var buffer       = VideoFrame.CreateWithSoftwareBitmap(bitmapBuffer);
            await frame.CopyToAsync(buffer);


            var innerBitmap = new WriteableBitmap(bitmapBuffer.PixelWidth, bitmapBuffer.PixelHeight);

            bitmapBuffer.CopyToBuffer(innerBitmap.PixelBuffer);
            var pixelsStream = innerBitmap.PixelBuffer.AsStream();

            var transform = new BitmapTransform()
            {
                ScaledWidth = width, ScaledHeight = height, InterpolationMode = BitmapInterpolationMode.Cubic
            };
            var decoder = await BitmapDecoder.CreateAsync(pixelsStream.AsRandomAccessStream());

            var pixelData = await decoder.GetPixelDataAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, transform, ExifOrientationMode.RespectExifOrientation, ColorManagementMode.ColorManageToSRgb);

            var pixels = pixelData.DetachPixelData();

            return(Normalize(pixels, mean, std, width, height));
        }
コード例 #12
0
        private async void VideoFrameAvailable(MediaPlayer sender, object args)
        {
            CanvasDevice canvasDevice = CanvasDevice.GetSharedDevice();
            int          width        = (int)sender.PlaybackSession.NaturalVideoWidth;
            int          height       = (int)sender.PlaybackSession.NaturalVideoHeight;

            if (frameBuffer == null)
            {
                frameBuffer = new SoftwareBitmap(BitmapPixelFormat.Rgba8, width, height, BitmapAlphaMode.Premultiplied);
            }

            await window.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => {
                SoftwareBitmap frame;

                using (var inputBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameBuffer)) {
                    sender.CopyFrameToVideoSurface(inputBitmap);
                    frame = await SoftwareBitmap.CreateCopyFromSurfaceAsync(inputBitmap);
                }

                Width  = frame.PixelWidth;
                Height = frame.PixelHeight;

                var cam           = camera.View.Inverse();
                var webcamToWorld = new Matrix4(cam.m00, cam.m01, cam.m02, cam.m03,
                                                cam.m10, cam.m11, cam.m12, cam.m13,
                                                cam.m20, cam.m21, cam.m22, cam.m23,
                                                0, 0, 0, 1);

                FrameReady?.Invoke(new FrameData()
                {
                    bitmap = frame,
                    webcamToWorldMatrix = webcamToWorld,
                    projectionMatrix    = camera.Projection
                });
            });
        }
コード例 #13
0
ファイル: MainPage.xaml.cs プロジェクト: reiserwang/BearID
        private async static Task <SoftwareBitmap> ResizeBitmap(SoftwareBitmap softwareBitmap, int width, int height)
        {
            SoftwareBitmap result;

            using (SoftwareBitmap encoderBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied))
            {
                using (MemoryStream memoryStream = new MemoryStream())
                {
                    BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, memoryStream.AsRandomAccessStream());

                    encoder.SetSoftwareBitmap(encoderBitmap);
                    encoder.BitmapTransform.ScaledWidth  = (uint)width;
                    encoder.BitmapTransform.ScaledHeight = (uint)height;

                    await encoder.FlushAsync();

                    var decoder = await BitmapDecoder.CreateAsync(memoryStream.AsRandomAccessStream());

                    result = await decoder.GetSoftwareBitmapAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                }
            }

            return(result);
        }
コード例 #14
0
ファイル: MainWindow.xaml.cs プロジェクト: kame-chan/WPF
        private async void OcrStringAsync(Mat img)
        {
            copyimg.Source = System.Windows.Media.Imaging.BitmapFrame.Create(OpenCvSharp.WpfExtensions.BitmapSourceConverter.ToBitmapSource(img));
            SoftwareBitmap sbitmap = await ConvertSoftwareBitmap(copyimg);

            OcrResult result = await RunOcr(sbitmap);

            Console.WriteLine(result.Text);
            string output = "";

            foreach (var line in result.Lines)
            {
                // 1行分の文字列を格納するためのバッファ
                var sb = new System.Text.StringBuilder();
                // 出現場所は各文字ごとに記録されている
                RectangleF cloneRect = new RectangleF(
                    (float)line.Words[0].BoundingRect.Left,
                    (float)line.Words[0].BoundingRect.Top,
                    (float)(line.Words[line.Words.Count - 1].BoundingRect.Right - line.Words[0].BoundingRect.Left),
                    (float)(line.Words[0].BoundingRect.Bottom - line.Words[0].BoundingRect.Top)
                    );

                foreach (var word in line.Words)
                {
                    // wordには1文字ずつ入っているので結合
                    sb.Append(word.Text);
                }

                output += string.Format("[{0}]{1}{2}",
                                        sb.ToString().TrimEnd(),
                                        cloneRect,
                                        Environment.NewLine // 改行
                                        );
            }
            Console.WriteLine(output);
        }
コード例 #15
0
ファイル: RAWThumbnail.cs プロジェクト: xvanneste/Raw2Jpeg
        public SoftwareBitmap GetBitmap()
        {
            SoftwareBitmap bitmap = null;

            //Needs to run in UI thread
            CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
            {
                bitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, (int)dim.width, (int)dim.height);
            }).AsTask().Wait();

            using (BitmapBuffer buffer = bitmap.LockBuffer(BitmapBufferAccessMode.Write))
                using (var reference = buffer.CreateReference())
                {
                    unsafe
                    {
                        ((IMemoryBufferByteAccess)reference).GetBuffer(out var tempByteArray, out uint capacity);
                        // Fill-in the BGRA plane
                        BitmapPlaneDescription bufferLayout = buffer.GetPlaneDescription(0);
                        for (int i = 0; i < bufferLayout.Width * bufferLayout.Height; i++)
                        {
                            tempByteArray[bufferLayout.StartIndex + (i * 4)]     = data[(i * cpp) + 2];
                            tempByteArray[bufferLayout.StartIndex + (i * 4) + 1] = data[(i * cpp) + 1];
                            tempByteArray[bufferLayout.StartIndex + (i * 4) + 2] = data[(i * cpp)];
                            if (cpp == 4)
                            {
                                tempByteArray[bufferLayout.StartIndex + (i * 4) + 3] = data[(i * 4) + 3];
                            }
                            else
                            {
                                tempByteArray[bufferLayout.StartIndex + (i * 4) + 3] = 255;
                            }
                        }
                    }
                }
            return(bitmap);
        }
コード例 #16
0
        public async Task <SoftwareBitmap> RenderAsync(IEnumerable <InkStroke> inkStrokes, double width, double height)
        {
            var dpi = DisplayInformation.GetForCurrentView().LogicalDpi;

            try
            {
                var renderTarget = new CanvasRenderTarget(_canvasDevice, (float)width, (float)height, dpi);
                using (renderTarget)
                {
                    using (var drawingSession = renderTarget.CreateDrawingSession())
                    {
                        drawingSession.DrawInk(inkStrokes);
                    }

                    return(await SoftwareBitmap.CreateCopyFromSurfaceAsync(renderTarget));
                }
            }
            catch (Exception e) when(_canvasDevice.IsDeviceLost(e.HResult))
            {
                _canvasDevice.RaiseDeviceLost();
            }

            return(null);
        }
コード例 #17
0
        private static async Task <string> OCRInternal(Bitmap bmp, string languageTag)
        {
            Language language = new Language(languageTag);

            if (!OcrEngine.IsLanguageSupported(language))
            {
                throw new Exception($"{language.LanguageTag} is not supported in this system.");
            }

            OcrEngine engine = OcrEngine.TryCreateFromLanguage(language);

            using (InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream())
            {
                bmp.Save(stream.AsStream(), ImageFormat.Bmp);
                BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                using (SoftwareBitmap softwareBitmap = await decoder.GetSoftwareBitmapAsync())
                {
                    OcrResult ocrResult = await engine.RecognizeAsync(softwareBitmap);

                    return(string.Join("\r\n", ocrResult.Lines.Select(x => x.Text)));
                }
            }
        }
コード例 #18
0
    private unsafe void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        // TryAcquireLatestFrame will return the latest frame that has not yet been acquired.
        // This can return null if there is no such frame, or if the reader is not in the
        // "Started" state. The latter can occur if a FrameArrived event was in flight
        // when the reader was stopped.
        if (onFrameArrivedProcessing)
        {
            Debug.Log(" OnFrameArrived() is still processing");
            return;
        }
        onFrameArrivedProcessing = true;
        using (var frame = sender.TryAcquireLatestFrame()) {
            if (frame != null)
            {
                Debug.Log("frame received");
                var softwareBitmap = SoftwareBitmap.Convert(frame.VideoMediaFrame.SoftwareBitmap,
                                                            BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore);

                Interlocked.Exchange(ref upBitmap, softwareBitmap);
            }
        }
        onFrameArrivedProcessing = false;
    }
コード例 #19
0
        private async void CaptureButton_Click(object sender, RoutedEventArgs e)
        {
            //Capturing the photo
            var capture = await mediaCapture.PrepareLowLagPhotoCaptureAsync(ImageEncodingProperties.CreateUncompressed(MediaPixelFormat.Bgra8));

            var photo = await capture.CaptureAsync();


            //Creating the bitmap
            var softwareBitmap = photo.Frame.SoftwareBitmap;

            await capture.FinishAsync();

            //API call
            CognitiveServiceHelper.FetchApi(softwareBitmap);

            //Converting the bitmap
            var convertedSoftwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);

            //Creating a bitmap source to set to the image source
            var bitmapSource = new SoftwareBitmapSource();
            await bitmapSource.SetBitmapAsync(convertedSoftwareBitmap);

            //Drawing the taken picture (without drawing the face rectangle on it, due to fail in the API call)
            Photo.Source = bitmapSource;

            //Setting the size of the wrapper grid element, so the popups size is the full view
            WrapperGrid.Width  = Window.Current.Bounds.Width;
            WrapperGrid.Height = Window.Current.Bounds.Height;

            //Disabling capture button
            CaptureButton.IsEnabled = false;

            //Showing the popup
            ImagePopup.IsOpen = true;
        }
コード例 #20
0
        public static SoftwareBitmap Crop(this SoftwareBitmap softwareBitmap, Rect bounds)
        {
            var resourceCreator = CanvasDevice.GetSharedDevice();

            using (var canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(resourceCreator, softwareBitmap))
                using (var canvasRenderTarget = new CanvasRenderTarget(resourceCreator, (float)bounds.Width, (float)bounds.Width, canvasBitmap.Dpi))
                    using (var drawingSession = canvasRenderTarget.CreateDrawingSession())
                        using (var cropEffect = new CropEffect())
                            using (var atlasEffect = new AtlasEffect())
                            {
                                drawingSession.Clear(Colors.White);

                                cropEffect.SourceRectangle = bounds;
                                cropEffect.Source          = canvasBitmap;

                                atlasEffect.SourceRectangle = bounds;
                                atlasEffect.Source          = cropEffect;

                                drawingSession.DrawImage(atlasEffect);
                                drawingSession.Flush();

                                return(SoftwareBitmap.CreateCopyFromBuffer(canvasRenderTarget.GetPixelBytes().AsBuffer(), BitmapPixelFormat.Bgra8, (int)bounds.Width, (int)bounds.Width, BitmapAlphaMode.Premultiplied));
                            }
        }
コード例 #21
0
        /// <summary>
        /// Decode an image file into a VideoFrame
        /// </summary>
        /// <param name="file"></param>
        /// <returns></returns>
        private async Task GetFrameFromFileAsync(StorageFile file)
        {
            // Decoding image file content into a SoftwareBitmap, and wrap into VideoFrame
            SoftwareBitmap softwareBitmap = null;

            using (IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.Read))
            {
                // Create the decoder from the stream
                BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                // Get the SoftwareBitmap representation of the file in BGRA8 format
                softwareBitmap = await decoder.GetSoftwareBitmapAsync();

                // Convert to friendly format for UI display purpose
                softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);

                // Extract frame dimensions
                FrameWidth  = (uint)softwareBitmap.PixelWidth;
                FrameHeight = (uint)softwareBitmap.PixelHeight;
            }

            // Encapsulate the image in a VideoFrame instance
            m_videoFrame = VideoFrame.CreateWithSoftwareBitmap(softwareBitmap);
        }
コード例 #22
0
        public async Task <string> WritableBitmapToBase64Async(WriteableBitmap bitmap, CancellationToken cancellationToken)
        {
            Arguments.NotNull(bitmap, nameof(bitmap));
            TaskHelper.ThrowIfNotOnUIThread();

            using (var memoryStream = new InMemoryRandomAccessStream())
            {
                BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, memoryStream);

                using (SoftwareBitmap softwareBitmap = SoftwareBitmap.CreateCopyFromBuffer(bitmap.PixelBuffer, BitmapPixelFormat.Rgba8, bitmap.PixelWidth, bitmap.PixelHeight))
                {
                    encoder.SetSoftwareBitmap(softwareBitmap);
                    await encoder.FlushAsync();
                }

                var bytes = new byte[memoryStream.Size];
                using (Stream stream = memoryStream.AsStream())
                {
                    await stream.ReadAsync(bytes, 0, bytes.Length).ConfigureAwait(false);
                }

                return(Convert.ToBase64String(bytes));
            }
        }
コード例 #23
0
        public void ProcessFrame(ProcessVideoFrameContext context)
        {
            var inputFrameBitmap = context.InputFrame.SoftwareBitmap;

            Snap = inputFrameBitmap;
        }
コード例 #24
0
 //--------------------------------------------------------Constructor:----------------------------------------------------------------\\
 #region --Constructors--
 /// <summary>
 /// Called when editing an image was successful.
 /// </summary>
 /// <param name="image">The resulting image.</param>
 public ImageEditDoneEventArgs(SoftwareBitmap image)
 {
     IMAGE   = image;
     SUCCESS = true;
 }
コード例 #25
0
        /// <summary>
        /// Converts a frame to a SoftwareBitmap of a valid format to display in an Image control.
        /// </summary>
        /// <param name="inputFrame">Frame to convert.</param>
        public static unsafe SoftwareBitmap ConvertToDisplayableImage(VideoMediaFrame inputFrame)
        {
            SoftwareBitmap result = null;

            using (var inputBitmap = inputFrame?.SoftwareBitmap)
            {
                if (inputBitmap != null)
                {
                    switch (inputFrame.FrameReference.SourceKind)
                    {
                    case MediaFrameSourceKind.Color:
                        // XAML requires Bgra8 with premultiplied alpha.
                        // We requested Bgra8 from the MediaFrameReader, so all that's
                        // left is fixing the alpha channel if necessary.
                        if (inputBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8)
                        {
                            Debug.WriteLine("Color frame in unexpected format.");
                        }
                        else if (inputBitmap.BitmapAlphaMode == BitmapAlphaMode.Premultiplied)
                        {
                            // Already in the correct format.
                            result = SoftwareBitmap.Copy(inputBitmap);
                        }
                        else
                        {
                            // Convert to premultiplied alpha.
                            result = SoftwareBitmap.Convert(inputBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                        }
                        break;

                    case MediaFrameSourceKind.Depth:
                        // We requested D16 from the MediaFrameReader, so the frame should
                        // be in Gray16 format.
                        if (inputBitmap.BitmapPixelFormat == BitmapPixelFormat.Gray16)
                        {
                            // Use a special pseudo color to render 16 bits depth frame.
                            var depthScale       = (float)inputFrame.DepthMediaFrame.DepthFormat.DepthScaleInMeters;
                            var minReliableDepth = inputFrame.DepthMediaFrame.MinReliableDepth;
                            var maxReliableDepth = inputFrame.DepthMediaFrame.MaxReliableDepth;
                            result = TransformBitmap(inputBitmap, (w, i, o) => PseudoColorHelper.PseudoColorForDepth(w, i, o, depthScale, minReliableDepth, maxReliableDepth));
                        }
                        else
                        {
                            Debug.WriteLine("Depth frame in unexpected format.");
                        }
                        break;

                    case MediaFrameSourceKind.Infrared:
                        // We requested L8 or L16 from the MediaFrameReader, so the frame should
                        // be in Gray8 or Gray16 format.
                        switch (inputBitmap.BitmapPixelFormat)
                        {
                        case BitmapPixelFormat.Gray16:
                            // Use pseudo color to render 16 bits frames.
                            result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor16BitInfrared);
                            break;

                        case BitmapPixelFormat.Gray8:

                            // Use pseudo color to render 8 bits frames.
                            result = TransformBitmap(inputBitmap, PseudoColorHelper.PseudoColorFor8BitInfrared);
                            break;

                        default:
                            Debug.WriteLine("Infrared frame in unexpected format.");
                            break;
                        }
                        break;
                    }
                }
            }
            return(result);
        }
コード例 #26
0
        public async void ReadXml(XmlReader reader)
        {
            reader.Read();
            Title       = reader.ReadElementContentAsString("Title", "");
            Description = reader.ReadElementContentAsString("Description", "");
            string s = reader.ReadElementContentAsString("Status", "");

            Status = (JobStatus)Enum.Parse(typeof(JobStatus), s);

            if (!reader.IsEmptyElement)
            {
                InMemoryRandomAccessStream ms = new InMemoryRandomAccessStream();
                DataWriter dw = new DataWriter(ms.GetOutputStreamAt(0));
                byte[]     tempBytes;
                int        bytesRead  = 0;
                int        totalBytes = 0;
                do
                {
                    tempBytes = new byte[1024];
                    bytesRead = reader.ReadElementContentAsBinHex(tempBytes, 0, 1024);
                    if (bytesRead > 0)
                    {
                        dw.WriteBytes(tempBytes);
                        totalBytes += bytesRead;
                    }
                } while (bytesRead == 1024);

                await dw.StoreAsync();

                if (totalBytes > 1)
                {
                    InkStrokeContainer inkCont = new InkStrokeContainer();
                    await inkCont.LoadAsync(ms);

                    Strokes = inkCont.GetStrokes().ToList();
                }
                reader.ReadEndElement();
            }
            else
            {
                reader.Read();
            }

            if (!reader.IsEmptyElement)
            {
                InMemoryRandomAccessStream ms = new InMemoryRandomAccessStream();
                DataWriter dw = new DataWriter(ms.GetOutputStreamAt(0));
                byte[]     tempBytes;
                int        bytesRead      = 0;
                int        totalBytesRead = 0;
                do
                {
                    tempBytes = new byte[1024];
                    bytesRead = reader.ReadContentAsBinHex(tempBytes, 0, 1024);
                    if (bytesRead > 0)
                    {
                        dw.WriteBytes(tempBytes);
                        totalBytesRead += bytesRead;
                    }
                } while (bytesRead == 1024);

                await dw.StoreAsync();

                if (totalBytesRead > 1)
                {
                    //load bytes as image
                    byte[] bytes = new byte[ms.Size];
                    //var dataWriter = new DataWriter(ms);
                    var dataReader = new DataReader(ms.GetInputStreamAt(0));

                    await dataReader.LoadAsync((uint)ms.Size);

                    dataReader.ReadBytes(bytes);
                    //TODO: this should change based on the resolution you store the photos at
                    Photo = new SoftwareBitmap(BitmapPixelFormat.Bgra8, 640, 360);
                    Photo.CopyFromBuffer(bytes.AsBuffer());
                }
                reader.ReadEndElement();
            }
            else
            {
                reader.Read();
            }


            reader.Skip();
        }
コード例 #27
0
ファイル: ImageOperate.cs プロジェクト: ryoha000/capture
        unsafe public static byte[] GetCroppedBitmap(SoftwareBitmap inputBitmap, uint startPointX, uint startPointY, uint width, uint height)
        {
            int bigger;

            if (width >= height)
            {
                bigger = (int)width;
            }
            else
            {
                bigger = (int)height;
            }
            // なぜか0が入れられることがある?けど無視したら動いてるからヨシ!
            if (bigger == 0)
            {
                return(new byte[0]);
            }
            SoftwareBitmap softwareBitmap = new SoftwareBitmap(BitmapPixelFormat.Bgra8, bigger, bigger, BitmapAlphaMode.Premultiplied);

            using (BitmapBuffer buffer = softwareBitmap.LockBuffer(BitmapBufferAccessMode.Write))
                using (BitmapBuffer inputBuffer = inputBitmap.LockBuffer(BitmapBufferAccessMode.Write))
                {
                    using (var reference = buffer.CreateReference())
                        using (var inputReference = inputBuffer.CreateReference())
                        {
                            byte *dataInBytes;
                            uint  capacity;
                            ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity);


                            byte *inputDataInBytes;
                            uint  inputCapacity;
                            ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputDataInBytes, out inputCapacity);

                            BitmapPlaneDescription bufferLayout      = buffer.GetPlaneDescription(0);
                            BitmapPlaneDescription inputBufferLayout = inputBuffer.GetPlaneDescription(0);
                            int h = bufferLayout.Height;
                            for (int i = 0; i < bufferLayout.Height; i++)
                            {
                                for (int j = 0; j < bufferLayout.Width; j++)
                                {
                                    byte valueB;
                                    byte valueG;
                                    byte valueR;
                                    byte valueA;
                                    if ((h - height) / 2 < i && (h + height) / 2 > i)
                                    {
                                        valueB = inputDataInBytes[inputBufferLayout.StartIndex + inputBufferLayout.Stride * (startPointY + i - (h - height) / 2) + 4 * (startPointX + j) + 0];
                                        valueG = inputDataInBytes[inputBufferLayout.StartIndex + inputBufferLayout.Stride * (startPointY + i - (h - height) / 2) + 4 * (startPointX + j) + 1];
                                        valueR = inputDataInBytes[inputBufferLayout.StartIndex + inputBufferLayout.Stride * (startPointY + i - (h - height) / 2) + 4 * (startPointX + j) + 2];
                                        valueA = inputDataInBytes[inputBufferLayout.StartIndex + inputBufferLayout.Stride * (startPointY + i - (h - height) / 2) + 4 * (startPointX + j) + 3];
                                        if (((double)valueR * 0.2126 + (double)valueG * 0.7152 + (double)valueB * 0.0772) / 255 > 0.5)
                                        {
                                            valueB = (byte)255;
                                            valueG = (byte)255;
                                            valueR = (byte)255;
                                            valueA = 0;
                                        }
                                        else
                                        {
                                            valueB = 0;
                                            valueG = 0;
                                            valueR = 0;
                                            valueA = 0;
                                        }
                                    }
                                    else
                                    {
                                        valueB = 0;
                                        valueG = 0;
                                        valueR = 0;
                                        valueA = 0;
                                    }
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = valueB;
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = valueG;
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = valueR;
                                    dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = valueA;
                                }
                            }
                            byte[] data = new byte[capacity];
                            Marshal.Copy((IntPtr)dataInBytes, data, 0, (int)capacity);
                            return(data);
                        }
                }
        }
コード例 #28
0
ファイル: MainPage.xaml.cs プロジェクト: mareklinka/holoyolo
        private async void ProcessPreview(MediaFrameReader reader)
        {
            var count      = 0;
            var full       = 0D;
            var conversion = 0D;
            var prediction = 0D;
            var drawing    = 0D;

            while (true)
            {
                using (var frame = reader.TryAcquireLatestFrame())
                {
                    if (frame?.VideoMediaFrame == null)
                    {
                        continue;
                    }

                    count++;

                    var sw     = Stopwatch.StartNew();
                    var convSw = Stopwatch.StartNew();
                    var bitmap =
                        SoftwareBitmap.Convert(frame.VideoMediaFrame.SoftwareBitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Ignore);

                    using (bitmap)
                    {
                        byte[] jpegData;
                        using (var ms = new MemoryStream())
                        {
                            var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms.AsRandomAccessStream());

                            encoder.SetSoftwareBitmap(bitmap);

                            encoder.BitmapTransform.ScaledWidth  = 416;
                            encoder.BitmapTransform.ScaledHeight = 416;
                            encoder.IsThumbnailGenerated         = false;

                            await encoder.FlushAsync();

                            jpegData = ms.ToArray();
                        }

                        //var hex = ByteArrayToHexViaLookup32(jpegData);
                        var hex     = Convert.ToBase64String(jpegData);
                        var payload = JsonConvert.SerializeObject(new { data = hex, width = 416, height = 416 });

                        convSw.Stop();
                        conversion += convSw.Elapsed.TotalMilliseconds;

                        var predSw   = Stopwatch.StartNew();
                        var response = await http.PostAsync(new Uri("http://mar3ek.ddns.net:55665/image"), new HttpStringContent(payload, UnicodeEncoding.Utf8, "application/json"));

                        try
                        {
                            response.EnsureSuccessStatusCode();
                            var responseStream = await response.Content.ReadAsInputStreamAsync();

                            predSw.Stop();
                            prediction += predSw.Elapsed.TotalMilliseconds;

                            var drawSw = Stopwatch.StartNew();
                            ParseRespone(responseStream);

                            drawSw.Stop();
                            drawing += drawSw.Elapsed.TotalMilliseconds;
                        }
                        catch (Exception)
                        {
                            // todo
                        }
                    }

                    sw.Stop();
                    full += sw.Elapsed.TotalMilliseconds;
                }
            }
        }
コード例 #29
0
        private static unsafe void bWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            byte *data     = null;
            uint  capacity = 0;

            SoftwareBitmap bitmap = e.Argument as SoftwareBitmap; //could check for null
            int            width  = 0;
            int            height = 0;

            byte[] returnArray = null;

            // Effect is hard-coded to operate on BGRA8 format only
            if (bitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8 || bitmap.BitmapPixelFormat == BitmapPixelFormat.Nv12 ||
                bitmap.BitmapPixelFormat == BitmapPixelFormat.Yuy2 || bitmap.BitmapPixelFormat == BitmapPixelFormat.Gray8)
            {
                // In BGRA8 format, each pixel is defined by 4 bytes
                int BYTES_PER_PIXEL = 4;

                using (var buffer = bitmap.LockBuffer(BitmapBufferAccessMode.ReadWrite))
                    using (IMemoryBufferReference reference = buffer.CreateReference())
                    {
                        if (reference is IMemoryBufferByteAccess)
                        {
                            // Get a pointer to the pixel buffer
                            ((IMemoryBufferByteAccess)reference).GetBuffer(out data, out capacity);
                            var desc = buffer.GetPlaneDescription(0);
                            width       = desc.Width;
                            height      = desc.Height;
                            returnArray = new byte[desc.Width * desc.Height];
                            if (bitmap.BitmapPixelFormat == BitmapPixelFormat.Yuy2)
                            {
                                int length = desc.Width * desc.Height;
                                for (int i = 0; i < length; i++)
                                {
                                    returnArray[i] = data[i << 1];
                                }
                            }
                            else
                            if (bitmap.BitmapPixelFormat == BitmapPixelFormat.Nv12 || bitmap.BitmapPixelFormat == BitmapPixelFormat.Gray8)
                            {
                                Marshal.Copy((IntPtr)data, returnArray, 0, desc.Width * desc.Height);
                            }
                            else

                            if (bitmap.BitmapPixelFormat == BitmapPixelFormat.Bgra8)
                            {
                                BYTES_PER_PIXEL = 4;

                                // Get information about the BitmapBuffer

                                // Iterate over all pixels
                                width  = desc.Width;
                                height = desc.Height;
                                for (uint row = 0; row < desc.Height; row++)
                                {
                                    for (uint col = 0; col < desc.Width; col++)
                                    {
                                        // Index of the current pixel in the buffer (defined by the next 4 bytes, BGRA8)
                                        var currPixel = desc.StartIndex + desc.Stride * row + BYTES_PER_PIXEL * col;

                                        // Read the current pixel information into b,g,r channels (leave out alpha channel)
                                        var b = data[currPixel + 0]; // Blue
                                        var g = data[currPixel + 1]; // Green
                                        var r = data[currPixel + 2]; // Red

                                        int y = (r * 77) + (g * 151) + (b * 28) >> 8;

                                        /*
                                         *                              data[currPixel + 0] = (byte)y;
                                         *                              data[currPixel + 1] = (byte)y;
                                         *                              data[currPixel + 2] = (byte)y;
                                         */
                                        returnArray[row * desc.Width + col] = (byte)y;
                                    }
                                }
                            }
                        }
                    }
            }

            conversionResult cResult = new conversionResult();

            cResult.width       = width;
            cResult.height      = height;
            cResult.returnArray = returnArray;

            law_n_order.WaitOne();
            convertedQueue.Enqueue(cResult);
            law_n_order.ReleaseMutex();
            //throw new NotImplementedException();
        }
コード例 #30
0
        /// <summary>
        /// This is event handler for 'Extract' button.
        /// Captures image from camera ,recognizes text and displays it.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void ExtractButton_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e)
        {
            //Get information about the preview.
            var previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            int videoFrameWidth   = (int)previewProperties.Width;
            int videoFrameHeight  = (int)previewProperties.Height;

            // In portrait modes, the width and height must be swapped for the VideoFrame to have the correct aspect ratio and avoid letterboxing / black bars.
            if (!externalCamera && (displayInformation.CurrentOrientation == DisplayOrientations.Portrait || displayInformation.CurrentOrientation == DisplayOrientations.PortraitFlipped))
            {
                videoFrameWidth  = (int)previewProperties.Height;
                videoFrameHeight = (int)previewProperties.Width;
            }

            // Create the video frame to request a SoftwareBitmap preview frame.
            var videoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, videoFrameWidth, videoFrameHeight);

            // Capture the preview frame.
            using (var currentFrame = await mediaCapture.GetPreviewFrameAsync(videoFrame))
            {
                // Collect the resulting frame.
                SoftwareBitmap bitmap = currentFrame.SoftwareBitmap;

                OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(ocrLanguage);

                if (ocrEngine == null)
                {
                    rootPage.NotifyUser(ocrLanguage.DisplayName + " is not supported.", NotifyType.ErrorMessage);

                    return;
                }

                var imgSource = new WriteableBitmap(bitmap.PixelWidth, bitmap.PixelHeight);
                bitmap.CopyToBuffer(imgSource.PixelBuffer);
                PreviewImage.Source = imgSource;

                var ocrResult = await ocrEngine.RecognizeAsync(bitmap);

                // Used for text overlay.
                // Prepare scale transform for words since image is not displayed in original format.
                var scaleTrasform = new ScaleTransform
                {
                    CenterX = 0,
                    CenterY = 0,
                    ScaleX  = PreviewControl.ActualWidth / bitmap.PixelWidth,
                    ScaleY  = PreviewControl.ActualHeight / bitmap.PixelHeight
                };

                if (ocrResult.TextAngle != null)
                {
                    // If text is detected under some angle in this sample scenario we want to
                    // overlay word boxes over original image, so we rotate overlay boxes.
                    TextOverlay.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PreviewImage.ActualWidth / 2,
                        CenterY = PreviewImage.ActualHeight / 2
                    };
                }

                // Iterate over recognized lines of text.
                foreach (var line in ocrResult.Lines)
                {
                    // Iterate over words in line.
                    foreach (var word in line.Words)
                    {
                        WordOverlay wordBoxOverlay = new WordOverlay(word);

                        // Keep references to word boxes.
                        wordBoxes.Add(wordBoxOverlay);

                        // Create a box with the word inside it.
                        var textBlock = new TextBlock()
                        {
                            Text  = word.Text,
                            Style = ExtractedWordTextStyle
                        };
                        TextOverlay.Children.Add(wordBoxOverlay.CreateBorder(HighlightedWordBoxHorizontalLineStyle, textBlock));
                    }
                }

                rootPage.NotifyUser("Image processed using " + ocrEngine.RecognizerLanguage.DisplayName + " language.", NotifyType.StatusMessage);
            }

            UpdateWordBoxTransform();

            PreviewControl.Visibility = Visibility.Collapsed;
            Image.Visibility          = Visibility.Visible;
            ExtractButton.Visibility  = Visibility.Collapsed;
            CameraButton.Visibility   = Visibility.Visible;
        }