Example #1
0
        /// <summary>
        /// uses the BarcodeWriterGeneric implementation and the <see cref="BitmapRenderer"/> class for decoding
        /// </summary>
        /// <param name="writer"></param>
        /// <param name="content"></param>
        /// <returns></returns>
        public static Bitmap WriteAsBitmap(this IBarcodeWriterGeneric writer, string content)
        {
            var bitmatrix = writer.Encode(content);
            var renderer  = new BitmapRenderer();

            return(renderer.Render(bitmatrix, writer.Format, content, writer.Options));
        }
Example #2
0
        public void Update(FeralTic.DX11.DX11RenderContext context)
        {
            for (int i = 0; i < FTextureOut.SliceCount; i++)
            {
                if (!FTextureOut[i].Contains(context))
                {
                    renderer          = new BitmapRenderer();
                    renderer.TextFont = new System.Drawing.Font(FFontIn[i].Name, FFontSizeIn[i]);

                    Bitmap bmp = new Bitmap(GenerateBarcodeImage(i));

                    DX11DynamicTexture2D tex = new DX11DynamicTexture2D(context, bmp.Width, bmp.Height, SlimDX.DXGI.Format.R8G8B8A8_UNorm);

                    int pitch = tex.GetRowPitch();

                    var data = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadOnly, bmp.PixelFormat);

                    if (pitch != bmp.Width * 4)
                    {
                        tex.WriteDataPitch(data.Scan0, bmp.Width * bmp.Height * 4);
                    }
                    else
                    {
                        tex.WriteData(data.Scan0, bmp.Width * bmp.Height * 4);
                    }

                    FTextureOut[i][context] = tex;
                }
            }
        }
Example #3
0
        private static void SaveAsPng(string gpFile, string pngDirectory)
        {
            IRenderer renderer = new BitmapRenderer();

            var imagePaletteBytes = new Extractor().GetPaletteBytes(gpFile);
            var extractResult     = new Extractor().GetImagesFromOutput(gpFile).ToList();

            var rawParser = new RawParser();

            var paletteBytes    = File.ReadAllBytes(@"..\..\..\..\palette\0\agew_1.pal");
            var colorCollection = rawParser.GetColorCollectionFromPalleteFile(paletteBytes);

            var imagePaletteColors = ImageGenerator.OffsetsToColors(imagePaletteBytes, colorCollection);

            var nationColorOffset = NationColorOffset.Red;

            using (var bitMap = new Runner().Run(extractResult, nationColorOffset, 0, rawParser, renderer, imagePaletteColors, colorCollection))
            {
                using (var stream = new MemoryStream())
                {
                    bitMap.Save(stream, ImageFormat.Png);
                    File.WriteAllBytes(Path.Combine(pngDirectory, Path.GetFileNameWithoutExtension(gpFile) + ".png"),
                                       stream.ToArray());
                }
            }
        }
Example #4
0
        public LookupImage(string name)
        {
            m_loadingTask = Task.Run(async() =>
            {
                var file = await Package.Current.InstalledLocation.GetFileAsync(name).AsTask().ConfigureAwait(false);

                using (var imageSource = new StorageFileImageSource(file))
                {
                    var info            = await imageSource.GetInfoAsync().AsTask().ConfigureAwait(false);
                    var landscapeBitmap = new Bitmap(info.ImageSize, ColorMode.Bgra8888);
                    var portraitBitmap  = new Bitmap(new Size(info.ImageSize.Height, info.ImageSize.Width), ColorMode.Bgra8888);

                    using (var renderer = new BitmapRenderer(imageSource, landscapeBitmap, OutputOption.Stretch))
                    {
                        renderer.RenderOptions = RenderOptions.Cpu;
                        m_landscapeSource      = new BitmapImageSource(await renderer.RenderAsync().AsTask().ConfigureAwait(false));
                    }

                    using (var renderer = new BitmapRenderer(m_landscapeSource.Rotate(90), portraitBitmap, OutputOption.Stretch))
                    {
                        renderer.RenderOptions = RenderOptions.Cpu;
                        m_portraitSource       = new BitmapImageSource(await renderer.RenderAsync().AsTask().ConfigureAwait(false));
                    }
                }
            });
        }
        private  void Button_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                PhotoChooserTask task = new PhotoChooserTask();
                task.Completed += async (s, res) =>
                {
                    if (res.TaskResult == TaskResult.OK)
                    {
                       

                       using( var source  = new StreamImageSource(res.ChosenPhoto))
                        using(var renderer = new BitmapRenderer(source,input))
                        {
                            await renderer.RenderAsync();
                        }
                        if(manager != null)
                        {
                            manager.Dispose();
                            manager = null;
                        }
                        manager = new PipelineManager.Manager.PipelineManager(picture);
                        GeneratePicture();
                    }

                };
                task.Show();
            }
            catch (Exception)
            {


            }
        }
        public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize)
        {
            if (_semaphore.WaitOne(500))
            {
                var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode
                var bitmap           = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer);

                try
                {
                    if (_blendEffect != null)
                    {
                        _blendEffect.GlobalAlpha = GlobalAlpha;

                        var renderer = new BitmapRenderer(_blendEffect, bitmap);
                        await renderer.RenderAsync();
                    }
                    else
                    {
                        var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap);
                        await renderer.RenderAsync();
                    }
                }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("RealtimeBlendDemo.GetNewFrameAndApplyEffect(): "
                                                       + ex.ToString());
                }

                _semaphore.Release();
            }
        }
Example #7
0
 public virtual void Render(BitmapRenderer target)
 {
     foreach (var e in _childs)
     {
         e.Render(target);
     }
 }
Example #8
0
        public void Run()
        {
            var screenBitmap = new Bitmap(_screen.Screen, 800, 600, 800 * 4);
            var desktop      = new BitmapRenderer(screenBitmap);
            var screen       = new BitmapRenderer(screenBitmap);
            var root         = new Element();

            root.Add(new Box(screenBitmap.Area, 0xff404040));

            while (_power.On)
            {
                screen.Draw(Cursor, new Point(_mouse.X, _mouse.Y));
                //_screen.Screen[_mouse.X + _mouse.Y * 320] = 255;

                _screen.VRetrace();
                root.Render(desktop);

                foreach (Elements.App app in _apps)
                {
                    foreach (Window window in app.Windows)
                    {
                        window.Render(desktop);
                    }
                }
            }
        }
        public async Task<EnhanceResult> EnhanceAsync(Frame frame)
        {
            using (var bitmap = new Bitmap(new Windows.Foundation.Size(frame.Dimensions.Width, frame.Dimensions.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer()))
            using (var source = new BitmapImageSource(bitmap))
            using (var effect = new FilterEffect(source))
            using (var renderer = new BitmapRenderer(effect))
            {
                effect.Filters = new List<IFilter>()
                {
                    new ContrastFilter(0.5)
                };

                using (var newBitmap = new Bitmap(new Windows.Foundation.Size(frame.Dimensions.Width, frame.Dimensions.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format)))
                {
                    await effect.GetBitmapAsync(newBitmap, OutputOption.PreserveAspectRatio);

                    return new EnhanceResult()
                    {
                        Frame = new Frame()
                        {
                            Buffer = newBitmap.Buffers[0].Buffer.ToArray(),
                            Pitch = newBitmap.Buffers[0].Pitch,
                            Format = frame.Format,
                            Dimensions = newBitmap.Dimensions
                        }
                    };
                }
            }
        }
        public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize)
        {
            if (_semaphore.WaitOne(500))
            {
                _cameraPreviewImageSource.InvalidateLoad();

                var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode
                var bitmap           = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer);

                if (_filterEffect != null)
                {
                    var renderer = new BitmapRenderer(_filterEffect, bitmap);
                    await renderer.RenderAsync();
                }
                else if (_customEffect != null)
                {
                    var renderer = new BitmapRenderer(_customEffect, bitmap);
                    await renderer.RenderAsync();
                }
                else
                {
                    var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap);
                    await renderer.RenderAsync();
                }

                _semaphore.Release();
            }
        }
Example #11
0
        public async Task <EnhanceResult> EnhanceAsync(Frame frame)
        {
            using (var bitmap = new Bitmap(new Windows.Foundation.Size(frame.Dimensions.Width, frame.Dimensions.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer()))
                using (var source = new BitmapImageSource(bitmap))
                    using (var effect = new FilterEffect(source))
                        using (var renderer = new BitmapRenderer(effect))
                        {
                            effect.Filters = new List <IFilter>()
                            {
                                new ContrastFilter(0.5)
                            };

                            using (var newBitmap = new Bitmap(new Windows.Foundation.Size(frame.Dimensions.Width, frame.Dimensions.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format)))
                            {
                                await effect.GetBitmapAsync(newBitmap, OutputOption.PreserveAspectRatio);

                                return(new EnhanceResult()
                                {
                                    Frame = new Frame()
                                    {
                                        Buffer = newBitmap.Buffers[0].Buffer.ToArray(),
                                        Pitch = newBitmap.Buffers[0].Pitch,
                                        Format = frame.Format,
                                        Dimensions = newBitmap.Dimensions
                                    }
                                });
                            }
                        }
        }
        private static async Task <Curve[]> LoadRgbLookupCurves(string path, double outputGain, int outputOffset)
        {
            var storageFile = await Package.Current.InstalledLocation.GetFileAsync(path).AsTask().ConfigureAwait(false);

            using (var storageFileImageSource = new StorageFileImageSource(storageFile))
                using (var renderer = new BitmapRenderer(storageFileImageSource)
                {
                    OutputOption = OutputOption.PreserveAspectRatio
                })
                    using (var tableBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false))
                    {
                        var redCurve   = new Curve();
                        var greenCurve = new Curve();
                        var blueCurve  = new Curve();

                        var redValues   = new Point[256];
                        var greenValues = new Point[256];
                        var blueValues  = new Point[256];

                        var table = tableBitmap.Buffers[0].Buffer.ToArray();

                        for (int i = 0; i < 256; ++i)
                        {
                            redValues[i]   = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 2] * outputGain + outputOffset))));
                            greenValues[i] = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 1] * outputGain + outputOffset))));
                            blueValues[i]  = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 0] * outputGain + outputOffset))));
                        }

                        redCurve.Points   = redValues;
                        greenCurve.Points = greenValues;
                        blueCurve.Points  = blueValues;

                        return(new[] { redCurve, greenCurve, blueCurve });
                    }
        }
        /// <summary>
        /// Vytvoří objekt typu <see cref="Body3D"/> na základě zadaných parametrů. Byl-li již zadaný model
        /// zpracováván, nedochází opětovně k výpočetně náročnému hledání dvojrozměrného tvaru.
        /// </summary>
        /// <param name="model">Trojrozměrný model.</param>
        /// <param name="position">Výchozí pozice objektu ve dvojrozměrném světě.</param>
        /// <param name="bodyType">Typ simulovaného tělesa (statické, kinematické nebo dynamické).</param>
        /// <param name="basicEffectParams">>Parametry pro třídu <see cref="BasicEffect"/>.</param>
        /// <param name="positionZ">Výchozí pozice na ose Z ve trojrozměrném světě.</param>
        /// <returns></returns>
        public Body3D CreateBody3D(
            Model model,
            Vector2 position  = new Vector2(),
            BodyType bodyType = BodyCreator.DEFAULT_BODY_TYPE,
            BasicEffectParams basicEffectParams = null,
            float positionZ = 0f)
        {
            //Tento model ještě nebyl instancí této třídy zpracováván
            if (!_modelVerticesPairs.ContainsKey(model))
            {
                using (Texture2D orthoRender = BitmapRenderer.RenderOrthographic(_graphicsDevice, model))
                {
                    //Najít v bitmapě tvar je výpočetně náročné, proto se to stane pro každý model zpracovávaný touto
                    //třídou jenom jednou
                    List <Vertices> verticesList = BodyCreator.CreateVerticesForBody(
                        orthoRender,
                        ReduceVerticesDistance,
                        TriangulationAlgorithm,
                        GraphicsToSimulationRatio);

                    _modelVerticesPairs.Add(model, verticesList);
                }
            }
            return(ConstructBody3D(model, _world2D, position, bodyType, positionZ, basicEffectParams));
        }
Example #14
0
        private async Task GetNewFrameAndApplyChosenEffectAsync(IBuffer buffer)
        {
            var lineSize = (uint)frameSize.Width * 4;
            var bitmap   = new Bitmap(frameSize, ColorMode.Bgra8888, lineSize, buffer);

            IFilter[] filters;

            if (activeFilter == null)
            {
                filters = new IFilter[0];
            }
            else
            {
                filters = new IFilter[]
                {
                    activeFilter
                };
            }

            using (FilterEffect fe = new FilterEffect(cameraPreviewImageSource)
            {
                Filters = filters
            })
                using (BitmapRenderer renderer = new BitmapRenderer(fe, bitmap))
                {
                    await renderer.RenderAsync();
                }
        }
        public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize)
        {
            if (_semaphore.WaitOne(500))
            {
                var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode
                var bitmap = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer);

                if (_filterEffect != null)
                {
                    var renderer = new BitmapRenderer(_filterEffect, bitmap);
                    await renderer.RenderAsync();
                }
                else if (_customEffect != null)
                {
                    var renderer = new BitmapRenderer(_customEffect, bitmap);
                    await renderer.RenderAsync();
                }
                else
                {
                    var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap);
                    await renderer.RenderAsync();
                }

                _semaphore.Release();
            }
        }
Example #16
0
        /// <summary>
        /// Creates an image source of the overlay, specifying the size of the background image it will be used on. The image source will be sized and cropped correctly.
        /// </summary>
        /// <param name="backgroundSize">The size of the background image.</param>
        /// <returns>The constructed overlay image source.</returns>
        public async Task <IImageProvider> CreateAsync(Size backgroundSize)
        {
            var uriAndRotation = GetUriAndRotation(backgroundSize);
            var file           = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uriAndRotation.Item1).AsTask().ConfigureAwait(false);

            var overlayImageSource = new StorageFileImageSource(file);

            var overlayImageInfo = await overlayImageSource.GetInfoAsync().AsTask().ConfigureAwait(false);

            var overlayImageAspectRatio = overlayImageInfo.ImageSize.Width / overlayImageInfo.ImageSize.Height;

            int overlayImageScaledWidth  = (int)overlayImageInfo.ImageSize.Width;
            int overlayImageScaledHeight = (int)overlayImageInfo.ImageSize.Height;

            if ((int)backgroundSize.Width > (int)backgroundSize.Height)
            {
                overlayImageScaledHeight = (int)(backgroundSize.Width / overlayImageAspectRatio);
                overlayImageScaledWidth  = (int)(backgroundSize.Width);
            }
            else if ((int)backgroundSize.Width < (int)backgroundSize.Height)
            {
                overlayImageScaledWidth  = (int)(backgroundSize.Height * overlayImageAspectRatio);
                overlayImageScaledHeight = (int)(backgroundSize.Height);
            }

            var renderer = new BitmapRenderer(overlayImageSource)
            {
                Size = new Size(overlayImageScaledWidth, overlayImageScaledHeight)
            };
            var overlayBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false);

            var overlayBitmapImageSource = new BitmapImageSource(overlayBitmap);

            return(Crop(overlayBitmapImageSource, overlayImageInfo, (int)overlayImageInfo.ImageSize.Width, (int)overlayImageInfo.ImageSize.Height));
        }
Example #17
0
        public void It_should_render_rounded_panels()
        {
            var renderer = new BitmapRenderer();

            var vbox = new HBox();

            var borders = new[]
            {
                new [] { "10 0 0 0", "0 10 0 0", "0 0 10 0", "0 0 0 10" },
                new [] { "10 10 0 0", "10 0 10 0", "10 0 0 10", "0 0 10 10" },
                new [] { "10 10 10 0", "10 0 10 10", "10 10 0 10", "10 10 10 10" }
            };

            foreach (var boxLine in borders)
            {
                var hbox = new HBox();
                foreach (var border in boxLine)
                {
                    hbox.AddComponent(new Panel
                    {
                        Margin          = new Spacer(1),
                        BackgroundColor = Color.Blue,
                        Width           = 22,
                        Height          = 22,
                        BorderRadius    = BorderRadius.Parse(border)
                    });
                }
                vbox.AddComponent(hbox);
            }

            var bmp = new Bitmap(300, 40);

            renderer.Render(new Form(vbox), bmp);
            BitmapComparer.CompareBitmaps("panels_rounded", bmp);
        }
        private static async Task<Curve[]> LoadRgbLookupCurves(string path, double outputGain, int outputOffset)
        {
            var storageFile = await Package.Current.InstalledLocation.GetFileAsync(path).AsTask().ConfigureAwait(false);
            using (var storageFileImageSource = new StorageFileImageSource(storageFile))
            using (var renderer = new BitmapRenderer(storageFileImageSource) { OutputOption = OutputOption.PreserveAspectRatio })
            using (var tableBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false))
            {
                var redCurve = new Curve();
                var greenCurve = new Curve();
                var blueCurve = new Curve();

                var redValues = new Point[256];
                var greenValues = new Point[256];
                var blueValues = new Point[256];

                var table = tableBitmap.Buffers[0].Buffer.ToArray();
                
                for (int i = 0; i < 256; ++i)
                {
                    redValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 2] * outputGain + outputOffset))));
                    greenValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 1] * outputGain + outputOffset))));
                    blueValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 0] * outputGain + outputOffset))));
                }

                redCurve.Points = redValues;
                greenCurve.Points = greenValues;
                blueCurve.Points = blueValues;

                return new[] { redCurve, greenCurve, blueCurve };
            }
        }
Example #19
0
        /// <summary>
        /// Renders a thumbnail of requested size from the center of the current image with
        /// filters applied.
        /// </summary>
        /// <param name="side">Side length of square thumbnail to render</param>
        /// <returns>Rendered thumbnail bitmap</returns>
        public async Task <Bitmap> RenderThumbnailBitmapAsync(int side)
        {
            Windows.Foundation.Size dimensions = await GetImageSizeAsync();

            int minSide = (int)Math.Min(dimensions.Width, dimensions.Height);

            Windows.Foundation.Rect rect = new Windows.Foundation.Rect()
            {
                Width  = minSide,
                Height = minSide,
                X      = (dimensions.Width - minSide) / 2,
                Y      = (dimensions.Height - minSide) / 2,
            };

            _components.Add(new CropFilter(rect));

            Bitmap bitmap = new Bitmap(new Windows.Foundation.Size(side, side), ColorMode.Ayuv4444);

            using (BufferImageSource source = new BufferImageSource(_buffer))
                using (FilterEffect effect = new FilterEffect(source)
                {
                    Filters = _components
                })
                    using (BitmapRenderer renderer = new BitmapRenderer(effect, bitmap, OutputOption.Stretch))
                    {
                        await renderer.RenderAsync();
                    }

            _components.RemoveAt(_components.Count - 1);

            return(bitmap);
        }
Example #20
0
        public static async Task <Rect> GetDrawArea(IImageProvider source, InkCanvas canvas)
        {
            var image = source;

            Size imageSize;

            if (image is IImageResource)
            {
                imageSize = ((IImageResource)image).ImageSize;
            }
            else if (image is IAsyncImageResource)
            {
                imageSize = (await((IAsyncImageResource)image).LoadAsync()).ImageSize;
            }
            else
            {
                using (var renderer = new BitmapRenderer(image))
                {
                    var tempBmp = await renderer.RenderAsync();

                    imageSize = tempBmp.Dimensions;
                }
            }

            return(FitWithinWhilePreservingAspectRatio(new Rect(0, 0, imageSize.Width, imageSize.Height), new Rect(0, 0, canvas.ActualWidth, canvas.ActualHeight), true));
        }
Example #21
0
        public void It_should_render_text_with_different_line_heights()
        {
            var renderer = new BitmapRenderer();
            var content  = new HBox {
                Width = SizeUnit.Unlimited
            };

            content.AddComponent(new Panel
            {
                Width           = 100,
                Height          = SizeUnit.Unlimited,
                BackgroundColor = Color.Yellow,
                Margin          = new Spacer(1),
                Inner           = new Label
                {
                    TextColor  = Color.Red,
                    Font       = new FontInfo(TestFontFamily.Serif, 10, FontInfoStyle.Underline | FontInfoStyle.Italic),
                    Text       = "Hello my friend!\nIt's nice to see you!\n\nWhat is a nice and sunny day, is not it?",
                    LineHeight = 1.2f
                }
            });

            content.AddComponent(new Panel
            {
                Width           = 100,
                Height          = SizeUnit.Unlimited,
                BackgroundColor = Color.Yellow,
                Margin          = new Spacer(1),
                Inner           = new Label
                {
                    TextColor  = Color.Red,
                    Font       = new FontInfo(TestFontFamily.Serif, 10, FontInfoStyle.Underline | FontInfoStyle.Italic),
                    Text       = "Hello my friend!\nIt's nice to see you!\n\nWhat is a nice and sunny day, is not it?",
                    LineHeight = 2f
                }
            });

            content.AddComponent(new Panel
            {
                Width           = 100,
                Height          = SizeUnit.Unlimited,
                BackgroundColor = Color.Yellow,
                Margin          = new Spacer(1),
                Inner           = new Label
                {
                    TextColor  = Color.Red,
                    Font       = new FontInfo(TestFontFamily.Serif, 10, FontInfoStyle.Underline | FontInfoStyle.Italic),
                    Text       = "Hello my friend!\nIt's nice to see you!\n\nWhat is a nice and sunny day, is not it?",
                    LineHeight = 0.8f
                }
            });

            var form = new Form(content);

            var bmp = new Bitmap(300, 400);

            renderer.Render(form, bmp);
            BitmapComparer.CompareBitmaps("text_box_line_height", bmp);
        }
Example #22
0
 public override void AddEvents()
 {
     base.AddEvents();
     Client.BitmapDataChange   += eBitmapChanged;
     Client.BitmapSourceChange += eSourceChanged;
     renderer           = new BitmapRenderer(Client);
     renderer.InitColor = Client.BackColor;
 }
Example #23
0
        public void Render(BitmapRenderer target)
        {
            var old = target.Target;

            target.Target = old.MoveTo(_position.TopLeft);
            _root.Render(target);
            target.Target = old;
        }
Example #24
0
 public void Process(Bitmap input, Bitmap output, TimeSpan time)
 {
     var effect = new FilterEffect();
     effect.Filters = new IFilter[]{ new WatercolorFilter() };
     effect.Source = new BitmapImageSource(input);
     var renderer = new BitmapRenderer(effect, output);
     renderer.RenderAsync().AsTask().Wait(); // Async calls must run sync inside Process()
 }
Example #25
0
        public async Task CS_W_MediaReader_LumiaEffect()
        {
            using (var mediaReader = await MediaReader.CreateFromPathAsync("ms-appx:///car.mp4", AudioInitialization.Deselected, VideoInitialization.Nv12))
                using (var mediaResult = await mediaReader.VideoStream.ReadAsync())
                {
                    var streamProperties = mediaReader.VideoStream.GetCurrentStreamProperties();
                    int width            = (int)streamProperties.Width;
                    int height           = (int)streamProperties.Height;
                    Assert.AreEqual(320, width);
                    Assert.AreEqual(240, height);

                    var inputSample = (MediaSample2D)mediaResult.Sample;
                    Assert.AreEqual(MediaSample2DFormat.Nv12, inputSample.Format);
                    Assert.AreEqual(320, inputSample.Width);
                    Assert.AreEqual(240, inputSample.Height);

                    using (var outputSample = new MediaSample2D(MediaSample2DFormat.Nv12, width, height))
                    {
                        Assert.AreEqual(MediaSample2DFormat.Nv12, outputSample.Format);
                        Assert.AreEqual(320, outputSample.Width);
                        Assert.AreEqual(240, outputSample.Height);

                        using (var inputBuffer = inputSample.LockBuffer(BufferAccessMode.Read))
                            using (var outputBuffer = outputSample.LockBuffer(BufferAccessMode.Write))
                            {
                                // Wrap MediaBuffer2D in Bitmap
                                var inputBitmap = new Bitmap(
                                    new Size(width, height),
                                    ColorMode.Yuv420Sp,
                                    new uint[] { inputBuffer.Planes[0].Pitch, inputBuffer.Planes[1].Pitch },
                                    new IBuffer[] { inputBuffer.Planes[0].Buffer, inputBuffer.Planes[1].Buffer }
                                    );
                                var outputBitmap = new Bitmap(
                                    new Size(width, height),
                                    ColorMode.Yuv420Sp,
                                    new uint[] { outputBuffer.Planes[0].Pitch, outputBuffer.Planes[1].Pitch },
                                    new IBuffer[] { outputBuffer.Planes[0].Buffer, outputBuffer.Planes[1].Buffer }
                                    );

                                // Apply effect
                                var effect = new FilterEffect();
                                effect.Filters = new IFilter[] { new WatercolorFilter() };
                                effect.Source  = new BitmapImageSource(inputBitmap);
                                var renderer = new BitmapRenderer(effect, outputBitmap);
                                await renderer.RenderAsync();
                            }

                        // Save the file
                        var folder = await KnownFolders.PicturesLibrary.CreateFolderAsync("MediaCaptureReaderTests", CreationCollisionOption.OpenIfExists);

                        var file = await folder.CreateFileAsync("CS_W_MediaReader_TestLumiaEffect.jpg", CreationCollisionOption.ReplaceExisting);

                        await outputSample.SaveToFileAsync(file, ImageCompression.Jpeg);

                        Logger.LogMessage("Saved {0}", file.Path);
                    }
                }
        }
        /// <summary>
        /// 获取渲染器
        /// </summary>
        /// <param name="param">条形码参数</param>
        private BitmapRenderer GetRenderer(BarcodeParam param)
        {
            var renderer = new BitmapRenderer();

            renderer.Foreground = Color.FromName(param.Foreground.Name);
            renderer.Background = Color.FromName(param.Background.Name);
            renderer.TextFont   = new Font(param.FontName, param.FontSize, param.Bold ? FontStyle.Bold : FontStyle.Regular);
            return(renderer);
        }
Example #27
0
        public void It_should_render_images()
        {
            var redBlue    = CreateBitmap(Brushes.Red, Brushes.Blue, 400, 400);
            var blueYellow = CreateBitmap(Brushes.Blue, Brushes.Yellow, 30, 30);

            var renderer = new BitmapRenderer();
            var vBox     = new VBox();
            var hBox     = new HBox();

            AddComponent(hBox, new Image {
                Src = redBlue, Width = 40, Height = 40, Scaling = ImageScaling.Uniform
            });
            AddComponent(hBox, new Image {
                Src = redBlue, Width = 40, Height = 30, Scaling = ImageScaling.Uniform, Alignment = Alignment.Center
            });
            AddComponent(hBox, new Image {
                Src = redBlue, Width = 40, Height = 30, Scaling = ImageScaling.Uniform, Alignment = Alignment.Parse("center left")
            });
            AddComponent(hBox, new Image {
                Src = redBlue, Width = 40, Height = 30, Scaling = ImageScaling.Uniform, Alignment = Alignment.Parse("center right")
            });
            vBox.AddComponent(hBox);
            hBox = new HBox();
            AddComponent(hBox, new Image {
                Src = redBlue, Width = 30, Height = 40, Scaling = ImageScaling.Uniform, Alignment = Alignment.Center
            });
            AddComponent(hBox, new Image {
                Src = redBlue, Width = 30, Height = 40, Scaling = ImageScaling.Uniform, Alignment = Alignment.Parse("top center")
            });
            AddComponent(hBox, new Image {
                Src = redBlue, Width = 30, Height = 40, Scaling = ImageScaling.Uniform, Alignment = Alignment.Parse("bottom center")
            });
            vBox.AddComponent(hBox);
            hBox = new HBox();
            AddComponent(hBox, new Image {
                Src = redBlue, Width = 10, Height = 20, Scaling = ImageScaling.Fill
            });
            AddComponent(hBox, new Image {
                Src = blueYellow, Scaling = ImageScaling.None
            });
            AddComponent(hBox, new Image {
                Src = blueYellow, Width = 20, Height = 20, Alignment = Alignment.Center, Scaling = ImageScaling.None
            });
            AddComponent(hBox, new Image {
                Src = blueYellow, Width = 20, Height = 20, Alignment = Alignment.Parse("top left"), Scaling = ImageScaling.None
            });
            AddComponent(hBox, new Image {
                Src = blueYellow, Width = 20, Height = 20, Alignment = Alignment.Parse("bottom right"), Scaling = ImageScaling.None
            });
            vBox.AddComponent(hBox);
            var form = new Form(vBox);

            var bmp = renderer.Render(form);

            BitmapComparer.CompareBitmaps("bitmaps", bmp);
        }
Example #28
0
        //called when data for any output pin is requested
        public void Evaluate(int spreadMax)
        {
            if (bitmaps == null || spreadMax != this.spreadMax)
            {
                bitmaps = new List <Bitmap>(spreadMax);
            }
            this.spreadMax        = spreadMax;
            FStatusOut.SliceCount = spreadMax;
            FTextureOut.ResizeAndDispose(spreadMax, CreateTextureResource);
            for (int i = 0; i < spreadMax; i++)
            {
                var textureResource = FTextureOut[i];

                int w = (int)FSizeIn[i].x;
                int h = (int)FSizeIn[i].y;

                if (w > 0 && h > 0 && FFontSizeIn[i] > 0)
                {
                    renderer          = new BitmapRenderer();
                    renderer.TextFont = new System.Drawing.Font(FFontIn[i].Name, FFontSizeIn[i]);
                    var info = textureResource.Metadata;
                    //recreate textures if resolution was changed
                    if (info.Width != w || info.Height != h)
                    {
                        textureResource.Dispose();
                        textureResource = CreateTextureResource(i);
                        info            = textureResource.Metadata;
                    }
                    if (info.Data != FDataIn[i] || info.Format != FFormatIn[i] ||
                        info.Width != w || info.Height != h ||
                        info.ShowText != FShowTextIn[i] || info.FontSize != FFontSizeIn[i] || firstFrame)
                    {
                        info.Data     = FDataIn[i];
                        info.Format   = FFormatIn[i];
                        info.ShowText = FShowTextIn[i];
                        info.FontSize = FFontSizeIn[i];
                        Bitmap bmp = new Bitmap(GenerateBarcodeImage(w, h, FDataIn[i], FFormatIn[i], !FShowTextIn[i], i));
                        if (bitmaps.Count <= i)
                        {
                            bitmaps.Add(bmp);
                        }
                        else
                        {
                            bitmaps[i] = bmp;
                        }
                        textureResource.NeedsUpdate = true;
                    }
                    else
                    {
                        textureResource.NeedsUpdate = false;
                    }
                }
                FTextureOut[i] = textureResource;
            }
            firstFrame = false;
        }
        private void eventCB_SelectedIndexChanged(object sender, EventArgs e)
        {
            var            eventQRCode   = eventList[eventCB.SelectedIndex].qrCodeString;
            var            barcodeWriter = new QRCodeWriter();
            BitMatrix      bm            = barcodeWriter.encode(eventQRCode, ZXing.BarcodeFormat.QR_CODE, 600, 600);
            BitmapRenderer bit           = new BitmapRenderer();
            Bitmap         image         = bit.Render(bm, ZXing.BarcodeFormat.QR_CODE, eventQRCode);

            qrCodePhoto.Image = image;
        }
Example #30
0
        public void Process(Bitmap input, Bitmap output, TimeSpan time)
        {
            var effect = new FilterEffect();

            effect.Filters = new IFilter[] { new WatercolorFilter() };
            effect.Source  = new BitmapImageSource(input);
            var renderer = new BitmapRenderer(effect, output);

            renderer.RenderAsync().AsTask().Wait(); // Async calls must run sync inside Process()
        }
Example #31
0
        public async Task <NormalizeResult> NormalizeAsync(Frame frame, Windows.Foundation.Rect area, double rotation)
        {
            using (var bitmap = new Bitmap(frame.Dimensions, Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer()))
                using (var source = new BitmapImageSource(bitmap))
                    using (var effect = new FilterEffect(source))
                        using (var renderer = new BitmapRenderer(effect))
                        {
                            effect.Filters = new List <IFilter>()
                            {
                                new ReframingFilter(area, -rotation)
                            };

                            using (var newBitmap = new Bitmap(new Windows.Foundation.Size(area.Width, area.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format)))
                            {
                                await effect.GetBitmapAsync(newBitmap, OutputOption.PreserveAspectRatio);

                                return(new NormalizeResult()
                                {
                                    Frame = new Frame()
                                    {
                                        Buffer = newBitmap.Buffers[0].Buffer.ToArray(),
                                        Pitch = newBitmap.Buffers[0].Pitch,
                                        Format = frame.Format,
                                        Dimensions = newBitmap.Dimensions
                                    },
                                    Translate = new Func <Windows.Foundation.Point, Windows.Foundation.Point>((normalizedPoint) =>
                                    {
                                        var rotationRadians = -rotation / 360.0 * 2.0 * Math.PI;
                                        var sin = Math.Sin(rotationRadians);
                                        var cos = Math.Cos(rotationRadians);
                                        var origoX = area.Width / 2.0;
                                        var origoY = area.Height / 2.0;

                                        // Translate point to origo before rotation
                                        var ox = normalizedPoint.X - origoX;
                                        var oy = normalizedPoint.Y - origoY;

                                        // Move area to origo, calculate new point positions, restore area location and add crop margins
                                        var x = ox * cos - oy * sin;
                                        var y = ox * sin + oy * cos;

                                        // Translate point back to area after rotation
                                        x = x + origoX;
                                        y = y + origoY;

                                        // Add margins from original uncropped frame
                                        x = x + area.X;
                                        y = y + area.Y;

                                        return new Windows.Foundation.Point(x, y);
                                    })
                                });
                            }
                        }
        }
        public async Task<NormalizeResult> NormalizeAsync(Frame frame, Windows.Foundation.Rect area, double rotation)
        {
            using (var bitmap = new Bitmap(frame.Dimensions, Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer()))
            using (var source = new BitmapImageSource(bitmap))
            using (var effect = new FilterEffect(source))
            using (var renderer = new BitmapRenderer(effect))
            {
                effect.Filters = new List<IFilter>()
                {
                    new ReframingFilter(area, -rotation)
                };

                using (var newBitmap = new Bitmap(new Windows.Foundation.Size(area.Width, area.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format)))
                {
                    await effect.GetBitmapAsync(newBitmap, OutputOption.PreserveAspectRatio);

                    return new NormalizeResult()
                    {
                        Frame = new Frame()
                        {
                            Buffer = newBitmap.Buffers[0].Buffer.ToArray(),
                            Pitch = newBitmap.Buffers[0].Pitch,
                            Format = frame.Format,
                            Dimensions = newBitmap.Dimensions
                        },
                        Translate = new Func<Windows.Foundation.Point, Windows.Foundation.Point>((normalizedPoint) =>
                        {
                            var rotationRadians = -rotation / 360.0 * 2.0 * Math.PI;
                            var sin = Math.Sin(rotationRadians);
                            var cos = Math.Cos(rotationRadians);
                            var origoX = area.Width / 2.0;
                            var origoY = area.Height / 2.0;

                            // Translate point to origo before rotation
                            var ox = normalizedPoint.X - origoX;
                            var oy = normalizedPoint.Y - origoY;

                            // Move area to origo, calculate new point positions, restore area location and add crop margins
                            var x = ox * cos - oy * sin;
                            var y = ox * sin + oy * cos;

                            // Translate point back to area after rotation
                            x = x + origoX;
                            y = y + origoY;

                            // Add margins from original uncropped frame
                            x = x + area.X;
                            y = y + area.Y;

                            return new Windows.Foundation.Point(x, y);
                        })
                    };
                }
            }
        }
Example #33
0
        private void PrintWalletInfo()
        {
            QRCodeWriter write = new QRCodeWriter();

            int size = 120;

            BitMatrix      matrix = write.encode("", ZXing.BarcodeFormat.QR_CODE, size, size, null);
            BitmapRenderer bit    = new BitmapRenderer();

            Android.Graphics.Bitmap bitmap = bit.Render(matrix, BarcodeFormat.QR_CODE, "");
        }
        /// <summary>
        /// Renders the IImageProvider into a Bitmap. If the passed Bitmap is non-null, and it matches the passed size and color mode, it will be reused. Otherwise this method creates a new Bitmap.
        /// </summary>
        /// <param name="imageProvider">The image provider to render the image content of.</param>
        /// <param name="bitmap">The Bitmap to reuse, if possible. If null is passed, a new Bitmap is created.</param>
        /// <param name="size">The desired size of the rendered image.</param>
        /// <param name="outputOption">Specifies how to fit the image into the rendered rectangle, if the aspect ratio differs.</param>
        /// <returns>A task resulting in either the reused Bitmap, or a new one if necessary.</returns>
        public static async Task<Bitmap> GetBitmapAsync(this IImageProvider imageProvider, Bitmap bitmap, Size size, ColorMode colorMode, OutputOption outputOption)
        {
            if (bitmap == null || bitmap.Dimensions != size || bitmap.ColorMode != colorMode)
            {
                bitmap = new Bitmap(size, colorMode);
            }

            using (var renderer = new BitmapRenderer(imageProvider, bitmap, outputOption))
            {
                return await renderer.RenderAsync().AsTask().ConfigureAwait(false);
            }
        }
        private void GenerateQRCode()
        {
            var uuid = Intent.GetStringExtra("uuid");

            codeTextView.Text = uuid;
            QRCodeWriter   writer = new QRCodeWriter();
            BitMatrix      bm     = writer.encode(uuid, ZXing.BarcodeFormat.QR_CODE, 600, 600);
            BitmapRenderer bit    = new BitmapRenderer();
            Bitmap         image  = bit.Render(bm, ZXing.BarcodeFormat.QR_CODE, uuid);

            qrCodeImage.SetImageBitmap(image);
        }
Example #36
0
        public static Bitmap CreateQRCode(string address)
        {
            QRCodeWriter write = new QRCodeWriter();

            int size = 120;

            BitMatrix      matrix = write.encode(address, ZXing.BarcodeFormat.QR_CODE, size, size, null);
            BitmapRenderer bit    = new BitmapRenderer();
            Bitmap         bitmap = bit.Render(matrix, BarcodeFormat.QR_CODE, address);

            return(bitmap);
        }
        /// <summary>
        /// Renders the IImageProvider into a Bitmap. If the passed Bitmap is non-null, and it matches the passed size and color mode, it will be reused. Otherwise this method creates a new Bitmap.
        /// </summary>
        /// <param name="imageProvider">The image provider to render the image content of.</param>
        /// <param name="bitmap">The Bitmap to reuse, if possible. If null is passed, a new Bitmap is created.</param>
        /// <param name="size">The desired size of the rendered image.</param>
        /// <param name="outputOption">Specifies how to fit the image into the rendered rectangle, if the aspect ratio differs.</param>
        /// <returns>A task resulting in either the reused Bitmap, or a new one if necessary.</returns>
        public static async Task <Bitmap> GetBitmapAsync(this IImageProvider imageProvider, Bitmap bitmap, Size size, ColorMode colorMode, OutputOption outputOption)
        {
            if (bitmap == null || bitmap.Dimensions != size || bitmap.ColorMode != colorMode)
            {
                bitmap = new Bitmap(size, colorMode);
            }

            using (var renderer = new BitmapRenderer(imageProvider, bitmap, outputOption))
            {
                return(await renderer.RenderAsync().AsTask().ConfigureAwait(false));
            }
        }
        /// <summary>
        /// Creates an image source of the overlay, specifying the size of the background image it will be used on. The image source will be sized and cropped correctly.
        /// </summary>
        /// <param name="backgroundSize">The size of the background image.</param>
        /// <returns>The constructed overlay image source.</returns>
        public async Task<IImageProvider> CreateAsync(Size backgroundSize)
        {
            var uriAndRotation = GetUriAndRotation(backgroundSize);
            var file = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uriAndRotation.Item1).AsTask().ConfigureAwait(false);
            var overlayImageSource = new StorageFileImageSource(file);

            var overlayImageInfo = await overlayImageSource.GetInfoAsync().AsTask().ConfigureAwait(false);
            var overlayImageAspectRatio = overlayImageInfo.ImageSize.Width / overlayImageInfo.ImageSize.Height;

            int overlayImageScaledWidth = (int)overlayImageInfo.ImageSize.Width;
            int overlayImageScaledHeight = (int)overlayImageInfo.ImageSize.Height;

            if ((int)backgroundSize.Width > (int)backgroundSize.Height)
            {
                overlayImageScaledHeight = (int)(backgroundSize.Width / overlayImageAspectRatio);
                overlayImageScaledWidth = (int)(backgroundSize.Width);
            }
            else if ((int)backgroundSize.Width < (int)backgroundSize.Height)
            {
                overlayImageScaledWidth = (int)(backgroundSize.Height * overlayImageAspectRatio);
                overlayImageScaledHeight = (int)(backgroundSize.Height);
            }

            var renderer = new BitmapRenderer(overlayImageSource) { Size = new Size(overlayImageScaledWidth, overlayImageScaledHeight) };
            var overlayBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false);
            var overlayBitmapImageSource = new BitmapImageSource(overlayBitmap);

            return Crop(overlayBitmapImageSource, overlayImageInfo, (int)overlayImageInfo.ImageSize.Width, (int)overlayImageInfo.ImageSize.Height);
        }
Example #39
0
        public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize)
        {
            if (_semaphore.WaitOne(500))
            {
                var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode
                var bitmap = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer);

                try
                {
                    if (_blendEffect != null)
                    {
                        _blendEffect.GlobalAlpha = GlobalAlpha;

                        var renderer = new BitmapRenderer(_blendEffect, bitmap);
                        await renderer.RenderAsync();
                    }
                    else
                    {
                        var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap);
                        await renderer.RenderAsync();
                    }
                }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("RealtimeBlendDemo.GetNewFrameAndApplyEffect(): "
                        + ex.ToString());
                }

                _semaphore.Release();
            }
        }
        public async Task GetImageProviderReturnsExpectedResultWhenCreatedWithLambdaReturningAsynchronousImageProvider()
        {
            var backgroundImage = new ColorImageSource(BackgroundLayerSize, Colors.AliceBlue);
            var backgroundLayer = new Layer(LayerStyle.Normal(), backgroundImage, BackgroundLayerSize);

            var layer = new AdjustmentLayer(LayerStyle.Normal(), context => CreateTestEffectAsync(context));
            var imageProvider = layer.GetImageProvider(CreateFakeLayerContext(backgroundLayer, backgroundImage, layer));
            ((IImageConsumer)await imageProvider.AsTask()).Source = backgroundImage;
            Assert.IsFalse(imageProvider.WasSynchronous);
            var imageProviderResult = await imageProvider.AsTask();
            Assert.IsNotNull(imageProviderResult);

            var bitmapRenderer = new BitmapRenderer(imageProvider.Result);
            var bitmap = await bitmapRenderer.RenderAsync();
            Assert.AreEqual(BackgroundLayerSize, bitmap.Dimensions);
        }
        /// <summary>
        /// Initialises the data structures to pass data to the media pipeline via the MediaStreamSource.
        /// </summary>
        protected override void OpenMediaAsync()
        {
            // General properties

            _frameBufferSize = (int)_frameSize.Width * (int)_frameSize.Height * 4; // RGBA
            _frameBuffer = new byte[_frameBufferSize];

            _frameStream = new MemoryStream(_frameBuffer);

            int layersize = (int)(_frameSize.Width * _frameSize.Height);
            int layersizeuv = layersize / 2;
            _cameraFrameBuffer = new byte[layersize + layersizeuv];
            _cameraBitmap = new Bitmap(
                    _frameSize,
                    ColorMode.Yuv420Sp,
                    new uint[] { (uint)_frameSize.Width, (uint)_frameSize.Width },
                    new IBuffer[] { _cameraFrameBuffer.AsBuffer(0, layersize), _cameraFrameBuffer.AsBuffer(layersize, layersizeuv) });
            _source = new BitmapImageSource(_cameraBitmap);
            _frameBitmap = new Bitmap(
                    _frameSize,
                    ColorMode.Bgra8888,
                   4 * (uint)_frameSize.Width,
                     _frameBuffer.AsBuffer());



            _renderer = new BitmapRenderer();
            _renderer.Bitmap = _frameBitmap;


            // Media stream attributes

            var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();

            mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
            mediaStreamAttributes[MediaStreamAttributeKeys.Width] = ((int)_frameSize.Width).ToString();
            mediaStreamAttributes[MediaStreamAttributeKeys.Height] = ((int)_frameSize.Height).ToString();

            _videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);

            // Media stream descriptions

            var mediaStreamDescriptions = new List<MediaStreamDescription>();
            mediaStreamDescriptions.Add(_videoStreamDescription);

            // Media source attributes

            var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture);
            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();

            _frameTime = (int)TimeSpan.FromSeconds((double)0).Ticks;

            // Start frame rate timer



            // Report that we finished initializing its internal state and can now pass in frame samples

            ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
        protected override void CloseMedia()
        {

            {
                _camera = null;
                if (_frameStream != null)
                {
                    _frameStream.Close();
                    _frameStream = null;
                }




                if (_renderer != null)
                {
                    _renderer.Bitmap = null; // bug  : crash on bitmap dispose

                    _renderer.Dispose();
                    _renderer = null;

                }

                if (_effect != null && _effect is IDisposable)
                {
                    // (_effect as IDisposable).Dispose(); // bug : crash on CustomEffectBase dispose
                    _effect = null;
                }

                if (_source != null)
                {
                    _source.Dispose();
                    _source = null;
                }
                if (_frameBitmap != null)
                {
                    _frameBitmap.Dispose();
                    _frameBitmap = null;
                }
                if (_cameraBitmap != null)
                {
                    _cameraBitmap.Dispose();
                    _cameraBitmap = null;
                }


                _frameStreamOffset = 0;
                _frameTime = 0;

                _frameBufferSize = 0;
                _frameBuffer = null;
                _cameraFrameBuffer = null;
                _videoStreamDescription = null;
                _currentTime = 0;
            }
        }
Example #43
0
        /// <summary>
        /// Renders a thumbnail of requested size from the center of the current image with
        /// filters applied.
        /// </summary>
        /// <param name="side">Side length of square thumbnail to render</param>
        /// <returns>Rendered thumbnail bitmap</returns>
        public async Task<Bitmap> RenderThumbnailBitmapAsync(int side)
        {
            Windows.Foundation.Size dimensions = await GetImageSizeAsync();

            int minSide = (int)Math.Min(dimensions.Width, dimensions.Height);

            Windows.Foundation.Rect rect = new Windows.Foundation.Rect()
            {
                Width = minSide,
                Height = minSide,
                X = (dimensions.Width - minSide) / 2,
                Y = (dimensions.Height - minSide) / 2,
            };

            _components.Add(new CropFilter(rect));

            Bitmap bitmap = new Bitmap(new Windows.Foundation.Size(side, side), ColorMode.Ayuv4444);

            using (BufferImageSource source = new BufferImageSource(_buffer))
            using (FilterEffect effect = new FilterEffect(source) { Filters = _components })
            using (BitmapRenderer renderer = new BitmapRenderer(effect, bitmap, OutputOption.Stretch))
            {
                await renderer.RenderAsync();
            }

            _components.RemoveAt(_components.Count - 1);

            return bitmap;
        }
        public async Task RenderWithBackgroundAndDifferentRenderSizePassedInToImageProvider()
        {
            var layerList = new LayerList(new Layer[]
            {
                new Layer(LayerStyle.Add(), context => new ColorImageSource(context.BackgroundLayer.ImageSize, Color.FromArgb(255,32,16,8)))
            });

            var backgroundImage = new ColorImageSource(new Size(100, 100), Color.FromArgb(255, 128, 128, 128));
            var layersEffect = await layerList.ToImageProvider(backgroundImage, backgroundImage.ImageSize, new Size(50,50)).AsTask();

            var bitmap = new Bitmap(new Size(50,50), ColorMode.Bgra8888);
            var bitmapRenderer = new BitmapRenderer(layersEffect, bitmap);
            var renderedBitmap = await bitmapRenderer.RenderAsync();

            var pixels = bitmap.Buffers[0].Buffer.ToArray();
            Assert.AreEqual(136, pixels[0]);
            Assert.AreEqual(144, pixels[1]);
            Assert.AreEqual(160, pixels[2]);
            Assert.AreEqual(255, pixels[3]);
        }
        async Task createLRPicture(IImageProvider source, ImageProviderInfo info)
        {
            HR_LR_Factor = 4096 / Math.Max(info.ImageSize.Width, info.ImageSize.Height);
            if (HR_LR_Factor > 1) HR_LR_Factor = 1;

            InputLR = new WriteableBitmap(
                (int)(HR_LR_Factor * info.ImageSize.Width),
                (int)(HR_LR_Factor * info.ImageSize.Height)
                );
            int LRblocksize = 4096/4;
            double blocksize = LRblocksize / HR_LR_Factor;
          

      

            var bitmapLR =new Bitmap(
                new Size(InputLR.PixelWidth, InputLR.PixelHeight),
               ColorMode.Bgra8888,
               (uint)(4*InputLR.PixelWidth),
               InputLR.Pixels.AsBuffer());
             
            var tmpBitmp = new WriteableBitmap(LRblocksize, LRblocksize);

            for (int y = 0; y < 4; ++y)
            {

                var tmpheight =(int)  Math.Min((double)LRblocksize, InputLR.PixelHeight - y * LRblocksize);
                if (tmpheight<=0) break;
                for (int x = 0; x < 4; ++x)
                {
                    var tmpWidth = (int)Math.Min((double)LRblocksize, InputLR.PixelWidth - x * LRblocksize);
                    if (tmpWidth <= 0) break;

                    var tmp = new Bitmap(bitmapLR,
                        new Rect((double)x * LRblocksize, (double)y * LRblocksize,
                            tmpWidth,
                            tmpheight
                           ));

                    using (var filter = new FilterEffect(source))
                    using (var render = new BitmapRenderer(filter, tmp))
                    {
                        filter.Filters = new IFilter[] { new ReframingFilter(new Rect((double)x * blocksize, (double)y * blocksize, 
                            (double)blocksize, 
                            (double)blocksize), 
                            0) };
                        await render.RenderAsync();
                    }

                }
            }


        }
 public virtual async Task RenderPreview(Bitmap bitmap)
 {
     try
     {
         Debug.WriteLine(DebugTag + Name + ": Rendering Preview()");
         using (var renderer = new BitmapRenderer(_realTimeEffect, bitmap))
         {
             await renderer.RenderAsync();
         }
         Debug.WriteLine(DebugTag + Name + ": RenderPreview Done()");
     }
     catch (Exception e)
     {
         Debug.WriteLine(DebugTag + Name + ": RenderPreview(): " + e.Message);
     }
 }
Example #47
0
        public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize)
        {
            if (_semaphore.WaitOne(500))
            {
                _cameraPreviewImageSource.InvalidateLoad();

                var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode
                var bitmap = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer);

                if (_effectIndex >= 0 && _effectIndex < EffectGroup.Count)
                {
                    AbstractFilter filter = EffectGroup[_effectIndex];
                    await filter.RenderPreview(bitmap);
                }
                else
                {
                    var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap);
                    await renderer.RenderAsync();
                }

                _semaphore.Release();
            }
        }