public static async Task <Rect> GetDrawArea(IImageProvider source, InkCanvas canvas) { var image = source; Size imageSize; if (image is IImageResource) { imageSize = ((IImageResource)image).ImageSize; } else if (image is IAsyncImageResource) { imageSize = (await((IAsyncImageResource)image).LoadAsync()).ImageSize; } else { using (var renderer = new BitmapRenderer(image)) { var tempBmp = await renderer.RenderAsync(); imageSize = tempBmp.Dimensions; } } return(FitWithinWhilePreservingAspectRatio(new Rect(0, 0, imageSize.Width, imageSize.Height), new Rect(0, 0, canvas.ActualWidth, canvas.ActualHeight), true)); }
public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize) { if (_semaphore.WaitOne(500)) { var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode var bitmap = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer); if (_filterEffect != null) { var renderer = new BitmapRenderer(_filterEffect, bitmap); await renderer.RenderAsync(); } else if (_customEffect != null) { var renderer = new BitmapRenderer(_customEffect, bitmap); await renderer.RenderAsync(); } else { var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap); await renderer.RenderAsync(); } _semaphore.Release(); } }
public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize) { if (_semaphore.WaitOne(500)) { _cameraPreviewImageSource.InvalidateLoad(); var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode var bitmap = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer); if (_filterEffect != null) { var renderer = new BitmapRenderer(_filterEffect, bitmap); await renderer.RenderAsync(); } else if (_customEffect != null) { var renderer = new BitmapRenderer(_customEffect, bitmap); await renderer.RenderAsync(); } else { var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap); await renderer.RenderAsync(); } _semaphore.Release(); } }
public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize) { if (_semaphore.WaitOne(500)) { var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode var bitmap = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer); try { if (_blendEffect != null) { _blendEffect.GlobalAlpha = GlobalAlpha; var renderer = new BitmapRenderer(_blendEffect, bitmap); await renderer.RenderAsync(); } else { var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap); await renderer.RenderAsync(); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("RealtimeBlendDemo.GetNewFrameAndApplyEffect(): " + ex.ToString()); } _semaphore.Release(); } }
private static async Task<Curve[]> LoadRgbLookupCurves(string path, double outputGain, int outputOffset) { var storageFile = await Package.Current.InstalledLocation.GetFileAsync(path).AsTask().ConfigureAwait(false); using (var storageFileImageSource = new StorageFileImageSource(storageFile)) using (var renderer = new BitmapRenderer(storageFileImageSource) { OutputOption = OutputOption.PreserveAspectRatio }) using (var tableBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false)) { var redCurve = new Curve(); var greenCurve = new Curve(); var blueCurve = new Curve(); var redValues = new Point[256]; var greenValues = new Point[256]; var blueValues = new Point[256]; var table = tableBitmap.Buffers[0].Buffer.ToArray(); for (int i = 0; i < 256; ++i) { redValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 2] * outputGain + outputOffset)))); greenValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 1] * outputGain + outputOffset)))); blueValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 0] * outputGain + outputOffset)))); } redCurve.Points = redValues; greenCurve.Points = greenValues; blueCurve.Points = blueValues; return new[] { redCurve, greenCurve, blueCurve }; } }
private static async Task <Curve[]> LoadRgbLookupCurves(string path, double outputGain, int outputOffset) { var storageFile = await Package.Current.InstalledLocation.GetFileAsync(path).AsTask().ConfigureAwait(false); using (var storageFileImageSource = new StorageFileImageSource(storageFile)) using (var renderer = new BitmapRenderer(storageFileImageSource) { OutputOption = OutputOption.PreserveAspectRatio }) using (var tableBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false)) { var redCurve = new Curve(); var greenCurve = new Curve(); var blueCurve = new Curve(); var redValues = new Point[256]; var greenValues = new Point[256]; var blueValues = new Point[256]; var table = tableBitmap.Buffers[0].Buffer.ToArray(); for (int i = 0; i < 256; ++i) { redValues[i] = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 2] * outputGain + outputOffset)))); greenValues[i] = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 1] * outputGain + outputOffset)))); blueValues[i] = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 0] * outputGain + outputOffset)))); } redCurve.Points = redValues; greenCurve.Points = greenValues; blueCurve.Points = blueValues; return(new[] { redCurve, greenCurve, blueCurve }); } }
public LookupImage(string name) { m_loadingTask = Task.Run(async() => { var file = await Package.Current.InstalledLocation.GetFileAsync(name).AsTask().ConfigureAwait(false); using (var imageSource = new StorageFileImageSource(file)) { var info = await imageSource.GetInfoAsync().AsTask().ConfigureAwait(false); var landscapeBitmap = new Bitmap(info.ImageSize, ColorMode.Bgra8888); var portraitBitmap = new Bitmap(new Size(info.ImageSize.Height, info.ImageSize.Width), ColorMode.Bgra8888); using (var renderer = new BitmapRenderer(imageSource, landscapeBitmap, OutputOption.Stretch)) { renderer.RenderOptions = RenderOptions.Cpu; m_landscapeSource = new BitmapImageSource(await renderer.RenderAsync().AsTask().ConfigureAwait(false)); } using (var renderer = new BitmapRenderer(m_landscapeSource.Rotate(90), portraitBitmap, OutputOption.Stretch)) { renderer.RenderOptions = RenderOptions.Cpu; m_portraitSource = new BitmapImageSource(await renderer.RenderAsync().AsTask().ConfigureAwait(false)); } } }); }
private void Button_Click(object sender, RoutedEventArgs e) { try { PhotoChooserTask task = new PhotoChooserTask(); task.Completed += async (s, res) => { if (res.TaskResult == TaskResult.OK) { using( var source = new StreamImageSource(res.ChosenPhoto)) using(var renderer = new BitmapRenderer(source,input)) { await renderer.RenderAsync(); } if(manager != null) { manager.Dispose(); manager = null; } manager = new PipelineManager.Manager.PipelineManager(picture); GeneratePicture(); } }; task.Show(); } catch (Exception) { } }
/// <summary> /// Creates an image source of the overlay, specifying the size of the background image it will be used on. The image source will be sized and cropped correctly. /// </summary> /// <param name="backgroundSize">The size of the background image.</param> /// <returns>The constructed overlay image source.</returns> public async Task <IImageProvider> CreateAsync(Size backgroundSize) { var uriAndRotation = GetUriAndRotation(backgroundSize); var file = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uriAndRotation.Item1).AsTask().ConfigureAwait(false); var overlayImageSource = new StorageFileImageSource(file); var overlayImageInfo = await overlayImageSource.GetInfoAsync().AsTask().ConfigureAwait(false); var overlayImageAspectRatio = overlayImageInfo.ImageSize.Width / overlayImageInfo.ImageSize.Height; int overlayImageScaledWidth = (int)overlayImageInfo.ImageSize.Width; int overlayImageScaledHeight = (int)overlayImageInfo.ImageSize.Height; if ((int)backgroundSize.Width > (int)backgroundSize.Height) { overlayImageScaledHeight = (int)(backgroundSize.Width / overlayImageAspectRatio); overlayImageScaledWidth = (int)(backgroundSize.Width); } else if ((int)backgroundSize.Width < (int)backgroundSize.Height) { overlayImageScaledWidth = (int)(backgroundSize.Height * overlayImageAspectRatio); overlayImageScaledHeight = (int)(backgroundSize.Height); } var renderer = new BitmapRenderer(overlayImageSource) { Size = new Size(overlayImageScaledWidth, overlayImageScaledHeight) }; var overlayBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false); var overlayBitmapImageSource = new BitmapImageSource(overlayBitmap); return(Crop(overlayBitmapImageSource, overlayImageInfo, (int)overlayImageInfo.ImageSize.Width, (int)overlayImageInfo.ImageSize.Height)); }
private async Task GetNewFrameAndApplyChosenEffectAsync(IBuffer buffer) { var lineSize = (uint)frameSize.Width * 4; var bitmap = new Bitmap(frameSize, ColorMode.Bgra8888, lineSize, buffer); IFilter[] filters; if (activeFilter == null) { filters = new IFilter[0]; } else { filters = new IFilter[] { activeFilter }; } using (FilterEffect fe = new FilterEffect(cameraPreviewImageSource) { Filters = filters }) using (BitmapRenderer renderer = new BitmapRenderer(fe, bitmap)) { await renderer.RenderAsync(); } }
/// <summary> /// Renders a thumbnail of requested size from the center of the current image with /// filters applied. /// </summary> /// <param name="side">Side length of square thumbnail to render</param> /// <returns>Rendered thumbnail bitmap</returns> public async Task <Bitmap> RenderThumbnailBitmapAsync(int side) { Windows.Foundation.Size dimensions = await GetImageSizeAsync(); int minSide = (int)Math.Min(dimensions.Width, dimensions.Height); Windows.Foundation.Rect rect = new Windows.Foundation.Rect() { Width = minSide, Height = minSide, X = (dimensions.Width - minSide) / 2, Y = (dimensions.Height - minSide) / 2, }; _components.Add(new CropFilter(rect)); Bitmap bitmap = new Bitmap(new Windows.Foundation.Size(side, side), ColorMode.Ayuv4444); using (BufferImageSource source = new BufferImageSource(_buffer)) using (FilterEffect effect = new FilterEffect(source) { Filters = _components }) using (BitmapRenderer renderer = new BitmapRenderer(effect, bitmap, OutputOption.Stretch)) { await renderer.RenderAsync(); } _components.RemoveAt(_components.Count - 1); return(bitmap); }
public void Process(Bitmap input, Bitmap output, TimeSpan time) { var effect = new FilterEffect(); effect.Filters = new IFilter[]{ new WatercolorFilter() }; effect.Source = new BitmapImageSource(input); var renderer = new BitmapRenderer(effect, output); renderer.RenderAsync().AsTask().Wait(); // Async calls must run sync inside Process() }
public async Task CS_W_MediaReader_LumiaEffect() { using (var mediaReader = await MediaReader.CreateFromPathAsync("ms-appx:///car.mp4", AudioInitialization.Deselected, VideoInitialization.Nv12)) using (var mediaResult = await mediaReader.VideoStream.ReadAsync()) { var streamProperties = mediaReader.VideoStream.GetCurrentStreamProperties(); int width = (int)streamProperties.Width; int height = (int)streamProperties.Height; Assert.AreEqual(320, width); Assert.AreEqual(240, height); var inputSample = (MediaSample2D)mediaResult.Sample; Assert.AreEqual(MediaSample2DFormat.Nv12, inputSample.Format); Assert.AreEqual(320, inputSample.Width); Assert.AreEqual(240, inputSample.Height); using (var outputSample = new MediaSample2D(MediaSample2DFormat.Nv12, width, height)) { Assert.AreEqual(MediaSample2DFormat.Nv12, outputSample.Format); Assert.AreEqual(320, outputSample.Width); Assert.AreEqual(240, outputSample.Height); using (var inputBuffer = inputSample.LockBuffer(BufferAccessMode.Read)) using (var outputBuffer = outputSample.LockBuffer(BufferAccessMode.Write)) { // Wrap MediaBuffer2D in Bitmap var inputBitmap = new Bitmap( new Size(width, height), ColorMode.Yuv420Sp, new uint[] { inputBuffer.Planes[0].Pitch, inputBuffer.Planes[1].Pitch }, new IBuffer[] { inputBuffer.Planes[0].Buffer, inputBuffer.Planes[1].Buffer } ); var outputBitmap = new Bitmap( new Size(width, height), ColorMode.Yuv420Sp, new uint[] { outputBuffer.Planes[0].Pitch, outputBuffer.Planes[1].Pitch }, new IBuffer[] { outputBuffer.Planes[0].Buffer, outputBuffer.Planes[1].Buffer } ); // Apply effect var effect = new FilterEffect(); effect.Filters = new IFilter[] { new WatercolorFilter() }; effect.Source = new BitmapImageSource(inputBitmap); var renderer = new BitmapRenderer(effect, outputBitmap); await renderer.RenderAsync(); } // Save the file var folder = await KnownFolders.PicturesLibrary.CreateFolderAsync("MediaCaptureReaderTests", CreationCollisionOption.OpenIfExists); var file = await folder.CreateFileAsync("CS_W_MediaReader_TestLumiaEffect.jpg", CreationCollisionOption.ReplaceExisting); await outputSample.SaveToFileAsync(file, ImageCompression.Jpeg); Logger.LogMessage("Saved {0}", file.Path); } } }
public void Process(Bitmap input, Bitmap output, TimeSpan time) { var effect = new FilterEffect(); effect.Filters = new IFilter[] { new WatercolorFilter() }; effect.Source = new BitmapImageSource(input); var renderer = new BitmapRenderer(effect, output); renderer.RenderAsync().AsTask().Wait(); // Async calls must run sync inside Process() }
/// <summary> /// Renders the IImageProvider into a Bitmap. If the passed Bitmap is non-null, and it matches the passed size and color mode, it will be reused. Otherwise this method creates a new Bitmap. /// </summary> /// <param name="imageProvider">The image provider to render the image content of.</param> /// <param name="bitmap">The Bitmap to reuse, if possible. If null is passed, a new Bitmap is created.</param> /// <param name="size">The desired size of the rendered image.</param> /// <param name="outputOption">Specifies how to fit the image into the rendered rectangle, if the aspect ratio differs.</param> /// <returns>A task resulting in either the reused Bitmap, or a new one if necessary.</returns> public static async Task<Bitmap> GetBitmapAsync(this IImageProvider imageProvider, Bitmap bitmap, Size size, ColorMode colorMode, OutputOption outputOption) { if (bitmap == null || bitmap.Dimensions != size || bitmap.ColorMode != colorMode) { bitmap = new Bitmap(size, colorMode); } using (var renderer = new BitmapRenderer(imageProvider, bitmap, outputOption)) { return await renderer.RenderAsync().AsTask().ConfigureAwait(false); } }
/// <summary> /// Renders the IImageProvider into a Bitmap. If the passed Bitmap is non-null, and it matches the passed size and color mode, it will be reused. Otherwise this method creates a new Bitmap. /// </summary> /// <param name="imageProvider">The image provider to render the image content of.</param> /// <param name="bitmap">The Bitmap to reuse, if possible. If null is passed, a new Bitmap is created.</param> /// <param name="size">The desired size of the rendered image.</param> /// <param name="outputOption">Specifies how to fit the image into the rendered rectangle, if the aspect ratio differs.</param> /// <returns>A task resulting in either the reused Bitmap, or a new one if necessary.</returns> public static async Task <Bitmap> GetBitmapAsync(this IImageProvider imageProvider, Bitmap bitmap, Size size, ColorMode colorMode, OutputOption outputOption) { if (bitmap == null || bitmap.Dimensions != size || bitmap.ColorMode != colorMode) { bitmap = new Bitmap(size, colorMode); } using (var renderer = new BitmapRenderer(imageProvider, bitmap, outputOption)) { return(await renderer.RenderAsync().AsTask().ConfigureAwait(false)); } }
public static async Task <IImageProvider> CreateImageSourceFromFile(StorageFile file) { //method needed, workaround for exif orientation bug using (var source = new StorageFileImageSource(file)) using (var renderer = new BitmapRenderer(source) { RenderOptions = RenderOptions.Cpu }) { var bitmap = await renderer.RenderAsync(); return(new BitmapImageSource(bitmap)); } }
public async Task GetImageProviderReturnsExpectedResultWhenCreatedWithLambda() { var backgroundImage = new ColorImageSource(BackgroundLayerSize, Colors.AliceBlue); var backgroundLayer = new Layer(LayerStyle.Normal(), backgroundImage, BackgroundLayerSize); var layer = new AdjustmentLayer(LayerStyle.Normal(), context => CreateTestEffect(context)); var imageProvider = layer.GetImageProvider(CreateFakeLayerContext(backgroundLayer, backgroundImage, layer)); ((IImageConsumer)await imageProvider.AsTask()).Source = backgroundImage; Assert.IsTrue(imageProvider.IsSynchronous); Assert.IsNotNull(imageProvider.Result); var bitmapRenderer = new BitmapRenderer(imageProvider.Result); var bitmap = await bitmapRenderer.RenderAsync(); Assert.AreEqual(BackgroundLayerSize, bitmap.Dimensions); }
public async Task RenderWithBackgroundPassedInConstructor() { var backgroundImage = new ColorImageSource(new Size(100, 100), Color.FromArgb(255, 128, 128, 128)); var layerList = new LayerList(backgroundImage, backgroundImage.ImageSize, new Layer[] { new Layer(LayerStyle.Add(), context => new ColorImageSource(context.BackgroundLayer.ImageSize, Color.FromArgb(255, 32, 16, 8))) }); var layersEffect = await layerList.ToImageProvider().AsTask(); var bitmapRenderer = new BitmapRenderer(layersEffect); var renderedBitmap = await bitmapRenderer.RenderAsync(); var pixels = renderedBitmap.Buffers[0].Buffer.ToArray(); Assert.AreEqual(136, pixels[0]); Assert.AreEqual(144, pixels[1]); Assert.AreEqual(160, pixels[2]); Assert.AreEqual(255, pixels[3]); }
/// <summary> /// Creates an image source of the overlay, specifying the size of the background image it will be used on. The image source will be sized and cropped correctly. /// </summary> /// <param name="backgroundSize">The size of the background image.</param> /// <returns>The constructed overlay image source.</returns> public async Task<IImageProvider> CreateAsync(Size backgroundSize) { var uriAndRotation = GetUriAndRotation(backgroundSize); var file = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uriAndRotation.Item1).AsTask().ConfigureAwait(false); var overlayImageSource = new StorageFileImageSource(file); var overlayImageInfo = await overlayImageSource.GetInfoAsync().AsTask().ConfigureAwait(false); var overlayImageAspectRatio = overlayImageInfo.ImageSize.Width / overlayImageInfo.ImageSize.Height; int overlayImageScaledWidth = (int)overlayImageInfo.ImageSize.Width; int overlayImageScaledHeight = (int)overlayImageInfo.ImageSize.Height; if ((int)backgroundSize.Width > (int)backgroundSize.Height) { overlayImageScaledHeight = (int)(backgroundSize.Width / overlayImageAspectRatio); overlayImageScaledWidth = (int)(backgroundSize.Width); } else if ((int)backgroundSize.Width < (int)backgroundSize.Height) { overlayImageScaledWidth = (int)(backgroundSize.Height * overlayImageAspectRatio); overlayImageScaledHeight = (int)(backgroundSize.Height); } var renderer = new BitmapRenderer(overlayImageSource) { Size = new Size(overlayImageScaledWidth, overlayImageScaledHeight) }; var overlayBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false); var overlayBitmapImageSource = new BitmapImageSource(overlayBitmap); return Crop(overlayBitmapImageSource, overlayImageInfo, (int)overlayImageInfo.ImageSize.Width, (int)overlayImageInfo.ImageSize.Height); }
public async Task RenderWithBackgroundAndDifferentRenderSizePassedInToImageProvider() { var layerList = new LayerList(new Layer[] { new Layer(LayerStyle.Add(), context => new ColorImageSource(context.BackgroundLayer.ImageSize, Color.FromArgb(255,32,16,8))) }); var backgroundImage = new ColorImageSource(new Size(100, 100), Color.FromArgb(255, 128, 128, 128)); var layersEffect = await layerList.ToImageProvider(backgroundImage, backgroundImage.ImageSize, new Size(50,50)).AsTask(); var bitmap = new Bitmap(new Size(50,50), ColorMode.Bgra8888); var bitmapRenderer = new BitmapRenderer(layersEffect, bitmap); var renderedBitmap = await bitmapRenderer.RenderAsync(); var pixels = bitmap.Buffers[0].Buffer.ToArray(); Assert.AreEqual(136, pixels[0]); Assert.AreEqual(144, pixels[1]); Assert.AreEqual(160, pixels[2]); Assert.AreEqual(255, pixels[3]); }
public async Task GetNewFrameAndApplyEffect(IBuffer frameBuffer, Size frameSize) { if (_semaphore.WaitOne(500)) { _cameraPreviewImageSource.InvalidateLoad(); var scanlineByteSize = (uint)frameSize.Width * 4; // 4 bytes per pixel in BGRA888 mode var bitmap = new Bitmap(frameSize, ColorMode.Bgra8888, scanlineByteSize, frameBuffer); if (_effectIndex >= 0 && _effectIndex < EffectGroup.Count) { AbstractFilter filter = EffectGroup[_effectIndex]; await filter.RenderPreview(bitmap); } else { var renderer = new BitmapRenderer(_cameraPreviewImageSource, bitmap); await renderer.RenderAsync(); } _semaphore.Release(); } }
public async Task GetImageProviderReturnsExpectedResultWhenCreatedWithLambdaReturningAsynchronousImageProvider() { var backgroundImage = new ColorImageSource(BackgroundLayerSize, Colors.AliceBlue); var backgroundLayer = new Layer(LayerStyle.Normal(), backgroundImage, BackgroundLayerSize); var layer = new AdjustmentLayer(LayerStyle.Normal(), context => CreateTestEffectAsync(context)); var imageProvider = layer.GetImageProvider(CreateFakeLayerContext(backgroundLayer, backgroundImage, layer)); ((IImageConsumer)await imageProvider.AsTask()).Source = backgroundImage; Assert.IsFalse(imageProvider.WasSynchronous); var imageProviderResult = await imageProvider.AsTask(); Assert.IsNotNull(imageProviderResult); var bitmapRenderer = new BitmapRenderer(imageProvider.Result); var bitmap = await bitmapRenderer.RenderAsync(); Assert.AreEqual(BackgroundLayerSize, bitmap.Dimensions); }
/// <summary> /// Renders a thumbnail of requested size from the center of the current image with /// filters applied. /// </summary> /// <param name="side">Side length of square thumbnail to render</param> /// <returns>Rendered thumbnail bitmap</returns> public async Task<Bitmap> RenderThumbnailBitmapAsync(int side) { Windows.Foundation.Size dimensions = await GetImageSizeAsync(); int minSide = (int)Math.Min(dimensions.Width, dimensions.Height); Windows.Foundation.Rect rect = new Windows.Foundation.Rect() { Width = minSide, Height = minSide, X = (dimensions.Width - minSide) / 2, Y = (dimensions.Height - minSide) / 2, }; _components.Add(new CropFilter(rect)); Bitmap bitmap = new Bitmap(new Windows.Foundation.Size(side, side), ColorMode.Ayuv4444); using (BufferImageSource source = new BufferImageSource(_buffer)) using (FilterEffect effect = new FilterEffect(source) { Filters = _components }) using (BitmapRenderer renderer = new BitmapRenderer(effect, bitmap, OutputOption.Stretch)) { await renderer.RenderAsync(); } _components.RemoveAt(_components.Count - 1); return bitmap; }
async Task createLRPicture(IImageProvider source, ImageProviderInfo info) { HR_LR_Factor = 4096 / Math.Max(info.ImageSize.Width, info.ImageSize.Height); if (HR_LR_Factor > 1) HR_LR_Factor = 1; InputLR = new WriteableBitmap( (int)(HR_LR_Factor * info.ImageSize.Width), (int)(HR_LR_Factor * info.ImageSize.Height) ); int LRblocksize = 4096/4; double blocksize = LRblocksize / HR_LR_Factor; var bitmapLR =new Bitmap( new Size(InputLR.PixelWidth, InputLR.PixelHeight), ColorMode.Bgra8888, (uint)(4*InputLR.PixelWidth), InputLR.Pixels.AsBuffer()); var tmpBitmp = new WriteableBitmap(LRblocksize, LRblocksize); for (int y = 0; y < 4; ++y) { var tmpheight =(int) Math.Min((double)LRblocksize, InputLR.PixelHeight - y * LRblocksize); if (tmpheight<=0) break; for (int x = 0; x < 4; ++x) { var tmpWidth = (int)Math.Min((double)LRblocksize, InputLR.PixelWidth - x * LRblocksize); if (tmpWidth <= 0) break; var tmp = new Bitmap(bitmapLR, new Rect((double)x * LRblocksize, (double)y * LRblocksize, tmpWidth, tmpheight )); using (var filter = new FilterEffect(source)) using (var render = new BitmapRenderer(filter, tmp)) { filter.Filters = new IFilter[] { new ReframingFilter(new Rect((double)x * blocksize, (double)y * blocksize, (double)blocksize, (double)blocksize), 0) }; await render.RenderAsync(); } } } }
public virtual async Task RenderPreview(Bitmap bitmap) { try { Debug.WriteLine(DebugTag + Name + ": Rendering Preview()"); using (var renderer = new BitmapRenderer(_realTimeEffect, bitmap)) { await renderer.RenderAsync(); } Debug.WriteLine(DebugTag + Name + ": RenderPreview Done()"); } catch (Exception e) { Debug.WriteLine(DebugTag + Name + ": RenderPreview(): " + e.Message); } }