public async Task<StorageFile> SaveToImageX(StorageFile file, bool calculateMax = true, bool isCached = true) { try { Helper.CreateCachedFolder(); //{"message": "Uploaded image isn't in an allowed aspect ratio", "status": "fail"} using (var source = new StorageFileImageSource(file)) using (var renderer = new JpegRenderer(source, JpegOutputColorMode.Yuv422, OutputOption.Stretch)) { var info = await source.GetInfoAsync(); var size = AspectRatioHelper.GetDesireSize(info.ImageSize.Width, info.ImageSize.Height, info.ImageSize.Height < info.ImageSize.Width); //var max = Math.Max(size.Height, size.Width); var ratio = info.ImageSize.Height > info.ImageSize.Width ? info.ImageSize.Height / info.ImageSize.Width : info.ImageSize.Width / info.ImageSize.Height; var h = (size.Height / (float)ratio); var w = (size.Width / (float)ratio); if (calculateMax) renderer.Size = new Size(Math.Round(w), Math.Round(h)); else renderer.Size = new Size(info.ImageSize.Width, info.ImageSize.Height); var folder = await GetOutputFolder(isCached); var saveAsTarget = await folder.CreateFileAsync(Helper.GenerateString("IMG") + ".jpg", CreationCollisionOption.GenerateUniqueName); var render = await renderer.RenderAsync(); using (var fs = await saveAsTarget.OpenAsync(FileAccessMode.ReadWrite)) { await fs.WriteAsync(render); await fs.FlushAsync(); return await SaveToImageX2(saveAsTarget); } } } catch { } return file; }
private static async Task <Curve[]> LoadRgbLookupCurves(string path, double outputGain, int outputOffset) { var storageFile = await Package.Current.InstalledLocation.GetFileAsync(path).AsTask().ConfigureAwait(false); using (var storageFileImageSource = new StorageFileImageSource(storageFile)) using (var renderer = new BitmapRenderer(storageFileImageSource) { OutputOption = OutputOption.PreserveAspectRatio }) using (var tableBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false)) { var redCurve = new Curve(); var greenCurve = new Curve(); var blueCurve = new Curve(); var redValues = new Point[256]; var greenValues = new Point[256]; var blueValues = new Point[256]; var table = tableBitmap.Buffers[0].Buffer.ToArray(); for (int i = 0; i < 256; ++i) { redValues[i] = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 2] * outputGain + outputOffset)))); greenValues[i] = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 1] * outputGain + outputOffset)))); blueValues[i] = new Point(i, Math.Min(255, Math.Max(0, (int)(table[i * 4 + 0] * outputGain + outputOffset)))); } redCurve.Points = redValues; greenCurve.Points = greenValues; blueCurve.Points = blueValues; return(new[] { redCurve, greenCurve, blueCurve }); } }
public LookupImage(string name) { m_loadingTask = Task.Run(async() => { var file = await Package.Current.InstalledLocation.GetFileAsync(name).AsTask().ConfigureAwait(false); using (var imageSource = new StorageFileImageSource(file)) { var info = await imageSource.GetInfoAsync().AsTask().ConfigureAwait(false); var landscapeBitmap = new Bitmap(info.ImageSize, ColorMode.Bgra8888); var portraitBitmap = new Bitmap(new Size(info.ImageSize.Height, info.ImageSize.Width), ColorMode.Bgra8888); using (var renderer = new BitmapRenderer(imageSource, landscapeBitmap, OutputOption.Stretch)) { renderer.RenderOptions = RenderOptions.Cpu; m_landscapeSource = new BitmapImageSource(await renderer.RenderAsync().AsTask().ConfigureAwait(false)); } using (var renderer = new BitmapRenderer(m_landscapeSource.Rotate(90), portraitBitmap, OutputOption.Stretch)) { renderer.RenderOptions = RenderOptions.Cpu; m_portraitSource = new BitmapImageSource(await renderer.RenderAsync().AsTask().ConfigureAwait(false)); } } }); }
/// <summary> /// Creates an image source of the overlay, specifying the size of the background image it will be used on. The image source will be sized and cropped correctly. /// </summary> /// <param name="backgroundSize">The size of the background image.</param> /// <returns>The constructed overlay image source.</returns> public async Task <IImageProvider> CreateAsync(Size backgroundSize) { var uriAndRotation = GetUriAndRotation(backgroundSize); var file = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uriAndRotation.Item1).AsTask().ConfigureAwait(false); var overlayImageSource = new StorageFileImageSource(file); var overlayImageInfo = await overlayImageSource.GetInfoAsync().AsTask().ConfigureAwait(false); var overlayImageAspectRatio = overlayImageInfo.ImageSize.Width / overlayImageInfo.ImageSize.Height; int overlayImageScaledWidth = (int)overlayImageInfo.ImageSize.Width; int overlayImageScaledHeight = (int)overlayImageInfo.ImageSize.Height; if ((int)backgroundSize.Width > (int)backgroundSize.Height) { overlayImageScaledHeight = (int)(backgroundSize.Width / overlayImageAspectRatio); overlayImageScaledWidth = (int)(backgroundSize.Width); } else if ((int)backgroundSize.Width < (int)backgroundSize.Height) { overlayImageScaledWidth = (int)(backgroundSize.Height * overlayImageAspectRatio); overlayImageScaledHeight = (int)(backgroundSize.Height); } var renderer = new BitmapRenderer(overlayImageSource) { Size = new Size(overlayImageScaledWidth, overlayImageScaledHeight) }; var overlayBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false); var overlayBitmapImageSource = new BitmapImageSource(overlayBitmap); return(Crop(overlayBitmapImageSource, overlayImageInfo, (int)overlayImageInfo.ImageSize.Width, (int)overlayImageInfo.ImageSize.Height)); }
async Task <Uri> Despeckle(int EffectPercentage) { var Level = DespeckleLevel.Minimum; if (EffectPercentage < 25) { Level = DespeckleLevel.Minimum; } if (EffectPercentage > 25 && EffectPercentage <= 50) { Level = DespeckleLevel.Low; } if (EffectPercentage > 50 && EffectPercentage <= 75) { Level = DespeckleLevel.High; } if (EffectPercentage > 75) { Level = DespeckleLevel.Maximum; } using (var source = new StorageFileImageSource(imageStorageFile)) using (var contrastEffect = new ContrastEffect(source) { Level = 0.6 }) using (var sharpnessEffect = new DespeckleEffect(contrastEffect) { DespeckleLevel = Level }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
async Task <Uri> Noise(int EffectPercentage) { var level = NoiseLevel.Minimum; if (EffectPercentage <= 35) { level = NoiseLevel.Minimum; } else if (EffectPercentage > 70) { level = NoiseLevel.Maximum; } else { level = NoiseLevel.Medium; } using (var source = new StorageFileImageSource(imageStorageFile)) using (var contrastEffect = new ContrastEffect(source) { Level = 0.6 }) using (var sharpnessEffect = new NoiseEffect(contrastEffect) { Level = level }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
private static async Task<Curve[]> LoadRgbLookupCurves(string path, double outputGain, int outputOffset) { var storageFile = await Package.Current.InstalledLocation.GetFileAsync(path).AsTask().ConfigureAwait(false); using (var storageFileImageSource = new StorageFileImageSource(storageFile)) using (var renderer = new BitmapRenderer(storageFileImageSource) { OutputOption = OutputOption.PreserveAspectRatio }) using (var tableBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false)) { var redCurve = new Curve(); var greenCurve = new Curve(); var blueCurve = new Curve(); var redValues = new Point[256]; var greenValues = new Point[256]; var blueValues = new Point[256]; var table = tableBitmap.Buffers[0].Buffer.ToArray(); for (int i = 0; i < 256; ++i) { redValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 2] * outputGain + outputOffset)))); greenValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 1] * outputGain + outputOffset)))); blueValues[i] = new Point(i,Math.Min(255, Math.Max(0, (int)(table[i * 4 + 0] * outputGain + outputOffset)))); } redCurve.Points = redValues; greenCurve.Points = greenValues; blueCurve.Points = blueValues; return new[] { redCurve, greenCurve, blueCurve }; } }
// private void SliderContrast_ValueChanged(object sender, RangeBaseValueChangedEventArgs e) // { // if (EffectIndex is -1) { return; } // int value = Convert.ToInt32(SliderContrast.Value); // if (value is 0) { NoFilter(); return; } // switch (EffectIndex) // { // case 0: /*add*/ break; // case 1: Brightness(value); break; // case 2: Contrast(value); break; // case 3: Temperature(value); break; // case 4: ColorBoost(value); break; // case 5: /*Add*/ break; // case 6: /*add*/ break; // } // } // private void EditsList_ItemClick(object sender, ItemClickEventArgs e) // { // var data = e.ClickedItem as GridViewEditItem; // EffectIndex = data.Target; // _NameEffect.Text = data.Text; // EditRoot.Visibility = Visibility.Visible; // } // private void Cancel_Click(object sender, RoutedEventArgs e) // { // EditRoot.Visibility = Visibility.Collapsed; // } // private void Done_Click(object sender, RoutedEventArgs e) // { // EditRoot.Visibility = Visibility.Collapsed; // } async Task <Uri> SaveToImage() { using (var source = new StorageFileImageSource(imageStorageFile)) using (var renderer = new JpegRenderer(LastEffect, JpegOutputColorMode.Yuv420)) { var info = await source.GetInfoAsync(); var R = AspectRatioHelper.Aspect(Convert.ToInt32(info.ImageSize.Width), Convert.ToInt32(info.ImageSize.Height)); if (!SupportedAspectRatio(R)) { var max = Math.Max(info.ImageSize.Height, info.ImageSize.Width); renderer.Size = new Size(max, max); } var saveAsTarget = await ApplicationData.Current.LocalFolder.CreateFileAsync("file.Jpg", CreationCollisionOption.GenerateUniqueName); var render = await renderer.RenderAsync(); using (var fs = await saveAsTarget.OpenAsync(FileAccessMode.ReadWrite)) { await fs.WriteAsync(render); await fs.FlushAsync(); return(new Uri($"ms-appdata:///local/{saveAsTarget.Name}", UriKind.RelativeOrAbsolute)); } } }
async void Next_Click(object sender, RoutedEventArgs e) { // Width of Photo should be 1080 at last // photo that has a width between 320 and 1080 pixels, // photo's aspect ratio is between 1.91:1 and 4:5 (a height between 566 and 1350 pixels with a width of 1080 pixels) using (var source = new StorageFileImageSource(imageStorageFile)) { var size = (await source.GetInfoAsync()).ImageSize; var res = await AppCore.InstaApi.UploadPhotoAsync( new InstaSharper.Classes.Models.InstaImage((FiltersList.SelectedItem as FilterListItem).bitmapSource.LocalPath, (int)size.Width, (int)size.Height), "#تست #موقت"); } StorageFile F2S = null; if (FiltersList.SelectedItem == null) { F2S = imageStorageFile; } else { F2S = await StorageFile.GetFileFromApplicationUriAsync((FiltersList.SelectedItem as FilterListItem).bitmapSource); } var fsp = new FileSavePicker(); fsp.FileTypeChoices.Add(".jpg", new List <string> { ".jpg" }); fsp.SuggestedFileName = "WinGoTag"; fsp.SuggestedStartLocation = PickerLocationId.PicturesLibrary; var fs = await fsp.PickSaveFileAsync(); if (fs == null) { return; } await F2S.CopyAndReplaceAsync(fs); // using (var source = new StorageFileImageSource(imageStorageFile)) // using (var contrastEffect = new BlurEffect(source) { KernelSize = 40 }) // using (var renderer = new JpegRenderer(contrastEffect, JpegOutputColorMode.Yuv420)) // { // var info = await source.GetInfoAsync(); // var saveAsTarget = await ApplicationData.Current.LocalFolder.CreateFileAsync("TempImage1.Jpg", CreationCollisionOption.OpenIfExists); // var render = await renderer.RenderAsync(); // using (var fs = await saveAsTarget.OpenAsync(FileAccessMode.ReadWrite)) // { // await fs.WriteAsync(render); // await fs.FlushAsync(); // } // } // var res = await AppCore.InstaApi.UploadPhotoAsync(new InstaSharper.Classes.Models.InstaImage() // { // URI = new Uri("ms-appdata:///TempImage1.Jpg", UriKind.Absolute).LocalPath, // Width = 391, // Height = 428 // }, "از بیرون تحریم؛ از داخل فیلتر :|"); }
async Task <Uri> NoFilter() { using (var source = new StorageFileImageSource(imageStorageFile)) { LastEffect = source; return(await SaveToImage()); } }
async Task <Uri> Grayscale() { using (var source = new StorageFileImageSource(imageStorageFile)) using (var sharpnessEffect = new GrayscaleEffect(source)) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
private async void LoadForgroundSource(string forgroundImageName) { var blendEffect = Effect as BlendEffect; if (blendEffect != null) { var file = await StorageFile.GetFileFromApplicationUriAsync(new System.Uri(string.Format("ms-appx:///Images/{0}", forgroundImageName))); var storageFileImageSource = new StorageFileImageSource(file); blendEffect.ForegroundSource = storageFileImageSource; } }
private async Task<IImageProvider> GetEffectTaskAsync(IImageProvider source, Size sourceSize, Size renderSize) { var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///images/LensBlurMask.jpg")); StorageFileImageSource mapSource = new StorageFileImageSource(file); var lensBlurEffect = new LensBlurEffect(source, mapSource); lensBlurEffect.FocusAreaEdgeMirroring= LensBlurFocusAreaEdgeMirroring.Off; lensBlurEffect.PointLightStrength = 10; lensBlurEffect.Kernels = new LensBlurPredefinedKernel[] { new LensBlurPredefinedKernel(LensBlurPredefinedKernelShape.Heart, 15)}; return lensBlurEffect; }
private async Task <IImageProvider> GetEffectTaskAsync(IImageProvider source, Size sourceSize, Size renderSize) { var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///images/LensBlurMask.jpg")); StorageFileImageSource mapSource = new StorageFileImageSource(file); var lensBlurEffect = new LensBlurEffect(source, mapSource); lensBlurEffect.FocusAreaEdgeMirroring = LensBlurFocusAreaEdgeMirroring.Off; lensBlurEffect.PointLightStrength = 10; lensBlurEffect.Kernels = new LensBlurPredefinedKernel[] { new LensBlurPredefinedKernel(LensBlurPredefinedKernelShape.Heart, 15) }; return(lensBlurEffect); }
async Task <Uri> Contrast(int EffectPercentage) { using (var source = new StorageFileImageSource(imageStorageFile)) using (var sharpnessEffect = new ContrastEffect(source) { Level = (EffectPercentage / 100) }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
async Task <Uri> Fog() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new FogEffect(source)) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
private static void LoadResources() { var defaultImageFile = ResourceFolder.GetFileAsync(@"Assets\DefaultImage.jpg").AsTask().Result; DefaultImage = new StorageFileImageSource(defaultImageFile); var ntkLogoFile = ResourceFolder.GetFileAsync(@"Assets\NTKLogo.png").AsTask().Result; NTKLogo = new StorageFileImageSource(ntkLogoFile); var bozjakHeadshotFile = ResourceFolder.GetFileAsync(@"Assets\Bozjak_HeadShot_2015.png").AsTask().Result; BozjakHeadshot = new StorageFileImageSource(bozjakHeadshotFile); }
async Task <Uri> Sepia() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new Lumia.Imaging.Artistic.SepiaEffect(source) { }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
public static async Task <IImageProvider> CreateImageSourceFromFile(StorageFile file) { //method needed, workaround for exif orientation bug using (var source = new StorageFileImageSource(file)) using (var renderer = new BitmapRenderer(source) { RenderOptions = RenderOptions.Cpu }) { var bitmap = await renderer.RenderAsync(); return(new BitmapImageSource(bitmap)); } }
/// <summary> /// Apply the chosen filter(s) /// </summary> /// <param name="sampleEffect"></param> public async Task <WriteableBitmap> ApplyBasicFilter(List <IFilter> sampleEffect) { if (App.ChosenPhoto == null || _rendering) { return(null); } _rendering = true; if (IsRenderingChanged != null) { IsRenderingChanged(this, true); } var props = await App.ChosenPhoto.Properties.GetImagePropertiesAsync(); var target = new WriteableBitmap((int)props.Width, (int)props.Height); try { // Create a source to read the image from PhotoResult stream using (var source = new StorageFileImageSource(App.ChosenPhoto)) using (var filters = new FilterEffect(source)) { filters.Filters = sampleEffect.ToArray(); // Create a new renderer which outputs WriteableBitmaps using (var renderer = new WriteableBitmapRenderer(filters, target)) { // Render the image with the filter await renderer.RenderAsync(); } } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } _rendering = false; if (IsRenderingChanged != null) { IsRenderingChanged(this, false); } return(target); }
async Task <Uri> BigNose() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new WarpingEffect(source) { WarpMode = WarpMode.BigNose }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
async Task <Uri> GraySketch() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new SketchEffect(source) { SketchMode = SketchMode.Gray }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
async Task <Uri> Oil() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new OilyEffect(source) { OilBrushSize = OilBrushSize.Medium }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
async Task <Uri> LomoYellow() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new LomoEffect(source) { LomoStyle = LomoStyle.Yellow }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
async Task <Uri> LomoGreen() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new Lumia.Imaging.Artistic.LomoEffect(source) { LomoStyle = LomoStyle.Green }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
async Task <Uri> ColorAdjust(int RedPercentage, int GreenPercentage, int BluePercentage) { using (var source = new StorageFileImageSource(imageStorageFile)) using (var contrastEffect = new ContrastEffect(source) { Level = 0.6 }) using (var sharpnessEffect = new ColorAdjustEffect(contrastEffect) { Blue = (BluePercentage / 100), Green = (GreenPercentage / 100), Red = (RedPercentage / 100) }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
async Task <Uri> SpotlightEffect() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new SpotlightEffect(source) { Position = new Point((inf.ImageSize.Width / 2), (inf.ImageSize.Height / 2)), Radius = (int)((inf.ImageSize.Width / 2) - 100), TransitionSize = 0.8 }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
async Task <Uri> Posterize() { using (var source = new StorageFileImageSource(imageStorageFile)) { var inf = await source.GetInfoAsync(); using (var sharpnessEffect = new PosterizeEffect(source) { ColorComponentValueCount = 10 }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } } }
async Task <Uri> SqureBlur(int EffectPercentage) { using (var source = new StorageFileImageSource(imageStorageFile)) using (var contrastEffect = new ContrastEffect(source) { Level = 0.6 }) using (var sharpnessEffect = new BlurEffect(contrastEffect) { KernelSize = EffectPercentage }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
async Task <Uri> LocalBoost(int EffectPercentage) { using (var source = new StorageFileImageSource(imageStorageFile)) using (var contrastEffect = new ContrastEffect(source) { Level = 0.6 }) using (var sharpnessEffect = new LocalBoostAutomaticEffect(contrastEffect) { Level = (EffectPercentage / 100) }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
async Task <Uri> Temperature(int EffectPercentage) { using (var source = new StorageFileImageSource(imageStorageFile)) using (var contrastEffect = new ContrastEffect(source) { Level = 0.6 }) using (var sharpnessEffect = new TemperatureAndTintEffect(contrastEffect) { Temperature = (EffectPercentage / 100) }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
async Task <Uri> Exposure(int EffectPercentage) { using (var source = new StorageFileImageSource(imageStorageFile)) using (var contrastEffect = new ContrastEffect(source) { Level = 0.6 }) using (var sharpnessEffect = new ExposureEffect(contrastEffect) { Gain = (EffectPercentage / 67), ExposureMode = ExposureMode.Natural }) { LastEffect = sharpnessEffect; return(await SaveToImage()); } }
private async void OnPageLoaded(object sender, RoutedEventArgs e) { SystemNavigationManager.GetForCurrentView().AppViewBackButtonVisibility = AppViewBackButtonVisibility.Visible; var file = await StorageFile.GetFileFromApplicationUriAsync(new System.Uri("ms-appx:///Assets/defaultImage.jpg")); var storageFileImageSource = new StorageFileImageSource(file); var asyncImageResource = storageFileImageSource as IAsyncImageResource; var imageResource = await asyncImageResource.LoadAsync(); m_imageSize = imageResource.ImageSize; var sepiaEffect = new Lumia.Imaging.Artistic.SepiaEffect(storageFileImageSource); SoftwareBitmapRenderer softwareBitmapRenderer = new SoftwareBitmapRenderer(sepiaEffect); m_sepiaEffectSoftwareBitmap = await softwareBitmapRenderer.RenderAsync(); m_canvasControl.Invalidate(); }
public async Task CS_W_ZX_DetectCode(string filename, BarcodeFormat barcodeFormat, string barcodeValue) { // Load a bitmap in Bgra8 colorspace var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Input/" + filename)); var source = new StorageFileImageSource(file); Bitmap bitmapBgra8 = await source.GetBitmapAsync(null, OutputOption.PreserveAspectRatio); Assert.AreEqual(ColorMode.Bgra8888, bitmapBgra8.ColorMode); // Convert the bitmap to Nv12 colorspace (colorspace used when decoding barcode from cameras) var bitmapYuv = new Bitmap(bitmapBgra8.Dimensions, ColorMode.Yuv420Sp); bitmapYuv.ConvertFrom(bitmapBgra8); // Decode the barcode var reader = new BarcodeReader { Options = new DecodingOptions { PossibleFormats = new BarcodeFormat[] { barcodeFormat }, TryHarder = true } }; Result resultBgra8 = reader.Decode( bitmapBgra8.Buffers[0].Buffer.ToArray(), (int)bitmapBgra8.Dimensions.Width, (int)bitmapBgra8.Dimensions.Height, BitmapFormat.BGRA32 ); Result resultYuv = reader.Decode( bitmapYuv.Buffers[0].Buffer.ToArray(), (int)bitmapYuv.Buffers[0].Pitch, // Should be width here but I haven't found a way to pass both width and stride to ZXing yet (int)bitmapYuv.Dimensions.Height, BitmapFormat.Gray8 ); Assert.IsNotNull(resultBgra8, "Decoding barcode in Bgra8 colorspace failed"); Assert.AreEqual(barcodeValue, resultBgra8.Text); Assert.IsNotNull(resultYuv, "Decoding barcode in Nv12 colorspace failed"); Assert.AreEqual(barcodeValue, resultYuv.Text); }
private async Task<IVideoEffectDefinition> CreateEffectDefinitionAsync(VideoEncodingProperties props) { switch (EffectType.SelectedIndex) { case 0: return new LumiaEffectDefinition(() => { return new IFilter[] { new AntiqueFilter(), new FlipFilter(FlipMode.Horizontal) }; }); case 1: IBuffer shaderY = await PathIO.ReadBufferAsync("ms-appx:///Invert_093_NV12_Y.cso"); IBuffer shaderUV = await PathIO.ReadBufferAsync("ms-appx:///Invert_093_NV12_UV.cso"); return new ShaderEffectDefinitionNv12(shaderY, shaderUV); case 2: IBuffer shader = await PathIO.ReadBufferAsync("ms-appx:///Invert_093_RGB32.cso"); return new ShaderEffectDefinitionBgrx8(shader); case 3: // Select the largest centered square area in the input video uint inputWidth = props.Width; uint inputHeight = props.Height; uint outputLength = Math.Min(inputWidth, inputHeight); Rect cropArea = new Rect( (float)((inputWidth - outputLength) / 2), (float)((inputHeight - outputLength) / 2), (float)outputLength, (float)outputLength ); var definition = new LumiaEffectDefinition(new FilterChainFactory(() => { var filters = new List<IFilter>(); filters.Add(new CropFilter(cropArea)); return filters; })); definition.InputWidth = inputWidth; definition.InputHeight = inputHeight; definition.OutputWidth = outputLength; definition.OutputHeight = outputLength; return definition; case 4: return new SquareEffectDefinition(); case 5: var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/traffic.png")); var foreground = new StorageFileImageSource(file); return new LumiaEffectDefinition(() => { #pragma warning disable 618 var filter = new BlendFilter(foreground); filter.TargetOutputOption = OutputOption.PreserveAspectRatio; filter.TargetArea = new Rect(0, 0, .4, .4); return new IFilter[] { filter }; }); case 6: return new LumiaEffectDefinition(() => { return new AnimatedWarp(); }); case 7: return new LumiaEffectDefinition(() => { return new BitmapEffect(); }); case 8: return new LumiaEffectDefinition(() => { return new BitmapEffect2(); }); case 9: return new CanvasEffectDefinition(() => { return new CanvasEffect(); }); default: throw new ArgumentException("Invalid effect type"); } }
/// <summary> /// Creates an image source of the overlay, specifying the size of the background image it will be used on. The image source will be sized and cropped correctly. /// </summary> /// <param name="backgroundSize">The size of the background image.</param> /// <returns>The constructed overlay image source.</returns> public async Task<IImageProvider> CreateAsync(Size backgroundSize) { var uriAndRotation = GetUriAndRotation(backgroundSize); var file = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uriAndRotation.Item1).AsTask().ConfigureAwait(false); var overlayImageSource = new StorageFileImageSource(file); var overlayImageInfo = await overlayImageSource.GetInfoAsync().AsTask().ConfigureAwait(false); var overlayImageAspectRatio = overlayImageInfo.ImageSize.Width / overlayImageInfo.ImageSize.Height; int overlayImageScaledWidth = (int)overlayImageInfo.ImageSize.Width; int overlayImageScaledHeight = (int)overlayImageInfo.ImageSize.Height; if ((int)backgroundSize.Width > (int)backgroundSize.Height) { overlayImageScaledHeight = (int)(backgroundSize.Width / overlayImageAspectRatio); overlayImageScaledWidth = (int)(backgroundSize.Width); } else if ((int)backgroundSize.Width < (int)backgroundSize.Height) { overlayImageScaledWidth = (int)(backgroundSize.Height * overlayImageAspectRatio); overlayImageScaledHeight = (int)(backgroundSize.Height); } var renderer = new BitmapRenderer(overlayImageSource) { Size = new Size(overlayImageScaledWidth, overlayImageScaledHeight) }; var overlayBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false); var overlayBitmapImageSource = new BitmapImageSource(overlayBitmap); return Crop(overlayBitmapImageSource, overlayImageInfo, (int)overlayImageInfo.ImageSize.Width, (int)overlayImageInfo.ImageSize.Height); }
private async Task LoadImage(String filename) { var storageFile = await KnownFolders.SavedPictures.GetFileAsync(filename); var storageFileImageSource = new StorageFileImageSource(storageFile); var asyncImageResource = storageFileImageSource as IAsyncImageResource; var imageResource = await asyncImageResource.LoadAsync(); for (uint frameIndex = 0; frameIndex < imageResource.FrameCount; frameIndex++) { imageResource.FrameIndex = frameIndex; var writeableBitmap = new WriteableBitmap((int)imageResource.ImageSize.Width, (int)imageResource.ImageSize.Height); using (var writeableBitmapRenderer = new WriteableBitmapRenderer(storageFileImageSource, writeableBitmap)) { writeableBitmapRenderer.RenderOptions = RenderOptions.Cpu; await writeableBitmapRenderer.RenderAsync(); _frames.Add(writeableBitmap); } } StartAnimation(); }