public static async Task <WriteableBitmap> AdjustCurvesEffect(WriteableBitmap imgSource) { //var imgSource = new WriteableBitmap(bitmap.PixelWidth, bitmap.PixelHeight); //bitmap.CopyToBuffer(imgSource.PixelBuffer); var source = new BitmapImageSource(imgSource.AsBitmap()); var curvesEffect = new CurvesEffect { Source = source }; //allow for curve values to be set via settings pane with var globalCurve = new Curve(CurveInterpolation.NaturalCubicSpline, new[] { new Point(200, 62) }); //156, 78 //new Point(110, 34) curvesEffect.Red = globalCurve; curvesEffect.Green = globalCurve; curvesEffect.Blue = globalCurve; var adjustedImg = new WriteableBitmap(imgSource.PixelWidth, imgSource.PixelHeight); using (var renderer = new WriteableBitmapRenderer(curvesEffect, adjustedImg)) { // Generate the gray image await renderer.RenderAsync(); } return(adjustedImg); }
public async Task <EnhanceResult> EnhanceAsync(Frame frame) { using (var bitmap = new Bitmap(new Windows.Foundation.Size(frame.Dimensions.Width, frame.Dimensions.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer())) using (var source = new BitmapImageSource(bitmap)) using (var effect = new FilterEffect(source)) using (var renderer = new BitmapRenderer(effect)) { effect.Filters = new List <IFilter>() { new ContrastFilter(0.5) }; using (var newBitmap = new Bitmap(new Windows.Foundation.Size(frame.Dimensions.Width, frame.Dimensions.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format))) { await effect.GetBitmapAsync(newBitmap, OutputOption.PreserveAspectRatio); return(new EnhanceResult() { Frame = new Frame() { Buffer = newBitmap.Buffers[0].Buffer.ToArray(), Pitch = newBitmap.Buffers[0].Pitch, Format = frame.Format, Dimensions = newBitmap.Dimensions } }); } } }
public async Task<EnhanceResult> EnhanceAsync(Frame frame) { using (var bitmap = new Bitmap(new Windows.Foundation.Size(frame.Dimensions.Width, frame.Dimensions.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer())) using (var source = new BitmapImageSource(bitmap)) using (var effect = new FilterEffect(source)) using (var renderer = new BitmapRenderer(effect)) { effect.Filters = new List<IFilter>() { new ContrastFilter(0.5) }; using (var newBitmap = new Bitmap(new Windows.Foundation.Size(frame.Dimensions.Width, frame.Dimensions.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format))) { await effect.GetBitmapAsync(newBitmap, OutputOption.PreserveAspectRatio); return new EnhanceResult() { Frame = new Frame() { Buffer = newBitmap.Buffers[0].Buffer.ToArray(), Pitch = newBitmap.Buffers[0].Pitch, Format = frame.Format, Dimensions = newBitmap.Dimensions } }; } } }
/// <summary> /// Creates an image source of the overlay, specifying the size of the background image it will be used on. The image source will be sized and cropped correctly. /// </summary> /// <param name="backgroundSize">The size of the background image.</param> /// <returns>The constructed overlay image source.</returns> public async Task <IImageProvider> CreateAsync(Size backgroundSize) { var uriAndRotation = GetUriAndRotation(backgroundSize); var file = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uriAndRotation.Item1).AsTask().ConfigureAwait(false); var overlayImageSource = new StorageFileImageSource(file); var overlayImageInfo = await overlayImageSource.GetInfoAsync().AsTask().ConfigureAwait(false); var overlayImageAspectRatio = overlayImageInfo.ImageSize.Width / overlayImageInfo.ImageSize.Height; int overlayImageScaledWidth = (int)overlayImageInfo.ImageSize.Width; int overlayImageScaledHeight = (int)overlayImageInfo.ImageSize.Height; if ((int)backgroundSize.Width > (int)backgroundSize.Height) { overlayImageScaledHeight = (int)(backgroundSize.Width / overlayImageAspectRatio); overlayImageScaledWidth = (int)(backgroundSize.Width); } else if ((int)backgroundSize.Width < (int)backgroundSize.Height) { overlayImageScaledWidth = (int)(backgroundSize.Height * overlayImageAspectRatio); overlayImageScaledHeight = (int)(backgroundSize.Height); } var renderer = new BitmapRenderer(overlayImageSource) { Size = new Size(overlayImageScaledWidth, overlayImageScaledHeight) }; var overlayBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false); var overlayBitmapImageSource = new BitmapImageSource(overlayBitmap); return(Crop(overlayBitmapImageSource, overlayImageInfo, (int)overlayImageInfo.ImageSize.Width, (int)overlayImageInfo.ImageSize.Height)); }
private async Task <WriteableBitmap> ConvertPreviewToWriteableBitmap(Bitmap bitmap, WriteableBitmap writeableBitmap) { int previewWidth = (int)m_renderer.PreviewSize.Width; int previewHeight = (int)m_renderer.PreviewSize.Height; if (writeableBitmap == null || writeableBitmap.PixelWidth != previewWidth || writeableBitmap.PixelHeight != previewHeight) { writeableBitmap = new WriteableBitmap(previewWidth, previewHeight); } if (bitmap.Dimensions != m_renderer.PreviewSize) { // Re-render Bitmap to WriteableBitmap at the correct size. using (var bitmapImageSource = new BitmapImageSource(bitmap)) using (var renderer = new WriteableBitmapRenderer(bitmapImageSource, writeableBitmap)) { renderer.RenderOptions = RenderOptions.Cpu; await renderer.RenderAsync().AsTask(); writeableBitmap.Invalidate(); } } else { // Already at the display size, so just copy. bitmap.CopyTo(writeableBitmap); writeableBitmap.Invalidate(); } return(writeableBitmap); }
private async void AttemptSave() { if (Processing) { return; } Processing = true; GC.Collect(); var lowMemory = false; try { var result = (long)DeviceExtendedProperties.GetValue("ApplicationWorkingSetLimit"); lowMemory = result / 1024 / 1024 < 300; } catch (ArgumentOutOfRangeException) { } IBuffer buffer; Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = lowMemory ? 0.5 : 1; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); using (var effect = new LensBlurEffect(source, new LensBlurPredefinedKernel(Shape, (uint)SizeSlider.Value))) using (var renderer = new JpegRenderer(effect)) { effect.KernelMap = segmenter; buffer = await renderer.RenderAsync(); } } using (var library = new MediaLibrary()) using (var stream = buffer.AsStream()) { library.SavePicture("lensblur_" + DateTime.Now.Ticks, stream); Model.Saved = true; AdaptButtonsToState(); } Processing = false; }
private async void AttemptUpdatePreviewAsync() { if (!Processing) { Processing = true; AdaptButtonsToState(); Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = 0.5; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); do { _processingPending = false; var previewBitmap = new WriteableBitmap((int)Model.AnnotationsBitmap.Dimensions.Width, (int)Model.AnnotationsBitmap.Dimensions.Height); using (var effect = new LensBlurEffect(source, new LensBlurPredefinedKernel(Model.KernelShape, (uint)Model.KernelSize))) using (var renderer = new WriteableBitmapRenderer(effect, previewBitmap)) { effect.KernelMap = segmenter; try { await renderer.RenderAsync(); PreviewImage.Source = previewBitmap; previewBitmap.Invalidate(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("AttemptUpdatePreviewAsync rendering failed: " + ex.Message); } } }while (_processingPending); } Processing = false; AdaptButtonsToState(); } else { _processingPending = true; } }
// Constructor public MainPage() { InitializeComponent(); input = new Bitmap(new Windows.Foundation.Size(480, 800), ColorMode.Bgra8888); picture = new BitmapImageSource(input); // Sample code to localize the ApplicationBar //BuildLocalizedApplicationBar(); }
private void AnalyzeBitmap(Bitmap bitmap, TimeSpan time) { if (ProductDetailsPanel.IsOpen) { return; } Result result = barcodeReader.Decode( bitmap.Buffers[0].Buffer.ToArray(), (int)bitmap.Buffers[0].Pitch, // Should be width here but I haven't found a way to pass both width and stride to ZXing yet (int)bitmap.Dimensions.Height, BitmapFormat.Gray8); if (result != null && IsValidEan(result.Text)) { if (autoFocus != null) { autoFocus.BarcodeFound = true; } string barcode = result.Text; var ignore = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ShowFilteringBarcode(barcode); }); BitmapImageSource bmpImgSrc = new BitmapImageSource(bitmap); WriteableBitmapRenderer renderer = new WriteableBitmapRenderer(bmpImgSrc, bitmapWithBarcode); bitmapWithBarcode = renderer.RenderAsync().AsTask().Result; if (barcodeFilter.Update(barcode)) { Debug.WriteLine(barcode); ignore = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ProductsListBox.AddProduct(barcode, bitmapWithBarcode); bitmapWithBarcode = new WriteableBitmap(bitmapWithBarcode.PixelWidth, bitmapWithBarcode.PixelHeight); if (HintTextBlock.Visibility == Visibility.Visible) { HintTextBlock.Visibility = Visibility.Collapsed; } ShowActiveBarcode(barcode); }); } } else { if (autoFocus != null) { autoFocus.BarcodeFound = false; } } }
/// <summary> /// Renders a writeable bitmap preview of the given frame. /// </summary> /// <param name="frame">Frame to render.</param> /// <param name="size">Preview size in pixels.</param> /// <returns>Rendered frame preview.</returns> public static async Task <WriteableBitmap> RenderPreviewAsync(Frame frame, Windows.Foundation.Size size) { using (var bitmap = new Bitmap(frame.Dimensions, Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer())) using (var source = new BitmapImageSource(bitmap)) using (var renderer = new WriteableBitmapRenderer(source, new WriteableBitmap((int)size.Width, (int)size.Height), OutputOption.Stretch)) { return(await renderer.RenderAsync()); } }
public async Task <NormalizeResult> NormalizeAsync(Frame frame, Windows.Foundation.Rect area, double rotation) { using (var bitmap = new Bitmap(frame.Dimensions, Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer())) using (var source = new BitmapImageSource(bitmap)) using (var effect = new FilterEffect(source)) using (var renderer = new BitmapRenderer(effect)) { effect.Filters = new List <IFilter>() { new ReframingFilter(area, -rotation) }; using (var newBitmap = new Bitmap(new Windows.Foundation.Size(area.Width, area.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format))) { await effect.GetBitmapAsync(newBitmap, OutputOption.PreserveAspectRatio); return(new NormalizeResult() { Frame = new Frame() { Buffer = newBitmap.Buffers[0].Buffer.ToArray(), Pitch = newBitmap.Buffers[0].Pitch, Format = frame.Format, Dimensions = newBitmap.Dimensions }, Translate = new Func <Windows.Foundation.Point, Windows.Foundation.Point>((normalizedPoint) => { var rotationRadians = -rotation / 360.0 * 2.0 * Math.PI; var sin = Math.Sin(rotationRadians); var cos = Math.Cos(rotationRadians); var origoX = area.Width / 2.0; var origoY = area.Height / 2.0; // Translate point to origo before rotation var ox = normalizedPoint.X - origoX; var oy = normalizedPoint.Y - origoY; // Move area to origo, calculate new point positions, restore area location and add crop margins var x = ox * cos - oy * sin; var y = ox * sin + oy * cos; // Translate point back to area after rotation x = x + origoX; y = y + origoY; // Add margins from original uncropped frame x = x + area.X; y = y + area.Y; return new Windows.Foundation.Point(x, y); }) }); } } }
public async Task<NormalizeResult> NormalizeAsync(Frame frame, Windows.Foundation.Rect area, double rotation) { using (var bitmap = new Bitmap(frame.Dimensions, Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer())) using (var source = new BitmapImageSource(bitmap)) using (var effect = new FilterEffect(source)) using (var renderer = new BitmapRenderer(effect)) { effect.Filters = new List<IFilter>() { new ReframingFilter(area, -rotation) }; using (var newBitmap = new Bitmap(new Windows.Foundation.Size(area.Width, area.Height), Internal.Utilities.FrameFormatToColorMode(frame.Format))) { await effect.GetBitmapAsync(newBitmap, OutputOption.PreserveAspectRatio); return new NormalizeResult() { Frame = new Frame() { Buffer = newBitmap.Buffers[0].Buffer.ToArray(), Pitch = newBitmap.Buffers[0].Pitch, Format = frame.Format, Dimensions = newBitmap.Dimensions }, Translate = new Func<Windows.Foundation.Point, Windows.Foundation.Point>((normalizedPoint) => { var rotationRadians = -rotation / 360.0 * 2.0 * Math.PI; var sin = Math.Sin(rotationRadians); var cos = Math.Cos(rotationRadians); var origoX = area.Width / 2.0; var origoY = area.Height / 2.0; // Translate point to origo before rotation var ox = normalizedPoint.X - origoX; var oy = normalizedPoint.Y - origoY; // Move area to origo, calculate new point positions, restore area location and add crop margins var x = ox * cos - oy * sin; var y = ox * sin + oy * cos; // Translate point back to area after rotation x = x + origoX; y = y + origoY; // Add margins from original uncropped frame x = x + area.X; y = y + area.Y; return new Windows.Foundation.Point(x, y); }) }; } } }
public async static Task <WriteableBitmap> Render(WriteableBitmap actualImage, List <IFilter> filters) { var bitmap = actualImage.AsBitmap(); BitmapImageSource bitmapSource = new BitmapImageSource(bitmap); FilterEffect effects = new FilterEffect(bitmapSource); effects.Filters = filters; WriteableBitmapRenderer renderer = new WriteableBitmapRenderer(effects, actualImage); return(await renderer.RenderAsync()); }
public void CreatesBlendEffectBlendingWithSelfGraph() { var bitmap = new Bitmap(new Size(10, 10), ColorMode.Argb8888); using (var source = new BitmapImageSource(bitmap)) using (var blendEffect = new BlendEffect(source, source)) { string result = CreateGraph(blendEffect); Assert.AreEqual(3, new Regex(Regex.Escape(NodeId(source))).Matches(result).Count); Assert.AreEqual(3, new Regex(Regex.Escape(NodeId(blendEffect))).Matches(result).Count); } }
/// <summary> /// Loads an ImageSource and allows control of thumbnail use. /// Morpheus_xx, 2011-12-13: For fallback sources no thumbnails should be used, because ALL thumbs are created as JPG. This currenly causes an issue: /// Source -> no thumbnail created -> FallbackSource (PNG) -> creates a JPG thumbnail, so Alpha-Channel of FallbackSource is lost. /// TODO: Image class and thumbnail handling should be refactored to allow more control about image formats and thumbs usage. /// </summary> /// <param name="source">Source</param> /// <param name="allowThumbs">True to allow building a thumbnail of given source</param> /// <returns>ImageSource or null</returns> protected ImageSource LoadImageSource(object source, bool allowThumbs) { ImageSource result; bool thumbnail = allowThumbs && Thumbnail; _invalidateImageSourceOnResize = false; if (source is MediaItem) { result = MediaItemsHelper.CreateThumbnailImageSource((MediaItem)source, (int)Math.Max(Width, Height)); _invalidateImageSourceOnResize = true; } else if (source is IResourceLocator) { IResourceLocator resourceLocator = (IResourceLocator)source; result = new ResourceAccessorTextureImageSource(resourceLocator.CreateAccessor(), RightAngledRotation.Zero); } else { result = source as ImageSource; } if (result == null) { string uriSource = source as string; if (!string.IsNullOrEmpty(uriSource)) { // Remember to adapt list of supported extensions for image player plugin... if (IsValidSource(uriSource)) { BitmapImageSource bmi = new BitmapImageSource { UriSource = uriSource, Thumbnail = thumbnail }; if (thumbnail) { // Set the requested thumbnail dimension, to use the best matching format. bmi.ThumbnailDimension = (int)Math.Max(Width, Height); } result = bmi; } // TODO: More image types else { if (_formerWarnURI != uriSource) { ServiceRegistration.Get <ILogger>().Warn("Image: Image source '{0}' is not supported", uriSource); // Remember if we already wrote a warning to the log to avoid log flooding _formerWarnURI = uriSource; } } } } return(result); }
public void CreatesFilterEffectGraph() { var bitmap = new Bitmap(new Size(10, 10), ColorMode.Argb8888); using (var source = new BitmapImageSource(bitmap)) using (var effect = new FilterEffect(source)) { effect.Filters = new IFilter[] { new BlurFilter(), new MoonlightFilter(), new HueSaturationFilter() }; string result = CreateGraph(effect); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source))).Matches(result).Count); Assert.AreEqual(3, new Regex(Regex.Escape(NodeId(effect))).Matches(result).Count); } }
public async static Task<WriteableBitmap> Render(WriteableBitmap actualImage, List<IFilter> filters) { var bitmap = actualImage.AsBitmap(); BitmapImageSource bitmapSource = new BitmapImageSource(bitmap); FilterEffect effects = new FilterEffect(bitmapSource); effects.Filters = filters; WriteableBitmapRenderer renderer = new WriteableBitmapRenderer(effects, actualImage); return await renderer.RenderAsync(); }
public LensBlurProcessor() { CanRenderAtPreviewSize = false; Name = "LensBlurEffect"; List<IImageProvider> list = new List<IImageProvider>(); byte[] map = new byte[] { 255, 255, 255, 255, 0, 255, 255, 255, 255 }; Bitmap bitmap = new Bitmap(new Size(3, 3), ColorMode.Gray8, 3, map.AsBuffer()); BitmapImageSource mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 255, 0 }; bitmap = new Bitmap(new Size(2, 1), ColorMode.Gray8, 2, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 0, 255 }; bitmap = new Bitmap(new Size(2, 1), ColorMode.Gray8, 2, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 255, 0 }; bitmap = new Bitmap(new Size(1, 2), ColorMode.Gray8, 1, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 0, 255 }; bitmap = new Bitmap(new Size(1, 2), ColorMode.Gray8, 1, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 0 }; bitmap = new Bitmap(new Size(1, 1), ColorMode.Gray8, 1, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); m_KernelMaps = list.ToArray(); m_lensBlurEffect = new LensBlurEffect(); SetupEffectCategory(m_lensBlurEffect); m_lensBlurEffect.KernelMap = m_KernelMaps[0]; m_propertyDescriptions = new Dictionary<string, PropertyDescription>(); m_propertyDescriptions.Add("BlendKernelWidth", new PropertyDescription(0, 255, 5)); m_propertyDescriptions.Add("PointLightStrength", new PropertyDescription(1, 10, 7)); m_propertyDescriptions.Add("Quality", new PropertyDescription(0, 1.0, 1.0)); AddEditors(); }
public LensBlurProcessor() { CanRenderAtPreviewSize = false; Name = "LensBlurEffect"; List <IImageProvider> list = new List <IImageProvider>(); byte[] map = new byte[] { 255, 255, 255, 255, 0, 255, 255, 255, 255 }; Bitmap bitmap = new Bitmap(new Size(3, 3), ColorMode.Gray8, 3, map.AsBuffer()); BitmapImageSource mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 255, 0 }; bitmap = new Bitmap(new Size(2, 1), ColorMode.Gray8, 2, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 0, 255 }; bitmap = new Bitmap(new Size(2, 1), ColorMode.Gray8, 2, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 255, 0 }; bitmap = new Bitmap(new Size(1, 2), ColorMode.Gray8, 1, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 0, 255 }; bitmap = new Bitmap(new Size(1, 2), ColorMode.Gray8, 1, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); map = new byte[] { 0 }; bitmap = new Bitmap(new Size(1, 1), ColorMode.Gray8, 1, map.AsBuffer()); mapSource = new BitmapImageSource(bitmap); list.Add(mapSource); m_KernelMaps = list.ToArray(); m_lensBlurEffect = new LensBlurEffect(); SetupEffectCategory(m_lensBlurEffect); m_lensBlurEffect.KernelMap = m_KernelMaps[0]; m_propertyDescriptions = new Dictionary <string, PropertyDescription>(); m_propertyDescriptions.Add("BlendKernelWidth", new PropertyDescription(0, 255, 5)); m_propertyDescriptions.Add("PointLightStrength", new PropertyDescription(1, 10, 7)); m_propertyDescriptions.Add("Quality", new PropertyDescription(0, 1.0, 1.0)); AddEditors(); }
public void CreatesExtractorGraph() { var bitmap = new Bitmap(new Size(10, 10), ColorMode.Argb8888); using (var source = new BitmapImageSource(bitmap)) using (var mask = new BitmapImageSource(bitmap)) using (var extractor = new ObjectExtractor(source, mask)) { string result = CreateGraph(extractor); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source))).Matches(result).Count); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(mask))).Matches(result).Count); Assert.AreEqual(3, new Regex(Regex.Escape(NodeId(extractor))).Matches(result).Count); } }
private async void Update() { var image = Image; if (image == null) { return; } using (var ms = new MemoryStream(image)) { await BitmapImageSource.SetSourceAsync(ms.AsRandomAccessStream()); } }
/// <summary> /// Loads an ImageSource and allows control of thumbnail use. /// Morpheus_xx, 2011-12-13: For fallback sources no thumbnails should be used, because ALL thumbs are created as JPG. This currenly causes an issue: /// Source -> no thumbnail created -> FallbackSource (PNG) -> creates a JPG thumbnail, so Alpha-Channel of FallbackSource is lost. /// TODO: Image class and thumbnail handling should be refactored to allow more control about image formats and thumbs usage. /// </summary> /// <param name="source">Source</param> /// <param name="allowThumbs">True to allow building a thumbnail of given source</param> /// <returns>ImageSource or null</returns> protected ImageSource LoadImageSource(object source, bool allowThumbs) { if (source == null) { return(null); } bool thumbnail = allowThumbs && Thumbnail; ImageSource imageSource; if (ImageSourceFactory.TryCreateImageSource(source, (int)Width, (int)Height, out imageSource)) { return(imageSource); } string uriSource = source as string; if (!string.IsNullOrEmpty(uriSource)) { // Remember to adapt list of supported extensions for image player plugin... if (IsValidSource(uriSource)) { BitmapImageSource bmi = new BitmapImageSource { UriSource = uriSource, Thumbnail = thumbnail }; if (thumbnail) { // Set the requested thumbnail dimension, to use the best matching format. // Note: Math.Max returns NaN if one argument is NaN (which casts to int.MinValue), so the additional Max with 0 catches this bmi.ThumbnailDimension = Math.Max((int)Math.Max(Width, Height), 0); } return(bmi); } // TODO: More image types } string warnSource = source.ToString(); if (_formerWarnURI != warnSource) { if (!string.IsNullOrEmpty(warnSource)) { ServiceRegistration.Get <ILogger>().Warn("Image: Image source '{0}' is not supported", warnSource); } // Remember if we already wrote a warning to the log to avoid log flooding _formerWarnURI = warnSource; } return(null); }
public static async Task <WriteableBitmap> ApplyGaussianBlur(WriteableBitmap imgSource, int kernelS) { BitmapImageSource source = new BitmapImageSource(imgSource.AsBitmap()); BlurEffect effect = new BlurEffect(source, kernelS); WriteableBitmap blurredImage = new WriteableBitmap(imgSource.PixelWidth, imgSource.PixelHeight); using (var renderer = new WriteableBitmapRenderer(effect, blurredImage)) { // Generate the gray image await renderer.RenderAsync(); } return(blurredImage); }
public static async Task <WriteableBitmap> ApplyStampThreshold(WriteableBitmap imgSource, double threshold) { var source = new BitmapImageSource(imgSource.AsBitmap()); var effect = new Lumia.Imaging.Artistic.StampEffect(source, 0, threshold); var stampImage = new WriteableBitmap(imgSource.PixelWidth, imgSource.PixelHeight); using (var renderer = new WriteableBitmapRenderer(effect, stampImage)) { // Generate the gray image await renderer.RenderAsync(); } return(stampImage); }
public void CreatesSegmenterGraph() { var bitmap = new Bitmap(new Size(10, 10), ColorMode.Argb8888); using (var source = new BitmapImageSource(bitmap)) using (var source2 = new BitmapImageSource(bitmap)) using (var kernel2 = new BitmapImageSource(bitmap)) using (var segmenter = new InteractiveForegroundSegmenter(source)) { segmenter.AnnotationsSource = source2; string result = CreateGraph(segmenter); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source))).Matches(result).Count); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source2))).Matches(result).Count); Assert.AreEqual(0, new Regex(Regex.Escape(NodeId(kernel2))).Matches(result).Count); Assert.AreEqual(3, new Regex(Regex.Escape(NodeId(segmenter))).Matches(result).Count); } }
public void CreatesAlignerGraph() { var bitmap = new Bitmap(new Size(10, 10), ColorMode.Argb8888); using (var source1 = new BitmapImageSource(bitmap)) using (var source2 = new BitmapImageSource(bitmap)) using (var source3 = new BitmapImageSource(bitmap)) using (var aligner = new ImageAligner()) { aligner.Sources = new[] { source1, source2, source3 }; string result = aligner.ToDotString("CreatesAlignerGraph"); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source1))).Matches(result).Count); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source2))).Matches(result).Count); Assert.AreEqual(4, new Regex(Regex.Escape(NodeId(aligner))).Matches(result).Count); } }
public void CreatesLensBlurGraph() { var bitmap = new Bitmap(new Size(10, 10), ColorMode.Argb8888); using (var source = new BitmapImageSource(bitmap)) using (var kernel1 = new BitmapImageSource(bitmap)) using (var kernel2 = new BitmapImageSource(bitmap)) using (var kernelMap = new BitmapImageSource(bitmap)) using (var lensBlur = new LensBlurEffect(source, kernelMap)) { lensBlur.Kernels = new ILensBlurKernel[] { new LensBlurCustomKernel(kernel1, 10), new LensBlurCustomKernel(kernel2, 20), new LensBlurPredefinedKernel(LensBlurPredefinedKernelShape.Circle, 10) }; string result = CreateGraph(lensBlur); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source))).Matches(result).Count); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(kernel1))).Matches(result).Count); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(kernel2))).Matches(result).Count); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(kernelMap))).Matches(result).Count); Assert.AreEqual(4, new Regex(Regex.Escape(NodeId(lensBlur))).Matches(result).Count); } }
/// <summary> /// For the given bitmap renders filtered thumbnails for each filter in given list and populates /// the given wrap panel with the them. /// /// For quick rendering, renders 10 thumbnails synchronously and then releases the calling thread. /// </summary> /// <param name="bitmap">Source bitmap to be filtered</param> /// <param name="side">Side length of square thumbnails to be generated</param> /// <param name="list">List of filters to be used, one per each thumbnail to be generated</param> /// <param name="panel">Wrap panel to be populated with the generated thumbnails</param> private async Task RenderThumbnailsAsync(Bitmap bitmap, int side, List <FilterModel> list, WrapPanel panel) { using (var source = new BitmapImageSource(bitmap)) using (var effect = new FilterEffect(source)) { foreach (FilterModel filter in list) { effect.Filters = filter.Components; WriteableBitmap writeableBitmap = new WriteableBitmap(side, side); using (var renderer = new WriteableBitmapRenderer(effect, writeableBitmap)) { await renderer.RenderAsync(); writeableBitmap.Invalidate(); var photoThumbnail = new PhotoThumbnail() { Bitmap = writeableBitmap, Text = filter.Name, Width = side, Margin = new Thickness(6) }; FilterModel tempFilter = filter; photoThumbnail.Tap += (object sender, System.Windows.Input.GestureEventArgs e) => { App.PhotoModel.ApplyFilter(tempFilter); App.PhotoModel.Dirty = true; NavigationService.GoBack(); }; panel.Children.Add(photoThumbnail); } } } }
public void CreatesQuiteComplexGraph() { var bitmap = new Bitmap(new Size(10, 10), ColorMode.Argb8888); using (var source1 = new BitmapImageSource(bitmap)) using (var source2 = new BitmapImageSource(bitmap)) using (var source3 = new BitmapImageSource(bitmap)) using (var source4 = new BitmapImageSource(bitmap)) using (var segmenter = new InteractiveForegroundSegmenter(source1, Color.FromArgb(255, 255, 0, 0), Color.FromArgb(255, 0, 255, 0), source2)) using (var bokeh = new LensBlurEffect(source1, segmenter)) using (var blendEffect = new BlendEffect(bokeh, source3)) { string result = CreateGraph(blendEffect); Assert.AreEqual(3, new Regex(Regex.Escape(NodeId(source1))).Matches(result).Count); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source2))).Matches(result).Count); Assert.AreEqual(2, new Regex(Regex.Escape(NodeId(source3))).Matches(result).Count); Assert.AreEqual(0, new Regex(Regex.Escape(NodeId(source4))).Matches(result).Count); Assert.AreEqual(4, new Regex(Regex.Escape(NodeId(segmenter))).Matches(result).Count); Assert.AreEqual(5, new Regex(Regex.Escape(NodeId(bokeh))).Matches(result).Count); Assert.AreEqual(3, new Regex(Regex.Escape(NodeId(blendEffect))).Matches(result).Count); } }
private IImageProvider Crop(BitmapImageSource overlayBitmapImageSource, ImageProviderInfo originalOverlayImageProviderInfo, int backgroundWidth, int backgroundHeight) { IImageProvider imageProvider; int overlayWidth = (int)overlayBitmapImageSource.Bitmap.Dimensions.Width; int overlayHeight = (int)overlayBitmapImageSource.Bitmap.Dimensions.Height; if (HorizontalAlignment != HorizontalAlignment.None) { int cropLeft = 0; int cropTop = 0; int cropWidth = Math.Min(overlayWidth, (int)originalOverlayImageProviderInfo.ImageSize.Width); int cropHeight = Math.Min(overlayHeight, (int)originalOverlayImageProviderInfo.ImageSize.Height); if ((HorizontalAlignment == HorizontalAlignment.Center) && (overlayWidth < (int)originalOverlayImageProviderInfo.ImageSize.Width)) { cropLeft = Math.Abs(overlayWidth / 2 - backgroundWidth / 2); cropWidth -= cropLeft * 2; } if ((VerticalAlignment == HorizontalAlignment.Center) && (overlayHeight < (int)originalOverlayImageProviderInfo.ImageSize.Height)) { cropTop = Math.Abs(overlayHeight / 2 - backgroundHeight / 2); cropHeight -= cropTop * 2; } imageProvider = new CropEffect(new Rect(cropLeft, cropTop, cropWidth, cropHeight)); } else { imageProvider = overlayBitmapImageSource; } return(imageProvider); }
}//hide progress bar private async Task SaveImagesAsGifFileAsync(WriteableBitmap[] imageArray) { try { //convert images from imageArray to IBuffer (to save it as GIF-file then) GC.Collect(); List <IImageProvider> imageProviders = new List <IImageProvider>(); for (int i = 0; i < imageArray.Length; i++) { var buffFrame = imageArray[i]; var buffBitmap = buffFrame.AsBitmap(); var bufferSource = new BitmapImageSource(buffBitmap); imageProviders.Add(bufferSource); } GC.Collect(); GifRenderer gifRenderer = new GifRenderer(); gifRenderer.Duration = rate; gifRenderer.NumberOfAnimationLoops = 10000; gifRenderer.UseGlobalPalette = false; gifRenderer.Sources = imageProviders; await Task.Delay(TimeSpan.FromSeconds(1)); Windows.Storage.Streams.IBuffer buff = await gifRenderer.RenderAsync(); //crete/open folder StorageFolder folder; try { folder = await KnownFolders.PicturesLibrary.GetFolderAsync("GIF Editor"); } catch (Exception) { folder = await KnownFolders.PicturesLibrary.CreateFolderAsync("GIF Editor"); } //generate next unique name ulong num = 0; do { num++; filename = num + ".gif"; } while (await folder.FileExists(filename)); //save file var storageFile = await folder.CreateFileAsync(filename, CreationCollisionOption.GenerateUniqueName); using (var memoryStream = await storageFile.OpenAsync(FileAccessMode.ReadWrite)) { await Task.Delay(TimeSpan.FromSeconds(1)); await memoryStream.WriteAsync(buff); } gifRenderer.Dispose(); imageProviders = null; GC.Collect(); } catch (OutOfMemoryException) { var dlg = new MessageDialog(Localize("memoryError")); await dlg.ShowAsync(); } catch (Exception) { Frame.Navigate(typeof(MainPage)); } }//convert images to GIF-file and save it
/////////////////////////////////////////////////////////////////////////// // Use the Nokia Imaging SDK to apply a filter to a selected image private async void AppBarBtnEdit_Click(object sender, RoutedEventArgs e) { progressRing.IsEnabled = true; progressRing.IsActive = true; progressRing.Visibility = Visibility.Visible; // Create NOK Imaging SDK effects pipeline and run it var imageStream = new BitmapImageSource(originalBitmap.AsBitmap()); using (var effect = new FilterEffect(imageStream)) { var filter = new Lumia.Imaging.Adjustments.GrayscaleFilter(); effect.Filters = new[] { filter }; // Render the image to a WriteableBitmap. var renderer = new WriteableBitmapRenderer(effect, originalBitmap); editedBitmap = await renderer.RenderAsync(); editedBitmap.Invalidate(); } Image.Source = originalBitmap; Image.Visibility = Visibility.Collapsed; //Resizing the editedBitmap to 128x128 var resized1 = editedBitmap.Resize(128, 128, Windows.UI.Xaml.Media.Imaging.WriteableBitmapExtensions.Interpolation.Bilinear); //converting the editedBitmap to byte array byte[] edit_arr = resized1.ToByteArray(); //obtaining the images folder StorageFolder folder = Windows.ApplicationModel.Package.Current.InstalledLocation; StorageFolder subfolder = await folder.GetFolderAsync("Images"); //create list of all the files in the images folder var pictures = await subfolder.GetFilesAsync(); double ldiff = 50; //least percentage difference for an image to be a match string dispText = "Try again"; //default message to be displayed byte threshold = 124; //process through all images foreach (var file in pictures) { if (file != null) { // Use WriteableBitmapEx to easily load image from a stream using (var stream = await file.OpenReadAsync()) { listBitmap = await new WriteableBitmap(1, 1).FromStream(stream); stream.Dispose(); } //convert obtained image to byte array byte[] list_arr = listBitmap.ToByteArray(); byte[] difference = new byte[edit_arr.Length]; //compare byte array of both the images for (int i = 0; i < list_arr.Length; i++) { difference[i] = (byte)Math.Abs(edit_arr[i] - list_arr[i]); } //calculate percentage difference int differentPixels = 0; foreach (byte b in difference) { if (b > threshold) { differentPixels++; } } double percentage = (double)differentPixels / (double)list_arr.Length; percentage = percentage * 100; if (percentage <= ldiff) { ldiff = percentage; dispText = file.DisplayName; } } } tb.Text = dispText; progressRing.IsEnabled = false; progressRing.IsActive = false; progressRing.Visibility = Visibility.Collapsed; tb.Visibility = Visibility.Visible; Image.Visibility = Visibility.Visible; var tmp = new RenderTargetBitmap(); await tmp.RenderAsync(source); var buffer = await tmp.GetPixelsAsync(); var width = tmp.PixelWidth; var height = tmp.PixelHeight; editedBitmap = await new WriteableBitmap(1, 1).FromPixelBuffer(buffer, width, height); AppBarBtnSpeech.IsEnabled = true; AppBarBtnSpeech.Visibility = Visibility.Visible; AppBarBtnSave.IsEnabled = true; }
public async Task<IBuffer> TakePictureFast() { if (_photoCaptureDevice == null && _cameraSemaphore.WaitOne(100)) return null; if (_cameraSemaphore.WaitOne(100)) { try { int angle = 0; if (Orientation.HasFlag(PageOrientation.LandscapeLeft)) { angle = (int)_photoCaptureDevice.SensorRotationInDegrees - 90; } else if (Orientation.HasFlag(PageOrientation.LandscapeRight)) { angle = (int)_photoCaptureDevice.SensorRotationInDegrees + 90; } else // PageOrientation.PortraitUp { angle = (int)_photoCaptureDevice.SensorRotationInDegrees; } int layersize = (int)(_photoCaptureDevice.PreviewResolution.Width * _photoCaptureDevice.PreviewResolution.Height); int layersizeuv = layersize / 2; var buffer = new byte[layersize + layersizeuv]; _photoCaptureDevice.GetPreviewBufferYCbCr(buffer); IBuffer capturedPicture; using (var cameraBitmap = new Bitmap( _photoCaptureDevice.PreviewResolution, ColorMode.Yuv420Sp, new uint[] { (uint)_photoCaptureDevice.PreviewResolution.Width, (uint)_photoCaptureDevice.PreviewResolution.Width }, new IBuffer[] { buffer.AsBuffer(0, layersize), buffer.AsBuffer(layersize, layersizeuv) })) using (var source = new BitmapImageSource(cameraBitmap)) using (var orientationffect = new FilterEffect(source)) { if(_cameraLocation == CameraSensorLocation.Back) { orientationffect.Filters = new IFilter[]{new RotationFilter(angle)}; } else { orientationffect.Filters = new IFilter[] { new RotationFilter(-angle), new FlipFilter(FlipMode.Horizontal) }; } var recipe = RecipeFactory.Current.CreatePipeline(orientationffect); using (var renderer = new JpegRenderer(recipe)) { capturedPicture = await renderer.RenderAsync(); } if (recipe is IDisposable) (recipe as IDisposable).Dispose(); } return capturedPicture; } finally { _cameraSemaphore.Release(); } } return null; }
private async void AttemptUpdateImageAsync() { if (!Processing) { Processing = true; GC.Collect(); var lowMemory = false; try { long result = (long)DeviceExtendedProperties.GetValue("ApplicationWorkingSetLimit"); lowMemory = result / 1024 / 1024 < 300; } catch (ArgumentOutOfRangeException) { } var maxSide = lowMemory ? 2048.0 : 4096.0; Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = lowMemory ? 0.5 : 1; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); var info = await source.GetInfoAsync(); double scaler, rotation; var width = info.ImageSize.Width; var height = info.ImageSize.Height; if (width > height) { scaler = maxSide / width; rotation = 90; var t = width; // We're rotating the image, so swap width and height width = height; height = t; } else { scaler = maxSide / height; rotation = 0; } scaler = Math.Max(1, scaler); _bitmap = new WriteableBitmap((int)(width * scaler), (int)(height * scaler)); using (var blurEffect = new LensBlurEffect(source, new LensBlurPredefinedKernel(Model.KernelShape, (uint)Model.KernelSize))) using (var filterEffect = new FilterEffect(blurEffect) { Filters = new[] { new RotationFilter(rotation) } }) using (var renderer = new WriteableBitmapRenderer(filterEffect, _bitmap)) { blurEffect.KernelMap = segmenter; try { await renderer.RenderAsync(); Image.Source = _bitmap; _bitmap.Invalidate(); ConfigureViewport(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("AttemptUpdateImageAsync rendering failed: " + ex.Message); } } } Processing = false; } }
private async void AttemptUpdatePreviewAsync() { if (!Processing) { Processing = true; AdaptButtonsToState(); Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = 0.5; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); do { _processingPending = false; var previewBitmap = new WriteableBitmap((int)Model.AnnotationsBitmap.Dimensions.Width, (int)Model.AnnotationsBitmap.Dimensions.Height); using (var effect = new LensBlurEffect(source, new LensBlurPredefinedKernel(Model.KernelShape, (uint)Model.KernelSize))) using (var renderer = new WriteableBitmapRenderer(effect, previewBitmap)) { effect.KernelMap = segmenter; try { await renderer.RenderAsync(); PreviewImage.Source = previewBitmap; previewBitmap.Invalidate(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("AttemptUpdatePreviewAsync rendering failed: " + ex.Message); } } } while (_processingPending); } Processing = false; AdaptButtonsToState(); } else { _processingPending = true; } }
async void processRendering() { try { if (LRImageSource == null) return; using (var source = new BitmapImageSource(LRImageSource.AsBitmap())) { var t = DateTime.Now; var effect = RecipeFactory.Current.CreatePipeline(source); using (var renderer = new WriteableBitmapRenderer(effect, bitmapTmp)) { var result = await renderer.RenderAsync(); var ms = DateTime.Now.Subtract(t).TotalMilliseconds; previewResult = string.Format( @"ImageSize = {0}x{1} t = {1:F} ", bitmapTmp.PixelWidth, bitmapTmp.PixelHeight, ms); } if (effect is IDisposable) (effect as IDisposable).Dispose(); } bitmapTmp.Invalidate(); var tmp = bitmapTmp; bitmapTmp = bitmapDisplayed; bitmapDisplayed = tmp; bitmapDisplayed.Invalidate(); displayBackgroundBrush.ImageSource = bitmapDisplayed; } catch (Exception) { } finally { processFinished(); } }
protected override void CloseMedia() { { _camera = null; if (_frameStream != null) { _frameStream.Close(); _frameStream = null; } if (_renderer != null) { _renderer.Bitmap = null; // bug : crash on bitmap dispose _renderer.Dispose(); _renderer = null; } if (_effect != null && _effect is IDisposable) { // (_effect as IDisposable).Dispose(); // bug : crash on CustomEffectBase dispose _effect = null; } if (_source != null) { _source.Dispose(); _source = null; } if (_frameBitmap != null) { _frameBitmap.Dispose(); _frameBitmap = null; } if (_cameraBitmap != null) { _cameraBitmap.Dispose(); _cameraBitmap = null; } _frameStreamOffset = 0; _frameTime = 0; _frameBufferSize = 0; _frameBuffer = null; _cameraFrameBuffer = null; _videoStreamDescription = null; _currentTime = 0; } }
/// <summary> /// Initialises the data structures to pass data to the media pipeline via the MediaStreamSource. /// </summary> protected override void OpenMediaAsync() { // General properties _frameBufferSize = (int)_frameSize.Width * (int)_frameSize.Height * 4; // RGBA _frameBuffer = new byte[_frameBufferSize]; _frameStream = new MemoryStream(_frameBuffer); int layersize = (int)(_frameSize.Width * _frameSize.Height); int layersizeuv = layersize / 2; _cameraFrameBuffer = new byte[layersize + layersizeuv]; _cameraBitmap = new Bitmap( _frameSize, ColorMode.Yuv420Sp, new uint[] { (uint)_frameSize.Width, (uint)_frameSize.Width }, new IBuffer[] { _cameraFrameBuffer.AsBuffer(0, layersize), _cameraFrameBuffer.AsBuffer(layersize, layersizeuv) }); _source = new BitmapImageSource(_cameraBitmap); _frameBitmap = new Bitmap( _frameSize, ColorMode.Bgra8888, 4 * (uint)_frameSize.Width, _frameBuffer.AsBuffer()); _renderer = new BitmapRenderer(); _renderer.Bitmap = _frameBitmap; // Media stream attributes var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>(); mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA"; mediaStreamAttributes[MediaStreamAttributeKeys.Width] = ((int)_frameSize.Width).ToString(); mediaStreamAttributes[MediaStreamAttributeKeys.Height] = ((int)_frameSize.Height).ToString(); _videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes); // Media stream descriptions var mediaStreamDescriptions = new List<MediaStreamDescription>(); mediaStreamDescriptions.Add(_videoStreamDescription); // Media source attributes var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>(); mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture); mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString(); _frameTime = (int)TimeSpan.FromSeconds((double)0).Ticks; // Start frame rate timer // Report that we finished initializing its internal state and can now pass in frame samples ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions); }
/////////////////////////////////////////////////////////////////////////// // Use the Nokia Imaging SDK to apply a filter to a selected image private async void AppBarBtnEdit_Click(object sender, RoutedEventArgs e) { progressRing.IsEnabled = true; progressRing.IsActive = true; progressRing.Visibility = Visibility.Visible; // Create NOK Imaging SDK effects pipeline and run it var imageStream = new BitmapImageSource(originalBitmap.AsBitmap()); using (var effect = new FilterEffect(imageStream)) { var filter = new Lumia.Imaging.Adjustments.GrayscaleFilter(); effect.Filters = new[] { filter }; // Render the image to a WriteableBitmap. var renderer = new WriteableBitmapRenderer(effect, originalBitmap); editedBitmap = await renderer.RenderAsync(); editedBitmap.Invalidate(); } Image.Source = originalBitmap; Image.Visibility = Visibility.Collapsed; //Resizing the editedBitmap to 128x128 var resized1 = editedBitmap.Resize(128, 128, Windows.UI.Xaml.Media.Imaging.WriteableBitmapExtensions.Interpolation.Bilinear); //converting the editedBitmap to byte array byte[] edit_arr = resized1.ToByteArray(); //obtaining the images folder StorageFolder folder = Windows.ApplicationModel.Package.Current.InstalledLocation; StorageFolder subfolder = await folder.GetFolderAsync("Images"); //create list of all the files in the images folder var pictures = await subfolder.GetFilesAsync(); double ldiff = 50;//least percentage difference for an image to be a match string dispText = "Try again";//default message to be displayed byte threshold = 124; //process through all images foreach (var file in pictures) { if (file != null) { // Use WriteableBitmapEx to easily load image from a stream using (var stream = await file.OpenReadAsync()) { listBitmap = await new WriteableBitmap(1, 1).FromStream(stream); stream.Dispose(); } //convert obtained image to byte array byte[] list_arr = listBitmap.ToByteArray(); byte[] difference = new byte[edit_arr.Length]; //compare byte array of both the images for (int i=0;i<list_arr.Length;i++) { difference[i] = (byte)Math.Abs(edit_arr[i]-list_arr[i]); } //calculate percentage difference int differentPixels = 0; foreach(byte b in difference ) { if (b > threshold) differentPixels++; } double percentage = (double)differentPixels / (double)list_arr.Length; percentage = percentage * 100; if (percentage <= ldiff) { ldiff = percentage; dispText =file.DisplayName; } } } tb.Text = dispText; progressRing.IsEnabled = false; progressRing.IsActive = false; progressRing.Visibility = Visibility.Collapsed; tb.Visibility = Visibility.Visible; Image.Visibility = Visibility.Visible; var tmp = new RenderTargetBitmap(); await tmp.RenderAsync(source); var buffer = await tmp.GetPixelsAsync(); var width = tmp.PixelWidth; var height = tmp.PixelHeight; editedBitmap = await new WriteableBitmap(1, 1).FromPixelBuffer(buffer, width, height); AppBarBtnSpeech.IsEnabled = true; AppBarBtnSpeech.Visibility = Visibility.Visible; AppBarBtnSave.IsEnabled = true; }
private IImageProvider Crop(BitmapImageSource overlayBitmapImageSource, ImageProviderInfo originalOverlayImageProviderInfo, int backgroundWidth, int backgroundHeight) { IImageProvider imageProvider; int overlayWidth = (int)overlayBitmapImageSource.Bitmap.Dimensions.Width; int overlayHeight = (int)overlayBitmapImageSource.Bitmap.Dimensions.Height; if (HorizontalAlignment != HorizontalAlignment.None) { int cropLeft = 0; int cropTop = 0; int cropWidth = Math.Min(overlayWidth, (int)originalOverlayImageProviderInfo.ImageSize.Width); int cropHeight = Math.Min(overlayHeight, (int)originalOverlayImageProviderInfo.ImageSize.Height); if ((HorizontalAlignment == HorizontalAlignment.Center) && (overlayWidth < (int)originalOverlayImageProviderInfo.ImageSize.Width)) { cropLeft = Math.Abs(overlayWidth / 2 - backgroundWidth / 2); cropWidth -= cropLeft * 2; } if ((VerticalAlignment == HorizontalAlignment.Center) && (overlayHeight < (int)originalOverlayImageProviderInfo.ImageSize.Height)) { cropTop = Math.Abs(overlayHeight / 2 - backgroundHeight / 2); cropHeight -= cropTop * 2; } imageProvider = new CropEffect(new Rect(cropLeft, cropTop, cropWidth, cropHeight)); } else { imageProvider = overlayBitmapImageSource; } return imageProvider; }
/// <summary> /// For the given bitmap renders filtered thumbnails for each filter in given list and populates /// the given wrap panel with the them. /// /// For quick rendering, renders 10 thumbnails synchronously and then releases the calling thread. /// </summary> /// <param name="bitmap">Source bitmap to be filtered</param> /// <param name="side">Side length of square thumbnails to be generated</param> /// <param name="list">List of filters to be used, one per each thumbnail to be generated</param> /// <param name="panel">Wrap panel to be populated with the generated thumbnails</param> private async Task RenderThumbnailsAsync(Bitmap bitmap, int side, List<FilterModel> list, WrapPanel panel) { using (BitmapImageSource source = new BitmapImageSource(bitmap)) using (FilterEffect effect = new FilterEffect(source)) { foreach (FilterModel filter in list) { effect.Filters = filter.Components; WriteableBitmap writeableBitmap = new WriteableBitmap(side, side); using (WriteableBitmapRenderer renderer = new WriteableBitmapRenderer(effect, writeableBitmap)) { await renderer.RenderAsync(); writeableBitmap.Invalidate(); PhotoThumbnail photoThumbnail = new PhotoThumbnail() { Bitmap = writeableBitmap, Text = filter.Name, Width = side, Margin = new Thickness(6) }; photoThumbnail.Tap += (object sender, System.Windows.Input.GestureEventArgs e) => { App.PhotoModel.ApplyFilter(filter); App.PhotoModel.Dirty = true; NavigationService.GoBack(); }; panel.Children.Add(photoThumbnail); } } } }
private async void AttemptUpdateImageAsync() { if (!Processing) { Processing = true; GC.Collect(); var lowMemory = false; try { long result = (long)DeviceExtendedProperties.GetValue("ApplicationWorkingSetLimit"); lowMemory = result / 1024 / 1024 < 300; } catch (ArgumentOutOfRangeException) { } var maxSide = lowMemory ? 2048.0 : 4096.0; Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = lowMemory ? 0.5 : 1; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); var info = await source.GetInfoAsync(); double scaler, rotation; var width = info.ImageSize.Width; var height = info.ImageSize.Height; if (width > height) { scaler = maxSide / width; rotation = 90; var t = width; // We're rotating the image, so swap width and height width = height; height = t; } else { scaler = maxSide / height; rotation = 0; } scaler = Math.Max(1, scaler); _bitmap = new WriteableBitmap((int)(width * scaler), (int)(height * scaler)); using (var blurEffect = new LensBlurEffect(source, new LensBlurPredefinedKernel(Model.KernelShape, (uint)Model.KernelSize))) using (var filterEffect = new FilterEffect(blurEffect) { Filters = new[] { new RotationFilter(rotation) }}) using (var renderer = new WriteableBitmapRenderer(filterEffect, _bitmap)) { blurEffect.KernelMap = segmenter; try { await renderer.RenderAsync(); Image.Source = _bitmap; _bitmap.Invalidate(); ConfigureViewport(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("AttemptUpdateImageAsync rendering failed: " + ex.Message); } } } Processing = false; } }
private async void AttemptUpdatePreviewAsync() { if (!Processing) { Processing = true; do { _processingPending = false; if (Model.OriginalImage != null && ForegroundAnnotationsDrawn && BackgroundAnnotationsDrawn) { Model.OriginalImage.Position = 0; var maskBitmap = new WriteableBitmap((int)AnnotationsCanvas.ActualWidth, (int)AnnotationsCanvas.ActualHeight); var annotationsBitmap = new WriteableBitmap((int)AnnotationsCanvas.ActualWidth, (int)AnnotationsCanvas.ActualHeight); annotationsBitmap.Render(AnnotationsCanvas, new ScaleTransform { ScaleX = 1, ScaleY = 1 }); annotationsBitmap.Invalidate(); Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var renderer = new WriteableBitmapRenderer(segmenter, maskBitmap)) using (var annotationsSource = new BitmapImageSource(annotationsBitmap.AsBitmap())) { var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); segmenter.Quality = 0.5; segmenter.AnnotationsSource = annotationsSource; await renderer.RenderAsync(); MaskImage.Source = maskBitmap; maskBitmap.Invalidate(); Model.AnnotationsBitmap = (Bitmap)annotationsBitmap.AsBitmap(); } } else { MaskImage.Source = null; } } while (_processingPending && !_manipulating); Processing = false; } else { _processingPending = true; } }
private async void AttemptUpdatePreviewAsync2() { if (!Processing) { Processing = true; Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = 0.5; segmenter.Source = source; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); do { _processingPending = false; var previewBitmap = new WriteableBitmap((int)Model.AnnotationsBitmap.Dimensions.Width, (int)Model.AnnotationsBitmap.Dimensions.Height); using (var backgroundSource = new StreamImageSource(Model.OriginalImage2)) using (var filterEffect = new FilterEffect(backgroundSource)) using (var blendFilter = new BlendFilter(source)) using (var renderer = new WriteableBitmapRenderer(filterEffect, previewBitmap)) { blendFilter.BlendFunction = BlendFunction.Normal; blendFilter.MaskSource = segmenter; filterEffect.Filters = new IFilter[] { blendFilter }; try { await renderer.RenderAsync(); } catch { } var wb = previewBitmap; var fileStream = new MemoryStream(); wb.SaveJpeg(fileStream, wb.PixelWidth, wb.PixelHeight, 100, 100); fileStream.Seek(0, SeekOrigin.Begin); var effect = new LensBlurEffect(new StreamImageSource(fileStream), new LensBlurPredefinedKernel(LensBlurPredefinedKernelShape.Circle, 10)); var renderer2 = new WriteableBitmapRenderer(effect, previewBitmap); effect.KernelMap = segmenter; try { await renderer2.RenderAsync(); } catch { } PreviewImage.Source = previewBitmap; wb = previewBitmap; fileStream = new MemoryStream(); wb.SaveJpeg(fileStream, wb.PixelWidth, wb.PixelHeight, 100, 100); fileStream.Seek(0, SeekOrigin.Begin); // var m = new MediaLibrary(); // m.SavePictureToCameraRoll("test", fileStream); Model.MixedStream = fileStream; // Model.MixedStream = ConvertToStream(previewBitmap); previewBitmap.Invalidate(); } } while (_processingPending); } Processing = false; } else { _processingPending = true; } }
/// <summary> /// Creates an image source of the overlay, specifying the size of the background image it will be used on. The image source will be sized and cropped correctly. /// </summary> /// <param name="backgroundSize">The size of the background image.</param> /// <returns>The constructed overlay image source.</returns> public async Task<IImageProvider> CreateAsync(Size backgroundSize) { var uriAndRotation = GetUriAndRotation(backgroundSize); var file = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(uriAndRotation.Item1).AsTask().ConfigureAwait(false); var overlayImageSource = new StorageFileImageSource(file); var overlayImageInfo = await overlayImageSource.GetInfoAsync().AsTask().ConfigureAwait(false); var overlayImageAspectRatio = overlayImageInfo.ImageSize.Width / overlayImageInfo.ImageSize.Height; int overlayImageScaledWidth = (int)overlayImageInfo.ImageSize.Width; int overlayImageScaledHeight = (int)overlayImageInfo.ImageSize.Height; if ((int)backgroundSize.Width > (int)backgroundSize.Height) { overlayImageScaledHeight = (int)(backgroundSize.Width / overlayImageAspectRatio); overlayImageScaledWidth = (int)(backgroundSize.Width); } else if ((int)backgroundSize.Width < (int)backgroundSize.Height) { overlayImageScaledWidth = (int)(backgroundSize.Height * overlayImageAspectRatio); overlayImageScaledHeight = (int)(backgroundSize.Height); } var renderer = new BitmapRenderer(overlayImageSource) { Size = new Size(overlayImageScaledWidth, overlayImageScaledHeight) }; var overlayBitmap = await renderer.RenderAsync().AsTask().ConfigureAwait(false); var overlayBitmapImageSource = new BitmapImageSource(overlayBitmap); return Crop(overlayBitmapImageSource, overlayImageInfo, (int)overlayImageInfo.ImageSize.Width, (int)overlayImageInfo.ImageSize.Height); }
private async void AttemptSaveAsync() { if (!Processing) { Processing = true; AdaptButtonsToState(); GC.Collect(); var lowMemory = false; try { long result = (long)DeviceExtendedProperties.GetValue("ApplicationWorkingSetLimit"); lowMemory = result / 1024 / 1024 < 300; } catch (ArgumentOutOfRangeException) { } IBuffer buffer = null; Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = lowMemory ? 0.5 : 1; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); using (var effect = new LensBlurEffect(source, new LensBlurPredefinedKernel(Model.KernelShape, (uint)Model.KernelSize))) using (var renderer = new JpegRenderer(effect)) { effect.KernelMap = segmenter; try { buffer = await renderer.RenderAsync(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("AttemptSave rendering failed: " + ex.Message); } } } if (buffer != null) { using (var library = new MediaLibrary()) using (var stream = buffer.AsStream()) { library.SavePicture("lensblur_" + DateTime.Now.Ticks, stream); Model.Saved = true; AdaptButtonsToState(); } } Processing = false; AdaptButtonsToState(); } }
private async void AttemptUpdatePreviewAsync() { if (!Processing) { Processing = true; do { _processingPending = false; if (Model.OriginalImage != null && ForegroundAnnotationsDrawn && BackgroundAnnotationsDrawn) { Model.OriginalImage.Position = 0; var maskBitmap = new WriteableBitmap((int)AnnotationsCanvas.ActualWidth, (int)AnnotationsCanvas.ActualHeight); var annotationsBitmap = new WriteableBitmap((int)AnnotationsCanvas.ActualWidth, (int)AnnotationsCanvas.ActualHeight); annotationsBitmap.Render(AnnotationsCanvas, new ScaleTransform { ScaleX = 1, ScaleY = 1 }); annotationsBitmap.Invalidate(); Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var renderer = new WriteableBitmapRenderer(segmenter, maskBitmap)) using (var annotationsSource = new BitmapImageSource(annotationsBitmap.AsBitmap())) { var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); segmenter.Quality = 0.5; segmenter.AnnotationsSource = annotationsSource; await renderer.RenderAsync(); MaskImage.Source = maskBitmap; maskBitmap.Invalidate(); Model.AnnotationsBitmap = (Bitmap)annotationsBitmap.AsBitmap(); } } else { MaskImage.Source = null; } }while (_processingPending && !_manipulating); Processing = false; } else { _processingPending = true; } }
private async Task<WriteableBitmap> ConvertPreviewToWriteableBitmap(Bitmap bitmap, WriteableBitmap writeableBitmap) { int previewWidth = (int)m_renderer.PreviewSize.Width; int previewHeight = (int)m_renderer.PreviewSize.Height; if (writeableBitmap == null || writeableBitmap.PixelWidth != previewWidth || writeableBitmap.PixelHeight != previewHeight) { writeableBitmap = new WriteableBitmap(previewWidth, previewHeight); } if (bitmap.Dimensions != m_renderer.PreviewSize) { // Re-render Bitmap to WriteableBitmap at the correct size. using (var bitmapImageSource = new BitmapImageSource(bitmap)) using (var renderer = new WriteableBitmapRenderer(bitmapImageSource, writeableBitmap)) { renderer.RenderOptions = RenderOptions.Cpu; await renderer.RenderAsync().AsTask(); writeableBitmap.Invalidate(); } } else { // Already at the display size, so just copy. bitmap.CopyTo(writeableBitmap); writeableBitmap.Invalidate(); } return writeableBitmap; }
async void startDisplay() { camera = await PhotoCaptureDevice.OpenAsync(CameraSensorLocation.Back, PhotoCaptureDevice.GetAvailableCaptureResolutions(CameraSensorLocation.Back).First()); int layersize = (int)(camera.PreviewResolution.Width * camera.PreviewResolution.Height); int layersizeuv = layersize / 2; buffer = new byte[layersize + layersizeuv]; b1 = new WriteableBitmap(300, 300); b2 = new WriteableBitmap(300, 300); b3 = new WriteableBitmap(300, 300); b4 = new WriteableBitmap(300, 300); d1.Source = b1; d2.Source = b2; d3.Source = b3; d4.Source = b4; /* var previewBitmap = new Bitmap( camera.PreviewResolution, ColorMode.Bgra8888, (uint)(4*camera.PreviewResolution.Width), buffer.AsBuffer());*/ var bbbb = buffer.AsBuffer(layersize, layersizeuv); var previewBitmap = new Bitmap( camera.PreviewResolution, ColorMode.Yuv420Sp, new uint[] { (uint)camera.PreviewResolution.Width, (uint)camera.PreviewResolution.Width }, new IBuffer[] { buffer.AsBuffer(0, layersize), bbbb }); { var input = new BitmapImageSource(previewBitmap); var effet = new FilterEffect(input); effet.Filters = new IFilter[] { new RotationFilter(90) }; renderer1 = new WriteableBitmapRenderer(effet, b1); } { var input = new BitmapImageSource(previewBitmap); var effet = new FilterEffect(input); effet.Filters = new IFilter[] { new RotationFilter(90), new CartoonFilter() , new HueSaturationFilter()}; renderer2 = new WriteableBitmapRenderer(effet, b2); } { var input = new BitmapImageSource(previewBitmap); var effet = new FilterEffect(input); effet.Filters = new IFilter[] { new RotationFilter(90), new MagicPenFilter(), new NegativeFilter() }; renderer3 = new WriteableBitmapRenderer(effet, b3); } { var input = new BitmapImageSource(previewBitmap); var effet = new FilterEffect(input); effet.Filters = new IFilter[] { new RotationFilter(90), new SketchFilter() }; renderer4 = new WriteableBitmapRenderer(effet, b4); } camera.PreviewFrameAvailable += camera_PreviewFrameAvailable; }
private void AnalyzeBitmap(Bitmap bitmap, TimeSpan time) { if (ProductDetailsPanel.IsOpen) return; Result result = barcodeReader.Decode( bitmap.Buffers[0].Buffer.ToArray(), (int)bitmap.Buffers[0].Pitch, // Should be width here but I haven't found a way to pass both width and stride to ZXing yet (int)bitmap.Dimensions.Height, BitmapFormat.Gray8); if (result != null && IsValidEan(result.Text)) { if (autoFocus != null) autoFocus.BarcodeFound = true; string barcode = result.Text; var ignore = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ShowFilteringBarcode(barcode); }); BitmapImageSource bmpImgSrc = new BitmapImageSource(bitmap); WriteableBitmapRenderer renderer = new WriteableBitmapRenderer(bmpImgSrc, bitmapWithBarcode); bitmapWithBarcode = renderer.RenderAsync().AsTask().Result; if (barcodeFilter.Update(barcode)) { Debug.WriteLine(barcode); ignore = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ProductsListBox.AddProduct(barcode, bitmapWithBarcode); bitmapWithBarcode = new WriteableBitmap(bitmapWithBarcode.PixelWidth, bitmapWithBarcode.PixelHeight); if (HintTextBlock.Visibility == Visibility.Visible) HintTextBlock.Visibility = Visibility.Collapsed; ShowActiveBarcode(barcode); }); } } else { if (autoFocus != null) autoFocus.BarcodeFound = false; } }