// private void SliderContrast_ValueChanged(object sender, RangeBaseValueChangedEventArgs e) // { // if (EffectIndex is -1) { return; } // int value = Convert.ToInt32(SliderContrast.Value); // if (value is 0) { NoFilter(); return; } // switch (EffectIndex) // { // case 0: /*add*/ break; // case 1: Brightness(value); break; // case 2: Contrast(value); break; // case 3: Temperature(value); break; // case 4: ColorBoost(value); break; // case 5: /*Add*/ break; // case 6: /*add*/ break; // } // } // private void EditsList_ItemClick(object sender, ItemClickEventArgs e) // { // var data = e.ClickedItem as GridViewEditItem; // EffectIndex = data.Target; // _NameEffect.Text = data.Text; // EditRoot.Visibility = Visibility.Visible; // } // private void Cancel_Click(object sender, RoutedEventArgs e) // { // EditRoot.Visibility = Visibility.Collapsed; // } // private void Done_Click(object sender, RoutedEventArgs e) // { // EditRoot.Visibility = Visibility.Collapsed; // } async Task <Uri> SaveToImage() { using (var source = new StorageFileImageSource(imageStorageFile)) using (var renderer = new JpegRenderer(LastEffect, JpegOutputColorMode.Yuv420)) { var info = await source.GetInfoAsync(); var R = AspectRatioHelper.Aspect(Convert.ToInt32(info.ImageSize.Width), Convert.ToInt32(info.ImageSize.Height)); if (!SupportedAspectRatio(R)) { var max = Math.Max(info.ImageSize.Height, info.ImageSize.Width); renderer.Size = new Size(max, max); } var saveAsTarget = await ApplicationData.Current.LocalFolder.CreateFileAsync("file.Jpg", CreationCollisionOption.GenerateUniqueName); var render = await renderer.RenderAsync(); using (var fs = await saveAsTarget.OpenAsync(FileAccessMode.ReadWrite)) { await fs.WriteAsync(render); await fs.FlushAsync(); return(new Uri($"ms-appdata:///local/{saveAsTarget.Name}", UriKind.RelativeOrAbsolute)); } } }
public async Task SaveToPicturesLibraryAsync(InMemoryRandomAccessStream stream) { try { var rotationAngle = ConvertDeviceOrientationToDegrees(GetCameraOrientation()); IsCaptureEnabled = false; var file = await KnownFolders.PicturesLibrary.CreateFileAsync("LumiaImagingCapturedImage.jpg", CreationCollisionOption.GenerateUniqueName); stream.Seek(0); var imageSource = new RandomAccessStreamImageSource(stream); using (var jpegRenderer = new JpegRenderer(imageSource)) using (var fileStream = await file.OpenAsync(FileAccessMode.ReadWrite)) { if (rotationAngle != 0) { jpegRenderer.Source = new RotationEffect(imageSource, rotationAngle - 180); //Andjust to Lumia.Imaging Rotation } // Jpeg renderer gives the raw buffer containing the filtered image. IBuffer jpegBuffer = await jpegRenderer.RenderAsync(); await fileStream.WriteAsync(jpegBuffer); await fileStream.FlushAsync(); } } catch (Exception ex) { Debug.WriteLine(ex.Message); } IsCaptureEnabled = true; }
private async void AttemptSave() { if (Processing) { return; } Processing = true; GC.Collect(); var lowMemory = false; try { var result = (long)DeviceExtendedProperties.GetValue("ApplicationWorkingSetLimit"); lowMemory = result / 1024 / 1024 < 300; } catch (ArgumentOutOfRangeException) { } IBuffer buffer; Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = lowMemory ? 0.5 : 1; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); using (var effect = new LensBlurEffect(source, new LensBlurPredefinedKernel(Shape, (uint)SizeSlider.Value))) using (var renderer = new JpegRenderer(effect)) { effect.KernelMap = segmenter; buffer = await renderer.RenderAsync(); } } using (var library = new MediaLibrary()) using (var stream = buffer.AsStream()) { library.SavePicture("lensblur_" + DateTime.Now.Ticks, stream); Model.Saved = true; AdaptButtonsToState(); } Processing = false; }
public async Task<StorageFile> SaveToImageX(StorageFile file, bool calculateMax = true, bool isCached = true) { try { Helper.CreateCachedFolder(); //{"message": "Uploaded image isn't in an allowed aspect ratio", "status": "fail"} using (var source = new StorageFileImageSource(file)) using (var renderer = new JpegRenderer(source, JpegOutputColorMode.Yuv422, OutputOption.Stretch)) { var info = await source.GetInfoAsync(); var size = AspectRatioHelper.GetDesireSize(info.ImageSize.Width, info.ImageSize.Height, info.ImageSize.Height < info.ImageSize.Width); //var max = Math.Max(size.Height, size.Width); var ratio = info.ImageSize.Height > info.ImageSize.Width ? info.ImageSize.Height / info.ImageSize.Width : info.ImageSize.Width / info.ImageSize.Height; var h = (size.Height / (float)ratio); var w = (size.Width / (float)ratio); if (calculateMax) renderer.Size = new Size(Math.Round(w), Math.Round(h)); else renderer.Size = new Size(info.ImageSize.Width, info.ImageSize.Height); var folder = await GetOutputFolder(isCached); var saveAsTarget = await folder.CreateFileAsync(Helper.GenerateString("IMG") + ".jpg", CreationCollisionOption.GenerateUniqueName); var render = await renderer.RenderAsync(); using (var fs = await saveAsTarget.OpenAsync(FileAccessMode.ReadWrite)) { await fs.WriteAsync(render); await fs.FlushAsync(); return await SaveToImageX2(saveAsTarget); } } } catch { } return file; }
/// <summary> /// Renders current image with applied filters to a buffer and returns it. /// Meant to be used where the filtered image is for example going to be /// saved to a file. /// </summary> /// <param name="buffer">The buffer containing the original image data.</param> /// <returns>Buffer containing the filtered image data.</returns> public async Task <IBuffer> RenderJpegAsync(IBuffer buffer) { using (BufferImageSource source = new BufferImageSource(buffer)) using (JpegRenderer renderer = new JpegRenderer(_effect)) { return(await renderer.RenderAsync()); } }
public async Task <IBuffer> ToBufferAsync() { using (var jpegRenderer = new JpegRenderer(_source)) { var pixels = await jpegRenderer.RenderAsync(); return(pixels); } }
public async Task RenderHighpassEffectGroupWithGrayscaleEnabled() { using (var source = KnownImages.MikikoLynn.ImageSource) using (var highpassEffect = new HighpassEffectGroup(8, true, 1) { Source = source }) using (var renderer = new JpegRenderer(highpassEffect)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer); } }
public async Task <Stream> ToStreamAsync() { using (var jpegRenderer = new JpegRenderer(_source)) { var biSource = new BitmapImage(); var pixels = await jpegRenderer.RenderAsync(); return(pixels.AsStream()); } }
public async Task RenderEmptyEffectGroup() { using (var source = KnownImages.MikikoLynn.ImageSource) using (var effectGroup = new EmptyEffectGroup(source)) using (var renderer = new JpegRenderer(effectGroup)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer); } }
public async Task RenderWithZeroWidthFocusAreaSuccedes() { using (var source = KnownImages.Nurse.ImageSource) using (var effect = new EllipticFocusDepthOfFieldEffect(source, new FocusEllipse(new Point(0.5, 0.3), new EllipseRadius(0.0, 0.0)), 1.0, DepthOfFieldQuality.Full)) using (var renderer = new JpegRenderer(effect)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer, "EllipticFocusDepthOfFieldEffectTest_ZeroWidthFocusArea.jpg"); } }
public async Task RenderWithFullFocusAreaSuccedes() { using (var source = await KnownImages.Nurse.GetImageSourceAsync()) using (var effect = new EllipticFocusDepthOfFieldEffect(source, new FocusEllipse(new Point(0.5, 0.3), new EllipseRadius(1.0, 1.0)), 1.0, DepthOfFieldQuality.Full)) using (var renderer = new JpegRenderer(effect)) { var buffer = await renderer.RenderAsync(); await FileUtilities.SaveToPicturesLibraryAsync(buffer, "EllipticFocusDepthOfFieldEffectTest_FullFocusArea.jpg"); } }
private static async Task RenderEffect(FocusBand focusBand, DepthOfFieldQuality quality, [CallerMemberName] string testName = "") { using (var source = await KnownImages.Nurse.GetImageSourceAsync()) using (var effect = new LensTiltDepthOfFieldEffect(source, focusBand, 1.0, 1.0, quality)) using (var renderer = new JpegRenderer(effect)) { var buffer = await renderer.RenderAsync(); await FileUtilities.SaveToPicturesLibraryAsync(buffer, "LensTiltDepthOfFieldEffectTest_" + testName + ".jpg"); } }
public async Task RenderPreviewImage() { using (var source = KnownImages.Nurse.ImageSource) using (var effect = new EllipticFocusDepthOfFieldEffect(source, new FocusEllipse(new Point(0.5, 0.3), new EllipseRadius(0.2, 0.2)), 1.0, DepthOfFieldQuality.Preview)) using (var renderer = new JpegRenderer(effect)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer, "EllipticFocusDepthOfFieldEffectTest_Preview.jpg"); } }
public async Task RenderEmptyCustomEffect() { using (var source = KnownImages.MikikoLynn.ImageSource) using (var customEffect = new EmptyCustomEffect(source)) using (var renderer = new JpegRenderer(customEffect)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer); } }
public async Task <BitmapImage> ToImageAsync() { using (var jpegRenderer = new JpegRenderer(_source)) { var biSource = new BitmapImage(); var pixels = await jpegRenderer.RenderAsync(); await biSource.SetSourceAsync(pixels.AsStream().AsRandomAccessStream()); return(biSource); } }
private async void Save_Tapped(object sender, TappedRoutedEventArgs e) { var file = await KnownFolders.SavedPictures.CreateFileAsync("OutputEffect.jpg", CreationCollisionOption.GenerateUniqueName); using (var renderer = new JpegRenderer(ImageElement.Source)) using (var outputStream = (await file.OpenStreamForWriteAsync())) { var buffer = await renderer.RenderAsync(); await buffer.AsStream().CopyToAsync(outputStream); } }
public async Task RenderEmptyCustomFilter() { var customFilter = new EmptyCustomFilter(); using (var source = KnownImages.MikikoLynn.ImageSource) using (var filterEffect = new FilterEffect(source) { Filters = new[] { customFilter } }) using (var renderer = new JpegRenderer(filterEffect)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer); } }
/// <summary> /// Renders current image with applied filters to a buffer and returns it. /// Meant to be used where the filtered image is for example going to be /// saved to a file. /// </summary> /// <returns>Buffer containing the filtered image data</returns> public async Task <IBuffer> RenderFullBufferAsync() { using (var source = new BufferImageSource(_buffer)) using (var effect = new FilterEffect(source) { Filters = _components }) using (var renderer = new JpegRenderer(effect)) { return(await renderer.RenderAsync()); } }
private static async Task CreateImageEffectAsync(AppServiceRequestReceivedEventArgs message, BackgroundTaskDeferral wholeTaskDeferral) { try { var messageDef = message.GetDeferral(); try { var targetFileToken = message.Request.Message["targetFileToken"].ToString(); var targetFile = await SharedStorageAccessManager.RedeemTokenForFileAsync(targetFileToken); var outputFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(Guid.NewGuid().ToString("N") + ".jpg"); var foregroundImage = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Paillete.png")); //use the blureffect using (var inputStream = await targetFile.OpenReadAsync()) using (var foregroundImageStream = await foregroundImage.OpenReadAsync()) { var effect = new BlendEffect(new RandomAccessStreamImageSource(inputStream), new RandomAccessStreamImageSource(foregroundImageStream)); inputStream.Seek(0); foregroundImageStream.Seek(0); //effect.Source = new RandomAccessStreamImageSource(inputStream); //effect.BlendFunction = BlendFunction.Add; //effect.ForegroundSource = new RandomAccessStreamImageSource(foregroundImageStream); using (var jpegRenderer = new JpegRenderer(effect)) using (var stream = await outputFile.OpenAsync(FileAccessMode.ReadWrite)) { // Jpeg renderer gives the raw buffer that contains the filtered image. IBuffer jpegBuffer = await jpegRenderer.RenderAsync(); await stream.WriteAsync(jpegBuffer); await stream.FlushAsync(); } } var outputFileToken = SharedStorageAccessManager.AddFile(outputFile); await message.Request.SendResponseAsync(new ValueSet { { "outputFileToken", outputFileToken } }); } finally { // fermeture du message messageDef.Complete(); } } finally { // On ne fera pas d'autres communications wholeTaskDeferral.Complete(); } }
public async Task RenderFullQualityImage() { using (var source = KnownImages.Nurse.ImageSource) using (var annotations = KnownImages.NurseScribbles.ImageSource) using (var segmenter = new InteractiveForegroundSegmenter(source, Color.FromArgb(255, 255, 255, 255), Color.FromArgb(255, 255, 255, 0), annotations)) using (var effect = new FocusObjectDepthOfFieldEffect(source, segmenter, new Point(0, 0.8), new Point(1, 0.8), 1.0, 1.0, DepthOfFieldQuality.Full)) using (var renderer = new JpegRenderer(effect)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer); } }
public async Task RenderProvider() { var tile = KnownImages.MikikoLynn; using (var tileSource = tile.ImageSource) using (var source = new RepeatedTileImageSource(new Size(2048, 2048), tileSource, tile.Size)) using (var renderer = new JpegRenderer(source)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer, "RepeatedTileBasicRender.jpg"); } }
public async Task <WriteableBitmap> ToWritableBitmapAsync() { using (var jpegRenderer = new JpegRenderer(_source)) { var size = await GetSize(_source); var biSource = new WriteableBitmap((int)Math.Ceiling(size.Width), (int)Math.Ceiling(size.Height)); var pixels = await jpegRenderer.RenderAsync(); await biSource.SetSourceAsync(pixels.AsStream().AsRandomAccessStream()); return(biSource); } }
public async Task RenderProviderAndEffect() { var tile = KnownImages.MikikoLynn; using (var tileSource = tile.ImageSource) using (var source = new RepeatedTileImageSource(new Size(3048, 3048), tileSource, tile.Size)) using (var filterEffect = new FilterEffect(source)) using (var renderer = new JpegRenderer(filterEffect)) { filterEffect.Filters = new[] { new GrayscaleFilter() }; var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer, "RepeatedTileGrayscaleRender.jpg"); } }
public async Task RenderProviderCreatedFromTileSource() { var tile = KnownImages.MikikoLynn; using (var tileSource = tile.ImageSource) using (var source = await RepeatedTileImageSource.CreateFromTileSource(new Size(4048, 4048), tileSource)) using (var filterEffect = new FilterEffect(source)) using (var renderer = new JpegRenderer(filterEffect)) { filterEffect.Filters = new[] { new CartoonFilter() }; var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer, "RepeatedTileCartoonRender.jpg"); } }
public async Task RenderRgbGainCustomFilter() { var customFilter = new RgbGainCustomFilter() { RedLevel = 1.5f, GreenLevel = 1.0f, BlueLevel = 0.75f }; using (var source = KnownImages.MikikoLynn.ImageSource) using (var filterEffect = new FilterEffect(source) { Filters = new[] { customFilter } }) using (var renderer = new JpegRenderer(filterEffect)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer); } }
private void CropButton_Click(object sender, EventArgs e) { GeneralTransform transform = Crop.TransformToVisual(Image); var topLeftWindowsPoint = transform.Transform(new Point(0, 0)); topLeftWindowsPoint.X /= _scale; topLeftWindowsPoint.Y /= _scale; var bottomRightWindowsPoint = transform.Transform(new Point(Crop.Width, Crop.Height)); bottomRightWindowsPoint.X /= _scale; bottomRightWindowsPoint.Y /= _scale; var topLeftFoundationPoint = new Windows.Foundation.Point(Math.Round(topLeftWindowsPoint.X), Math.Round(topLeftWindowsPoint.Y)); var bottomRightFoundationPoint = new Windows.Foundation.Point(Math.Round(bottomRightWindowsPoint.X), Math.Round(bottomRightWindowsPoint.Y)); var reframingFilter = new ReframingFilter() { ReframingArea = new Windows.Foundation.Rect(topLeftFoundationPoint, bottomRightFoundationPoint) }; var filterEffect = new FilterEffect(_source) { Filters = new List <IFilter>() { reframingFilter } }; var renderer = new JpegRenderer(filterEffect) { OutputOption = OutputOption.PreserveAspectRatio, Quality = 1.0, Size = new Windows.Foundation.Size(bottomRightFoundationPoint.X - topLeftFoundationPoint.X, bottomRightFoundationPoint.Y - topLeftFoundationPoint.Y) }; IBuffer buffer = null; Task.Run(async() => { buffer = await renderer.RenderAsync(); }).Wait(); PhotoModel.Singleton.FromNewImage(buffer.AsStream(), PhotoOrigin.Reframe); NavigationService.GoBack(); }
private async void save_Click(object sender, EventArgs e) { if (_fotoEffect == null) { return; } var jpegRenderer = new JpegRenderer(_fotoEffect); // renderizamos el jpeg. IBuffer jpegOutput = await jpegRenderer.RenderAsync(); // guardar la imagen jpg en la libreria. MediaLibrary library = new MediaLibrary(); string fileName = string.Format("CartoonImage_{0:G}", DateTime.Now); var picture = library.SavePicture(fileName, jpegOutput.AsStream()); MessageBox.Show("Imagen!"); }
private async Task Render() { if (!_rendering && !_stop) { _rendering = true; // Render camera preview frame to screen _writeableBitmapRenderer.Source = _cameraPreviewImageSource; _writeableBitmapRenderer.WriteableBitmap = _writeableBitmap; await _writeableBitmapRenderer.RenderAsync(); await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync( CoreDispatcherPriority.High, () => { _writeableBitmap.Invalidate(); }); // Write camera preview frame to file if capturing if (_capturing) { if (_sequenceIndex < 20) { _jpegRenderer.Source = _cameraPreviewImageSource; IBuffer jpg = await _jpegRenderer.RenderAsync(); await Save(jpg, _sequenceIndex ++); } else { StartStopCapture(); } } _rendering = false; } if (_stop) { _capturing = false; _cameraPreviewImageSource.Dispose(); _writeableBitmapRenderer.Dispose(); _jpegRenderer.Dispose(); } }
private static async Task <RenderResult> RenderJpegAsync(IImageProvider effect, Size renderSize, long setupTimeMillis) { var stopwatch = new Stopwatch(); stopwatch.Start(); IBuffer jpegBuffer; using (var jpegRenderer = new JpegRenderer(effect) { Size = renderSize, Quality = 1.0 }) { jpegBuffer = await jpegRenderer.RenderAsync().AsTask().ConfigureAwait(false); } stopwatch.Stop(); long renderTimeMillis = stopwatch.ElapsedMilliseconds; return(new RenderResult(jpegBuffer, renderSize, setupTimeMillis, renderTimeMillis)); }
public async Task RenderHighpassEffectGroupAndAmplifyEdges() { using (var source = KnownImages.MikikoLynn.ImageSource) using (var highpassEffect = new HighpassEffectGroup(14, true, 2) { Source = source }) using (var sourceWithAmplifiedEdges = new BlendEffect(source, highpassEffect, BlendFunction.Hardlight, 0.4)) using (var renderer = new JpegRenderer(sourceWithAmplifiedEdges)) { DiagnosticsReport.BeginProbe(sourceWithAmplifiedEdges); var buffer = await renderer.RenderAsync(); // This should hit the inline blending (fast) path, make sure it did. var blendEffectReport = await DiagnosticsReport.EndProbeAsync(sourceWithAmplifiedEdges); Assert.AreEqual(1, (int)blendEffectReport.Properties["inlineblend_count"]); ImageResults.Instance.SaveToPicturesLibrary(buffer); } }
private async void SaveImage_Click(object sender, RoutedEventArgs e) { SaveButton.IsEnabled = false; if (_cartoonEffect == null) { return; } var jpegRenderer = new JpegRenderer(_cartoonEffect); // Jpeg renderer gives the raw buffer for the filtered image. IBuffer jpegOutput = await jpegRenderer.RenderAsync(); // Save the image as a jpeg to the saved pictures album. MediaLibrary library = new MediaLibrary(); string fileName = string.Format("CartoonImage_{0:G}", DateTime.Now); var picture = library.SavePicture(fileName, jpegOutput.AsStream()); MessageBox.Show("Image saved!"); SaveButton.IsEnabled = true; }
public async Task SaveFile() { try { savedFile = await ApplicationData.Current.LocalFolder.CreateFileAsync("tempImage.jpg", CreationCollisionOption.ReplaceExisting); using (FilterEffect effect = App.photo.GetEffect()) using (var jpegRenderer = new JpegRenderer(effect)) using (var stream = await savedFile.OpenAsync(FileAccessMode.ReadWrite)) { IBuffer jpegBuffer = await jpegRenderer.RenderAsync(); await stream.WriteAsync(jpegBuffer); await stream.FlushAsync(); } } catch (Exception ex) { MessageDialog mess = new MessageDialog(ex.Message); mess.ShowAsync(); } }
public async override Task<IBuffer> RenderJpegAsync(IBuffer buffer) { if (buffer == null || buffer.Length == 0) { Debug.WriteLine(DebugTag + Name + ": RenderJpegAsync(): The given buffer is null or empty!"); return null; } IBuffer outputBuffer; using (var source = new BufferImageSource(buffer)) { _hdrEffect.Source = source; using (var renderer = new JpegRenderer(_hdrEffect)) { outputBuffer = await renderer.RenderAsync(); } _hdrEffect.Dispose(); } return outputBuffer; }
public static async Task<IBuffer> Reframe(IBuffer image, Rect area) { using (var source = new BufferImageSource(image)) using (var effect = new FilterEffect(source)) { effect.Filters = new List<IFilter>() { new ReframingFilter() { ReframingArea = area, } }; using (var renderer = new JpegRenderer(effect)) { return await renderer.RenderAsync(); } } }
private async void SaveImage_Click(object sender, RoutedEventArgs e) { SaveButton.IsEnabled = false; if (_cartoonEffect == null) return; var jpegRenderer = new JpegRenderer(_cartoonEffect); // Jpeg renderer gives the raw buffer for the filtered image. IBuffer jpegOutput = await jpegRenderer.RenderAsync(); // Save the image as a jpeg to the saved pictures album. MediaLibrary library = new MediaLibrary(); string fileName = string.Format("CartoonImage_{0:G}", DateTime.Now); var picture = library.SavePicture(fileName, jpegOutput.AsStream()); MessageBox.Show("Image saved!"); SaveButton.IsEnabled = true; }
private void CropButton_Click(object sender, EventArgs e) { GeneralTransform transform = Crop.TransformToVisual(Image); var topLeftWindowsPoint = transform.Transform(new Point(0, 0)); topLeftWindowsPoint.X /= _scale; topLeftWindowsPoint.Y /= _scale; var bottomRightWindowsPoint = transform.Transform(new Point(Crop.Width, Crop.Height)); bottomRightWindowsPoint.X /= _scale; bottomRightWindowsPoint.Y /= _scale; var topLeftFoundationPoint = new Windows.Foundation.Point(Math.Round(topLeftWindowsPoint.X), Math.Round(topLeftWindowsPoint.Y)); var bottomRightFoundationPoint = new Windows.Foundation.Point(Math.Round(bottomRightWindowsPoint.X), Math.Round(bottomRightWindowsPoint.Y)); var reframingFilter = new ReframingFilter() { ReframingArea = new Windows.Foundation.Rect(topLeftFoundationPoint, bottomRightFoundationPoint) }; var filterEffect = new FilterEffect(_source) { Filters = new List<IFilter>() { reframingFilter } }; var renderer = new JpegRenderer(filterEffect) { OutputOption = OutputOption.PreserveAspectRatio, Quality = 1.0, Size = new Windows.Foundation.Size(bottomRightFoundationPoint.X - topLeftFoundationPoint.X, bottomRightFoundationPoint.Y - topLeftFoundationPoint.Y) }; IBuffer buffer = null; Task.Run(async () => { buffer = await renderer.RenderAsync(); }).Wait(); PhotoModel.Singleton.FromNewCrop(buffer.AsStream()); NavigationService.GoBack(); }
private async Task<IRandomAccessStream> GetFilteredThumbnailStreamAsync() { #if DEBUG System.Diagnostics.Debug.WriteLine("GetFilteredThumbnailStreamAsync invoked " + this.GetHashCode()); #endif IRandomAccessStream filteredStream = null; using (var ticket = await TicketManager.AcquireTicket()) using (var stream = await _photo.GetThumbnailAsync()) { if (Filters.Count > 0) { var list = new List<IFilter>(); foreach (var filter in Filters) { list.Add(filter.GetFilter()); } filteredStream = new InMemoryRandomAccessStream(); using (var source = new RandomAccessStreamImageSource(stream)) using (var effect = new FilterEffect(source) { Filters = list }) using (var renderer = new JpegRenderer(effect)) { var buffer = await renderer.RenderAsync(); await filteredStream.WriteAsync(buffer); } } else { filteredStream = stream.CloneStream(); } } return filteredStream; }
private async void ApplicationBarIconButton_Save(object sender, EventArgs e) { if (currentState != STATE.WAIT || HRImagesource == null) return; IBuffer imageInMemory; var effect = RecipeFactory.Current.CreatePipeline(HRImagesource); using (var renderer = new JpegRenderer(effect)) { imageInMemory = await renderer.RenderAsync(); } if (effect is IDisposable) (effect as IDisposable).Dispose(); using (MediaLibrary mediaLibrary = new MediaLibrary()) mediaLibrary.SavePicture(String.Format("image {0:yyyyMMdd-HHmmss}", DateTime.Now), imageInMemory.AsStream()); MessageBox.Show("Image saved"); }
public async Task<IBuffer> TakePicture() { if (_photoCaptureDevice == null && _cameraSemaphore.WaitOne(100)) return null; if (_cameraSemaphore.WaitOne(100)) { try { int angle = 0; if (Orientation.HasFlag(PageOrientation.LandscapeLeft)) { angle = (int)_photoCaptureDevice.SensorRotationInDegrees - 90; } else if (Orientation.HasFlag(PageOrientation.LandscapeRight)) { angle = (int)_photoCaptureDevice.SensorRotationInDegrees + 90; } else // PageOrientation.PortraitUp { angle = (int)_photoCaptureDevice.SensorRotationInDegrees; } if (angle < 0) angle += 360; if (_cameraLocation == CameraSensorLocation.Back) { _photoCaptureDevice.SetProperty(KnownCameraGeneralProperties.EncodeWithOrientation, angle); } else { _photoCaptureDevice.SetProperty(KnownCameraGeneralProperties.EncodeWithOrientation, -angle); } _photoCaptureDevice.SetProperty(KnownCameraGeneralProperties.SpecifiedCaptureOrientation, 0); var cameraCaptureSequence = _photoCaptureDevice.CreateCaptureSequence(1); var stream = new MemoryStream(); cameraCaptureSequence.Frames[0].CaptureStream = stream.AsOutputStream(); await _photoCaptureDevice.PrepareCaptureSequenceAsync(cameraCaptureSequence); await cameraCaptureSequence.StartCaptureAsync(); IBuffer capturedPicture; if (_cameraLocation == CameraSensorLocation.Back) { capturedPicture = stream.GetWindowsRuntimeBuffer(); } else { capturedPicture = await JpegTools.FlipAndRotateAsync(stream.GetWindowsRuntimeBuffer(), FlipMode.Horizontal, Rotation.Rotate0, JpegOperation.AllowLossy); } using (var source = new StreamImageSource(capturedPicture.AsStream())) { var recipe = RecipeFactory.Current.CreatePipeline(source); using (var renderer = new JpegRenderer(recipe)) { capturedPicture = await renderer.RenderAsync(); } if (recipe is IDisposable) (recipe as IDisposable).Dispose(); } return capturedPicture; } finally { _cameraSemaphore.Release(); } } return null; }
private async void AttemptSaveAsync() { if (!Processing) { Processing = true; AdaptButtonsToState(); GC.Collect(); var lowMemory = false; try { long result = (long)DeviceExtendedProperties.GetValue("ApplicationWorkingSetLimit"); lowMemory = result / 1024 / 1024 < 300; } catch (ArgumentOutOfRangeException) { } IBuffer buffer = null; Model.OriginalImage.Position = 0; using (var source = new StreamImageSource(Model.OriginalImage)) using (var segmenter = new InteractiveForegroundSegmenter(source)) using (var annotationsSource = new BitmapImageSource(Model.AnnotationsBitmap)) { segmenter.Quality = lowMemory ? 0.5 : 1; segmenter.AnnotationsSource = annotationsSource; var foregroundColor = Model.ForegroundBrush.Color; var backgroundColor = Model.BackgroundBrush.Color; segmenter.ForegroundColor = Windows.UI.Color.FromArgb(foregroundColor.A, foregroundColor.R, foregroundColor.G, foregroundColor.B); segmenter.BackgroundColor = Windows.UI.Color.FromArgb(backgroundColor.A, backgroundColor.R, backgroundColor.G, backgroundColor.B); using (var effect = new LensBlurEffect(source, new LensBlurPredefinedKernel(Model.KernelShape, (uint)Model.KernelSize))) using (var renderer = new JpegRenderer(effect)) { effect.KernelMap = segmenter; try { buffer = await renderer.RenderAsync(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("AttemptSave rendering failed: " + ex.Message); } } } if (buffer != null) { using (var library = new MediaLibrary()) using (var stream = buffer.AsStream()) { library.SavePicture("lensblur_" + DateTime.Now.Ticks, stream); Model.Saved = true; AdaptButtonsToState(); } } Processing = false; AdaptButtonsToState(); } }
private async Task<bool> SaveImageAsync(StorageFile file) { if (_grayscaleEffect == null) { return false; } string errorMessage = null; try { using (var jpegRenderer = new JpegRenderer(_grayscaleEffect)) using (var stream = await file.OpenAsync(FileAccessMode.ReadWrite)) { // Jpeg renderer gives the raw buffer containing the filtered image. IBuffer jpegBuffer = await jpegRenderer.RenderAsync(); await stream.WriteAsync(jpegBuffer); await stream.FlushAsync(); } } catch (Exception exception) { errorMessage = exception.Message; } if (!string.IsNullOrEmpty(errorMessage)) { var dialog = new MessageDialog(errorMessage); await dialog.ShowAsync(); return false; } return true; }
/// <summary> /// Renders the given buffer with the applied filters to an output /// buffer and returns it. Meant to be used where the filtered image /// is, for example, going to be saved to a file. /// </summary> /// <param name="buffer">The buffer containing the original image data.</param> /// <returns>Buffer containing the filtered image data.</returns> public virtual async Task<IBuffer> RenderJpegAsync(IBuffer buffer) { if (buffer == null || buffer.Length == 0) { Debug.WriteLine(DebugTag + Name + ": RenderJpegAsync(): The given buffer is null or empty!"); return null; } if (_effect != null) { _effect.Dispose(); _effect = null; } // Construct the FilterEffect instance and set the // filters. _effect = new FilterEffect(_source); SetFilters(_effect); IBuffer outputBuffer; using (var source = new BufferImageSource(buffer)) { var effect = new FilterEffect(source); SetFilters(effect); using (var renderer = new JpegRenderer(effect)) { outputBuffer = await renderer.RenderAsync(); } effect.Dispose(); } return outputBuffer; }
public async Task<bool> SaveImageAsync(StorageFile file) { if (CurrentSelectedEffect == null && CurrentSelectedEffect.Effect == null) { return false; } string errorMessage = null; try { using (var jpegRenderer = new JpegRenderer(CurrentSelectedEffect.Effect)) using (var stream = await file.OpenAsync(FileAccessMode.ReadWrite)) { jpegRenderer.Size = new Windows.Foundation.Size(SourceImageObservedWidth, SourceImageObservedHeight); // Jpeg renderer gives the raw buffer containing the filtered image. IBuffer jpegBuffer = await jpegRenderer.RenderAsync(); await stream.WriteAsync(jpegBuffer); await stream.FlushAsync(); } } catch (Exception exception) { errorMessage = exception.Message; } if (!string.IsNullOrEmpty(errorMessage)) { var dialog = new MessageDialog(errorMessage); await dialog.ShowAsync(); return false; } return true; }
public async Task<IBuffer> TakePictureFast() { if (_photoCaptureDevice == null && _cameraSemaphore.WaitOne(100)) return null; if (_cameraSemaphore.WaitOne(100)) { try { int angle = 0; if (Orientation.HasFlag(PageOrientation.LandscapeLeft)) { angle = (int)_photoCaptureDevice.SensorRotationInDegrees - 90; } else if (Orientation.HasFlag(PageOrientation.LandscapeRight)) { angle = (int)_photoCaptureDevice.SensorRotationInDegrees + 90; } else // PageOrientation.PortraitUp { angle = (int)_photoCaptureDevice.SensorRotationInDegrees; } int layersize = (int)(_photoCaptureDevice.PreviewResolution.Width * _photoCaptureDevice.PreviewResolution.Height); int layersizeuv = layersize / 2; var buffer = new byte[layersize + layersizeuv]; _photoCaptureDevice.GetPreviewBufferYCbCr(buffer); IBuffer capturedPicture; using (var cameraBitmap = new Bitmap( _photoCaptureDevice.PreviewResolution, ColorMode.Yuv420Sp, new uint[] { (uint)_photoCaptureDevice.PreviewResolution.Width, (uint)_photoCaptureDevice.PreviewResolution.Width }, new IBuffer[] { buffer.AsBuffer(0, layersize), buffer.AsBuffer(layersize, layersizeuv) })) using (var source = new BitmapImageSource(cameraBitmap)) using (var orientationffect = new FilterEffect(source)) { if(_cameraLocation == CameraSensorLocation.Back) { orientationffect.Filters = new IFilter[]{new RotationFilter(angle)}; } else { orientationffect.Filters = new IFilter[] { new RotationFilter(-angle), new FlipFilter(FlipMode.Horizontal) }; } var recipe = RecipeFactory.Current.CreatePipeline(orientationffect); using (var renderer = new JpegRenderer(recipe)) { capturedPicture = await renderer.RenderAsync(); } if (recipe is IDisposable) (recipe as IDisposable).Dispose(); } return capturedPicture; } finally { _cameraSemaphore.Release(); } } return null; }
protected override async void Render() { try { if (Source != null) { Debug.WriteLine(DebugTag + Name + ": Rendering..."); foreach (var change in Changes) { change(); } Changes.Clear(); //blend source layer with mode overlay var blendEffect = new BlendEffect(); blendEffect.Source = Source;// new StreamImageSource(new MemoryStream()); blendEffect.ForegroundSource = Source; blendEffect.BlendFunction = BlendFunction.Overlay; blendEffect.GlobalAlpha = 0.0; var renderer = new JpegRenderer(blendEffect); var outBuffer = await renderer.RenderAsync(); blendEffect.Dispose(); var highPassSource = new BufferImageSource(outBuffer); var highPassFilter = new HighpassEffect(6, false, 1); highPassFilter.Source = highPassSource; var highPassRenderer = new JpegRenderer(highPassFilter); var highPassOutBuffer = await highPassRenderer.RenderAsync(); highPassFilter.Dispose(); var negativeFilter = new NegativeFilter(); var filterEffect = new FilterEffect(); filterEffect.Filters = new List<IFilter>() { negativeFilter }; filterEffect.Source = new BufferImageSource(highPassOutBuffer); var invertRenderer = new JpegRenderer(filterEffect); var invertOutBuffer = await invertRenderer.RenderAsync(); blendEffect = new BlendEffect(Source, new BufferImageSource(invertOutBuffer), BlendFunction.Overlay, 0.5); using (var bmpRender = new WriteableBitmapRenderer(blendEffect, TmpBitmap)) { await bmpRender.RenderAsync(); } TmpBitmap.Pixels.CopyTo(PreviewBitmap.Pixels, 0); PreviewBitmap.Invalidate(); // Force a redraw } else { Debug.WriteLine(DebugTag + Name + ": Render(): No buffer set!"); } } catch (Exception e) { Debug.WriteLine(DebugTag + Name + ": Render(): " + e.Message); } finally { switch (State) { case States.Apply: State = States.Wait; break; case States.Schedule: State = States.Apply; Render(); break; default: break; } } }
private static async Task<RenderResult> RenderJpegAsync(IImageProvider effect, Size renderSize, long setupTimeMillis) { var stopwatch = new Stopwatch(); stopwatch.Start(); IBuffer jpegBuffer; using (var jpegRenderer = new JpegRenderer(effect) { Size = renderSize, Quality = 1.0 }) { jpegBuffer = await jpegRenderer.RenderAsync().AsTask().ConfigureAwait(false); } stopwatch.Stop(); long renderTimeMillis = stopwatch.ElapsedMilliseconds; return new RenderResult(jpegBuffer, renderSize, setupTimeMillis, renderTimeMillis); }
private static async Task RenderEffect(FocusBand focusBand, DepthOfFieldQuality quality, [CallerMemberName] string testName = "") { using (var source = KnownImages.Nurse.ImageSource) using (var effect = new LensTiltDepthOfFieldEffect(source, focusBand, 1.0, 1.0, quality)) using (var renderer = new JpegRenderer(effect)) { var buffer = await renderer.RenderAsync(); ImageResults.Instance.SaveToPicturesLibrary(buffer, "LensTiltDepthOfFieldEffectTest_" + testName + ".jpg"); } }
/// <summary> /// Renders current image with applied filters to a buffer and returns it. /// Meant to be used where the filtered image is for example going to be /// saved to a file. /// </summary> /// <param name="buffer">The buffer containing the original image data.</param> /// <returns>Buffer containing the filtered image data.</returns> public async Task<IBuffer> RenderJpegAsync(IBuffer buffer) { using (BufferImageSource source = new BufferImageSource(buffer)) using (JpegRenderer renderer = new JpegRenderer(_effect)) { return await renderer.RenderAsync(); } }
//extract Oriented ROI async public Task<IBuffer> GenerateReframingPicture() { var currentSize = new Size( outputSize.Width / currentScale, outputSize.Height / currentScale); var corner = new Point(currentPos.X - currentSize.Width / 2, currentPos.Y - currentSize.Height / 2); reframing.ReframingArea = new Rect(corner, currentSize); reframing.Angle = -currentAngle; using (var render = new JpegRenderer(filter)) { return await render.RenderAsync(); } }
private async void save_Click(object sender, EventArgs e) { if (_fotoEffect == null) return; var jpegRenderer = new JpegRenderer(_fotoEffect); // renderizamos el jpeg. IBuffer jpegOutput = await jpegRenderer.RenderAsync(); // guardar la imagen jpg en la libreria. MediaLibrary library = new MediaLibrary(); string fileName = string.Format("CartoonImage_{0:G}", DateTime.Now); var picture = library.SavePicture(fileName, jpegOutput.AsStream()); MessageBox.Show("Imagen!"); }