private void FiltersList_SelectedIndexChanged(object sender, EventArgs e) { int selectedIndex = FiltersList.SelectedIndex; if (selectedIndex == 0) { if (File.Exists(_srcPath)) { InputImage.Load(_srcPath); _currentlySelectedTmpFilePath = _srcPath; _currentlySelectedFilterEnum = null; } } else { selectedIndex = selectedIndex - 1; FilterEnum filterEnum = (FilterEnum)selectedIndex; string tmpFilteredImgPath = _tmpImgFilePathPattern + "_" + filterEnum.ToString() + Path.GetExtension(_srcPath); if (File.Exists(tmpFilteredImgPath)) { InputImage.Load(tmpFilteredImgPath); _currentlySelectedFilterEnum = filterEnum; _currentlySelectedTmpFilePath = tmpFilteredImgPath; } else { if (File.Exists(_srcPath)) { ProgressPanel.Location = new Point(Width / 2 - (ProgressPanel.Width / 2), (Height / 2) - (ProgressPanel.Height / 2)); ProgressPanel.Visible = true; ProgressPanel.BringToFront(); Enabled = false; try { string result; switch (filterEnum) { case FilterEnum.Nashville: result = ApplyNashville(_srcPath, tmpFilteredImgPath); break; case FilterEnum.Clarendon: result = ApplyClarendon(_srcPath, tmpFilteredImgPath); break; case FilterEnum.Moon: result = ApplyMoon(_srcPath, tmpFilteredImgPath); break; case FilterEnum.Toaster: result = ApplyToaster(_srcPath, tmpFilteredImgPath); break; case FilterEnum.XPro2: result = ApplyXPro2(_srcPath, tmpFilteredImgPath); break; default: throw new ArgumentOutOfRangeException(); } if (string.IsNullOrEmpty(result)) { if (File.Exists(tmpFilteredImgPath)) { _tmpFilesToClear.Add(tmpFilteredImgPath); InputImage.Load(tmpFilteredImgPath); _currentlySelectedTmpFilePath = tmpFilteredImgPath; _currentlySelectedFilterEnum = filterEnum; } } else { MessageBox.Show(result, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } } finally { ProgressPanel.Visible = false; Enabled = true; } } else { MessageBox.Show("Can't find input file.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } } } }
//void a() //{ // { // RenderForm form = new RenderForm("TEST APP"); // Factory fact = new Factory(); // Device dev = new Device(fact.Adapters[0]); // SharpDX.DXGI.SwapChain chain = new SwapChain(fact, dev, new SwapChainDescription() // { // BufferCount = 1, // Flags = SwapChainFlags.AllowModeSwitch, // IsWindowed = true, // ModeDescription = new ModeDescription(800, 600, new Rational(1, 60), Format.B8G8R8A8_UNorm), // OutputHandle = form.Handle, // SampleDescription = new SampleDescription(1, 0), // SwapEffect = SwapEffect.Discard, // Usage = Usage.RenderTargetOutput // }); // Xamarin.Forms.Platform.WinRT.ImageConverter a = new Xamarin.Forms.Platform.WinRT.ImageConverter(); // a.Convert() // Texture2D backbuffer = Texture2D.FromSwapChain<Texture2D>(chain, 0); // RenderTargetView rtv = new RenderTargetView(dev, backbuffer); // SharpDX.Direct3D11.DeviceContext context = dev.ImmediateContext; // Surface surf = chain.GetBackBuffer<Surface>(0); // Factory2 fact2 = new Factory2(FactoryType.SingleThreaded, DebugLevel.Information); // // Query for the adapter and more advanced DXGI objects. // var dxgiDevice2 = dev.QueryInterface<SharpDX.DXGI.Device>(); // // Get the default Direct2D device and create a context. // var d2DDevice = new SharpDX.Direct2D1.Device(dxgiDevice2); // var _d2DContext = new SharpDX.Direct2D1.DeviceContext(d2DDevice, DeviceContextOptions.None); // _d2DContext.PrimitiveBlend = PrimitiveBlend.SourceOver; // var properties = new BitmapProperties1(new PixelFormat(Format.B8G8R8A8_UNorm, SharpDX.Direct2D1.AlphaMode.Premultiplied), // 96, 96, BitmapOptions.Target | BitmapOptions.CannotDraw); // var _targ = new Bitmap1(_d2DContext, chain.GetBackBuffer<Surface>(0), properties); // _d2DContext.Target = _targ; // } //} //public /*async Task<List<SKRectI>>*/ void GetBoxes(InputImage inputImage) //{ // int x0, y0, x1, y1; //List<SKRectI> boxesList = new List<SKRectI>(); //int res = 0; //while (res == 0) //{ //res = ITessApiSignatures.PageIteratorBoundingBox(handle, PageIteratorLevel.Symbol, out x0, out y0, out x1, out y1); //SKRectI rect = new SKRectI(x0, y0, x1, y1); //boxesList.Add(rect); //} //return boxesList; //} public async Task <InputImage> PreProcess(InputImage inputImage) { throw new NotImplementedException(); }
protected override async Task <Action <AsyncCodeActivityContext> > ExecuteAsync(AsyncCodeActivityContext context, CancellationToken cancellationToken) { // Inputs var timeout = TimeoutMS.Get(context); var inputImage = InputImage.Get(context); var positionX = PositionX.Get(context); var positionY = PositionY.Get(context); var width = Width.Get(context); var height = Height.Get(context); var blur = Blur.Get(context); var blurAmount = BlurAmount.Get(context); // Set a timeout on the execution var task = ExecuteWithTimeout(context, cancellationToken); if (await Task.WhenAny(task, Task.Delay(timeout, cancellationToken)) != task) { throw new TimeoutException(Resources.Timeout_Error); } Image returnImage; // Check if activity should blur or hide part of the image if (blur) { // Convert image to bytestream ImageConverter _imageConverter = new ImageConverter(); byte[] imageByteStream = (byte[])_imageConverter.ConvertTo(inputImage, typeof(byte[])); // Create image from bytestream for MagickImage use var magickimage = new MagickImage(imageByteStream); // Blur part of image magickimage.RegionMask(new MagickGeometry(positionX, positionY, width, height)); magickimage.GaussianBlur(blurAmount, 25); magickimage.RemoveRegionMask(); // Convert MagickInmage to Bytestream var imageBytesOut = magickimage.ToByteArray(); MemoryStream ms = new MemoryStream(imageBytesOut); // Create return image from bytestream returnImage = Image.FromStream(ms); } else { // Create graphics context with returnImage returnImage = inputImage; using (Graphics g = Graphics.FromImage(returnImage)) { // Define brush and rectangle SolidBrush blackBrush = new SolidBrush(Color.Black); Rectangle rect = new Rectangle(positionX, positionY, width, height); // Fill rectangle g.FillRectangle(blackBrush, rect); // Cleanup g.Dispose(); } } // Outputs return((ctx) => { OutputImage.Set(ctx, returnImage); }); }
public override void ViewDidLoad() { base.ViewDidLoad(); AnalyticsService.TrackEvent(AnalyticsService.Event.ResultsPageViewed); //Programmatically add a back button and an arrow UIImage backArrowImage = UIImage.FromBundle("back-arrow"); UIButton backButton = new UIButton(UIButtonType.Custom); backButton.SetImage(backArrowImage, UIControlState.Normal); backButton.SetTitle("Back", UIControlState.Normal); backButton.ImageEdgeInsets = new UIEdgeInsets(0.0f, -12.5f, 0.0f, 0.0f); backButton.AddTarget((sender, e) => { AnalyticsService.TrackEvent(AnalyticsService.Event.ReturnBackToCroppingPage); this.NavigationController?.PopViewController(true); }, UIControlEvent.TouchUpInside); this.NavigationItem.LeftBarButtonItem = new UIBarButtonItem(backButton); UIButton newSession = new UIButton(UIButtonType.Custom); newSession.SetTitle("New Session", UIControlState.Normal); newSession.AddTarget((sender, e) => { AnalyticsService.TrackEvent(AnalyticsService.Event.ReturnBackToImageInputPage); this.NavigationController?.PopToRootViewController(true); }, UIControlEvent.TouchUpInside); this.NavigationItem.RightBarButtonItem = new UIBarButtonItem(newSession); // Set nav bar attributes NavigationControllerUtil.SetNavigationBarAttributes(this.NavigationController.NavigationBar); NavigationControllerUtil.SetNavigationTitle(this.NavigationItem, NavBarTitle); InputImage = InputImage.Scale(new CoreGraphics.CGSize(LoadImageWidth, LoadImageHeight)); // Initialize UIImageView imageView = new UIImageView(); imageView.Frame = new CGRect(0, 0, InputImage.Size.Width, InputImage.Size.Height); imageView.Image = InputImage; imageView.UserInteractionEnabled = true; // Initialize original imageview originalImageView = new UIImageView(); originalImageView.Frame = new CGRect(0, 0, InputImage.Size.Width, InputImage.Size.Height); originalImageView.Image = InputImage; originalImageView.UserInteractionEnabled = true; // Initialize meta-data display string filename; DateTime?time; try { filename = GetFileString(ImageUrl, DefaultFileStringForCamera); time = GetDate(ImageUrl); } catch (NullReferenceException ex) { filename = DefaultFileStringForCamera; time = DateTime.Now; } AnalyzedImageFileNameLabel.Text = filename; AnalyzedImageDateLabel.Text = time.HasValue ? time.Value.ToShortDateString() : DefaultNoDateDisplay; OriginalImageFileNameLabel.Text = filename; OriginalImageDateLabel.Text = time.HasValue ? time.Value.ToShortDateString() : DefaultNoDateDisplay; // Add all original labels to List<UILabel> originalLabels = new List <UILabel>(); originalLabels.Add(OriginalImageDateLabel); originalLabels.Add(OriginalImageDateHeaderLabel); originalLabels.Add(OriginalImageFileNameLabel); originalLabels.Add(OriginalImageFileNameHeaderLabel); // Toggle accessibilty off for all original labels (they are initially hidden) EnableVoiceOverForViews(originalLabels.ToArray(), false); SetDisplayBorders(); // Retrieve the shared AI Client that was loaded by the AppDelegate aiClient = ((AppDelegate)UIApplication.SharedApplication.Delegate).AIClient; System.Diagnostics.Debug.WriteLine("AI Client retrieved from AppDelegate"); }
private void Update(EvaluationContext context) { var image = InputImage.GetValue(context); var imageSrv = InputImageSrv.GetValue(context); if (image == null) { Log.Debug("input not completet"); return; } var d3DDevice = ResourceManager.Instance().Device; var immediateContext = d3DDevice.ImmediateContext; if (_imageWithCPUAccess == null || _imageWithCPUAccess.Description.Format != image.Description.Format || _imageWithCPUAccess.Description.Width != image.Description.Width || _imageWithCPUAccess.Description.Height != image.Description.Height || _imageWithCPUAccess.Description.MipLevels != image.Description.MipLevels) { var desc = new Texture2DDescription() { BindFlags = BindFlags.None, Format = image.Description.Format, Width = image.Description.Width, Height = image.Description.Height, MipLevels = image.Description.MipLevels, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Staging, OptionFlags = ResourceOptionFlags.None, CpuAccessFlags = CpuAccessFlags.Read, ArraySize = 1 }; Utilities.Dispose(ref _imageWithCPUAccess); _imageWithCPUAccess = new Texture2D(d3DDevice, desc); } if (_distanceFieldImage == null || _distanceFieldImage.Description.Format != image.Description.Format || _distanceFieldImage.Description.Width != image.Description.Width || _distanceFieldImage.Description.Height != image.Description.Height || _distanceFieldImage.Description.MipLevels != image.Description.MipLevels) { var desc = new Texture2DDescription() { BindFlags = BindFlags.ShaderResource, Format = image.Description.Format, Width = image.Description.Width, Height = image.Description.Height, MipLevels = 1, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Dynamic, OptionFlags = ResourceOptionFlags.None, CpuAccessFlags = CpuAccessFlags.Write, ArraySize = 1 }; Utilities.Dispose(ref _distanceFieldImage); _distanceFieldImage = new Texture2D(d3DDevice, desc); } // if (Changed) { immediateContext.CopyResource(image, _imageWithCPUAccess); int width = image.Description.Width; int height = image.Description.Height; if (_data == null || _data.Length != width * height) { _data = new float[width * height]; _xDist = new short[width * height]; _yDist = new short[width * height]; _gradients = new SharpDX.Vector2[width * height]; } DataStream sourceStream; var sourceDataBox = immediateContext.MapSubresource(_imageWithCPUAccess, 0, 0, MapMode.Read, SharpDX.Direct3D11.MapFlags.None, out sourceStream); // Convert img into float (data) using (sourceStream) { sourceStream.Position = 0; float minValue = 255, maxValue = -255; for (int y = 0; y < height; ++y) { for (int x = 0; x < width; ++x) { var color = new Color4(sourceStream.Read <Int32>()); float v = color.Red; _data[y * width + x] = v; if (v > maxValue) { maxValue = v; } if (v < minValue) { minValue = v; } } sourceStream.Position += sourceDataBox.RowPitch - width * 4; } // Rescale image levels between 0 and 1 for (int i = 0; i < width * height; ++i) { _data[i] = (_data[i] - minValue) / maxValue; } // transform background (black pixels) ComputeGradient(_data, width, height); var outside = Edtaa3(_data, height, width); // transform forground (white pixels) for (int i = 0; i < width * height; ++i) { _data[i] = 1 - _data[i]; // invert input } ComputeGradient(_data, width, height); var inside = Edtaa3(_data, height, width); // write resulting distance field to target texture DataStream destinationStream; var destinationDataBox = immediateContext.MapSubresource(_distanceFieldImage, 0, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None, out destinationStream); using (destinationStream) { sourceStream.Position = 0; destinationStream.Position = 0; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { int i = y * width + x; // distmap = outside - inside; % Bipolar distance field var color = new Color4(sourceStream.Read <Int32>()); outside[i] = MathUtils.Clamp(128.0f + (outside[i] - inside[i]) * 16.0f, 0.0f, 255.0f); //color.Alpha = (255 - (byte) outside[i])/255.0f; float f = (255 - (byte)outside[i]) / 255.0f; color.Red = f; color.Blue = f; color.Green = f; float alpha = 1 - _data[i]; { // do alpha dilatation const int range = 1; int xs = Math.Max(x - range, 0); int xe = Math.Min(x + range, width - 1); int ys = Math.Max(y - range, 0); int ye = Math.Min(y + range, height - 1); for (int yy = ys; yy <= ye; yy++) { for (int xx = xs; xx <= xe; xx++) { alpha = Math.Max(alpha, 1 - _data[yy * width + xx]); } } } color.Alpha = alpha;// * 0.8f; // > 0.0f ? 0.5f : 0; destinationStream.Write(color.ToRgba()); } destinationStream.Position += destinationDataBox.RowPitch - width * 4; } immediateContext.UnmapSubresource(_distanceFieldImage, 0); } immediateContext.UnmapSubresource(_imageWithCPUAccess, 0); } // Changed = false; } Output.Value = _distanceFieldImage; }
public bool Convert(CancellationToken?token) { width = InputImage.Width; height = InputImage.Height; HSLColor[,] tempHSLPixels = new HSLColor[width, height]; lock (lockRGB) { RGBPixels = new Color[width, height]; for (int x = 0; x < width; ++x) { for (int y = 0; y < height; ++y) { RGBPixels[x, y] = InputImage.GetPixel(x, y); } } } var tempImage = new WriteableBitmap(width, height, InputImage.HorizontalResolution, InputImage.VerticalResolution, System.Windows.Media.PixelFormats.Bgra32, null); int bytesPerPixel = tempImage.Format.BitsPerPixel / 8; int bytesAmount = width * height * bytesPerPixel; byte[] pixels = new byte[bytesAmount]; //масив пікселів для вихідного зображення int bytesPerRow = width * bytesPerPixel; for (int x = 0; x < width; ++x) { for (int y = 0; y < height; ++y) { if (token != null && token.Value.IsCancellationRequested) { return(false); } var rgbColor = InputImage.GetPixel(x, y); //отримуємо колір піксела var hslColor = new HSLColor(); //перетворюємо значення кольорів так, щоб вони були в межах [0;1] float r = rgbColor.R / 255.0f; float g = rgbColor.G / 255.0f; float b = rgbColor.B / 255.0f; //шукаємо максимальне та мінімальне значення float cMax = Math.Max(Math.Max(r, g), b); float cMin = Math.Min(Math.Min(r, g), b); //шукаємо розмах float delta = cMax - cMin; //рахуємо Hue в градусах if (delta == 0.0f) //якщо всі компоненти RGB мають рівне значення { hslColor.H = 0; //встановимо тон 0 } else { if (cMax == r) //якщо найбільшим є значення компоненти R { if (g >= b) { hslColor.H = (short)(60 * (g - b) / delta); } else { hslColor.H = (short)(60 * (g - b) / delta + 360); } } else if (cMax == g) //якщо найбільшим є значення компоненти G { hslColor.H = (short)(60 * (b - r) / delta + 120); } else //якщо найбільшим є значення компоненти B { hslColor.H = (short)(60 * (r - g) / delta + 240); } } //рахуємо Saturation у відсотках hslColor.S = (byte)(delta / (1 - Math.Abs(1 - (cMax + cMin))) * 100); //рахуємо Lightness у відсотках hslColor.L = (byte)((cMax + cMin) * 50); //те саме, що ((cMax + cMin) / 2 * 100) tempHSLPixels[x, y] = hslColor; //записуємо колір в масив //конвертація назад в RGB int pixelOffset = (y * width + x) * 4; Color newRgbColor = HSLtoRGB(hslColor); pixels[pixelOffset] = newRgbColor.B; //записуємо результат конвертації в масив pixels[pixelOffset + 1] = newRgbColor.G; pixels[pixelOffset + 2] = newRgbColor.R; pixels[pixelOffset + 3] = 255; } } if (token != null && token.Value.IsCancellationRequested) { return(false); } lock (lockHSL) { HSLPixels = tempHSLPixels; } tempImage.WritePixels(new Int32Rect(0, 0, width, height), pixels, bytesPerRow, 0); if (token != null && token.Value.IsCancellationRequested) { return(false); } Image = tempImage; //робимо копію кольорів в масив original (пригодиться під час редагування яскравості) originalHSLPixels = new HSLColor[width, height]; for (int x = 0; x < width; ++x) { for (int y = 0; y < height; ++y) { originalHSLPixels[x, y] = HSLPixels[x, y].DeepCopy(); } } return(true); }
public void Analyze(AUri imageUri) { var image = InputImage.FromFilePath(XEPlatform.AppContext, imageUri); Analyze(image); }
public InputImageProvider(InputImage image, PixelTypes pixelType) { Image = image; PixelType = pixelType; }