private void bwVideoBackgroundBox_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { ResizeFilter _res = new ResizeFilter(); p_theme.BackgroundVideo.LayerBox.Image = (Image)e.Result; previewPic.Image = _res.ExecuteFilter(p_theme.BackgroundVideo, previewPic.Width, previewPic.Height); }
public List <ImageDescription> computeImageForLayers(ImageDescription inputImage, int numberOfLayersToCompute) { List <ImageDescription> computedImages = new List <ImageDescription>(numberOfLayersToCompute); for (int i = 0; i < numberOfLayersToCompute; i++) { EdgeDetectionAlgorithm algorithm = layers[i].algorithm; int layerResizeFactor = layers[i].resizeFactor; ImageDescription newInputImage = null; ResizeFilter resizeGrayscale = new ResizeFilter(inputImage.sizeX / layerResizeFactor, inputImage.sizeY / layerResizeFactor, ImageDescriptionUtil.grayscaleChannel); ResizeFilter resizeColor = new ResizeFilter(inputImage.sizeX / layerResizeFactor, inputImage.sizeY / layerResizeFactor, ImageDescriptionUtil.colorChannels); if (layerResizeFactor == 1) { newInputImage = inputImage; } else { newInputImage = resizeColor.filter(inputImage); } if (i > 0) { ImageDescription resizedComputed = resizeGrayscale.filter(computedImages[i - 1]); newInputImage.setColorChannel(ColorChannelEnum.Layer, resizedComputed.gray); } ImageDescription layerOutputImage = algorithm.test(newInputImage); computedImages.Add(layerOutputImage); } return(computedImages); }
public static void makeAllImagesSameSize(List <ImageDescription> inputImages, out int newSizeX, out int newSizeY, out List <ImageDescription> outputImages) { newSizeX = 0; newSizeY = 0; foreach (ImageDescription image in inputImages) { if (image.sizeX > newSizeX) { newSizeX = image.sizeX; } if (image.sizeY > newSizeY) { newSizeY = image.sizeY; } } outputImages = new List <ImageDescription>(inputImages.Count); ResizeFilter rf = new ResizeFilter(newSizeX, newSizeY, new HashSet <ColorChannelEnum> { ColorChannelEnum.Gray }); foreach (ImageDescription image in inputImages) { if (image.sizeX != newSizeX || image.sizeY != newSizeY) { outputImages.Add(rf.filter(image)); } else { outputImages.Add(image); } } }
public void build() { Graphics _graphicContainer; ResizeFilter _resizeFilter = new ResizeFilter(); Rectangle _square = new Rectangle(0, 0, 200, 200); this._image = global::EzTvix.Theme.Default.Empty; try { // Préparation de l'image _graphicContainer = System.Drawing.Graphics.FromImage(this._image); // chargemet de l'image courante resizeFilter.Width = this.Background.Mask.Width; resizeFilter.Height = this.Background.Mask.Height; resizeFilter.KeepAspectRatio = false; _graphicContainer.DrawImageUnscaledAndClipped(resizeFilter.ExecuteFilter(_background.Image), this.Background.Mask); _graphicContainer.DrawImageUnscaledAndClipped(this.Foreground, this.Foreground.Mask); _graphicContainer.DrawImageUnscaledAndClipped(this.Text, this.Text.Mask); _graphicContainer.Dispose(); _graphicContainer = null; } catch (Exception e) { } }
/// <summary> /// Create a filter with default settings. /// </summary> /// <param name="filter">The filter type.</param> /// <returns>The <see cref="Filter"/> instance.</returns> /// <exception cref="ArgumentException"> /// <paramref name="filter"/> is invalid. /// </exception> private static Filter GetFilter(ResizeFilter filter) { switch (filter) { case ResizeFilter.Box: return new BoxFilter(); case ResizeFilter.Triangle: return new TriangleFilter(); case ResizeFilter.Cubic: return new CubicFilter(); case ResizeFilter.Quadric: return new QuadricFilter(); case ResizeFilter.BSpline: return new BSplineFilter(); case ResizeFilter.Mitchell: return new MitchellFilter(); case ResizeFilter.Lanczos: return new LanczosFilter(); case ResizeFilter.Sinc: return new SincFilter(); case ResizeFilter.Kaiser: return new KaiserFilter(); default: throw new ArgumentException("Invalid resize filter.", "filter"); } }
public ActionResult ImportImage() { var postedFile = Request.Files["FileUpload1"]; if (postedFile != null && postedFile.ContentLength > 0) { MemoryStream ms = new MemoryStream(); postedFile.InputStream.CopyTo(ms); Image image = new Bitmap(ms); Guid newImageName = Guid.NewGuid(); string newNameToImage = newImageName.ToString() + Path.GetExtension(postedFile.FileName); ResizeFilter resizeFilter = new ResizeFilter { Height = 0, Width = 75 }; using (Image thumbnailImage = resizeFilter.Process(image)) { var imagePath = Path.Combine(Server.MapPath("~/Content/Images"), newNameToImage); thumbnailImage.Save(imagePath); } resizeFilter.Width = 350; resizeFilter.Height = 0; newImageName = Guid.NewGuid(); newNameToImage = newImageName.ToString() + Path.GetExtension(postedFile.FileName); using (Image middleImage = resizeFilter.Process(image)) { var imagePath = Path.Combine(Server.MapPath("~/Content/Images"), newNameToImage); middleImage.Save(imagePath); } } return(RedirectToAction("Index")); }
public override IFilter CreateFilter(IFilter input) { DisposeHelper.Dispose(ref m_Buffer1); DisposeHelper.Dispose(ref m_Buffer2); if (!Renderer.IsOpenClAvail || Renderer.RenderQuality.PerformanceMode()) { Renderer.FallbackOccurred = true; // Warn user via player stats OSD return(input); // OpenCL is not available, or UNORM8 textures used (not supported); fallback } Func <TextureSize, TextureSize> transformWidth = s => new TextureSize(2 * s.Width, s.Height); Func <TextureSize, TextureSize> transformHeight = s => new TextureSize(s.Width, 2 * s.Height); var kernel = CompileKernel(); var shaderH = kernel.Configure(transform: transformWidth); var shaderV = kernel.Configure(transform: transformHeight); var combine = CompileShader("Combine.hlsl"); var neuronCount1 = s_NeuronCount[(int)Neurons1]; var neuronCount2 = s_NeuronCount[(int)Neurons2]; var weights1 = s_Weights[(int)Neurons1]; m_Buffer1 = Renderer.CreateClBuffer(weights1); var differentWeights = neuronCount1 != neuronCount2; if (differentWeights) { var weights2 = s_Weights[(int)Neurons2]; m_Buffer2 = Renderer.CreateClBuffer(weights2); } var sourceSize = input.OutputSize; if (!IsUpscalingFrom(sourceSize)) { return(input); } var yuv = input.ConvertToYuv(); var chroma = new ResizeFilter(yuv, new TextureSize(sourceSize.Width * 2, sourceSize.Height * 2), TextureChannels.ChromaOnly, new Vector2(-0.25f, -0.25f), Renderer.ChromaUpscaler, Renderer.ChromaDownscaler); var localWorkSizes = new[] { 8, 8 }; var nnedi3H = new NNedi3HKernelFilter(shaderH, m_Buffer1, neuronCount1, new TextureSize(yuv.OutputSize.Width, yuv.OutputSize.Height), localWorkSizes, yuv); var nnedi3V = new NNedi3VKernelFilter(shaderV, m_Buffer2, neuronCount2, differentWeights, new TextureSize(nnedi3H.OutputSize.Width, nnedi3H.OutputSize.Height), localWorkSizes, nnedi3H); var result = new ShaderFilter(combine, nnedi3V, chroma); return(new ResizeFilter(result.ConvertToRgb(), result.OutputSize, new Vector2(0.5f, 0.5f), Renderer.LumaUpscaler, Renderer.LumaDownscaler)); }
private void bwVideoBackground_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { ResizeFilter _res = new ResizeFilter(); p_theme.BackgroundVideo.Foreground.Image = _res.ExecuteFilter((Image)e.Result, p_theme.BackgroundVideo.Foreground.Width, p_theme.BackgroundVideo.Foreground.Height); previewPic.Image = _res.ExecuteFilter(p_theme.BackgroundVideo, previewPic.Width, previewPic.Height); this.toolStripProgressBar.Value = 0; }
public void trainWithBaseAlgorithm(EdgeDetectionAlgorithm algorithm, EdgeDetectionAlgorithm baseAlgorithm, int resizeFactor) { DateTime trainingStart = DateTime.Now; float totalLoss = 0; List <String> fileList = new List <string>(benchmark.getTrainingFilesPathList()); int totalNumberOfFiles = numberOfTrainingSetPasses * fileList.Count; int totalIndex = 0; for (int pass = 0; pass < numberOfTrainingSetPasses; pass++) { ListUtils.Shuffle(fileList); int index = 1; float totalPassLoss = 0; DateTime trainingPassStart = DateTime.Now; foreach (string trainingFileName in fileList) { DateTime start = DateTime.Now; Console.WriteLine("Pass: "******"/" + numberOfTrainingSetPasses + ", " + index + "/" + fileList.Count + " Training file: " + Path.GetFileName(trainingFileName)); ImageDescription inputImage = ImageFileHandler.loadFromPath(trainingFileName); ImageDescription computedImage = baseAlgorithm.test(inputImage); ResizeFilter resizeColor = new ResizeFilter(inputImage.sizeX / resizeFactor, inputImage.sizeY / resizeFactor, ImageDescriptionUtil.colorChannels); ImageDescription newInputImage = resizeColor.filter(inputImage); ImageDescription inputImageGroundTruth = ImageFileHandler.loadFromPath(benchmark.getTrainingFileGroundTruth(trainingFileName)); inputImageGroundTruth.computeGrayscale(); ResizeFilter resizeGrayscale = new ResizeFilter(inputImage.sizeX / resizeFactor, inputImage.sizeY / resizeFactor, ImageDescriptionUtil.grayscaleChannel); ImageDescription newInputImageGroundTruth = resizeGrayscale.filter(inputImageGroundTruth); ImageDescription resizedComputed = resizeGrayscale.filter(computedImage); newInputImage.setColorChannel(ColorChannelEnum.Layer, resizedComputed.gray); float loss = algorithm.train(newInputImage, newInputImageGroundTruth); totalLoss += loss; totalPassLoss += loss; index++; totalIndex++; double timeElapsed = (DateTime.Now - start).TotalSeconds; double timeElapsedSoFar = (DateTime.Now - trainingStart).TotalSeconds; double estimatedTime = (timeElapsedSoFar / totalIndex) * (totalNumberOfFiles - totalIndex); Console.WriteLine("Loss: " + loss.ToString("0.00") + " Time: " + timeElapsed.ToString("0.00") + "s Time elapsed: " + timeElapsedSoFar.ToString("0.00") + "s ETA: " + estimatedTime.ToString("0.00") + "s"); } double tariningPassTimeElapsed = (DateTime.Now - trainingPassStart).TotalSeconds; Console.WriteLine("Pass took " + tariningPassTimeElapsed.ToString("0.00") + " sec. Pass loss: " + totalPassLoss.ToString("0.00") + " Avg loss: " + (totalPassLoss / (fileList.Count)).ToString("0.00")); } double totalTimeElapsed = (DateTime.Now - trainingStart).TotalSeconds; Console.WriteLine("Training took " + totalTimeElapsed.ToString("0.00") + " sec. Total loss: " + totalLoss.ToString("0.00") + " Avg loss: " + (totalLoss / (totalNumberOfFiles)).ToString("0.00")); }
private void btnLoadThemeVideoBox_Click(object sender, EventArgs e) { //ImageTemplate _localImage = new ImageTemplate(); ResizeFilter _res = new ResizeFilter(); p_theme.BackgroundVideo.LayerBox.Image = p_theme.VideoBox.Image; pictureBoxVideoBox.Image = _res.ExecuteFilter(p_theme.BackgroundVideo.LayerBox.Image, pictureBoxVideoBox.Width, pictureBoxVideoBox.Height); LoadBackgroundVideoPreview(); }
private void cbDemoCover_CheckedChanged(object sender, EventArgs e) { ResizeFilter _res = new ResizeFilter(); if (cbDemoCover.Checked) { p_theme.BackgroundVideo.LayerCover.Image = global::EzTvix.Theme.Default.DemoCover; pictureBoxCover.Image = _res.ExecuteFilter(p_theme.BackgroundVideo.LayerCover.Image, pictureBoxCover.Width, pictureBoxCover.Height); LoadBackgroundVideoPreview(); } }
/// <summary> /// Load existing theme image /// </summary> /// <param name="_imgToLoad">Image to load</param> private void LoadItem(ItemType itemType) { ImageManager _imgMan = new ImageManager(); ResizeFilter _resizeFilter = new ResizeFilter(); Image _imgFinal = p_theme.ItemBack; p_BackgroundImage = p_theme.Empty; p_currentItem = itemType; switch (p_currentItem) { case ItemType.VideoBox: case ItemType.AudioBox: tabPanelBackgroundItem.Enabled = true; break; default: tabPanelBackgroundItem.Enabled = p_theme[p_currentItem].layerToSave.contains(Layer.Background); break; } pbFront.Image = p_theme[p_currentItem].Foreground; pbBack.Image = p_theme[p_currentItem].Foreground; try { itemPic.BackgroundImage = p_theme.ItemBack; itemPic.BackgroundImageLayout = ImageLayout.Stretch; itemPic.Image = p_theme[p_currentItem].Image; } catch (FileNotFoundException e) { // empty image by default itemPic.Image = p_theme.ItemBack; p_theme[p_currentItem] = p_theme.ItemBack; } #region *** Init fields *** p_valueSetByCode = !p_valueSetByCode; lblFontSample.Font = p_theme[p_currentItem].Text.TextFont; lblFontSample.ForeColor = p_theme[p_currentItem].Text.TextColor; nudTextX.Value = decimal.Parse(p_theme[p_currentItem].Text.TextZone.X.ToString()); nudTextY.Value = decimal.Parse(p_theme[p_currentItem].Text.TextZone.Y.ToString()); nudTextWidth.Value = decimal.Parse(p_theme[p_currentItem].Text.TextZone.Width.ToString()); nudTextHeight.Value = decimal.Parse(p_theme[p_currentItem].Text.TextZone.Height.ToString()); nudBackX.Value = decimal.Parse(p_theme[p_currentItem].Background.Mask.X.ToString()); nudBackY.Value = decimal.Parse(p_theme[p_currentItem].Background.Mask.Y.ToString()); nudBackWidth.Value = decimal.Parse(p_theme[p_currentItem].Background.Mask.Width.ToString()); nudBackHeight.Value = decimal.Parse(p_theme[p_currentItem].Background.Mask.Height.ToString()); p_valueSetByCode = !p_valueSetByCode; #endregion }
public override bool Run() { var processor = new Simplicode.Imaging.ImageProcessor { JpegCompression = 90L }; var resizer = new ResizeFilter((int)mControl.Dimension1.Value, (int)mControl.Dimension2.Value); processor.AddFilter(resizer); processor.ProcessImage(mControl.SourceFilePathResolvedValue, mControl.DestinationFilePathResolvedValue); return(true); }
public mModifyResize(ResizingMode Mode, ScalingMode Type, int Width, int Height) { ResizingType = Mode; ScalingType = Type; Effect = new ResizeFilter(); Effect.Mode = (ResizeMode)(int)ResizingType; Effect.BitmapScalingMode = (BitmapScalingMode)(int)ScalingType; Effect.Width = Unit.Pixel(Width); Effect.Height = Unit.Pixel(Height); Effect.Enabled = true; filter = Effect; }
private void btnLoadBackgroundVideo_Click(object sender, EventArgs e) { Stream myStream; ResizeFilter _res = new ResizeFilter(); if (openDialogJpg.ShowDialog() == DialogResult.OK) { if ((myStream = openDialogJpg.OpenFile()) != null) { foreImage = Image.FromStream(myStream); myStream.Close(); Render(); } } }
public void ResizeFilterInUseHeightModeCalculatesDimensionsCorrectly() { // Arrange. var bitmap = new FastBitmap(@"Resources\Tulips.png", UriKind.Relative); var resizeFilter = new ResizeFilter { Mode = ResizeMode.UseHeight, Height = Unit.Pixel(200) }; // Act. resizeFilter.ApplyFilter(bitmap); // Assert. Assert.AreEqual(300, bitmap.Width); Assert.AreEqual(200, bitmap.Height); }
private void btnLoadBackgroundVideo_Click(object sender, EventArgs e) { //ImageTemplate _localImage = new ImageTemplate(); Stream myStream; ResizeFilter _res = new ResizeFilter(); if (openDialogJpg.ShowDialog() == DialogResult.OK) { if ((myStream = openDialogJpg.OpenFile()) != null) { p_theme.BackgroundVideo.Foreground.Image = Image.FromStream(myStream); pictureBoxVideoBackground.Image = _res.ExecuteFilter(p_theme.BackgroundVideo.Foreground.Image, pictureBoxVideoBackground.Width, pictureBoxVideoBackground.Height); myStream.Close(); } } LoadBackgroundVideoPreview(); }
public static string GetTable() { Image myImg = Bitmap.FromFile("1.jpg"); Image transformed; Table myTable = new Table(); //Resize Filter ZRLabs.Yael.BasicFilters.ResizeFilter resize = new ResizeFilter(); transformed = resize.ExecuteFilter(myImg); Row resizeFilter = new Row(); Cell resizeSimpleCell = new Cell(); resizeSimpleCell.InnerText = "transformed = resize.ExecuteFilter(myImg);"; resizeFilter.Cells.Add(resizeSimpleCell); myTable.Rows.Add(resizeFilter); return(myTable.GetInnerHtml()); }
/// <summary> /// Resizes the image. /// </summary> /// <param name="inputFilename">The input filename.</param> /// <param name="outputFilename">The output filename.</param> /// <param name="width">The desired width of the output image.</param> /// <param name="height">The desired height of the output image.</param> /// <param name="method">The resize calculation method</param> /// <param name="anchorLocation">The anchor location.</param> /// <param name="jpegQuality">The JPEG quality.</param> /// <param name="outputFormat">The output format.</param> public static void ResizeImage(string inputFilename, string outputFilename, int width, int height, ResizeMethod method = ResizeMethod.KeepAspectRatio, AnchorLocation anchorLocation = AnchorLocation.Middle, long jpegQuality = 80L, ImageFormatType outputFormat = ImageFormatType.Default) { ImageProcessor processor = new ImageProcessor(new ResizeFilter(width, height, method, anchorLocation)); ResizeFilter filter = new ResizeFilter(width, height, method, anchorLocation); processor.JpegCompression = jpegQuality; processor.OutputFormat = outputFormat; processor.ProcessImage(inputFilename, outputFilename); }
private void bwVideoBackground_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { ResizeFilter _res = new ResizeFilter(); _res.KeepAspectRatio = true; _res.Width = coverPic.Width; _res.Height = coverPic.Height; if (e.Result != null) { //p_theme.BackgroundVideo.Foreground.Image = _res.ExecuteFilter((Image)e.Result, p_theme.BackgroundVideo.Foreground.Width, p_theme.BackgroundVideo.Foreground.Height); foreImage = _res.ExecuteFilter((Image)e.Result, iconeMask.Width, iconeMask.Height); this.toolStripProgressBar.Value = 99; Render(); this.toolStripProgressBar.Value = 0; } }
public void ResizeFilterInUseWidthModeWithDominantHeightCalculatesDimensionsCorrectly() { // Arrange. var bitmap = new FastBitmap(@"Resources\Tulips.png", UriKind.Relative); var resizeFilter = new ResizeFilter { Mode = ResizeMode.Uniform, Width = Unit.Pixel(200), Height = Unit.Pixel(100) }; // Act. resizeFilter.ApplyFilter(null, bitmap); // Assert. Assert.AreEqual(150, bitmap.Width); Assert.AreEqual(100, bitmap.Height); }
private void btnLoadCover_Click(object sender, EventArgs e) { Stream myStream; ResizeFilter _res = new ResizeFilter(); if (openDialog.ShowDialog() == DialogResult.OK) { if ((myStream = openDialog.OpenFile()) != null) { p_theme.BackgroundVideo.LayerCover.Image = Image.FromStream(myStream); pictureBoxCover.Image = _res.ExecuteFilter(p_theme.BackgroundVideo.LayerCover.Image, pictureBoxCover.Width, pictureBoxCover.Height); myStream.Close(); cbDemoCover.Checked = false; } } LoadBackgroundVideoPreview(); }
/// <summary> /// Resizes the texture. (If original texture has mipmaps, all mipmap levels are automatically /// recreated.) /// </summary> /// <param name="width">The new width.</param> /// <param name="height">The new height.</param> /// <param name="depth">The new depth. Must be 1 for 2D textures and cube map textures.</param> /// <param name="filter">The filter to use for resizing.</param> /// <param name="alphaTransparency"> /// <see langword="true"/> if the image contains uses non-premultiplied alpha; otherwise, /// <see langword="false"/> if the image uses premultiplied alpha or has no alpha. /// </param> /// <param name="wrapMode"> /// The texture address mode that will be used for sampling the at runtime. /// </param> /// <returns>The resized texture.</returns> public Texture Resize(int width, int height, int depth, ResizeFilter filter, bool alphaTransparency, TextureAddressMode wrapMode) { var description = Description; description.Width = width; description.Height = height; description.Depth = depth; var resizedTexture = new Texture(description); // Resize mipmap level 0. for (int arrayIndex = 0; arrayIndex < description.ArraySize; arrayIndex++) TextureHelper.Resize(this, 0, arrayIndex, resizedTexture, 0, arrayIndex, filter, alphaTransparency, wrapMode); // Regenerate mipmap levels, if necessary. if (description.MipLevels > 1) resizedTexture.GenerateMipmaps(filter, alphaTransparency, wrapMode); return resizedTexture; }
public override bool Run() { int newImgWidth; int newImgHeight; using (var img = Image.FromFile(mControl.SourceFilePathResolvedValue)) { newImgWidth = img.Width * (int)mControl.Dimension1.Value / 100; newImgHeight = img.Height * (int)mControl.Dimension1.Value / 100; } var processor = new Simplicode.Imaging.ImageProcessor { JpegCompression = 90L }; var resizer = new ResizeFilter(newImgWidth, newImgHeight); processor.AddFilter(resizer); processor.ProcessImage(mControl.SourceFilePathResolvedValue, mControl.DestinationFilePathResolvedValue); return(true); }
private Image bw_renderBackgroundVideo() { ImageManager _imgMan = new ImageManager(); try { // définition du resize ResizeFilter _resizeFilter = new ResizeFilter(); _resizeFilter.Width = previewPic.Width; _resizeFilter.Height = previewPic.Height; _resizeFilter.KeepAspectRatio = false; return(_resizeFilter.ExecuteFilter(p_theme.BackgroundVideo)); } catch (FileNotFoundException e) { // empty image par default return(p_theme[ItemType.BackgroundVideo].Foreground); } }
private void LoadPreview() { ImageManager _imgMan = new ImageManager(); try { // définition du resize ResizeFilter _resizeFilter = new ResizeFilter(); _resizeFilter.Width = previewPic.Width; _resizeFilter.Height = previewPic.Height; _resizeFilter.KeepAspectRatio = false; // chargement et resize de l'image previewPic.Image = _resizeFilter.ExecuteFilter(p_theme[ItemType.Background].Foreground); } catch (FileNotFoundException e) { // empty image par default previewPic.Image = p_theme.Background; } }
//private void renderBackgroundVideo() //{ // ImageManager _imgMan = new ImageManager(); // try // { // // définition du resize // ResizeFilter _resizeFilter = new ResizeFilter(); // _resizeFilter.Width = previewPic.Width; // _resizeFilter.Height = previewPic.Height; // _resizeFilter.KeepAspectRatio = false; // previewPic.Image = _resizeFilter.ExecuteFilter(p_theme.BackgroundVideo); // } // catch (FileNotFoundException e) // { // // empty image par default // previewPic.Image = p_theme[ItemType.BackgroundVideo].Foreground; // } //} private void LoadBackgroundItem(BackgroundTemplateItem itemType) { ImageManager _imgMan = new ImageManager(); ResizeFilter _resizeFilter = new ResizeFilter(); Image _imgFinal = p_theme.ItemBack; //p_BackgroundImage = p_theme.Empty; p_backgroundCurrentLayer = itemType; #region *** Init fields *** p_valueSetByCode = !p_valueSetByCode; lblVBackSample.Font = p_theme.BackgroundVideo[p_backgroundCurrentLayer].TextFont; lblVBackSample.ForeColor = p_theme.BackgroundVideo[p_backgroundCurrentLayer].TextColor; textVBackLabel.Text = p_theme.BackgroundVideo[p_backgroundCurrentLayer].TextData; nudVBackTextX.Value = decimal.Parse(p_theme.BackgroundVideo[p_backgroundCurrentLayer].TextZone.X.ToString()); nudVBackTextY.Value = decimal.Parse(p_theme.BackgroundVideo[p_backgroundCurrentLayer].TextZone.Y.ToString()); nudVBackTextWidth.Value = decimal.Parse(p_theme.BackgroundVideo[p_backgroundCurrentLayer].TextZone.Width.ToString()); nudVBackTextHeight.Value = decimal.Parse(p_theme.BackgroundVideo[p_backgroundCurrentLayer].TextZone.Height.ToString()); switch (p_theme.BackgroundVideo[p_backgroundCurrentLayer].TextFormat.Alignment) { case StringAlignment.Center: BackgroundAlignCombo.Text = "Center"; break; case StringAlignment.Far: BackgroundAlignCombo.Text = "Right"; break; default: BackgroundAlignCombo.Text = "Left"; break; } //nudVBackX.Value = decimal.Parse(p_theme.BackgroundVideo[p_backgroundCurrentLayer].Mask.X.ToString()); //nudVBackY.Value = decimal.Parse(p_theme.BackgroundVideo[p_backgroundCurrentLayer].Mask.Y.ToString()); //nudVBackWidth.Value = decimal.Parse(p_theme.BackgroundVideo[p_backgroundCurrentLayer].Width.ToString()); //nudVBackHeight.Value = decimal.Parse(p_theme.BackgroundVideo[p_backgroundCurrentLayer].Mask.Height.ToString()); p_valueSetByCode = !p_valueSetByCode; #endregion }
public override IFilter CreateFilter(IFilter input) { Cleanup(); if (!Renderer.IsDx11Avail) { Renderer.FallbackOccurred = true; // Warn user via player stats OSD return(input); // DX11 is not available; fallback } Func <TextureSize, TextureSize> transform = s => new TextureSize(2 * s.Height, s.Width); var shaderPass1 = LoadShader11(GetShaderFileName(Neurons1)); var shaderPass2 = LoadShader11(GetShaderFileName(Neurons2)); var interleave = CompileShader("Interleave.hlsl").Configure(transform: transform); var combine = CompileShader("Combine.hlsl").Configure(transform: transform); var sourceSize = input.OutputSize; if (!IsUpscalingFrom(sourceSize)) { return(input); } var yuv = input.ConvertToYuv(); var chroma = new ResizeFilter(yuv, new TextureSize(sourceSize.Width * 2, sourceSize.Height * 2), TextureChannels.ChromaOnly, new Vector2(-0.25f, -0.25f), Renderer.ChromaUpscaler, Renderer.ChromaDownscaler); m_Filter1 = NNedi3Helpers.CreateFilter(shaderPass1, yuv, Neurons1, Structured); var resultY = new ShaderFilter(interleave, yuv, m_Filter1); m_Filter2 = NNedi3Helpers.CreateFilter(shaderPass2, resultY, Neurons2, Structured); var result = new ShaderFilter(combine, resultY, m_Filter2, chroma); return(new ResizeFilter(result.ConvertToRgb(), result.OutputSize, new Vector2(0.5f, 0.5f), Renderer.LumaUpscaler, Renderer.LumaDownscaler)); }
public override IFilter CreateFilter(IFilter input) { if (!Renderer.IsDx11Avail) { Renderer.FallbackOccurred = true; // Warn user via player stats OSD return(input); // DX11 is not available; fallback } Func <TextureSize, TextureSize> Transformation = s => new TextureSize(2 * s.Height, s.Width); var NNEDI3 = LoadShader11(string.Format("NNEDI3_{0}_{1}.cso", s_NeuronCount[(int)Neurons], s_CodePath[(int)CodePath])); var Interleave = CompileShader("Interleave.hlsl").Configure(transform: Transformation); var Combine = CompileShader("Combine.hlsl").Configure(transform: Transformation); var sourceSize = input.OutputSize; if (!IsUpscalingFrom(sourceSize)) { return(input); } var yuv = input.ConvertToYuv(); var chroma = new ResizeFilter(yuv, new TextureSize(sourceSize.Width * 2, sourceSize.Height * 2), TextureChannels.ChromaOnly, new Vector2(-0.25f, -0.25f), Renderer.ChromaUpscaler, Renderer.ChromaDownscaler); IFilter resultY, result; var pass1 = NNedi3Helpers.CreateFilter(NNEDI3, yuv, Neurons); resultY = new ShaderFilter(Interleave, yuv, pass1); var pass2 = NNedi3Helpers.CreateFilter(NNEDI3, resultY, Neurons); result = new ShaderFilter(Combine, resultY, pass2, chroma); return(new ResizeFilter(result.ConvertToRgb(), result.OutputSize, new Vector2(0.5f, 0.5f), Renderer.LumaUpscaler, Renderer.LumaDownscaler)); }
/// <summary> /// (Re-)Generates all mipmap levels. /// </summary> /// <param name="filter">The filter to use for resizing.</param> /// <param name="alphaTransparency"> /// <see langword="true"/> if the image contains uses non-premultiplied alpha; otherwise, /// <see langword="false"/> if the image uses premultiplied alpha or has no alpha. /// </param> /// <param name="wrapMode"> /// The texture address mode that will be used for sampling the at runtime. /// </param> public void GenerateMipmaps(ResizeFilter filter, bool alphaTransparency, TextureAddressMode wrapMode) { var oldDescription = Description; var newDescription = Description; // Determine number of mipmap levels. if (oldDescription.Dimension == TextureDimension.Texture3D) newDescription.MipLevels = TextureHelper.CalculateMipLevels(oldDescription.Width, oldDescription.Height, oldDescription.Depth); else newDescription.MipLevels = TextureHelper.CalculateMipLevels(oldDescription.Width, oldDescription.Height); if (oldDescription.MipLevels != newDescription.MipLevels) { // Update Description and Images. var oldImages = Images; Description = newDescription; ValidateTexture(newDescription); // Recreate image collection. (Mipmap level 0 is copied from existing image collection.) Images = CreateImageCollection(newDescription, true); for (int arrayIndex = 0; arrayIndex < newDescription.ArraySize; arrayIndex++) { for (int zIndex = 0; zIndex < newDescription.Depth; zIndex++) { int oldIndex = oldDescription.GetImageIndex(0, arrayIndex, zIndex); int newIndex = newDescription.GetImageIndex(0, arrayIndex, zIndex); Images[newIndex] = oldImages[oldIndex]; } } } // Downsample mipmap levels. for (int arrayIndex = 0; arrayIndex < newDescription.ArraySize; arrayIndex++) for (int mipIndex = 0; mipIndex < newDescription.MipLevels - 1; mipIndex++) TextureHelper.Resize(this, mipIndex, arrayIndex, this, mipIndex + 1, arrayIndex, filter, alphaTransparency, wrapMode); }
/// <summary> /// Resizes a 2D image. /// </summary> /// <param name="srcImage">The input image.</param> /// <param name="dstImage">The output image.</param> /// <param name="filter">The filter to use for resizing.</param> /// <param name="alphaTransparency"> /// <see langword="true"/> if the image contains uses non-premultiplied alpha; otherwise, /// <see langword="false"/> if the image uses premultiplied alpha or has no alpha. /// </param> /// <param name="wrapMode"> /// The texture address mode that will be used for sampling the at runtime. /// </param> public static void Resize(Image srcImage, Image dstImage, ResizeFilter filter, bool alphaTransparency, TextureAddressMode wrapMode) { Resize(srcImage, dstImage, GetFilter(filter), alphaTransparency, wrapMode); }
/// <summary> /// (Re-)Generates all mipmap levels. /// </summary> /// <param name="filter">The filter to use for resizing.</param> /// <param name="alphaTransparency"> /// <see langword="true"/> if the image contains uses non-premultiplied alpha; otherwise, /// <see langword="false"/> if the image uses premultiplied alpha or has no alpha. /// </param> /// <param name="wrapMode"> /// The texture address mode that will be used for sampling the at runtime. /// </param> public void GenerateMipmaps(ResizeFilter filter, bool alphaTransparency, TextureAddressMode wrapMode) { var oldDescription = Description; var newDescription = Description; // Determine number of mipmap levels. if (oldDescription.Dimension == TextureDimension.Texture3D) newDescription.MipLevels = TextureHelper.CalculateMipLevels(oldDescription.Width, oldDescription.Height, oldDescription.Depth); else newDescription.MipLevels = TextureHelper.CalculateMipLevels(oldDescription.Width, oldDescription.Height); if (oldDescription.MipLevels != newDescription.MipLevels) { // Update Description and Images. var oldImages = Images; Description = newDescription; #if DEBUG ValidateTexture(newDescription); #endif // Recreate image collection. (Mipmap level 0 is copied from existing image collection.) Images = CreateImageCollection(newDescription, true); for (int arrayIndex = 0; arrayIndex < newDescription.ArraySize; arrayIndex++) { for (int zIndex = 0; zIndex < newDescription.Depth; zIndex++) { int oldIndex = oldDescription.GetImageIndex(0, arrayIndex, zIndex); int newIndex = newDescription.GetImageIndex(0, arrayIndex, zIndex); Images[newIndex] = oldImages[oldIndex]; } } } // Downsample mipmap levels. for (int arrayIndex = 0; arrayIndex < newDescription.ArraySize; arrayIndex++) for (int mipIndex = 0; mipIndex < newDescription.MipLevels - 1; mipIndex++) TextureHelper.Resize(this, mipIndex, arrayIndex, this, mipIndex + 1, arrayIndex, filter, alphaTransparency, wrapMode); }
/// <overloads> /// <summary> /// Resizes a texture/image. /// </summary> /// </overloads> /// /// <summary> /// Resizes a 2D texture or 3D (volume) texture. /// </summary> /// <param name="srcTexture">The input texture.</param> /// <param name="srcMipIndex">The mipmap level of the input image.</param> /// <param name="srcArrayOrFaceIndex"> /// The array index (or the face index for cube maps) of the input image. Must be 0 for volume /// textures. /// </param> /// <param name="dstTexture">The output texture.</param> /// <param name="dstMipIndex">The mipmap level of the output image.</param> /// <param name="dstArrayOrFaceIndex"> /// The array index (or the face index for cube maps) of the output image. Must be 0 for volume /// textures. /// </param> /// <param name="filter">The filter to use for resizing.</param> /// <param name="alphaTransparency"> /// <see langword="true"/> if the image contains uses non-premultiplied alpha; otherwise, /// <see langword="false"/> if the image uses premultiplied alpha or has no alpha. /// </param> /// <param name="wrapMode"> /// The texture address mode that will be used for sampling the at runtime. /// </param> public static void Resize(Texture srcTexture, int srcMipIndex, int srcArrayOrFaceIndex, Texture dstTexture, int dstMipIndex, int dstArrayOrFaceIndex, ResizeFilter filter, bool alphaTransparency, TextureAddressMode wrapMode) { Resize(srcTexture, srcMipIndex, srcArrayOrFaceIndex, dstTexture, dstMipIndex, dstArrayOrFaceIndex, GetFilter(filter), alphaTransparency, wrapMode); }