public void Size3_SerializesToJson_WhenNullable() { var size = new Size3(1, 2, 3); var json = JsonConvert.SerializeObject((Size3?)size); TheResultingString(json).ShouldBe(@"{""width"":1,""height"":2,""depth"":3}"); }
static void Main(string[] args) { Size size1 = new Size(640, 480); Size size2 = new Size(480, 640); // compiles, but wrong Size size3 = new Size(height: 480, width: 640); var height1 = size1.Height; var width1 = size1.Width; Width2 aWidth2 = new Width2(640); Height2 aHeight2 = new Height2(480); Size2 size21 = new Size2(aWidth2, aHeight2); Size2 size22 = new Size2(aHeight2, aWidth2); // compiler reports error Size2 size23 = new Size2(height: aHeight2, width: aWidth2); var height2 = size21.Height.Value; var width2 = size21.Width.Value; Size3 size31 = new Size3(new Width3(640), new Height3(480)); Size3 size32 = new Size3((Width3)640, (Height3)480); Size3 size33 = new Size3((Height3)480, (Width3)640); // compiler reports error Size3 size34 = new Size3(height: (Height3)480, width: (Width3)640); int height3 = size32.Height.Value; int width3 = size32.Width.Value; Size4 size41 = new Size4(new Width4(640), new Height4(640)); int width4 = size41.Width; int height4 = size41.Height; var width = new Width4(640); var height = new Height4(480); Size size4 = new Size(width, height); Size size5 = new Size(height, width); // compiles, but wrong Size size6 = new Size(height: height, width: width); }
public void Size3_Parse_CanRoundTrip() { var size1 = Size3.Parse("123 456 789"); var size2 = Size3.Parse(size1.ToString()); TheResultingValue(size1 == size2).ShouldBe(true); }
public void GetData(Resource res, Format format, int subresource, Size3 dim, IntPtr dst, uint size) { Debug.Assert(IO.SupportedFormats.Contains(format) || format == Format.R8_UInt); int pixelSize = 4; if (format == Format.R32G32B32A32_Float) { pixelSize = 16; } if (format == Format.R8_UInt) { pixelSize = 1; } // verify expected size Debug.Assert((uint)(dim.Product * pixelSize) == size); var data = context.MapSubresource(res, subresource, MapMode.Read, MapFlags.None); int rowSize = dim.Width * pixelSize; int sliceOffset = data.SlicePitch - data.RowPitch * dim.Height; for (int curZ = 0; curZ < dim.Depth; ++curZ) { for (int curY = 0; curY < dim.Height; ++curY) { Dll.CopyMemory(dst, data.DataPointer, (uint)rowSize); dst += rowSize; data.DataPointer += data.RowPitch; } data.DataPointer += sliceOffset; } context.UnmapSubresource(res, subresource); }
public void Size3_IsConstructedProperly() { var result = new Size3(123, 456, 789); TheResultingValue(result) .ShouldBe(123, 456, 789); }
public byte[] GetData(Resource res, int subresource, Size3 size, int pixelByteSize) { var result = new byte[size.Product * pixelByteSize]; var data = context.MapSubresource(res, subresource, MapMode.Read, MapFlags.None); int srcOffset = 0; int dstOffset = 0; int rowSize = size.Width * pixelByteSize; int sliceOffset = data.SlicePitch - data.RowPitch * size.Height; Debug.Assert(rowSize <= data.RowPitch); for (int curZ = 0; curZ < size.Depth; ++curZ) { for (int curY = 0; curY < size.Height; ++curY) { Marshal.Copy(data.DataPointer + srcOffset, result, dstOffset, rowSize); srcOffset += data.RowPitch; dstOffset += rowSize; } srcOffset += sliceOffset; } context.UnmapSubresource(res, subresource); return(result); }
protected CommonBufferData GetCommonData(Matrix transform, float screenAspect) { var zero = new Vector4(0.0f, 0.0f, 0.0f, 1.0f); Vector4.Transform(ref zero, ref transform, out var origin); var displayExt = (RayCastingDisplayModel)models.Display.ExtendedViewData; var curDim = models.Images.Size.GetMip(models.Display.ActiveMipmap); Size3 cubeStart = Size3.Zero; Size3 cubeEnd = curDim; if (displayExt.UseCropping) { cubeStart = models.ExportConfig.CropStart.ToPixels(curDim); cubeEnd = models.ExportConfig.CropEnd.ToPixels(curDim) + Size3.One; } return(new CommonBufferData { Common = GetCommonData(null), Transform = transform, Aspect = screenAspect, Origin = new Vector3(origin.X, origin.Y, origin.Z), CubeStart = cubeStart, CubeEnd = cubeEnd, SelfShadowing = displayExt.Shading, AlphaIsCoverage = displayExt.AlphaIsCoverage, HideInternals = displayExt.HideInternals }); }
public void Size3_TryParse_FailsForInvalidStrings() { var result = default(Size3); var succeeded = Size3.TryParse("foo", out result); TheResultingValue(succeeded).ShouldBe(false); }
public void Scale(Size3 size, MitchellNetravaliScaleShader shader) { var tmp = shader.Run((TextureArray2D)Image, size); Image.Dispose(); Image = tmp; }
/// <summary> /// scales all images to the given dimensions /// </summary> public void ScaleImages(Size3 size) { if (NumImages == 0) { return; } if (Size == size) { return; } if (ImageType != typeof(TextureArray2D)) { throw new Exception("scaling is only supported for 2D images"); } var prevMipmaps = NumMipmaps; foreach (var imageData in Images) { imageData.Scale(size, scaleShader); } InitDimensions(images[0].Image); OnPropertyChanged(nameof(Size)); if (prevMipmaps != NumMipmaps) { OnPropertyChanged(nameof(NumMipmaps)); } }
public Resource(uint format, Size3 size, LayerMipmapCount lm) { Id = Dll.image_allocate(format, size.Width, size.Height, size.Depth, lm.Layers, lm.Mipmaps); if (Id == 0) { throw new Exception("error allocating image: " + Dll.GetError()); } }
public void Size3_EqualsObject() { var volume1 = new Size3(123, 456, 789); var volume2 = new Size3(123, 456, 789); TheResultingValue(volume1.Equals((Object)volume2)).ShouldBe(true); TheResultingValue(volume1.Equals("This is a test")).ShouldBe(false); }
public void Scale(Size3 size, MitchellNetravaliScaleShader shader, ScalingModel scaling) { var tmp = shader.Run((TextureArray2D)Image, size, scaling); LastModified = null; Image.Dispose(); Image = tmp; }
public void Size3_Parse_SucceedsForValidStrings() { var str = "123 456 789"; var result = Size3.Parse(str); TheResultingValue(result) .ShouldBe(123, 456, 789); }
public void Size3_SerializesToJson() { var size = new Size3(1, 2, 3); var json = JsonConvert.SerializeObject(size, UltravioletJsonSerializerSettings.Instance); TheResultingString(json).ShouldBe(@"{""width"":1,""height"":2,""depth"":3}"); }
public void Resize(GraphicsDevice device, ref Size3 size) { if (TemporalBuffer == null || TemporalBuffer.Size != size) { TemporalBuffer?.Dispose(); TemporalBuffer = Texture.New2D(device, size.Width, size.Height, 1, ReflectionsFormat, TextureFlags.ShaderResource | TextureFlags.RenderTarget); } }
private int GetPixelValue(Size3 coord, Size3 texSize, int[] pong) { if ((coord >= new Size3(0)).AllTrue() && (coord < texSize).AllTrue()) { return(pong[GetIndex(coord, texSize)]); } return(255); }
/// <summary> /// adds pixels to the image edges /// </summary> /// <param name="src">source image</param> /// <param name="leftPad">padding on the left/top/front side</param> /// <param name="rightPad">padding on the right/bot/back side</param> /// <param name="fill">padding fill mode</param> /// <param name="scaling">used for regenerating mipmaps (may be null if no mipmaps need to be generated)</param> /// <param name="shared"></param> /// <param name="keepMipmaps">if set to false, no mipmaps will be generated</param> /// <returns>same as source with added padding (amount of mipmaps might change, format remains)</returns> public ITexture Run(ITexture src, Size3 leftPad, Size3 rightPad, FillMode fill, ScalingModel scaling, SharedModel shared, bool keepMipmaps = true) { Size3 dstSize = leftPad + rightPad + src.Size; int nMipmaps = src.NumMipmaps > 1 ? dstSize.MaxMipLevels : 1; if (!keepMipmaps) { nMipmaps = 1; } var dst = src.Create(new LayerMipmapCount(src.NumLayers, nMipmaps), dstSize, src.Format, src.HasUaViews, true); var dev = DirectX.Device.Get(); shared.Upload.SetData(new BufferData { Depth = dstSize.Depth, Offset = new Float3(leftPad) / new Float3(dstSize), Scale = new Float3(dstSize) / new Float3(src.Size), }); shared.QuadShader.Bind(src.Is3D); if (src.Is3D) { dev.Pixel.Set(shader3D.Pixel); } else { dev.Pixel.Set(shader.Pixel); } dev.Pixel.SetSampler(0, sampler[(int)fill]); dev.Pixel.SetConstantBuffer(0, shared.Upload.Handle); foreach (var lm in src.LayerMipmap.RangeOf(LayerMipmapRange.MostDetailed)) { dev.OutputMerger.SetRenderTargets(dst.GetRtView(lm)); dev.SetViewScissors(dstSize.Width, dstSize.Height); dev.Pixel.SetShaderResource(0, src.GetSrView(lm)); dev.DrawFullscreenTriangle(dstSize.Depth); } // remove bindings shared.QuadShader.Unbind(); dev.Pixel.Set(null); dev.OutputMerger.SetRenderTargets((RenderTargetView)null); dev.Pixel.SetShaderResource(0, null); if (dst.NumMipmaps > 1) { Debug.Assert(scaling != null); scaling.WriteMipmaps(dst); } return(dst); }
public void Pad(Size3 leftPad, Size3 rightPad, PaddingShader.FillMode fill, Models models) { var tmp = models.SharedModel.Padding.Run(Image, leftPad, rightPad, fill, models.Scaling, models.SharedModel); LastModified = null; Image.Dispose(); Image = tmp; }
public void Constructor_PositiveVector_CreatesSizeWithSameValues() { var vector = new Vector3(1, 2, 3); var actual = new Size3(vector); Assert.AreEqual(vector.x, actual.X); Assert.AreEqual(vector.y, actual.Y); Assert.AreEqual(vector.z, actual.Z); }
public void Size3_TotalSize3_IsCalculatedCorrectly() { var volume1 = new Size3(123, 456, 789); TheResultingValue(volume1.Volume).ShouldBe(123 * 456 * 789); var volume2 = new Size3(222, 555, 999); TheResultingValue(volume2.Volume).ShouldBe(222 * 555 * 999); }
public override void Render(LayerMipmapSlice lm, Size3 size) { Debug.Assert(HasWork); UpdateData(lm.Mipmap); Shader.Bind(new VertexBufferBinding(positionBuffer.Handle, 2 * sizeof(float), 0)); Shader.Draw(Boxes, cbuffer, lm.Mipmap, size.XY); Shader.Unbind(); }
public Texture3D(int numMipmaps, Size3 size, Format format, bool createUav, bool createRt = true) { Size = size; LayerMipmap = new LayerMipmapCount(1, numMipmaps); Format = format; handle = new SharpDX.Direct3D11.Texture3D(Device.Get().Handle, CreateTextureDescription(createUav, createRt)); CreateTextureViews(createUav, createRt); }
public TextureArray2D(LayerMipmapCount lm, Size3 size, Format format, bool createUav) { Debug.Assert(size.Depth == 1); Size = size; LayerMipmap = lm; this.Format = format; handle = new SharpDX.Direct3D11.Texture2D(Device.Get().Handle, CreateTextureDescription(createUav)); CreateTextureViews(createUav); }
public Texture3D(int numMipmaps, Size3 size, Format format, bool createUav) { Size = size; NumLayers = 1; NumMipmaps = numMipmaps; Format = format; handle = new SharpDX.Direct3D11.Texture3D(Device.Get().Handle, CreateTextureDescription(createUav)); CreateTextureViews(createUav); }
//returns value of the pixel or 127 if out of range private int getPixelValue(Size3 coord, Size3 texSize, int[] pong) { if ((coord >= new Size3(0)).AllTrue() && (coord < texSize).AllTrue()) { return(pong[getIndex(coord, texSize)]); } else { return(127); } }
public TextureArray2D(int numLayer, int numMipmaps, Size3 size, Format format, bool createUav) { Debug.Assert(size.Depth == 1); Size = size; NumMipmaps = numMipmaps; NumLayers = numLayer; this.Format = format; handle = new SharpDX.Direct3D11.Texture2D(Device.Get().Handle, CreateTextureDescription(createUav)); CreateTextureViews(createUav); }
public void Constructor_PositiveValues_CreatesSizeWithSameValues() { var x = 1f; var y = 2f; var z = 3f; var actual = new Size3(x, y, z); Assert.AreEqual(x, actual.X); Assert.AreEqual(y, actual.Y); Assert.AreEqual(z, actual.Z); }
public void Size3_TryParse_SucceedsForValidStrings() { var str = "123 456 789"; var result = default(Size3); if (!Size3.TryParse(str, out result)) { throw new InvalidOperationException("Unable to parse string to Size3."); } TheResultingValue(result) .ShouldBe(123, 456, 789); }
public void Size3_EqualsSize3() { var volume1 = new Size3(123, 456, 789); var volume2 = new Size3(123, 456, 789); var volume3 = new Size3(123, 555, 789); var volume4 = new Size3(222, 456, 789); var volume5 = new Size3(123, 456, 999); TheResultingValue(volume1.Equals(volume2)).ShouldBe(true); TheResultingValue(volume1.Equals(volume3)).ShouldBe(false); TheResultingValue(volume1.Equals(volume4)).ShouldBe(false); TheResultingValue(volume1.Equals(volume5)).ShouldBe(false); }
public void Size3_OpEquality() { var volume1 = new Size3(123, 456, 789); var volume2 = new Size3(123, 456, 789); var volume3 = new Size3(123, 555, 789); var volume4 = new Size3(222, 456, 789); var volume5 = new Size3(123, 456, 999); TheResultingValue(volume1 == volume2).ShouldBe(true); TheResultingValue(volume1 == volume3).ShouldBe(false); TheResultingValue(volume1 == volume4).ShouldBe(false); TheResultingValue(volume1 == volume5).ShouldBe(false); }
/// <summary> /// Initializes a new instance of the <see cref="Size3Result"/> class. /// </summary> /// <param name="value">The value being examined.</param> internal Size3Result(Size3 value) { this.value = value; }
private Texture FindOutputMatchingSize(Size3 targetSize, int scalingDirection) { for (int i = 0; i < outputTextures.Count; i++) { var outputTexture = outputTextures[i]; if (outputTexture == null) { continue; } if ((scalingDirection < 0 && outputTexture.Size >= targetSize) || (scalingDirection > 0 && outputTexture.Size <= targetSize)) { // Remove the texture from the pool outputTextures[i] = null; return outputTexture; } } return null; }
/// <summary> /// Prepares the scaling. /// </summary> /// <param name="inputTexture">The input texture.</param> /// <param name="scalingDirection">The scaling direction.</param> /// <param name="outputPixelFormat">The output pixel format.</param> /// <returns><c>true</c> if we have some output to process; otherwise <c>false</c>.</returns> private bool PrepareScaling(Texture inputTexture, out int scalingDirection, out PixelFormat outputPixelFormat) { // TODO: support for intermediate output with non-matching size // Query all ouptut outputTextures.Clear(); outputPixelFormat = PixelFormat.None; scalingDirection = 0; var maxSize = new Size3(1 << 15, 1 << 15, 1 << 15); var minSize = Size3.One; var inputSize = inputTexture.Size; for (int i = 0; i < OutputCount; i++) { var outputTexture = GetOutput(i); if (outputTexture != null) { // Verify pixel format if (outputPixelFormat != PixelFormat.None && outputPixelFormat != outputTexture.ViewFormat) { throw new InvalidOperationException("Output texture format [{0}] is not matching other output texture format [{1}]".ToFormat(outputTexture.ViewFormat, outputPixelFormat)); } outputPixelFormat = outputTexture.ViewFormat; var outputSize = outputTexture.Size; // Verify pixel format if (outputSize < minSize || outputSize > maxSize) { throw new InvalidOperationException("Unsupported texture size [{0}] out of limit [{1} - {2}]".ToFormat(outputTexture.Size, minSize, maxSize)); } if (inputSize == outputSize) { throw new InvalidOperationException("Input and output texture cannot have same size [{0}]".ToFormat(inputSize)); } int newScalingDirection = outputSize.CompareTo(inputSize); if (scalingDirection != 0 && Math.Sign(scalingDirection) != Math.Sign(newScalingDirection)) { throw new InvalidOperationException("Support only output scaling to the same direction"); } scalingDirection = newScalingDirection; // Check that we are scaling to different texture sizes foreach (var existingOutput in outputTextures) { if (existingOutput.Size == outputTexture.Size) { throw new InvalidOperationException("A texture with size [{0}] already exist with the same output size".ToFormat(existingOutput.Size)); } } // If the textrue is valid use it outputTextures.Add(outputTexture); } } return outputTextures.Count > 0; }
protected override void DrawCore(RenderContext context) { var input = GetSafeInput(0); var output = GetSafeOutput(0); // Render the luminance to a power-of-two target, so we preserve energy on downscaling var startWidth = Math.Max(1, Math.Min(MathUtil.NextPowerOfTwo(input.Size.Width), MathUtil.NextPowerOfTwo(input.Size.Height)) / 2); var startSize = new Size3(startWidth, startWidth, 1); var blurTextureSize = startSize.Down2(UpscaleCount); Texture outputTextureDown = null; if (blurTextureSize.Width != 1 && blurTextureSize.Height != 1) { outputTextureDown = NewScopedRenderTarget2D(blurTextureSize.Width, blurTextureSize.Height, luminanceFormat, 1); } var luminanceMap = NewScopedRenderTarget2D(startSize.Width, startSize.Height, luminanceFormat, 1); // Calculate the first luminance map luminanceLogEffect.SetInput(input); luminanceLogEffect.SetOutput(luminanceMap); luminanceLogEffect.Draw(context); // Downscales luminance up to BlurTexture (optional) and 1x1 multiScaler.SetInput(luminanceMap); if (outputTextureDown == null) { multiScaler.SetOutput(luminance1x1); } else { multiScaler.SetOutput(outputTextureDown, luminance1x1); } multiScaler.Draw(); // If we have an output texture if (outputTextureDown != null) { // Blur x2 the intermediate output texture blur.SetInput(outputTextureDown); blur.SetOutput(outputTextureDown); blur.Draw(context); blur.Draw(context); // Upscale from intermediate to output multiScaler.SetInput(outputTextureDown); multiScaler.SetOutput(output); multiScaler.Draw(context); } else { // TODO: Workaround to that the output filled with 1x1 Scaler.SetInput(luminance1x1); Scaler.SetOutput(output); Scaler.Draw(context); } // Calculate average luminance only if needed if (EnableAverageLuminanceReadback) { readback.SetInput(luminance1x1); readback.Draw(); var rawLogValue = readback.Result[0]; AverageLuminance = (float)Math.Pow(2.0, rawLogValue); // In case AvergaeLuminance go crazy because of halp float/infinity precision, some code to save the values here: //if (float.IsInfinity(AverageLuminance)) //{ // using (var stream = new FileStream("luminance_input.dds", FileMode.Create, FileAccess.Write)) // { // input.Save(stream, ImageFileType.Dds); // } // using (var stream = new FileStream("luminance.dds", FileMode.Create, FileAccess.Write)) // { // luminanceMap.Save(stream, ImageFileType.Dds); // } //} } }
/// <summary> /// Wraps the specified unit test result for evaluation. /// </summary> /// <param name="value">The value to wrap.</param> /// <returns>The wrapped value.</returns> protected static Size3Result TheResultingValue(Size3 value) { return new Size3Result(value); }
protected override void DrawCore(RenderContext context) { var input = GetInput(0); var output = GetOutput(0); if (input == null || output == null) { return; } // Gets the current camera state var camera = context.GetCameraFromSlot(Camera); if (camera != null) { // Update the parameters for this post effect CameraComponentRenderer.UpdateParameters(context, camera); } if (!Enabled) { if (input != output) { Scaler.SetInput(input); Scaler.SetOutput(output); Scaler.Draw(context); } return; } // If input == output, than copy the input to a temporary texture if (input == output) { var newInput = NewScopedRenderTarget2D(input.Width, input.Height, input.Format); GraphicsDevice.Copy(input, newInput); input = newInput; } var currentInput = input; if (depthOfField.Enabled && InputCount > 1 && GetInput(1) != null && GetInput(1).IsDepthStencil) { // DoF var dofOutput = NewScopedRenderTarget2D(input.Width, input.Height, input.Format); var inputDepthTexture = GetInput(1); // Depth depthOfField.SetColorDepthInput(input, inputDepthTexture); depthOfField.SetOutput(dofOutput); depthOfField.Draw(context); currentInput = dofOutput; } // Luminance pass (only if tone mapping is enabled) // TODO: This is not super pluggable to have this kind of dependencies. Check how to improve this var toneMap = colorTransformsGroup.Transforms.Get<ToneMap>(); if (colorTransformsGroup.Enabled && toneMap != null && toneMap.Enabled) { const int LocalLuminanceDownScale = 3; // The luminance chain uses power-of-two intermediate targets, so it expects to output to one as well var lumWidth = Math.Min(MathUtil.NextPowerOfTwo(currentInput.Size.Width), MathUtil.NextPowerOfTwo(currentInput.Size.Height)); lumWidth = Math.Max(1, lumWidth / 2); var lumSize = new Size3(lumWidth, lumWidth, 1).Down2(LocalLuminanceDownScale); var luminanceTexture = NewScopedRenderTarget2D(lumSize.Width, lumSize.Height, PixelFormat.R16_Float, 1); luminanceEffect.SetInput(currentInput); luminanceEffect.SetOutput(luminanceTexture); luminanceEffect.Draw(context); // Set this parameter that will be used by the tone mapping colorTransformsGroup.Parameters.Set(LuminanceEffect.LuminanceResult, new LuminanceResult(luminanceEffect.AverageLuminance, luminanceTexture)); } // Bright filter pass Texture brightTexture = null; if (bloom.Enabled || lightStreak.Enabled || lensFlare.Enabled) { brightTexture = NewScopedRenderTarget2D(currentInput.Width, currentInput.Height, currentInput.Format, 1); brightFilter.SetInput(currentInput); brightFilter.SetOutput(brightTexture); brightFilter.Draw(context); } // Bloom pass if (bloom.Enabled) { bloom.SetInput(brightTexture); bloom.SetOutput(currentInput); bloom.Draw(context); } // Light streak pass if (lightStreak.Enabled) { lightStreak.SetInput(brightTexture); lightStreak.SetOutput(currentInput); lightStreak.Draw(context); } // Lens flare pass if (lensFlare.Enabled) { lensFlare.SetInput(brightTexture); lensFlare.SetOutput(currentInput); lensFlare.Draw(context); } var outputForLastEffectBeforeAntiAliasing = output; if (ssaa != null && ssaa.Enabled) { outputForLastEffectBeforeAntiAliasing = NewScopedRenderTarget2D(output.Width, output.Height, output.Format); } // When FXAA is enabled we need to detect whether the ColorTransformGroup should output the Luminance into the alpha or not var fxaa = ssaa as FXAAEffect; var luminanceToChannelTransform = colorTransformsGroup.PostTransforms.Get<LuminanceToChannelTransform>(); if (fxaa != null) { if (luminanceToChannelTransform == null) { luminanceToChannelTransform = new LuminanceToChannelTransform { ColorChannel = ColorChannel.A }; colorTransformsGroup.PostTransforms.Add(luminanceToChannelTransform); } // Only enabled when FXAA is enabled and InputLuminanceInAlpha is true luminanceToChannelTransform.Enabled = fxaa.Enabled && fxaa.InputLuminanceInAlpha; } else if (luminanceToChannelTransform != null) { luminanceToChannelTransform.Enabled = false; } // Color transform group pass (tonemap, color grading) var lastEffect = colorTransformsGroup.Enabled ? (ImageEffect)colorTransformsGroup: Scaler; lastEffect.SetInput(currentInput); lastEffect.SetOutput(outputForLastEffectBeforeAntiAliasing); lastEffect.Draw(context); if (ssaa != null && ssaa.Enabled) { ssaa.SetInput(outputForLastEffectBeforeAntiAliasing); ssaa.SetOutput(output); ssaa.Draw(context); } }