/// <summary> /// Adds the value in tolerance to the tolerance buffer at [x,y] /// </summary> /// <param name="x">x index</param> /// <param name="y">y index</param> /// <param name="tolerance">New tolerance value that will be added</param> public void AddToTolerance(int x, int y, Color tolerance) { if (x >= 0 && x < width && y >= 0 && y < height) { toleranceBuffer[x, y] = ColorOperations.Add(tolerance, toleranceBuffer[x, y]); } }
private Color GetTolerance(Point point, Color expected) { // pixelCenter is factored into this point int x = (int)Math.Floor(point.X - Const.pixelCenterX); int y = (int)Math.Floor(point.Y - Const.pixelCenterY); Color missingPixelTolerance = ColorOperations.ColorFromArgb(0, 0, 0, 0); Color lookupTolerance = ColorOperations.ColorFromArgb(0, 0, 0, 0); for (int j = y; j < y + 2; j++) { for (int i = x; i < x + 2; i++) { Color?current = SafeGetPixel(i, j); if (current.HasValue) { // Keep the max of the diff tolerance and the existing tolerance Color diff = ColorOperations.AbsoluteDifference(current.Value, expected); lookupTolerance = ColorOperations.Max(lookupTolerance, diff); lookupTolerance = ColorOperations.Max(lookupTolerance, toleranceBuffer[i, j]); } else { // increase tolerance by 25% since this pixel's value is unknown missingPixelTolerance = ColorOperations.Add(missingPixelTolerance, ColorOperations.ColorFromArgb(.25, .25, .25, .25)); } } } return(ColorOperations.Add(lookupTolerance, missingPixelTolerance)); }
/// <summary> /// Produce a Difference image from a screen capture and a RenderBuffer. For every pixel, if it is an exact match or /// if the difference is within the provided tolerance, the pixel is marked as black. Otherwise the diff value is used. /// If the captured image is smaller than the expected image, throw an exception and refuse to compare them. /// Otherwise, compare the expected image with the matching region (the upper left corner) of the rendered image. /// We'll do the comparison by x,y coordinates and not pointer math (ie: y*width +x) to ensure correct matching. /// </summary> /// <returns>A new Render buffer with the Diff image on the framebuffer and a color coded image on the tbuffer.</returns> public static RenderBuffer ComputeDifference(Color[,] captured, RenderBuffer expected) { if (expected.Width > captured.GetLength(0) || expected.Height > captured.GetLength(1)) { throw new ApplicationException(exceptionCapturedRenderedEqual); } RenderBuffer result = new RenderBuffer(expected.Width, expected.Height); // We want to write to this directly, set z-test to always write ... result.DepthTestFunction = DepthTestFunction.Always; // We want to ignore any potential z-tolerance as well ... for (int y = 0; y < result.Height; y++) { for (int x = 0; x < result.Width; x++) { // Ignore alpha differences. Color diff = ColorOperations.AbsoluteDifference(expected.FrameBuffer[x, y], captured[x, y]); diff.A = 0xff; result.FrameBuffer[x, y] = diff; // Make perfect matches black if (ColorOperations.AreWithinTolerance(captured[x, y], expected.FrameBuffer[x, y], Colors.Black)) { result.ToleranceBuffer[x, y] = Colors.Black; result.FrameBuffer[x, y] = Colors.Black; } // Treat within tolerance as separate case else if (ColorOperations.AreWithinTolerance(captured[x, y], expected.FrameBuffer[x, y], expected.ToleranceBuffer[x, y])) { result.ToleranceBuffer[x, y] = codedColor[0]; result.FrameBuffer[x, y] = Colors.Black; } // Otherwise do color coding else { for (int i = 1; i < codedColor.Length; i++) { if (ColorOperations.AreWithinTolerance( captured[x, y], expected.FrameBuffer[x, y], ColorOperations.Add(toleranceThreshold[i], expected.ToleranceBuffer[x, y]))) { result.ToleranceBuffer[x, y] = codedColor[i]; break; } } } } } return(result); }
protected override double GetAttenuatedContribution(Point3D modelPosition, double nonAttenuatedValue) { // distance+falloff Attenuated raw contribution double attenuatedContribution = base.GetAttenuatedContribution(modelPosition, nonAttenuatedValue); Vector3D surfaceDirection = MathEx.Normalize(modelPosition - this.position); double cosSurfaceAngle = MathEx.DotProduct(surfaceDirection, direction); double surfaceAngle = MathEx.ToDegrees(Math.Acos(cosSurfaceAngle)); double angleFactor; if (surfaceAngle <= innerConeAngle) { angleFactor = 1.0; } // Make sure we render in the larger bound, since it will affect our final error else if (surfaceAngle > innerConeAngle && surfaceAngle < (outerConeAngle + RenderTolerance.SpotLightAngleTolerance)) { // Do DX light equation fade angleFactor = cosSurfaceAngle - cosInnerConeAngle; angleFactor /= cosOuterConeAngle - cosInnerConeAngle; angleFactor = Math.Max(0, angleFactor); // angleFactor = 1.0 - angleFactor; } else // Greater than outer cone angle { angleFactor = 0.0; } double totalContribution = attenuatedContribution * angleFactor; // If we're within SpotLightAngleTolerance degrees of being unlit, then set // error based on how much light we expect if (Math.Abs(surfaceAngle - outerConeAngle) < RenderTolerance.SpotLightAngleTolerance) { // This is so close, we may or may not light this point // So we compute the maximum possible error by scaling (255,255,255) by the // light contribution we would have if we DID light it. Color expected = ColorOperations.ScaleOpaque(lightColor, totalContribution); Color angleError = ColorOperations.Modulate(expected, lightToleranceColor); lastIlluminationError = ColorOperations.Add(lastIlluminationError, angleError); } return(totalContribution); }
private void ConvertTo16BitColor() { // Color roundingTolerance = ColorOperations.ConvertToleranceFrom32BitTo16Bit(RenderTolerance.DefaultColorTolerance); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { frameBuffer[x, y] = ColorOperations.ConvertFrom32BitTo16Bit(frameBuffer[x, y]); toleranceBuffer[x, y] = ColorOperations.Add(toleranceBuffer[x, y], roundingTolerance); } } }
protected virtual double GetAttenuatedContribution(Point3D modelPosition, double nonAttenuatedValue) { Vector3D lightDirection = this.position - modelPosition; double distance = MathEx.Length(lightDirection); double attenuation = constantAttenuation + linearAttenuation * distance + quadraticAttenuation * distance * distance; // We want to make sure the we have no augmenting or negative attenuation attenuation = Math.Max(1.0, attenuation); double finalContribution; if (distance > range) // We don't light things that are out of range { finalContribution = 0.0; } else { // We take specular and diffuse and divide to attenuate finalContribution = nonAttenuatedValue / attenuation; } // Prevent negative contributions finalContribution = Math.Max(finalContribution, 0); if (Math.Abs(distance - range) < RenderTolerance.LightingRangeTolerance) { // This is so close, we may or may not light this point // So we compute the maximum possible error by scaling (255,255,255) by the // light contribution we would have if we DID light it. Color expected = ColorOperations.ScaleOpaque(lightColor, finalContribution); Color rangeError = ColorOperations.Modulate(expected, lightToleranceColor); // We then add this error to our accumulated error for this illumination call lastIlluminationError = ColorOperations.Add(lastIlluminationError, rangeError); } // We don't clamp to 1, since this would break attenuating the falloff // of a spotlight. return(finalContribution); }
public async Task <IActionResult> Post([FromBody] Color newColorOperations) { if (colorOperations.Add(newColorOperations) > 0) { return(StatusCode(StatusCodes.Status201Created, new { status = true, message = "Color creado" })); } else { return(StatusCode(StatusCodes.Status401Unauthorized, new { status = false, message = "Error al crear color", })); } }
private void btnGuardar_Click(object sender, EventArgs e) { if (Validar()) { if (color == null) { if (colorOperations.Add(new Color { descripcion = txtDescripcion.Text }) > 0) { XtraMessageBox.Show("Color insertado correctamente", "Charls Shoes", MessageBoxButtons.OK, MessageBoxIcon.Information); this.Close(); } else { XtraMessageBox.Show("Ocurrió un error en la inserción", "Charls Shoes", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Close(); } } else { color.descripcion = txtDescripcion.Text; if (colorOperations.Update(color) > 0) { XtraMessageBox.Show("Color modificado correctamente", "Charls Shoes", MessageBoxButtons.OK, MessageBoxIcon.Information); this.Close(); } else { XtraMessageBox.Show("Ocurrió un error en la modificación", "Charls Shoes", MessageBoxButtons.OK, MessageBoxIcon.Information); this.Close(); } } } }
private void btnAceptar_Click(object sender, EventArgs e) { if (Validar()) { if (color == null) { if (colorOperations.Add(new Color { descripcion = txtDescripcion.Text, }) > 0) { XtraMessageBox.Show("Color Insertado Correctamente", Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Information); this.Close(); } else { XtraMessageBox.Show("Ocurrió un error inesperado", Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Information); this.Close(); } } else { color.descripcion = txtDescripcion.Text; if (colorOperations.Update(color) > 0) { XtraMessageBox.Show("Color Modificado Correctamente", Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Information); this.Close(); } else { XtraMessageBox.Show("Ocurrió un error inesperado", Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Information); this.Close(); } } } }
private void btnGuardar_Click(object sender, EventArgs e) { if (Validar()) { if (color == null) { if (colorOperations.Add(new Entities.Color { descripcion = txtNombre.Text }) > 0) { XtraMessageBox.Show("Color registrado correctamente", "System Shoes", MessageBoxButtons.OK, MessageBoxIcon.Information); Misc.actualiza = true; } else { XtraMessageBox.Show("Ocurrió un error al guardar el Color", "System Shoes", MessageBoxButtons.OK, MessageBoxIcon.Warning); Misc.actualiza = false; } } this.Close(); } }
protected double GetSpecularContribution(Vector3D view, Vector3D normal, Vector3D light, double exponent) { System.Diagnostics.Debug.Assert(RenderTolerance.SpecularLightDotProductTolerance >= 0); // Make vectors unit length normal = MathEx.Normalize(normal); light = MathEx.Normalize(light); view = MathEx.Normalize(view); double spec = 0.0; double nDotl = MathEx.DotProduct(light, normal); if (nDotl >= -RenderTolerance.SpecularLightDotProductTolerance) { Vector3D half = MathEx.Normalize(light + view); spec = Math.Max(MathEx.DotProduct(normal, half), 0); spec = Math.Pow(spec, exponent); // We want a +/- tolerance bound on the dot product computation if (Math.Abs(nDotl) <= RenderTolerance.SpecularLightDotProductTolerance) { // This is so close, we may or may not light this point // So we compute the maximum possible error by scaling (0,255,255,255) by the // light contribution we would have if we DID light it. Color expected = ColorOperations.ScaleOpaque(lightColor, spec); Color specError = ColorOperations.Modulate(expected, lightToleranceColor); // We then add this error to our accumulated error for this illumination call lastIlluminationError = ColorOperations.Add(lastIlluminationError, specError); // ignore it for our rendering spec = 0.0; } } return(spec); }
/// <summary> /// Clip the regions specified by the Geometry out of the frame buffer. /// </summary> public void ApplyClip(Geometry clip) { if (clip == null) { return; } Color[,] clipColors = TextureGenerator.RenderBrushToColorArray(GetClipBrush(clip), width, height); // Clipping is anti-aliased. Need tolerance around clip edges. Color[,] tolColors = TextureGenerator.RenderBrushToColorArray(GetClipTolerance(clip), width, height); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { double opacity = ColorOperations.ByteToDouble(clipColors[x, y].A); frameBuffer[x, y] = ColorOperations.PreMultipliedOpacityScale(frameBuffer[x, y], opacity); Color tolerance = new Color(); tolerance.A = tolerance.R = tolerance.G = tolerance.B = tolColors[x, y].A; toleranceBuffer[x, y] = ColorOperations.Add(toleranceBuffer[x, y], tolerance); } } }
/// <summary> /// Lights this pixel using precomputed lighting information. /// </summary> /// <param name="v">Interpolated vertex for this pixel position.</param> protected override void ComputePixelProgram(Vertex v) { bool rendered = false; Color totalTexturingTolerance = emptyColor; for (int pass = 0; pass < textures.Length; pass++) { // A Filter can be null if the Material or the Brush are null. // For those cases, we skip the material entirely. if (textures[pass] == null) { continue; } TextureFilter currentTexture = textures[pass]; rendered = true; // We need extra information for trilinear if (currentTexture is TrilinearTextureFilter) { ((TrilinearTextureFilter)currentTexture).MipMapFactor = v.MipMapFactor; } // Textures are not stored in premultiplied color space. // This means that we have to wait until we find the lookup tolerance before we can premultiply // (otherwise Alpha will be way off) Color texel = currentTexture.FilteredTextureLookup(v.TextureCoordinates); Color texelTolerance = emptyColor; if (currentTexture.HasErrorEstimation) { texelTolerance = currentTexture.FilteredErrorLookup( v.UVToleranceMin, v.UVToleranceMax, texel); } // Now we can premultiply. Color premultTexel = ColorOperations.PreMultiplyColor(texel); Color premultTexelTolerance = ColorOperations.PreMultiplyTolerance(texelTolerance, texel.A); // Modulate precomputed lighting (which is also premultiplied) by the Brush value Color premultColor = ColorOperations.Modulate(v.PrecomputedLight[pass], premultTexel); Color premultTolerance = ColorOperations.Modulate(v.PrecomputedLight[pass], premultTexelTolerance); // PrecomputedLightTolerance is NOT premultipled yet (see ComputeVertexProgram above). // This is because we needed to know the final alpha value of lighting * texture. // Color premultLightTolerance = ColorOperations.PreMultiplyTolerance(v.PrecomputedLightTolerance[pass], premultColor.A); premultTolerance = ColorOperations.Add(premultTolerance, premultLightTolerance); // For additive materials, we need to force the alpha channel to zero. // See notes on premultiplied blending in ColorOperations.cs if (currentTexture.MaterialType != MaterialType.Diffuse) { premultColor.A = 0x00; // Nothing needs to be done to tolerance's alpha // because the framebuffer's alpha value will be used in the blend } // we need to blend // Write to frame buffer according to alpha value of pixel v.Color = ColorOperations.PreMultipliedAlphaBlend(premultColor, v.Color); // Accumulate tolerance for each material pass totalTexturingTolerance = ColorOperations.PreMultipliedToleranceBlend( premultTolerance, totalTexturingTolerance, premultColor.A); } // Only set a pixel if we actually rendered at least one material for it ... if (rendered) { // Add texturing tolerance to our existing lighting tolerance. v.ColorTolerance = ColorOperations.Add(v.ColorTolerance, totalTexturingTolerance); // Send the pixel to be rendered buffer.SetPixel( (int)Math.Floor(v.ProjectedPosition.X), (int)Math.Floor(v.ProjectedPosition.Y), (float)v.ProjectedPosition.Z, v.Color, v.ColorTolerance ); } }
/// <summary> /// Does a perspective-correct, 3-way interpolation of vertex data based on projected point. /// Subclasses provide their own implementation. /// </summary> /// <param name="x">2D Screen-Space pixel X coordinate</param> /// <param name="y">2D Screen-Space pixel Y coordinate</param> /// <returns>Perspective-correct interpolated Vertex value for this point.</returns> public virtual Vertex GetVertex(double x, double y) { Vertex interpolation = new Vertex(); Point3D currRasterPoint = new Point3D(x, y, 0); Weights screenWeights = ComputeScreenWeights(currRasterPoint); // Use screen weights to find current Z Point3D currProjectedPoint = new Point3D(x, y, WeightedSum( vertex1.ProjectedPosition.Z, vertex2.ProjectedPosition.Z, vertex3.ProjectedPosition.Z, screenWeights)); interpolation.ProjectedPosition = currProjectedPoint; // Now that we have currProjectedPoint, we can go ahead and compute other weights Weights homogeneousWeights = ComputeHomogeneousWeights(currProjectedPoint); // Now we can use the perspectiveCorrectionFactor and the homogeneousWeights to // find the perspective-correct weights. Weights pcWeights = ComputePerspectiveCorrectWeights(homogeneousWeights); interpolation.W = 1.0 / WeightedSum( vertex1.OneOverW, vertex2.OneOverW, vertex3.OneOverW, homogeneousWeights); // Position ( Eye Space ) interpolation.Position = WeightedSum(vertex1.Position, vertex2.Position, vertex3.Position, pcWeights); // Normal ( Eye Space ) interpolation.Normal = MathEx.Normalize( pcWeights.W1 * vertex1.Normal + pcWeights.W2 * vertex2.Normal + pcWeights.W3 * vertex3.Normal); // Color interpolation.Color = WeightedSum(vertex1.Color, vertex2.Color, vertex3.Color, pcWeights); // Color Error interpolation.ColorTolerance = WeightedSum( vertex1.ColorTolerance, vertex2.ColorTolerance, vertex3.ColorTolerance, pcWeights); if (pixelOnEdge) { // Since numerical precision makes these borderline cases inaccurate, // we set the tolerance of this pixel to ignore it. interpolation.ColorTolerance = ColorOperations.Add( interpolation.ColorTolerance, Color.FromArgb(0x00, 0xFF, 0xFF, 0xFF)); } // NOTE: // x and y may not actually be contained by this triangle // This can get us texture coordinates that are outside the [0,1] range // Texture Coordinates interpolation.TextureCoordinates = WeightedSum( vertex1.TextureCoordinates, vertex2.TextureCoordinates, vertex3.TextureCoordinates, pcWeights); // Precomputed Lighting for each material on this triangle if (vertex1.PrecomputedLight != null) { int materialCount = vertex1.PrecomputedLight.Length; interpolation.PrecomputedLight = new Color[materialCount]; for (int i = 0; i < materialCount; i++) { interpolation.PrecomputedLight[i] = WeightedSum( vertex1.PrecomputedLight[i], vertex2.PrecomputedLight[i], vertex3.PrecomputedLight[i], pcWeights); } } // Precomputed Lighting tolerance for each material on this triangle if (vertex1.PrecomputedLightTolerance != null) { int materialCount = vertex1.PrecomputedLight.Length; interpolation.PrecomputedLightTolerance = new Color[materialCount]; for (int i = 0; i < materialCount; i++) { interpolation.PrecomputedLightTolerance[i] = WeightedSum( vertex1.PrecomputedLightTolerance[i], vertex2.PrecomputedLightTolerance[i], vertex3.PrecomputedLightTolerance[i], pcWeights); } } return(interpolation); }