} // RenderModel /// <summary> /// Render objects in light space. /// </summary> internal void RenderModelCubeShadows(ref Matrix worldMatrix, Model model, Matrix[] boneTransform) { if (model.IsSkinned) // If it is a skinned model. { spBones.Value = boneTransform; Resource.CurrentTechnique = generateCubeLightDepthBufferSkinnedTechnique; } else { Resource.CurrentTechnique = generateCubeLightDepthBufferTechnique; } if (boneTransform == null || model.IsSkinned) { Matrix worldLightProjectionMatrix; Matrix.Multiply(ref worldMatrix, ref lightViewProjectionMatrix, out worldLightProjectionMatrix); spWorldViewProjMatrix.Value = worldLightProjectionMatrix; spWorldMatrix.Value = worldMatrix; Resource.CurrentTechnique.Passes[0].Apply(); model.Render(); } else { for (int mesh = 0; mesh < model.MeshesCount; mesh++) { Matrix boneTransformedWorldMatrix; Matrix.Multiply(ref boneTransform[mesh + 1], ref worldMatrix, out boneTransformedWorldMatrix); spWorldMatrix.Value = boneTransformedWorldMatrix; Matrix boneTransformedWorldLighProjectionMatrix; Matrix.Multiply(ref boneTransformedWorldMatrix, ref lightViewProjectionMatrix, out boneTransformedWorldLighProjectionMatrix); spWorldViewProjMatrix.Value = boneTransformedWorldLighProjectionMatrix; Resource.CurrentTechnique.Passes[0].Apply(); // Render the model's mesh. int meshPartsCount = model.MeshPartsCountPerMesh[mesh]; for (int meshPart = 0; meshPart < meshPartsCount; meshPart++) { model.RenderMeshPart(mesh, meshPart); } } } } // RenderModel
} // GetParametersHandles #endregion #region Render /// <summary> /// Render the sky. /// </summary> internal void Render(Matrix viewMatrix, Matrix projectionMatrix, float farPlane, Vector3 sunLightDirection, Skydome skydome) { try { Matrix worldMatrix = Matrix.CreateScale(1f); spViewProjectionMatrix.Value = worldMatrix * Matrix.Transpose(Matrix.Invert(viewMatrix)) * projectionMatrix; // I remove the translation and scale of the view matrix. spTexture.Value = skydome.Texture; spWorldMatrix.Value = worldMatrix; spViewInverseMatrix.Value = Matrix.Invert(Matrix.Transpose(Matrix.Invert(viewMatrix))); spLightDirection.Value = sunLightDirection; spSkyDayTexture.Value = skyTextureDay; spSkyNightTexture.Value = skyTextureNight; spSkySunsetTexture.Value = skyTextureSunset; Resource.CurrentTechnique.Passes[0].Apply(); skydomeModel.Render(); } catch (Exception e) { throw new InvalidOperationException("Skybox shader: Unable to render the sky.", e); } } // Render
} // Begin #endregion #region Render /// <summary> /// Render the spot light. /// </summary> public void Render(Color diffuseColor, Vector3 position, Vector3 direction, float intensity, float range, float innerConeAngle, float outerConeAngle, Texture shadowTexture, Texture lightMaskTexture, Matrix worldMatrix, bool renderClipVolumeInLocalSpace, Model clipVolume = null) { try { #region Set Parameters spLightColor.Value = diffuseColor; spLightPosition.Value = Vector3.Transform(position, viewMatrix); spLightIntensity.Value = intensity; spInvLightRadius.Value = 1 / range; Vector3 directionVS = Vector3.TransformNormal(direction, viewMatrix); directionVS.Normalize(); spLightDirection.Value = directionVS; spLightInnerConeAngle.Value = innerConeAngle * (3.141592f / 180.0f); spLightOuterConeAngle.Value = outerConeAngle * (3.141592f / 180.0f); if (lightMaskTexture != null) { Matrix lightViewMatrix = Matrix.CreateLookAt(position, position + direction, Vector3.Up); Matrix lightProjectionMatrix = Matrix.CreatePerspectiveFieldOfView(outerConeAngle * (float)Math.PI / 180.0f, // field of view 1.0f, // Aspect ratio 1f, // Near plane range); // Far plane spViewToLightViewProjMatrix.Value = Matrix.Invert(viewMatrix) * lightViewMatrix * lightProjectionMatrix; spLightMaskTexture.Value = lightMaskTexture; } else { spLightMaskTexture.Value = Texture.BlackTexture; // To avoid a potential exception. } // Compute the light world matrix. Matrix boundingLightObjectWorldMatrix; if (clipVolume != null) { boundingLightObjectWorldMatrix = renderClipVolumeInLocalSpace ? Matrix.Identity : worldMatrix; } else { // Scale according to light radius, and translate it to light position. boundingLightObjectWorldMatrix = Matrix.CreateScale(range) * Matrix.CreateTranslation(position); // TODO: when the cone model is exported I have to include the rotation into consideration. } spWorldViewProjMatrix.Value = boundingLightObjectWorldMatrix * viewMatrix * projectionMatrix; spWorldViewMatrix.Value = boundingLightObjectWorldMatrix * viewMatrix; if (shadowTexture != null) { spShadowTexture.Value = shadowTexture; Resource.CurrentTechnique = lightMaskTexture != null ? spotLightWithShadowsWithMaskTechnique : spotLightWithShadowsTechnique; } else { spShadowTexture.Value = Texture.BlackTexture; // To avoid a potential exception. Resource.CurrentTechnique = lightMaskTexture != null ? spotLightWithMaskTechnique : spotLightTechnique; } #endregion // TODO: Implement the stencil optimization. // Calculate the distance between the camera and light center. float cameraToCenter = Vector3.Distance(Matrix.Invert(viewMatrix).Translation, position) - nearPlane; // If we are inside the light volume, draw the sphere's inside face. if (cameraToCenter <= range) { EngineManager.Device.DepthStencilState = interiorOfBoundingVolumeDepthStencilState; EngineManager.Device.RasterizerState = RasterizerState.CullClockwise; } else { EngineManager.Device.DepthStencilState = DepthStencilState.DepthRead; EngineManager.Device.RasterizerState = RasterizerState.CullCounterClockwise; } Resource.CurrentTechnique.Passes[0].Apply(); if (clipVolume != null) { clipVolume.Render(); } else { boundingLightObject.Render(); } } catch (Exception e) { throw new InvalidOperationException("Light Pre Pass Spot Light: Unable to render.", e); } } // Render
} // GetParameters #endregion #region Render /// <summary> /// Generate Bloom Texture. /// </summary> internal RenderTarget Render(Texture halfDepthTexture, Texture bloomTexture, PostProcess postProcess, Matrix viewMatrix, Matrix projectionMatrix, float farPlane, Vector3 cameraPosition) { if (postProcess == null) { throw new ArgumentNullException("postProcess"); } if (halfDepthTexture == null || halfDepthTexture.Resource == null) { throw new ArgumentNullException("halfDepthTexture"); } if (postProcess.AnamorphicLensFlare == null) { throw new ArgumentException("Anamorphic Lens Flare Shader: Anamorphic lens flare properties can not be null."); } try { // Fetch auxiliary render target. RenderTarget lensFlareTexture = RenderTarget.Fetch(halfDepthTexture.Size, SurfaceFormat.Color, DepthFormat.None, RenderTarget.AntialiasingType.NoAntialiasing); // Set Render States. EngineManager.Device.BlendState = BlendState.Opaque; EngineManager.Device.DepthStencilState = DepthStencilState.None; EngineManager.Device.RasterizerState = RasterizerState.CullCounterClockwise; #region First pass: sun rendering and occlusion test. lensFlareTexture.EnableRenderTarget(); lensFlareTexture.Clear(Color.Black); // Set Parameters spHalfPixel.Value = new Vector2(0.5f / lensFlareTexture.Width, 0.5f / lensFlareTexture.Height); spDepthTexture.Value = halfDepthTexture; spFarPlane.Value = farPlane; cameraPosition.Z = cameraPosition.Z - (farPlane - 10); Matrix worldMatrix = Matrix.CreateScale((farPlane * 2.25f) / 100) * Matrix.CreateTranslation(cameraPosition); spWorldViewProj.Value = worldMatrix * viewMatrix * projectionMatrix; spWorldView.Value = worldMatrix * viewMatrix; spSunColor.Value = new Color(1.0f, 0.9f, 0.70f); // Render Resource.CurrentTechnique.Passes[0].Apply(); sunObject.Render(); lensFlareTexture.DisableRenderTarget(); #endregion // The second and third pass were removed to improve performance. #region Fourth pass: high blur vertical RenderTarget highBlurredSunTextureVertical = RenderTarget.Fetch(halfDepthTexture.Size, SurfaceFormat.Color, DepthFormat.None, RenderTarget.AntialiasingType.NoAntialiasing); highBlurredSunTextureVertical.EnableRenderTarget(); highBlurredSunTextureVertical.Clear(Color.Black); spHalfPixel.Value = new Vector2(-0.5f / (lensFlareTexture.Width / 2), 0.5f / (lensFlareTexture.Height / 2)); spSceneTexture.Value = lensFlareTexture; // bloomTexture; Resource.CurrentTechnique.Passes[3].Apply(); RenderScreenPlane(); highBlurredSunTextureVertical.DisableRenderTarget(); #endregion #region Fifth pass: high blur horizontal RenderTarget highBlurredSunTexture = RenderTarget.Fetch(halfDepthTexture.Size, SurfaceFormat.Color, DepthFormat.None, RenderTarget.AntialiasingType.NoAntialiasing); highBlurredSunTexture.EnableRenderTarget(); highBlurredSunTexture.Clear(Color.Black); spSceneTexture.Value = highBlurredSunTextureVertical; Resource.CurrentTechnique.Passes[4].Apply(); RenderScreenPlane(); highBlurredSunTexture.DisableRenderTarget(); RenderTarget.Release(highBlurredSunTextureVertical); #endregion #region Last pass: composite images lensFlareTexture.EnableRenderTarget(); lensFlareTexture.Clear(Color.Black); // Set Parameters spHighBlurredSunTexture.Value = highBlurredSunTexture; spDispersal.Value = postProcess.AnamorphicLensFlare.Dispersal; spHaloWidth.Value = postProcess.AnamorphicLensFlare.HaloWidth; spIntensity.Value = postProcess.AnamorphicLensFlare.Intensity; spDistortion.Value = postProcess.AnamorphicLensFlare.ChromaticDistortion; spDirtTexture.Value = postProcess.AnamorphicLensFlare.DirtTexture ?? Texture.BlackTexture; // uv coordinates of the sun position on the screen. Vector4 sunPosProjected = Vector4.Transform(new Vector4(cameraPosition.X, cameraPosition.Y, cameraPosition.Z, 1), viewMatrix * projectionMatrix * BiasMatrix()); sunPosProjected = sunPosProjected / sunPosProjected.W; spSunPosProj.Value = new Vector2(sunPosProjected.X, 1 - sunPosProjected.Y); // Render Resource.CurrentTechnique.Passes[5].Apply(); RenderScreenPlane(); lensFlareTexture.DisableRenderTarget(); #endregion // Release the rest of the render targets. RenderTarget.Release(highBlurredSunTexture); return(lensFlareTexture); } catch (Exception e) { throw new InvalidOperationException("Anamorphic Lens Flare Shader: Unable to render.", e); } } // Render
} // Begin #endregion #region Render /// <summary> /// Render the point light. /// </summary> public void Render(Color diffuseColor, Vector3 position, float intensity, float radius, TextureCube shadowTexture, Matrix worldMatrix, bool renderClipVolumeInLocalSpace, Model clipVolume = null) { try { // It is possible to use the depth information and the stencil buffer to mark in a two pass rendering exactly what pixels are affected by the light. // This helps to reduce pixel shader load but at the same time allows implementing clip volumes. // With clip volumes you can put, for example, a box and the light won’t bleed outside this box even if the radius is bigger. // I.e. you can place lights in a wall and the opposite side of that wall won’t be illuminated. // // The problem is I don’t have the Z-Buffer available because XNA 4 does not allow sharing depth buffers between render targets. // However I can reconstruct the Z-Buffer with a shader and my G-Buffer. // // If you don’t use custom clip volumes (i.e. we use the default sphere) and the light is too far then we could have more vertex processing // than pixel processing. Some games use glow planes (a colored mask) to see the light’s bright when they are far away, // this is good for open environment games but not for interior games. // Instead games like Killzone 2 ignore the first pass on these lights and only compute the second (and this second pass still does one // part of the filter). Also the far plane "problem" is addressed in this optimization. // // Another optimization that I made is the use of a Softimage sphere instead of my procedural spheres. // Models exported in this kind of tools are optimized for accessing. For example my stress test changes from 20/21 frames to 22 frames. // Not a big change, but still a change nevertheless. // // I also research the possibility to use instancing with some lights. // But no article talk about this technique so I try to think why is not useful and it was easy to find that: // 1) It can be only used with spheres (not custom clip volumes). // 2) The dynamic buffers used for the instancing information could be too dynamic or difficult to maintain. // 3) The stencil optimization could be very important on interior games and could not be mixed with instancing and custom clip volumes. // Extra complexity added (including the use of vfetch for Xbox 360). // Fill the stencil buffer with 0s. EngineManager.Device.Clear(ClearOptions.Stencil, Color.White, 1.0f, 0); #region Set Parameters spLightColor.Value = diffuseColor; spLightPosition.Value = Vector3.Transform(position, viewMatrix); spLightIntensity.Value = intensity; spInvLightRadius.Value = 1 / radius; if (shadowTexture != null) { spShadowTexture.Value = shadowTexture; spViewInverse.Value = Matrix.Invert(Matrix.Transpose(Matrix.Invert(viewMatrix))); spTextureSize.Value = new Vector3(shadowTexture.Size, shadowTexture.Size, shadowTexture.Size); spTextureSizeInv.Value = new Vector3(1.0f / shadowTexture.Size, 1.0f / shadowTexture.Size, 1.0f / shadowTexture.Size); } else { spShadowTexture.Value = TextureCube.BlackTexture; } // Compute the light world matrix. Matrix boundingLightObjectWorldMatrix; if (clipVolume != null) { boundingLightObjectWorldMatrix = renderClipVolumeInLocalSpace ? Matrix.Identity : worldMatrix; } else { // Scale according to light radius, and translate it to light position. boundingLightObjectWorldMatrix = Matrix.CreateScale(radius) * Matrix.CreateTranslation(position); } spWorldViewProj.Value = boundingLightObjectWorldMatrix * viewMatrix * projectionMatrix; spWorldView.Value = boundingLightObjectWorldMatrix * viewMatrix; #endregion // http://en.wikipedia.org/wiki/Angular_diameter // The formula was inspired from Guerilla´s GDC 09 presentation. float distanceToCamera = Vector3.Distance(Matrix.Invert(viewMatrix).Translation, position); float angularDiameter = (float)(2 * Math.Atan(radius / distanceToCamera)); if (angularDiameter > 0.2f * (3.1416f * fieldOfView / 180.0f)) // 0.2f is the original value. { // This only works when the clip volume does not intercept the camera´s far plane. // First pass. // The stencil buffer was already filled with 0 and if the back of the clip volume // is in front of the geometry then it marks the pixel as useful. // I prefer to do it in that way because when the clip volume intercept the camera’s near plane // we don’t need to perform a special case and we still have custom volume support. Resource.CurrentTechnique = pointLightStencilTechnique; EngineManager.Device.RasterizerState = RasterizerState.CullCounterClockwise; EngineManager.Device.BlendState = stencilBlendState; EngineManager.Device.DepthStencilState = stencilDepthStencilState; Resource.CurrentTechnique.Passes[0].Apply(); if (clipVolume != null) { clipVolume.Render(); } else { boundingLightObject.Render(); } // Second pass. // Render the clip volume back faces with the light shader. // The pixel with stencil value of 1 that are in front of the geometry will be discarded. Resource.CurrentTechnique = shadowTexture != null ? pointLightWithShadowsTechnique : pointLightTechnique; EngineManager.Device.RasterizerState = RasterizerState.CullClockwise; EngineManager.Device.BlendState = lightBlendState; EngineManager.Device.DepthStencilState = lightDepthStencilState; Resource.CurrentTechnique.Passes[0].Apply(); if (clipVolume != null) { clipVolume.Render(); } else { boundingLightObject.Render(); } } else // Far lights { // Render the clip volume front faces with the light shader. Resource.CurrentTechnique = shadowTexture != null ? pointLightWithShadowsTechnique : pointLightTechnique; EngineManager.Device.RasterizerState = RasterizerState.CullCounterClockwise; //EngineManager.Device.BlendState = lightBlendState; // Not need to set it. EngineManager.Device.DepthStencilState = DepthStencilState.DepthRead; Resource.CurrentTechnique.Passes[0].Apply(); if (clipVolume != null) { clipVolume.Render(); } else { boundingLightObject.Render(); } } } catch (Exception e) { throw new InvalidOperationException("Light Pre Pass Point Light: Unable to render.", e); } } // Render