public void AddToRenderList(RenderOrder renderObject) { if (renderObject != null) { renderObjects.Add(renderObject); } }
// Render() draws a list of scene nodes. public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { // For simplicity we ignore the 'order' parameter and do not sort the TextNodes // by distance. var graphicsDevice = context.GraphicsService.GraphicsDevice; var cameraNode = context.CameraNode; if (cameraNode == null) { return; // No camera set. } Matrix view = (Matrix)cameraNode.View; Matrix projection = cameraNode.Camera.Projection; var viewport = graphicsDevice.Viewport; // Use the SpriteBatch for rendering text. _spriteBatch.Begin(); for (int i = 0; i < nodes.Count; i++) { var node = nodes[i] as TextNode; if (node != null) { // Draw text centered at position of TextNode. Vector3 positionWorld = (Vector3)node.PoseWorld.Position; Vector3 positionScreen = viewport.Project(positionWorld, projection, view, Matrix.Identity); Vector2 position2D = new Vector2(positionScreen.X, positionScreen.Y); Vector2 size = _spriteFont.MeasureString(node.Text); _spriteBatch.DrawString(_spriteFont, node.Text, position2D - size / 2, node.Color); } } _spriteBatch.End(); }
public void RemoveInRenderList(RenderOrder renderObject) { if (renderObject != null) { renderObjects.Remove(renderObject); } }
// Render() draws a list of scene nodes. public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { // For simplicity we ignore the 'order' parameter and do not sort the TextNodes // by distance. var graphicsDevice = context.GraphicsService.GraphicsDevice; var cameraNode = context.CameraNode; if (cameraNode == null) return; // No camera set. Matrix view = (Matrix)cameraNode.View; Matrix projection = cameraNode.Camera.Projection; var viewport = graphicsDevice.Viewport; // Use the SpriteBatch for rendering text. _spriteBatch.Begin(); for (int i = 0; i < nodes.Count; i++) { var node = nodes[i] as TextNode; if (node != null) { // Draw text centered at position of TextNode. Vector3 positionWorld = (Vector3)node.PoseWorld.Position; Vector3 positionScreen = viewport.Project(positionWorld, projection, view, Matrix.Identity); Vector2 position2D = new Vector2(positionScreen.X, positionScreen.Y); Vector2 size = _spriteFont.MeasureString(node.Text); _spriteBatch.DrawString(_spriteFont, node.Text, position2D - size / 2, node.Color); } } _spriteBatch.End(); }
/// <inheritdoc/> public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } context.ThrowIfCameraMissing(); Debug.Assert(Jobs.Count == 0, "Job list was not properly reset."); BatchJobs(nodes, context, order); if (Jobs.Count > 0) { ProcessJobs(context, order); Jobs.Clear(); } //PostProcess(context); }
/// <inheritdoc/> public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { // Start with a clean state. Remove any references from/to light nodes. RecycleShadowMasks(); base.Render(nodes, context, order); }
/// <inheritdoc/> public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); context.Validate(_vertexBuffer); context.ThrowIfCameraMissing(); context.ThrowIfRenderPassMissing(); Debug.Assert(_jobs.Count == 0, "Job list was not properly reset."); // Reset counters. _effectCount = 0; BatchJobs(nodes, context, order); if (_jobs.Count > 0) { ProcessJobs(context, order); _jobs.Clear(); } if ((GlobalSettings.ValidationLevelInternal & GlobalSettings.ValidationLevelDevBasic) != 0) ValidateNodes(nodes, context); }
public TiledMap(string version, string tiledVersion, string sourceTMXFile, Orientation orientation, RenderOrder renderOrder, int mapWidth, int mapHeight, int tileWidth, int tileHeight, bool isInfinate, int nextObjectID, Dictionary <uint, string> tiledSets, List <Layer> layers) { Version = version; TiledVersion = tiledVersion; SourceTMXFile = sourceTMXFile; Orientation = orientation; RenderOrder = renderOrder; MapWidth = mapWidth; MapHeight = mapHeight; TileWidth = tileWidth; TileHeight = tileHeight; IsInfinate = isInfinate; NextObjectID = nextObjectID; TiledSets = tiledSets; Layers = layers; }
/// <inheritdoc/> public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (nodes.Count == 0) { return; } context.Validate(_effect); context.ThrowIfCameraMissing(); // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; var cameraNode = context.CameraNode; cameraNode.LastFrame = frame; bool reach = (context.GraphicsService.GraphicsDevice.GraphicsProfile == GraphicsProfile.Reach); for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as SkyboxNode; if (node == null) { continue; } // SkyboxNode is visible in current frame. node.LastFrame = frame; if (node.Texture != null) { if (reach) { RenderReach(node, context); } else { RenderHiDef(node, context); } } } }
private void BatchJobs(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { // Get camera properties used to calculate the distance of the scene node to the camera. var cameraNode = context.CameraNode; Vector3 cameraPosition = new Vector3(); Vector3 lookDirection = new Vector3(); bool sortByDistance = (order == RenderOrder.BackToFront || order == RenderOrder.FrontToBack); bool backToFront = (order == RenderOrder.BackToFront); if (sortByDistance) { Pose cameraPose = cameraNode.PoseWorld; cameraPosition = cameraPose.Position; lookDirection = -cameraPose.Orientation.GetColumn(2); } // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // Add draw jobs to list. int numberOfNodes = nodes.Count; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as FigureNode; if (node == null) continue; // figureNode is visible in current frame. node.LastFrame = frame; // Determine distance to camera. float distance = 0; if (sortByDistance) { Vector3 cameraToNode = node.PoseWorld.Position - cameraPosition; distance = Vector3.Dot(cameraToNode, lookDirection); if (backToFront) distance = -distance; } var job = new Job { SortKey = GetSortKey(distance, node.DrawOrder), Node = node }; _jobs.Add(ref job); } if (_jobs.Count > 0 && order != RenderOrder.UserDefined) { // Sort draw jobs. _jobs.Sort(Comparer.Instance); } }
public RenderOrder SetRenderOrder(string renderOrder) { if (renderOrder.Equals("left-up")) { MapRenderOrder = RenderOrder.LEFT_UP; } else if (renderOrder.Equals("right-down")) { MapRenderOrder = RenderOrder.RIGHT_DOWN; } return(MapRenderOrder); }
private void BatchJobs(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { // Camera parameters. var cameraNode = context.CameraNode; _cameraPose = cameraNode.PoseWorld; _defaultNormal = _cameraPose.Orientation.GetColumn(2); // Local z-axis. _cameraForward = -_defaultNormal; bool sortByDistance = (order != RenderOrder.UserDefined); bool backToFront = (order == RenderOrder.Default || order == RenderOrder.BackToFront); // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // Add draw jobs to list. int numberOfNodes = nodes.Count; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i]; var billboardNode = node as BillboardNode; if (billboardNode != null) { // BillboardNode is visible in current frame. billboardNode.LastFrame = frame; AddJob(billboardNode, sortByDistance, backToFront); continue; } #if PARTICLES var particleSystemNode = node as ParticleSystemNode; if (particleSystemNode != null) { // ParticleSystemNode is visible in current frame. particleSystemNode.LastFrame = frame; AddJob(particleSystemNode, sortByDistance, backToFront); continue; } #endif } if (_jobs.Count > 0 && order != RenderOrder.UserDefined) { // Sort draw jobs. _jobs.Sort(Comparer.Instance); } }
public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (numberOfNodes == 0) { return; } for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) { continue; } var shadow = lightNode.Shadow as CompositeShadow; if (shadow == null) { continue; } // Set LightNode.Shadow to child shadow temporarily and call a suitable renderer. for (int j = 0; j < shadow.Shadows.Count; j++) { lightNode.Shadow = shadow.Shadows[j]; int numberOfRenderers = _shadowMapRenderers.Count; for (int k = 0; k < numberOfRenderers; k++) { var renderer = _shadowMapRenderers[k]; if (renderer.CanRender(lightNode, context)) { renderer.Render(lightNode, context); break; } } } lightNode.Shadow = shadow; } }
public static void WriteAttribute(this XmlWriter writer, string localName, RenderOrder value) { switch (value) { case RenderOrder.Unknown: break; case RenderOrder.rightdown: writer.WriteAttributeString(localName, "right-down"); break; default: throw new ArgumentOutOfRangeException(nameof(value)); } }
private void LoadFromJson() { this.tilemapJsonInfo = new TiledTilemapJsonInfo(); if (json != null) { JsonUtility.FromJsonOverwrite(json.text, tilemapJsonInfo); this.renderOrder = GetRenderOrder(tilemapJsonInfo.renderorder); Debug.Log(tilemapJsonInfo); } else { Debug.LogError("Something went wrong. JSON file may be invalid"); } }
public GraphicEngine(AnglerGame game, RenderOrder order = RenderOrder.Background) : base(game) { DrawOrder = (int)order; Components = new List <GraphicsEngineComponent>(); SortMode = SpriteSortMode.Deferred; BlendState = BlendState.AlphaBlend; SamplerState = SamplerState.LinearClamp; StencilState = DepthStencilState.None; RasterizerState = new RasterizerState() { ScissorTestEnable = true }; }
internal virtual void ProcessJobs(RenderContext context, RenderOrder order) { if (order == RenderOrder.BackToFront || order == RenderOrder.FrontToBack) { // The scene nodes are already sorted by distance. order = RenderOrder.UserDefined; } var savedRenderState = new RenderStateSnapshot(context.GraphicsService.GraphicsDevice); int index = 0; var jobs = Jobs.Array; int jobCount = Jobs.Count; while (index < jobCount) { var renderer = jobs[index].Renderer; // Find end of current batch. int endIndexExclusive = index + 1; while (endIndexExclusive < jobCount && jobs[endIndexExclusive].Renderer == renderer) { endIndexExclusive++; } // Restore the render state. (The integrated scene node renderers properly // restore the render state, but third-party renderers might mess it up.) if (index > 0) { savedRenderState.Restore(); } // Submit batch to renderer. // (Use Accessor to expose current batch as IList<SceneNode>.) JobsAccessor.Set(Jobs, index, endIndexExclusive); renderer.Render(JobsAccessor, context, order); JobsAccessor.Reset(); index = endIndexExclusive; } savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var shadow = lightNode.Shadow as CompositeShadow; if (shadow == null) continue; // Set LightNode.Shadow to child shadow temporarily and call a suitable renderer. for (int j = 0; j < shadow.Shadows.Count; j++) { lightNode.Shadow = shadow.Shadows[j]; int numberOfRenderers = _shadowMapRenderers.Count; for (int k = 0; k < numberOfRenderers; k++) { var renderer = _shadowMapRenderers[k]; if (renderer.CanRender(lightNode, context)) { renderer.Render(lightNode, context); break; } } } lightNode.Shadow = shadow; } }
/// <inheritdoc/> public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (nodes.Count == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; var cameraNode = context.CameraNode; cameraNode.LastFrame = frame; bool reach = (context.GraphicsService.GraphicsDevice.GraphicsProfile == GraphicsProfile.Reach); for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as SkyboxNode; if (node == null) continue; // SkyboxNode is visible in current frame. node.LastFrame = frame; if (node.Texture != null) { if (reach) RenderReach(node, context); else RenderHiDef(node, context); } } }
public int CompareTo(SpriteKey other) { var samplerComparison = Sampler.CompareTo(other.Sampler); if (samplerComparison != 0) { return(samplerComparison); } var orderComparison = RenderOrder.CompareTo(other.RenderOrder); if (orderComparison != 0) { return(orderComparison); } var flagsComparison = Flags.CompareTo(other.Flags); if (flagsComparison != 0) { return(flagsComparison); } return(Nullable.Compare(ScissorRegion, other.ScissorRegion)); }
// Same as SceneRenderer.BatchJobs() except we sort by SkyNode.DrawOrder instead of distance. internal override void BatchJobs(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { // Assign temporary IDs to scene node renderers. for (int i = 0; i < Renderers.Count; i++) Renderers[i].Id = (uint)(i & 0xff); // ID = index clamped to [0, 255]. // Add draw jobs. int numberOfNodes = nodes.Count; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as SkyNode; if (node == null) continue; var job = new Job(); job.Node = node; foreach (var renderer in Renderers) { if (renderer.CanRender(node, context)) { job.Renderer = renderer; break; } } if (job.Renderer == null) continue; job.SortKey = GetSortKey(node.DrawOrder, job.Renderer.Order, job.Renderer.Id); Jobs.Add(ref job); } if (order != RenderOrder.UserDefined) { // Sort draw jobs. Jobs.Sort(Comparer.Instance); } }
// Renders a fullscreen quad for each EnvironmentLight. All lights are accumulated in the // light buffer using additive alpha blending. public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (numberOfNodes == 0) { return; } if (context.CameraNode == null) { throw new GraphicsException("Camera node needs to be set in render context."); } var graphicsDevice = _effect.GraphicsDevice; graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd; var viewport = graphicsDevice.Viewport; _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height)); _parameterGBuffer0.SetValue(context.GBuffer0); _parameterGBuffer1.SetValue(context.GBuffer1); var cameraNode = context.CameraNode; int frame = context.GraphicsService.Frame; cameraNode.LastFrame = frame; // Frustum corners are vectors which point from the camera to the far plane corners. GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, _cameraFrustumFarCorners); // Convert frustum far corners from view space to world space. for (int j = 0; j < _cameraFrustumFarCorners.Length; j++) { _cameraFrustumFarCorners[j] = (Vector3)cameraNode.PoseWorld.ToWorldDirection((Vector3F)_cameraFrustumFarCorners[j]); } _parameterFrustumCorners.SetValue(_cameraFrustumFarCorners); // The current render pipeline is a HDR pipeline if the light buffer is HdrBlendable. // (This will practically always be the case.) var isHdrEnabled = context.RenderTarget != null && context.RenderTarget.Format == SurfaceFormat.HdrBlendable; for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) { continue; } var light = lightNode.Light as EnvironmentLight; if (light == null || light.EnvironmentMap == null || light.Color == new Vector3F(0) || (light.DiffuseIntensity == 0 && light.SpecularIntensity == 0)) { continue; } lightNode.LastFrame = frame; float hdrScale = isHdrEnabled ? light.HdrScale : 1; _parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale); _parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale); _parameterTextureSize.SetValue(light.EnvironmentMap.Size); _parameterMaxMipLevel.SetValue(Math.Max(0, light.EnvironmentMap.LevelCount - 1)); _parameterTexture.SetValue(light.EnvironmentMap); _effect.CurrentTechnique.Passes[0].Apply(); graphicsDevice.DrawFullScreenQuad(); } }
/// <inheritdoc/> public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (nodes.Count == 0) return; context.Validate(_effect); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; //var renderTargetPool = graphicsService.RenderTargetPool; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.BlendState = BlendState.Opaque; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.DepthStencilState = DepthStencilState.None; int frame = context.Frame; for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++) { var node = nodes[nodeIndex] as WaterNode; if (node == null) continue; var waves = node.Waves as OceanWaves; if (waves == null) continue; // We update the waves only once per frame. if (waves.LastFrame == frame) continue; waves.LastFrame = frame; float time = (float)context.Time.TotalSeconds; // Initialize h0 spectrum. Perform CPU FFT. waves.Update(graphicsDevice, time); int n = waves.TextureSize; // Allocate textures in the first frame and when the TextureSize was changed. if (waves.DisplacementSpectrum == null || waves.DisplacementSpectrum.Width != n) { waves.DisplacementSpectrum.SafeDispose(); waves.NormalSpectrum.SafeDispose(); waves.DisplacementMap.SafeDispose(); waves.NormalMap.SafeDispose(); waves.DisplacementSpectrum = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None); waves.NormalSpectrum = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None); waves.DisplacementMap = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None); waves.NormalMap = new RenderTarget2D( _graphicsService.GraphicsDevice, n, n, true, SurfaceFormat.Color, DepthFormat.None); } // Create spectrum (h, D, N) for current time from h0. _renderTargetBindings[0] = new RenderTargetBinding(waves.DisplacementSpectrum); _renderTargetBindings[1] = new RenderTargetBinding(waves.NormalSpectrum); graphicsDevice.SetRenderTargets(_renderTargetBindings); _parameterSize.SetValue((float)n); _parameterSpectrumParameters.SetValue(new Vector4( waves.TileSize, waves.Gravity, time, waves.HeightScale)); _parameterSourceTexture.SetValue(waves.H0Spectrum); _passSpectrum.Apply(); graphicsDevice.DrawFullScreenQuad(); // Do inverse FFT. _fft.Process( context, false, waves.DisplacementSpectrum, waves.NormalSpectrum, (RenderTarget2D)waves.DisplacementMap, (RenderTarget2D)waves.NormalMap, waves.Choppiness); #region ----- Old Debugging Code ----- // Create textures from CPU FFT data for debug visualization. //n = waves.CpuSize; //var s0Data = new Vector4[n * n]; //var s1Data = new Vector4[n * n]; //var s0 = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None); //var s1 = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None); //for (int y = 0; y < n; y++) //{ // for (int x = 0; x < n; x++) // { //s0Data[y * n + x] = new Vector4( // -waves._D[x, y].X * waves.Choppiness, // waves._h[x, y].X * 1, // -waves._D[x, y].Y * waves.Choppiness, // 1); //s1Data[y * n + x] = new Vector4( // waves._N[x, y].X, // waves._N[x, y].Y, // 0, // 0); // } //} //s0.SetData(s0Data); //s1.SetData(s1Data); //WaterSample._t0 = s0; //WaterSample._t1 = waves.DisplacementMap; #endregion } savedRenderState.Restore(); graphicsDevice.SetRenderTarget(null); context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; _renderTargetBindings[0] = default(RenderTargetBinding); _renderTargetBindings[1] = default(RenderTargetBinding); // Reset the texture stages. If a floating point texture is set, we get exceptions // when a sampler with bilinear filtering is set. #if !MONOGAME graphicsDevice.ResetTextures(); #endif }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); // Lens flares are used sparsely in most games. --> Early out, if possible. int numberOfNodes = nodes.Count; if (nodes.Count == 0) return; context.Validate(_spriteBatch); context.ThrowIfCameraMissing(); var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); bool hiDef = (graphicsDevice.GraphicsProfile == GraphicsProfile.HiDef); // Camera properties var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; Vector3 cameraForward = -cameraPose.Orientation.GetColumn(2); // 3rd column vector (negated) Matrix view = cameraNode.View; Matrix projection = cameraNode.Camera.Projection; // The flares are positioned on a line from the origin through the center of // the screen. var viewport = graphicsDevice.Viewport; Vector2F screenCenter = new Vector2F(viewport.Width / 2.0f, viewport.Height / 2.0f); if (_transformParameter != null) { // ----- Original: // Matrix matrix = (Matrix)(Matrix.CreateOrthographicOffCenter(0, viewport.Width, viewport.Height, 0, 0, 1) // * Matrix.CreateTranslation(-0.5f, -0.5f, 0)); // Half-pixel offset (only for Direct3D 9). // ----- Inlined: Matrix matrix = new Matrix(); float oneOverW = 1.0f / viewport.Width; float oneOverH = 1.0f / viewport.Height; matrix.M11 = oneOverW * 2f; matrix.M22 = -oneOverH * 2f; matrix.M33 = -1f; matrix.M44 = 1f; matrix.M41 = -1f; matrix.M42 = 1f; #else // Direct3D 9: half-pixel offset matrix.M41 = -oneOverW - 1f; matrix.M42 = oneOverH + 1f; _transformParameter.SetValue(matrix); } // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // Choose current effect technique: Linear vs. Gamma-corrected Writes. if (_effect != null) _effect.CurrentTechnique = context.IsHdrEnabled() ? _techniqueLinear : _techniqueGamma; _spriteBatch.Begin(SpriteSortMode.Texture, BlendState.Additive, null, null, null, _effect); for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as LensFlareNode; if (node == null) continue; var lensFlare = node.LensFlare; float size, intensity; if (hiDef) { // HiDef profile object dummy; cameraNode.ViewDependentData.TryGetValue(node, out dummy); var renderData = dummy as OcclusionData; if (renderData == null || renderData.VisiblePixels == 0) continue; lensFlare.OnGetSizeAndIntensity(node, context, renderData.VisiblePixels, renderData.TotalPixels, out size, out intensity); } else { // Reach profile lensFlare.OnGetSizeAndIntensity(node, context, 0, 0, out size, out intensity); } if (size <= 0 || intensity < MinIntensity) continue; // LensFlareNode is visible in current frame. node.LastFrame = frame; // Project position to screen space. Vector2F screenPosition; if (lensFlare.IsDirectional) { // ----- Directional lights Vector3 lightDirectionWorld = -node.PoseWorld.Orientation.GetColumn(2); // 3rd column vector (negated) Vector3 lightDirectionView = cameraPose.ToLocalDirection(lightDirectionWorld); // In Reach profile check light direction for visibility. // (In HiDef profile this check is done UpdateOcclusion().) if (!hiDef && lightDirectionView.Z < 0) { // Light comes from behind camera. continue; } Vector3 position = viewport.ProjectToViewport(-lightDirectionView, projection); screenPosition = new Vector2F(position.X, position.Y); } else { // ----- Local lights Vector3 position = node.PoseWorld.Position; // In Reach profile check light direction for visibility. // (In HiDef profile this check is done UpdateOcclusion().) if (!hiDef) { Vector3 cameraToNode = position - cameraPose.Position; float distance = Vector3.Dot(cameraToNode, cameraForward); if (distance < cameraNode.Camera.Projection.Near) { // Light is behind near plane. continue; } } position = viewport.ProjectToViewport(position, projection * view); screenPosition = new Vector2F(position.X, position.Y); } Vector2F flareVector = screenCenter - screenPosition; foreach (var flare in lensFlare.Elements) { if (flare == null) continue; var packedTexture = flare.Texture; if (packedTexture == null) continue; // Position the flare on a line from the lens flare origin through the // screen center. Vector2F position = screenPosition + flareVector * flare.Distance; // The intensity controls the alpha value. Vector4 color = flare.Color.ToVector4(); color.W *= intensity; // Get texture. Texture2D textureAtlas = packedTexture.TextureAtlas; Vector2F textureAtlasSize = new Vector2F(textureAtlas.Width, textureAtlas.Height); Vector2F textureOffset = packedTexture.Offset * textureAtlasSize; Vector2F textureSize = packedTexture.Scale * textureAtlasSize; Rectangle sourceRectangle = new Rectangle((int)textureOffset.X, (int)textureOffset.Y, (int)textureSize.X, (int)textureSize.Y); // The image rotates around its origin (= reference point) - usually the // center of the image. Vector2F origin = textureSize * flare.Origin; float rotation = flare.Rotation; Vector2F direction = flareVector; if (Numeric.IsNaN(rotation) && direction.TryNormalize()) { // NaN = automatic rotation: // Determine angle between direction and reference vector (0, 1): // From http://www.euclideanspace.com/maths/algebra/vectors/angleBetween/issues/index.htm: // rotation = atan2(v2.y,v2.x) - atan2(v1.y,v1.x) // = atan2(v2.y,v2.x) - atan2(1,0) // = atan2(v2.y,v2.x) - π/2 rotation = (float)Math.Atan2(direction.Y, direction.X) - ConstantsF.PiOver2; } Vector2F scale = size * viewport.Height * flare.Scale / textureSize.Y; // Render flare using additive blending. _spriteBatch.Draw(textureAtlas, (Vector2)position, sourceRectangle, new Color(color), rotation, (Vector2)origin, (Vector2)scale, SpriteEffects.None, 0); } } _spriteBatch.End(); savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (nodes.Count == 0) return; context.Validate(_spriteBatch); context.ThrowIfCameraMissing(); var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); // Camera properties var cameraNode = context.CameraNode; Matrix44F viewProjection = cameraNode.Camera.Projection * cameraNode.View; var viewport = graphicsDevice.Viewport; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; SpriteSortMode sortMode; switch (order) { case RenderOrder.Default: sortMode = SpriteSortMode.Texture; break; case RenderOrder.FrontToBack: sortMode = SpriteSortMode.FrontToBack; break; case RenderOrder.BackToFront: sortMode = SpriteSortMode.BackToFront; break; case RenderOrder.UserDefined: default: sortMode = SpriteSortMode.Deferred; break; } _spriteBatch.Begin(sortMode, graphicsDevice.BlendState, null, graphicsDevice.DepthStencilState, null); for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as SpriteNode; if (node == null) continue; // SpriteNode is visible in current frame. node.LastFrame = frame; // Position, size, and origin in pixels. Vector3F position = new Vector3F(); Vector2 size = new Vector2(); Vector2 origin = new Vector2(); var bitmapSprite = node.Sprite as ImageSprite; if (bitmapSprite != null) { var packedTexture = bitmapSprite.Texture; if (packedTexture != null) { // Project into viewport and snap to pixels. position = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection); position.X = (int)(position.X + 0.5f); position.Y = (int)(position.Y + 0.5f); // Get source rectangle (pixel bounds). var sourceRectangle = packedTexture.GetBounds(node.AnimationTime); size = new Vector2(sourceRectangle.Width, sourceRectangle.Height); // Premultiply color. Vector3F color3F = node.Color; float alpha = node.Alpha; Color color = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha); // Get absolute origin (relative to pixel bounds). origin = (Vector2)node.Origin * size; // Draw using SpriteBatch. _spriteBatch.Draw( packedTexture.TextureAtlas, new Vector2(position.X, position.Y), sourceRectangle, color, node.Rotation, origin, (Vector2)node.Scale, SpriteEffects.None, position.Z); } } else { var textSprite = node.Sprite as TextSprite; if (textSprite != null) { var font = textSprite.Font ?? _spriteFont; if (font != null) { // Text can be a string or StringBuilder. var text = textSprite.Text as string; if (text != null) { if (text.Length > 0) { // Project into viewport and snap to pixels. position = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection); position.X = (int)(position.X + 0.5f); position.Y = (int)(position.Y + 0.5f); // Premultiply color. Vector3F color3F = node.Color; float alpha = node.Alpha; Color color = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha); // Get absolute origin (relative to pixel bounds). size = font.MeasureString(text); origin = (Vector2)node.Origin * size; // Draw using SpriteBatch. _spriteBatch.DrawString( font, text, new Vector2(position.X, position.Y), color, node.Rotation, origin, (Vector2)node.Scale, SpriteEffects.None, position.Z); } } else { var stringBuilder = textSprite.Text as StringBuilder; if (stringBuilder != null && stringBuilder.Length > 0) { // Project into viewport and snap to pixels. position = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection); position.X = (int)(position.X + 0.5f); position.Y = (int)(position.Y + 0.5f); // Premultiply color. Vector3F color3F = node.Color; float alpha = node.Alpha; Color color = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha); // Get absolute origin (relative to pixel bounds). size = font.MeasureString(stringBuilder); origin = (Vector2)node.Origin * size; // Draw using SpriteBatch. _spriteBatch.DrawString( font, stringBuilder, new Vector2(position.X, position.Y), color, node.Rotation, origin, (Vector2)node.Scale, SpriteEffects.None, position.Z); } } } } } // Store bounds an depth for hit tests. node.LastBounds = new Rectangle( (int)(position.X - origin.X), (int)(position.Y - origin.Y), (int)(size.X * node.Scale.X), (int)(size.Y * node.Scale.Y)); node.LastDepth = position.Z; } _spriteBatch.End(); savedRenderState.Restore(); }
public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (numberOfNodes == 0) { return; } context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = _effect.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; var cameraNode = context.CameraNode; _parameterViewInverse.SetValue(cameraNode.PoseWorld); _parameterGBuffer0.SetValue(context.GBuffer0); Viewport viewport = context.Viewport; _parameterParameters0.SetValue(new Vector2(viewport.Width, viewport.Height)); if (_jitterMap == null) { _jitterMap = NoiseHelper.GetGrainTexture(context.GraphicsService, NoiseHelper.DefaultJitterMapWidth); } _parameterJitterMap.SetValue(_jitterMap); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) { continue; } var shadow = lightNode.Shadow as StandardShadow; if (shadow == null) { continue; } if (shadow.ShadowMap == null || shadow.ShadowMask == null) { continue; } // The effect must only render in a specific channel. // Do not change blend state if the correct write channels is already set, e.g. if this // shadow is part of a CompositeShadow, the correct blend state is already set. if ((int)graphicsDevice.BlendState.ColorWriteChannels != (1 << shadow.ShadowMaskChannel)) { graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteSingleChannel[shadow.ShadowMaskChannel]; } _parameterParameters1.SetValue(new Vector4( shadow.Near, shadow.Far, shadow.EffectiveDepthBias, shadow.EffectiveNormalOffset)); // If we use a subset of the Poisson kernel, we have to normalize the scale. int numberOfSamples = Math.Min(shadow.NumberOfSamples, PoissonKernel.Length); float filterRadius = shadow.FilterRadius; if (numberOfSamples > 0) { filterRadius /= PoissonKernel[numberOfSamples - 1].Length(); } _parameterParameters2.SetValue(new Vector3( shadow.ShadowMap.Width, filterRadius, // The StandardShadow.JitterResolution is the number of texels per world unit. // In the shader the parameter JitterResolution contains the division by the jitter map size. shadow.JitterResolution / _jitterMap.Width)); _parameterLightPosition.SetValue((Vector3)cameraNode.PoseWorld.ToLocalPosition(lightNode.PoseWorld.Position)); Matrix cameraViewToShadowView = cameraNode.PoseWorld * shadow.View; _parameterShadowView.SetValue(cameraViewToShadowView); _parameterShadowMatrix.SetValue(cameraViewToShadowView * shadow.Projection); _parameterShadowMap.SetValue(shadow.ShadowMap); var rectangle = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode); Vector2F texCoordTopLeft = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height); Vector2F texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height); GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners); _parameterFrustumCorners.SetValue(_frustumFarCorners); var pass = GetPass(numberOfSamples); if (numberOfSamples > 0) { if (_lastNumberOfSamples != numberOfSamples) { // Create an array with the first n samples and the rest set to 0. _lastNumberOfSamples = numberOfSamples; for (int j = 0; j < numberOfSamples; j++) { _samples[j].X = PoissonKernel[j].X; _samples[j].Y = PoissonKernel[j].Y; _samples[j].Z = 1.0f / numberOfSamples; // Note [HelmutG]: I have tried weights decreasing with distance but that did not // look better. } // Set the rest to zero. for (int j = numberOfSamples; j < _samples.Length; j++) { _samples[j] = Vector3.Zero; } _parameterSamples.SetValue(_samples); } else if (i == 0) { // Apply offsets in the first loop. _parameterSamples.SetValue(_samples); } } pass.Apply(); graphicsDevice.DrawQuad(rectangle); } _parameterGBuffer0.SetValue((Texture2D)null); _parameterJitterMap.SetValue((Texture2D)null); _parameterShadowMap.SetValue((Texture2D)null); savedRenderState.Restore(); }
public Spoofed2DGraphicEngine(AnglerGame game, RenderOrder order = RenderOrder.EffectsLayer) : base(game, order) { StencilState = DepthStencilState.DepthRead; }
/// <inheritdoc/> public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; var cameraNode = context.CameraNode; var projection = cameraNode.Camera.Projection; Pose view = cameraNode.PoseWorld.Inverse; int frame = context.Frame; cameraNode.LastFrame = frame; var graphicsDevice = context.GraphicsService.GraphicsDevice; // Save render state. var originalRasterizerState = graphicsDevice.RasterizerState; var originalDepthStencilState = graphicsDevice.DepthStencilState; var originalBlendState = graphicsDevice.BlendState; // We render only backsides with no depth test. // OPTIMIZE: When camera is outside a sphere, we can render front sides with depth-read. graphicsDevice.RasterizerState = RasterizerState.CullClockwise; graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.BlendState = BlendState.AlphaBlend; // Set global effect parameters. var viewport = graphicsDevice.Viewport; _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height)); _parameterView.SetValue(view); _parameterProjection.SetValue(projection); _parameterCameraPosition.SetValue((Vector3)cameraNode.PoseWorld.Position); _parameterCameraFar.SetValue(projection.Far); _parameterGBuffer0.SetValue(context.GBuffer0); for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as FogSphereNode; if (node == null) continue; // FogSphereNode is visible in current frame. node.LastFrame = frame; Matrix world = Matrix.CreateScale((Vector3)node.ScaleWorld) * node.PoseWorld; _parameterWorld.SetValue(world); _parameterWorldInverse.SetValue(Matrix.Invert(world)); _parameterColor.SetValue((Vector3)node.Color); _parameterBlendMode.SetValue(node.BlendMode); _parameterDensity.SetValue(node.Density); _parameterFalloff.SetValue(node.Falloff); _parameterIntersectionSoftness.SetValue(node.IntersectionSoftness); _effect.CurrentTechnique.Passes[0].Apply(); _submesh.Draw(); } // Restore render states. graphicsDevice.RasterizerState = originalRasterizerState; graphicsDevice.DepthStencilState = originalDepthStencilState; graphicsDevice.BlendState = originalBlendState; }
/// <inheritdoc/> public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (nodes.Count == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.BlendState = BlendState.AlphaBlend; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; // Camera properties var cameraNode = context.CameraNode; Matrix view = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F()); _parameterView.SetValue(view); Matrix projection = cameraNode.Camera.Projection; _parameterProjection.SetValue(projection); // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as ScatteringSkyNode; if (node == null) continue; // ScatteringSkyNode is visible in current frame. node.LastFrame = frame; _parameterSunDirection.SetValue((Vector3)node.SunDirection); _parameterSunIntensity.SetValue((Vector3)(node.SunIntensity * node.SunColor)); _parameterRadii.SetValue(new Vector4( node.AtmosphereHeight + node.PlanetRadius, // Atmosphere radius node.PlanetRadius, // Ground radius node.ObserverAltitude + node.PlanetRadius, // Observer radius node.ScaleHeight)); // Absolute Scale height _parameterNumberOfSamples.SetValue(node.NumberOfSamples); _parameterBetaRayleigh.SetValue((Vector3)node.BetaRayleigh); _parameterBetaMie.SetValue((Vector3)node.BetaMie); _parameterGMie.SetValue(node.GMie); _parameterTransmittance.SetValue(node.Transmittance); if (node.BaseHorizonColor.IsNumericallyZero && node.BaseZenithColor.IsNumericallyZero) { // No base color. if (context.IsHdrEnabled()) _passLinear.Apply(); else _passGamma.Apply(); } else { // Add base color. _parameterBaseHorizonColor.SetValue((Vector4)new Vector4F(node.BaseHorizonColor, node.BaseColorShift)); _parameterBaseZenithColor.SetValue((Vector3)node.BaseZenithColor); if (context.IsHdrEnabled()) _passLinearWithBaseColor.Apply(); else _passGammaWithBaseColor.Apply(); } _submesh.Draw(); } savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (nodes.Count == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); // Camera properties var cameraNode = context.CameraNode; Matrix view = (Matrix)cameraNode.View; Matrix projection = cameraNode.Camera.Projection; Matrix viewProjection = view * projection; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // Blend additively over any cosmos textures. graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; graphicsDevice.BlendState = BlendState.Additive; _effectParameterViewportSize.SetValue(new Vector2(context.Viewport.Width, context.Viewport.Height)); for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as StarfieldNode; if (node == null) continue; // SkyboxNode is visible in current frame. node.LastFrame = frame; if (node.Stars != null && node.Stars.Count > 0) { Matrix world = (Matrix)new Matrix44F(node.PoseWorld.Orientation, Vector3F.Zero); _effectParameterWorldViewProjection.SetValue(world * viewProjection); // In [ZFX] the star luminance of the precomputed star data is scaled with // float const viewFactor = tan(fov); // float const resolutionFactor = resolution / 1920.0f; // float const luminanceScale = 1.0f / (viewFactor * viewFactor) * (resolutionFactor * resolutionFactor); // We ignore this here, but we could add this factor to the Intensity parameter. _effectParameterIntensity.SetValue((Vector3)node.Color); if (context.IsHdrEnabled()) _effectPassLinear.Apply(); else _effectPassGamma.Apply(); var mesh = GetStarfieldMesh(node, context); mesh.Draw(); } } savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; Debug.Assert(context.CameraNode != null, "A camera node has to be set in the render context."); Debug.Assert(context.Scene != null, "A scene has to be set in the render context."); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalReferenceNode = context.ReferenceNode; // Camera properties var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; var projection = cameraNode.Camera.Projection; if (!(projection is PerspectiveProjection)) throw new NotImplementedException("VSM shadow maps not yet implemented for scenes with perspective camera."); float fieldOfViewY = projection.FieldOfViewY; float aspectRatio = projection.AspectRatio; // Update SceneNode.LastFrame for all rendered nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // The scene node renderer should use the light camera instead of the player camera. context.CameraNode = _orthographicCameraNode; // The shadow map is rendered using the technique "DirectionalVsm". // See ShadowMap.fx in the DigitalRune source code folder. context.Technique = "DirectionalVsm"; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var originalBlendState = graphicsDevice.BlendState; var originalDepthStencilState = graphicsDevice.DepthStencilState; var originalRasterizerState = graphicsDevice.RasterizerState; for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var shadow = lightNode.Shadow as VarianceShadow; if (shadow == null) continue; // LightNode is visible in current frame. lightNode.LastFrame = frame; // The format of the shadow map: var format = new RenderTargetFormat( shadow.PreferredSize, shadow.PreferredSize, false, shadow.Prefer16Bit ? SurfaceFormat.HalfVector2 : SurfaceFormat.Vector2, // VSM needs two channels! DepthFormat.Depth24); if (shadow.ShadowMap != null && shadow.IsLocked) continue; if (shadow.ShadowMap == null) shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(format); graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.BlendState = BlendState.Opaque; // Render front and back faces for VSM due to low shadow map texel density. // (VSM is usually used for distant geometry.) graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.SetRenderTarget(shadow.ShadowMap); context.RenderTarget = shadow.ShadowMap; context.Viewport = graphicsDevice.Viewport; graphicsDevice.Clear(Color.White); // Compute an orthographic camera for the light. // If Shadow.TargetArea is null, the shadow map should cover the area in front of the player camera. // If Shadow.TargetArea is set, the shadow map should cover this static area. if (shadow.TargetArea == null) { // near/far of this shadowed area. float near = projection.Near; float far = shadow.MaxDistance; // Abort if near-far distances are invalid. if (Numeric.IsGreaterOrEqual(near, far)) continue; // Create a view volume for frustum part that is covered by the shadow map. _cameraVolume.SetFieldOfView(fieldOfViewY, aspectRatio, near, far); // Find the bounding sphere of the frustum part. Vector3F center; float radius; GetBoundingSphere(_cameraVolume, out center, out radius); // Convert center to light space. Pose lightPose = lightNode.PoseWorld; center = cameraPose.ToWorldPosition(center); center = lightPose.ToLocalPosition(center); // Snap center to texel positions to avoid shadow swimming. SnapPositionToTexels(ref center, 2 * radius, shadow.ShadowMap.Height); // Convert center back to world space. center = lightPose.ToWorldPosition(center); Matrix33F orientation = lightPose.Orientation; Vector3F backward = orientation.GetColumn(2); var orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection; // Create a tight orthographic frustum around the target bounding sphere. orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius); Vector3F cameraPosition = center + radius * backward; Pose frustumPose = new Pose(cameraPosition, orientation); Pose view = frustumPose.Inverse; shadow.ViewProjection = (Matrix)view * orthographicProjection; // For rendering the shadow map, move near plane back by MinLightDistance // to catch occluders in front of the camera frustum. orthographicProjection.Near = -shadow.MinLightDistance; _orthographicCameraNode.PoseWorld = frustumPose; } else { // Get bounding sphere of static target area. Aabb targetAabb = shadow.TargetArea.Value; Vector3F center = targetAabb.Center; float radius = (targetAabb.Maximum - center).Length; // Convert center to light space. Matrix33F orientation = lightNode.PoseWorld.Orientation; Vector3F backward = orientation.GetColumn(2); var orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection; // Create a tight orthographic frustum around the target bounding sphere. orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius); Vector3F cameraPosition = center + radius * backward; Pose frustumPose = new Pose(cameraPosition, orientation); Pose view = frustumPose.Inverse; shadow.ViewProjection = (Matrix)view * orthographicProjection; // For rendering the shadow map, move near plane back by MinLightDistance // to catch occluders in front of the camera frustum. orthographicProjection.Near = -shadow.MinLightDistance; _orthographicCameraNode.PoseWorld = frustumPose; } context.ReferenceNode = lightNode; context.Object = shadow; // Render objects into shadow map. bool shadowMapContainsSomething = RenderCallback(context); if (shadowMapContainsSomething) { // Blur shadow map. if (shadow.Filter != null && shadow.Filter.Scale > 0) { context.SourceTexture = shadow.ShadowMap; shadow.Filter.Process(context); context.SourceTexture = null; } } else { // Shadow map is empty. Recycle it. graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap); shadow.ShadowMap = null; } } graphicsDevice.SetRenderTarget(null); graphicsDevice.BlendState = originalBlendState; graphicsDevice.DepthStencilState = originalDepthStencilState; graphicsDevice.RasterizerState = originalRasterizerState; context.CameraNode = cameraNode; context.Technique = null; context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.ReferenceNode = originalReferenceNode; context.Object = null; }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (nodes.Count == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.BlendState = BlendState.AlphaBlend; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; // Camera properties var cameraNode = context.CameraNode; Matrix view = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F()); _parameterView.SetValue(view); Matrix projection = cameraNode.Camera.Projection; _parameterProjection.SetValue(projection); // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as GradientTextureSkyNode; if (node == null) continue; // GradientTextureSkyNode is visible in current frame. node.LastFrame = frame; _parameterSunDirection.SetValue((Vector3)node.SunDirection); _parameterTime.SetValue((float)node.TimeOfDay.TotalHours / 24); _parameterColor.SetValue((Vector4)node.Color); _parameterFrontTexture.SetValue(node.FrontTexture); _parameterBackTexture.SetValue(node.BackTexture); if (node.CieSkyStrength < Numeric.EpsilonF) { if (context.IsHdrEnabled()) _passLinear.Apply(); else _passGamma.Apply(); } else { var p = node.CieSkyParameters; _parameterAbcd.SetValue(new Vector4(p.A, p.B, p.C, p.D)); _parameterEAndStrength.SetValue(new Vector2(p.E, node.CieSkyStrength)); if (context.IsHdrEnabled()) _passCieLinear.Apply(); else _passCieGamma.Apply(); } _submesh.Draw(); } savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; // Note: The camera node is not used by the StandardShadowMapRenderer. // Still throw an exception if null for consistency. (All other shadow map // renderers need a camera node.) context.ThrowIfCameraMissing(); context.ThrowIfSceneMissing(); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalReferenceNode = context.ReferenceNode; var cameraNode = context.CameraNode; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; context.Technique = "Default"; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var shadow = lightNode.Shadow as StandardShadow; if (shadow == null) continue; // LightNode is visible in current frame. lightNode.LastFrame = frame; // Get a new shadow map if necessary. if (shadow.ShadowMap == null) { shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D( new RenderTargetFormat( shadow.PreferredSize, shadow.PreferredSize, false, shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single, DepthFormat.Depth24)); } // Create a suitable shadow camera. CameraNode lightCameraNode; if (lightNode.Light is ProjectorLight) { var light = (ProjectorLight)lightNode.Light; if (light.Projection is PerspectiveProjection) { var lp = (PerspectiveProjection)light.Projection; var cp = (PerspectiveProjection)_perspectiveCameraNode.Camera.Projection; cp.SetOffCenter(lp.Left, lp.Right, lp.Bottom, lp.Top, lp.Near, lp.Far); lightCameraNode = _perspectiveCameraNode; } else //if (light.Projection is OrthographicProjection) { var lp = (OrthographicProjection)light.Projection; var cp = (OrthographicProjection)_orthographicCameraNode.Camera.Projection; cp.SetOffCenter(lp.Left, lp.Right, lp.Bottom, lp.Top, lp.Near, lp.Far); lightCameraNode = _orthographicCameraNode; } } else if (lightNode.Light is Spotlight) { var light = (Spotlight)lightNode.Light; var cp = (PerspectiveProjection)_perspectiveCameraNode.Camera.Projection; cp.SetFieldOfView(2 * light.CutoffAngle, 1, shadow.DefaultNear, light.Range); lightCameraNode = _perspectiveCameraNode; } else { throw new GraphicsException("StandardShadow can only be used with a Spotlight or a ProjectorLight."); } lightCameraNode.PoseWorld = lightNode.PoseWorld; // Store data for use in StandardShadowMaskRenderer. shadow.Near = lightCameraNode.Camera.Projection.Near; shadow.Far = lightCameraNode.Camera.Projection.Far; shadow.View = lightCameraNode.PoseWorld.Inverse; shadow.Projection = lightCameraNode.Camera.Projection; // World units per texel at a planar distance of 1 world unit. float unitsPerTexel = lightCameraNode.Camera.Projection.Width / (shadow.ShadowMap.Height * shadow.Near); // Convert depth bias from "texel" to world space. // Minus to move receiver depth closer to light. shadow.EffectiveDepthBias = -shadow.DepthBias * unitsPerTexel; // Convert normal offset from "texel" to world space. shadow.EffectiveNormalOffset = shadow.NormalOffset * unitsPerTexel; graphicsDevice.SetRenderTarget(shadow.ShadowMap); context.RenderTarget = shadow.ShadowMap; context.Viewport = graphicsDevice.Viewport; graphicsDevice.Clear(Color.White); // The scene node renderer should use the light camera instead of the player camera. context.CameraNode = lightCameraNode; context.ReferenceNode = lightNode; context.Object = shadow; graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.Opaque; bool shadowMapContainsSomething = RenderCallback(context); if (!shadowMapContainsSomething) { // Shadow map is empty. Recycle it. graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap); shadow.ShadowMap = null; } } graphicsDevice.SetRenderTarget(null); savedRenderState.Restore(); context.CameraNode = cameraNode; context.Technique = null; context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.ReferenceNode = originalReferenceNode; context.Object = null; }
internal override void ProcessJobs(RenderContext context, RenderOrder order) { var graphicsDevice = _graphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); var target = context.RenderTarget; var viewport = context.Viewport; Debug.Assert(_shadowMasks.Length > 0); Debug.Assert(_shadowMasks[0] != null); RenderTarget2D lowResTarget = null; if (UseHalfResolution && Numeric.IsGreater(UpsampleDepthSensitivity, 0)) { // Half-res rendering with upsampling. var format = new RenderTargetFormat(_shadowMasks[0]); format.Width /= 2; format.Height /= 2; lowResTarget = _graphicsService.RenderTargetPool.Obtain2D(format); } int index = 0; var jobs = Jobs.Array; int jobCount = Jobs.Count; int lastShadowMaskIndex = -1; while (index < jobCount) { int shadowMaskIndex = (int)(jobs[index].SortKey >> 16); var renderer = jobs[index].Renderer; // Find end of current batch. int endIndexExclusive = index + 1; while (endIndexExclusive < jobCount) { if ((int)(jobs[endIndexExclusive].SortKey >> 16) != lastShadowMaskIndex || jobs[endIndexExclusive].Renderer != renderer) { break; } endIndexExclusive++; } // Restore the render state. (The integrated scene node renderers properly // restore the render state, but third-party renderers might mess it up.) if (index > 0) { savedRenderState.Restore(); } if (shadowMaskIndex != lastShadowMaskIndex) { // Done with current shadow mask. Apply filter. if (lastShadowMaskIndex >= 0) { PostProcess(context, context.RenderTarget, _shadowMasks[lastShadowMaskIndex]); } // Switch to next shadow mask. lastShadowMaskIndex = shadowMaskIndex; var shadowMask = lowResTarget ?? _shadowMasks[shadowMaskIndex]; // Set device render target and clear it to white (= no shadow). graphicsDevice.SetRenderTarget(shadowMask); context.RenderTarget = shadowMask; context.Viewport = graphicsDevice.Viewport; graphicsDevice.Clear(Color.White); } // Submit batch to renderer. // (Use Accessor to expose current batch as IList<SceneNode>.) JobsAccessor.Set(Jobs, index, endIndexExclusive); renderer.Render(JobsAccessor, context, order); JobsAccessor.Reset(); index = endIndexExclusive; } // Done with last shadow mask. Apply filter. PostProcess(context, context.RenderTarget, _shadowMasks[lastShadowMaskIndex]); savedRenderState.Restore(); graphicsDevice.ResetTextures(); graphicsDevice.SetRenderTarget(null); context.RenderTarget = target; context.Viewport = viewport; _graphicsService.RenderTargetPool.Recycle(lowResTarget); }
private void ProcessJobs(RenderContext context, RenderOrder order) { Effect currentEffect = null; EffectEx currentEffectEx = null; EffectBinding currentMaterialBinding = null; // Set render states for drawing decals. var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; if (!ClipAtNearPlane) { // Cache some info for near plane intersection tests. var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; var projection = cameraNode.Camera.Projection; // Get min and max of near plane AABB in view space. var min = new Vector3(projection.Left, projection.Bottom, -projection.Near); var max = new Vector3(projection.Right, projection.Top, -projection.Near); // Convert min and max to world space. min = cameraPose.ToWorldPosition(min); max = cameraPose.ToWorldPosition(max); // Get world space aabb _cameraNearPlaneAabbWorld = new Aabb(Vector3.Min(min, max), Vector3.Max(min, max)); } // The BlendState is set below. bool isGBufferPass = string.Equals(context.RenderPass, "GBuffer", StringComparison.OrdinalIgnoreCase); // InvariantCultureIgnoreCase would be better but is not available in WindowsStore. var blendState = isGBufferPass ? GBufferBlendState : BlendState.AlphaBlend; int index = 0; var jobs = _jobs.Array; int jobCount = _jobs.Count; while (index < jobCount) { // Update BlendState. (Needs to be done for each batch because decals can // change the blend mode in the material. For example, alpha-tested decals // can disable alpha blending.) graphicsDevice.BlendState = blendState; uint materialKey = jobs[index].MaterialKey; var materialInstanceBinding = jobs[index].MaterialInstanceBinding; var materialBinding = materialInstanceBinding.MaterialBinding; var effectEx = materialBinding.EffectEx; Debug.Assert(effectEx != null, "EffectEx must not be null."); context.MaterialBinding = materialBinding; context.MaterialInstanceBinding = materialInstanceBinding; if (currentEffectEx != effectEx) { // ----- Next effect. currentEffectEx = effectEx; currentEffect = effectEx.Resource; // Reset ID. (Only used during state sorting.) ResetEffectId(effectEx); // Update and apply global bindings. foreach (var binding in currentEffectEx.ParameterBindings) { if (binding.Description.Hint == EffectParameterHint.Global) { binding.Update(context); binding.Apply(context); } } } if (currentMaterialBinding != materialBinding) { // ----- Next material. currentMaterialBinding = materialBinding; // Reset ID. (Only used during state sorting.) ResetMaterialId(materialBinding); // Update and apply material bindings. foreach (var binding in currentMaterialBinding.ParameterBindings) { binding.Update(context); binding.Apply(context); // In "GBuffer" pass the specular power is written to the alpha channel. // The specular power needs to be set as the BlendFactor. (See GBufferBlendState.) if (isGBufferPass && binding.Description.Semantic == DefaultEffectParameterSemantics.SpecularPower) { var specularPowerBinding = binding as EffectParameterBinding<float>; if (specularPowerBinding != null) { // Note: Specular power is currently encoded using log2 - see Deferred.fxh. // (Blending encoded values is mathematically not correct, but there are no // rules for blending specular powers anyway.) float specularPower = specularPowerBinding.Value; int encodedSpecularPower = (byte)((float)Math.Log(specularPower + 0.0001f, 2) / 17.6f * 255.0f); graphicsDevice.BlendFactor = new Color(255, 255, 255, encodedSpecularPower); } } } } // Note: EffectTechniqueBinding only returns the EffectTechnique, but does // not set it as the current technique. var techniqueBinding = materialInstanceBinding.TechniqueBinding; var technique = techniqueBinding.GetTechnique(currentEffect, context); // See if there is an associated technique that supports hardware instancing. //var instancingTechnique = (EffectTechnique)null; //var techniqueDescription = currentEffectEx.TechniqueDescriptions[technique]; //if (techniqueDescription != null) // instancingTechnique = techniqueDescription.InstancingTechnique; //if (EnableInstancing && instancingTechnique != null) //{ // // ----- Instancing // // Render all decals that share the same effect/material and batch instances // // into a single draw call. // int count = 1; // while (index + count < jobCount && jobs[index + count].MaterialKey == materialKey) // count++; // if (count >= InstancingThreshold) // { // // Draw decals using instancing. // currentEffect.CurrentTechnique = instancingTechnique; // var passBinding = techniqueBinding.GetPassBinding(instancingTechnique, context); // DrawInstanced(ref passBinding, context, index, count); // index += count; // } // else // { // // Draw decals without instancing. // currentEffect.CurrentTechnique = technique; // var passBinding = techniqueBinding.GetPassBinding(technique, context); // Draw(ref passBinding, context, index, count, order); // index += count; // } //} //else { // ----- No instancing // Render all decals that share the same effect/material. int count = 1; while (index + count < jobCount && jobs[index + count].MaterialKey == materialKey) count++; currentEffect.CurrentTechnique = technique; var passBinding = techniqueBinding.GetPassBinding(technique, context); Draw(ref passBinding, context, index, count, order); index += count; } } context.MaterialBinding = null; context.MaterialInstanceBinding = null; savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = _effect.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; var cameraNode = context.CameraNode; _parameterViewInverse.SetValue(cameraNode.PoseWorld); _parameterGBuffer0.SetValue(context.GBuffer0); Viewport viewport = context.Viewport; _parameterParameters0.SetValue(new Vector2(viewport.Width, viewport.Height)); if (_jitterMap == null) _jitterMap = NoiseHelper.GetGrainTexture(context.GraphicsService, NoiseHelper.DefaultJitterMapWidth); _parameterJitterMap.SetValue(_jitterMap); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var light = lightNode.Light as PointLight; if (light == null) return; var shadow = lightNode.Shadow as CubeMapShadow; if (shadow == null) continue; if (shadow.ShadowMap == null || shadow.ShadowMask == null) continue; // The effect must only render in a specific channel. // Do not change blend state if the correct write channels is already set, e.g. if this // shadow is part of a CompositeShadow, the correct blend state is already set. if ((int)graphicsDevice.BlendState.ColorWriteChannels != (1 << shadow.ShadowMaskChannel)) graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteSingleChannel[shadow.ShadowMaskChannel]; _parameterParameters1.SetValue(new Vector4( shadow.Near, light.Range, shadow.EffectiveDepthBias, shadow.EffectiveNormalOffset)); // If we use a subset of the Poisson kernel, we have to normalize the scale. int numberOfSamples = Math.Min(shadow.NumberOfSamples, StandardShadowMaskRenderer.PoissonKernel.Length); float filterRadius = shadow.FilterRadius; if (numberOfSamples > 0) filterRadius /= StandardShadowMaskRenderer.PoissonKernel[numberOfSamples - 1].Length(); _parameterParameters2.SetValue(new Vector3( shadow.ShadowMap.Size, filterRadius, // The StandardShadow.JitterResolution is the number of texels per world unit. // In the shader the parameter JitterResolution contains the division by the jitter map size. shadow.JitterResolution / _jitterMap.Width)); _parameterLightPosition.SetValue((Vector3)cameraNode.PoseWorld.ToLocalPosition(lightNode.PoseWorld.Position)); _parameterShadowView.SetValue(lightNode.PoseWorld.Inverse * cameraNode.PoseWorld); _parameterShadowMap.SetValue(shadow.ShadowMap); var rectangle = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode); Vector2F texCoordTopLeft = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height); Vector2F texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height); GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners); _parameterFrustumCorners.SetValue(_frustumFarCorners); var pass = GetPass(numberOfSamples); if (numberOfSamples > 0) { if (_lastNumberOfSamples != numberOfSamples) { // Create an array with the first n samples and the rest set to 0. _lastNumberOfSamples = numberOfSamples; for (int j = 0; j < numberOfSamples; j++) { _samples[j].Y = StandardShadowMaskRenderer.PoissonKernel[j].Y; _samples[j].X = StandardShadowMaskRenderer.PoissonKernel[j].X; _samples[j].Z = 1.0f / numberOfSamples; } // Set the rest to zero. for (int j = numberOfSamples; j < _samples.Length; j++) _samples[j] = Vector3.Zero; _parameterSamples.SetValue(_samples); } else if (i == 0) { // Apply offsets in the first loop. _parameterSamples.SetValue(_samples); } } pass.Apply(); graphicsDevice.DrawQuad(rectangle); } _parameterGBuffer0.SetValue((Texture2D)null); _parameterJitterMap.SetValue((Texture2D)null); _parameterShadowMap.SetValue((Texture2D)null); savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var renderTargetPool = graphicsService.RenderTargetPool; int frame = context.Frame; var savedRenderState = new RenderStateSnapshot(graphicsDevice); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalCameraNode = context.CameraNode; var originalLodCameraNode = context.LodCameraNode; var originalReferenceNode = context.ReferenceNode; try { // Use foreach instead of for-loop to catch InvalidOperationExceptions in // case the collection is modified. for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as SceneCaptureNode; if (node == null) continue; // Update each node only once per frame. if (node.LastFrame == frame) continue; node.LastFrame = frame; var cameraNode = node.CameraNode; if (cameraNode == null) continue; var texture = node.RenderToTexture.Texture; if (texture == null) continue; // RenderToTexture instances can be shared. --> Update them only once per frame. if (node.RenderToTexture.LastFrame == frame) continue; context.CameraNode = cameraNode; context.LodCameraNode = cameraNode; context.ReferenceNode = node; var renderTarget2D = texture as RenderTarget2D; var projection = cameraNode.Camera.Projection; if (renderTarget2D != null) { context.RenderTarget = renderTarget2D; context.Viewport = new Viewport(0, 0, renderTarget2D.Width, renderTarget2D.Height); RenderCallback(context); // Update other properties of RenderToTexture. node.RenderToTexture.LastFrame = frame; node.RenderToTexture.TextureMatrix = GraphicsHelper.ProjectorBiasMatrix * projection * cameraNode.PoseWorld.Inverse; continue; } var renderTargetCube = texture as RenderTargetCube; if (renderTargetCube != null) { var format = new RenderTargetFormat(renderTargetCube) { Mipmap = false }; renderTarget2D = renderTargetPool.Obtain2D(format); context.RenderTarget = renderTarget2D; context.Viewport = new Viewport(0, 0, renderTarget2D.Width, renderTarget2D.Height); if (_spriteBatch == null) _spriteBatch = graphicsService.GetSpriteBatch(); var perspectiveProjection = projection as PerspectiveProjection; if (perspectiveProjection == null) throw new GraphicsException("The camera of the SceneCaptureNode must use a perspective projection."); // ReSharper disable CompareOfFloatsByEqualityOperator if (perspectiveProjection.FieldOfViewX != ConstantsF.PiOver2 || perspectiveProjection.FieldOfViewY != ConstantsF.PiOver2) perspectiveProjection.SetFieldOfView(ConstantsF.PiOver2, 1, projection.Near, projection.Far); // ReSharper restore CompareOfFloatsByEqualityOperator var originalCameraPose = cameraNode.PoseWorld; for (int side = 0; side < 6; side++) { // Rotate camera to face the current cube map face. //var cubeMapFace = (CubeMapFace)side; // AMD problem: If we generate in normal order, the last cube map face contains // garbage when mipmaps are created. var cubeMapFace = (CubeMapFace)(5 - side); var position = cameraNode.PoseWorld.Position; cameraNode.View = Matrix44F.CreateLookAt( position, position + originalCameraPose.ToWorldDirection(GraphicsHelper.GetCubeMapForwardDirection(cubeMapFace)), originalCameraPose.ToWorldDirection(GraphicsHelper.GetCubeMapUpDirection(cubeMapFace))); RenderCallback(context); // Copy RGBM texture into cube map face. graphicsDevice.SetRenderTarget(renderTargetCube, cubeMapFace); _spriteBatch.Begin(SpriteSortMode.Immediate, BlendState.Opaque, null, null, null); _spriteBatch.Draw(renderTarget2D, new Vector2(0, 0), Color.White); _spriteBatch.End(); } cameraNode.PoseWorld = originalCameraPose; renderTargetPool.Recycle(renderTarget2D); // Update other properties of RenderToTexture. node.RenderToTexture.LastFrame = frame; node.RenderToTexture.TextureMatrix = GraphicsHelper.ProjectorBiasMatrix * projection * cameraNode.PoseWorld.Inverse; continue; } throw new GraphicsException( "SceneCaptureNode.RenderToTexture.Texture is invalid. The texture must be a RenderTarget2D or RenderTargetCube."); } } catch (InvalidOperationException exception) { throw new GraphicsException( "InvalidOperationException was raised in SceneCaptureRenderer.Render(). " + "This can happen if a SceneQuery instance that is currently in use is modified in the " + "RenderCallback. --> Use different SceneQuery types in the method which calls " + "SceneCaptureRenderer.Render() and in the RenderCallback method.", exception); } graphicsDevice.SetRenderTarget(null); savedRenderState.Restore(); context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.CameraNode = originalCameraNode; context.LodCameraNode = originalLodCameraNode; context.ReferenceNode = originalReferenceNode; }
/// <summary> /// Default constructor that initializes every render parameter /// </summary> /// <param name="orientation">Map's Orientation</param> /// <param name="renderOrder">Map's RenderOrder</param> /// <param name="staggerAxis">Map's StaggerAxis</param> /// <param name="staggerIndex">Map's StaggerIndex</param> /// <param name="width">Map's Width</param> /// <param name="height">Map's Height</param> /// <param name="tileWidth">Map's TileWidth</param> /// <param name="tileHeight">Map's TileHeight</param> /// <param name="hexSideLength">Hexagonal Map's stagger length</param> /// <param name="backgroundColor">Map's Background Color</param> public MapRenderParameters( Orientation orientation, RenderOrder renderOrder, StaggerAxis staggerAxis, StaggerIndex staggerIndex, int width, int height, int tileWidth, int tileHeight, int hexSideLength, Color backgroundColor) { Orientation = orientation; MapRenderOrder = renderOrder; MapStaggerAxis = staggerAxis; MapStaggerIndex = staggerIndex; Width = width; Height = height; TileWidth = tileWidth; TileHeight = tileHeight; HexSideLength = hexSideLength; BackgroundColor = backgroundColor; }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; Debug.Assert(context.CameraNode != null, "A camera node has to be set in the render context."); Debug.Assert(context.Scene != null, "A scene has to be set in the render context."); var graphicsDevice = _effect.GraphicsDevice; var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; var viewInverse = (Matrix)cameraPose; var originalBlendState = graphicsDevice.BlendState; var originalDepthStencilState = graphicsDevice.DepthStencilState; var originalRasterizerState = graphicsDevice.RasterizerState; graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; Viewport viewport = context.Viewport; _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height)); GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, _frustumFarCorners); _parameterFrustumCorners.SetValue(_frustumFarCorners); _parameterGBuffer0.SetValue(context.GBuffer0); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var shadow = lightNode.Shadow as VarianceShadow; if (shadow == null) continue; if (shadow.ShadowMap == null || shadow.ShadowMask == null) continue; // The effect must only render into a specific channel. // Do not change blend state if the correct write channels is already set, e.g. if this // shadow is part of a CompositeShadow, the correct blend state is already set. if ((int)graphicsDevice.BlendState.ColorWriteChannels != (1 << shadow.ShadowMaskChannel)) { switch (shadow.ShadowMaskChannel) { case 0: graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteRed; break; case 1: graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteGreen; break; case 2: graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteBlue; break; case 3: graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteAlpha; break; } } _parameterShadowMatrix.SetValue(viewInverse * shadow.ViewProjection); _parameterShadowMap.SetValue(shadow.ShadowMap); _parameterParameters0.SetValue(new Vector4( shadow.ShadowMap.Width, shadow.ShadowMap.Height, shadow.TargetArea.HasValue ? 0 : shadow.MaxDistance, 0)); _parameterParameters1.SetValue(new Vector4( shadow.FadeOutRange, shadow.MinVariance, shadow.LightBleedingReduction, shadow.ShadowFog)); _effect.CurrentTechnique.Passes[0].Apply(); graphicsDevice.DrawFullScreenQuad(); } graphicsDevice.BlendState = originalBlendState; graphicsDevice.DepthStencilState = originalDepthStencilState; graphicsDevice.RasterizerState = originalRasterizerState; }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = _effect.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd; var viewport = graphicsDevice.Viewport; _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height)); _parameterGBuffer0.SetValue(context.GBuffer0); _parameterGBuffer1.SetValue(context.GBuffer1); var cameraNode = context.CameraNode; Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; var isHdrEnabled = context.IsHdrEnabled(); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var light = lightNode.Light as AmbientLight; if (light == null) continue; // LightNode is visible in current frame. lightNode.LastFrame = frame; float hdrScale = isHdrEnabled ? light.HdrScale : 1; _parameterLightColor.SetValue((Vector3)light.Color * light.Intensity * hdrScale); _parameterHemisphericAttenuation.SetValue(light.HemisphericAttenuation); Vector3F upWorld = lightNode.PoseWorld.ToWorldDirection(Vector3F.Up); _parameterUp.SetValue((Vector3)upWorld); if (lightNode.Clip != null) { var data = lightNode.RenderData as LightRenderData; if (data == null) { data = new LightRenderData(); lightNode.RenderData = data; } data.UpdateClipSubmesh(context.GraphicsService, lightNode); graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail; graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite; _parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection); _passClip.Apply(); data.ClipSubmesh.Draw(); graphicsDevice.DepthStencilState = lightNode.InvertClip ? GraphicsHelper.DepthStencilStateStencilEqual0 : GraphicsHelper.DepthStencilStateStencilNotEqual0; graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd; } else { graphicsDevice.DepthStencilState = DepthStencilState.None; } _passLight.Apply(); graphicsDevice.DrawFullScreenQuad(); } savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); // Camera properties int viewportHeight = graphicsDevice.Viewport.Height; var cameraNode = context.CameraNode; var projection = cameraNode.Camera.Projection; _parameterProjection.SetValue(projection); // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as CloudLayerNode; if (node == null) continue; // CloudLayerNode is visible in current frame. node.LastFrame = frame; if (node.CloudMap.Texture == null) continue; var sunDirection = node.SunDirection; _parameterSunDirection.SetValue((Vector3)sunDirection); _parameterSkyCurvature.SetValue(node.SkyCurvature); _parameterTextureMatrix.SetValue((Matrix)new Matrix44F(node.TextureMatrix, Vector3F.Zero)); // The sample at the pixel counts as one, the rest are for the blur. // Note: We must not set -1 because a for loop like // for (int i = 0; i < -1, i++) // crashes the AMD DX9 WP8.1 graphics driver. LOL _parameterNumberOfSamples.SetValue(Math.Max(0, node.NumberOfSamples - 1)); _parameterSampleDistance.SetValue(node.SampleDistance); _parameterScatterParameters.SetValue(new Vector3(node.ForwardScatterExponent, node.ForwardScatterScale, node.ForwardScatterOffset)); _parameterHorizonFade.SetValue(new Vector2(node.HorizonFade, node.HorizonBias)); _parameterSunLight.SetValue((Vector3)node.SunLight); _parameterAmbientLight.SetValue(new Vector4((Vector3)node.AmbientLight, node.Alpha)); _parameterTexture.SetValue(node.CloudMap.Texture); // Occlusion query. if (graphicsDevice.GraphicsProfile != GraphicsProfile.Reach && node.SunQuerySize >= Numeric.EpsilonF) { bool skipQuery = false; if (node.OcclusionQuery != null) { if (node.OcclusionQuery.IsComplete) { node.TryUpdateSunOcclusion(); } else { // The previous query is still not finished. Do not start a new query, this would // create a SharpDX warning. skipQuery = true; } } else { node.OcclusionQuery = new OcclusionQuery(graphicsDevice); } if (!skipQuery) { node.IsQueryPending = true; float totalPixels = viewportHeight * node.SunQuerySize; totalPixels *= totalPixels; node.QuerySize = totalPixels; // Use a camera which looks at the sun. // Get an relative up vector which is not parallel to the forward direction. var lookAtUp = Vector3F.UnitY; if (Vector3F.AreNumericallyEqual(sunDirection, lookAtUp)) lookAtUp = Vector3F.UnitZ; Vector3F zAxis = -sunDirection; Vector3F xAxis = Vector3F.Cross(lookAtUp, zAxis).Normalized; Vector3F yAxis = Vector3F.Cross(zAxis, xAxis); var lookAtSunView = new Matrix(xAxis.X, yAxis.X, zAxis.X, 0, xAxis.Y, yAxis.Y, zAxis.Y, 0, xAxis.Z, yAxis.Z, zAxis.Z, 0, 0, 0, 0, 1); _parameterView.SetValue(lookAtSunView); graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite; graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; // Create small quad shortly behind the near plane. // Note: We use an "untranslated" view matrix, so we can ignore the camera position. float width = (projection.Top - projection.Bottom) * node.SunQuerySize; Vector3F right = sunDirection.Orthonormal1 * (width / 2); Vector3F up = sunDirection.Orthonormal2 * (width / 2); Vector3F center = sunDirection * (projection.Near * 1.0001f); _queryGeometry[0] = center - up - right; _queryGeometry[1] = center + up - right; _queryGeometry[2] = center - up + right; _queryGeometry[3] = center + up + right; if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8) _passOcclusionAlpha.Apply(); else _passOcclusionRgb.Apply(); node.OcclusionQuery.Begin(); graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _queryGeometry, 0, 2, VertexPosition.VertexDeclaration); node.OcclusionQuery.End(); } } else { node.IsQueryPending = false; node.SunOcclusion = 0; } Matrix viewUntranslated = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F(0)); _parameterView.SetValue(viewUntranslated); // Render clouds. graphicsDevice.BlendState = BlendState.AlphaBlend; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; if (context.IsHdrEnabled()) { if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8) _passCloudAlphaLinear.Apply(); else _passCloudRgbLinear.Apply(); } else { if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8) _passCloudAlphaGamma.Apply(); else _passCloudRgbGamma.Apply(); } _submesh.Draw(); } savedRenderState.Restore(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; context.ThrowIfCameraMissing(); context.ThrowIfSceneMissing(); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalReferenceNode = context.ReferenceNode; var cameraNode = context.CameraNode; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // The scene node renderer should use the light camera instead of the player camera. context.CameraNode = _perspectiveCameraNode; context.Technique = "Omnidirectional"; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var renderTargetPool = graphicsService.RenderTargetPool; var savedRenderState = new RenderStateSnapshot(graphicsDevice); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var shadow = lightNode.Shadow as CubeMapShadow; if (shadow == null) continue; var light = lightNode.Light as PointLight; if (light == null) throw new GraphicsException("CubeMapShadow can only be used with a PointLight."); // LightNode is visible in current frame. lightNode.LastFrame = frame; if (shadow.ShadowMap == null) { shadow.ShadowMap = renderTargetPool.ObtainCube( new RenderTargetFormat( shadow.PreferredSize, null, false, shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single, DepthFormat.Depth24)); } ((PerspectiveProjection)_perspectiveCameraNode.Camera.Projection).SetFieldOfView( ConstantsF.PiOver2, 1, shadow.Near, light.Range); // World units per texel at a planar distance of 1 world unit. float unitsPerTexel = _perspectiveCameraNode.Camera.Projection.Width / (shadow.ShadowMap.Size * shadow.Near); // Convert depth bias from "texel" to world space. // Minus to move receiver closer to light. shadow.EffectiveDepthBias = -shadow.DepthBias * unitsPerTexel; // Convert normal offset from "texel" to world space. shadow.EffectiveNormalOffset = shadow.NormalOffset * unitsPerTexel; var pose = lightNode.PoseWorld; context.ReferenceNode = lightNode; context.Object = shadow; bool shadowMapContainsSomething = false; for (int side = 0; side < 6; side++) { context.Data[RenderContextKeys.ShadowTileIndex] = BoxedIntegers[side]; graphicsDevice.SetRenderTarget(shadow.ShadowMap, CubeMapFaces[side]); // context.RenderTarget = shadow.ShadowMap; // TODO: Support cube maps targets in the render context. context.Viewport = graphicsDevice.Viewport; graphicsDevice.Clear(Color.White); _perspectiveCameraNode.View = Matrix44F.CreateLookAt( pose.Position, pose.ToWorldPosition(CubeMapForwardVectors[side]), pose.ToWorldDirection(CubeMapUpVectors[side])); // Abort if this cube map frustum does not touch the camera frustum. if (!context.Scene.HaveContact(cameraNode, _perspectiveCameraNode)) continue; graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.Opaque; shadowMapContainsSomething |= RenderCallback(context); } // Recycle shadow map if empty. if (!shadowMapContainsSomething) { renderTargetPool.Recycle(shadow.ShadowMap); shadow.ShadowMap = null; } } graphicsDevice.SetRenderTarget(null); savedRenderState.Restore(); context.CameraNode = cameraNode; context.Technique = null; context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.ReferenceNode = originalReferenceNode; context.Object = null; context.Data[RenderContextKeys.ShadowTileIndex] = null; }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); float deltaTime = (float)context.DeltaTime.TotalSeconds; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var renderTargetPool = graphicsService.RenderTargetPool; var cameraNode = context.CameraNode; Projection projection = cameraNode.Camera.Projection; Pose view = cameraNode.PoseWorld.Inverse; // Around the camera we push the waves down to avoid that the camera cuts the near plane. // Get largest vector from camera to near plane corners. float nearPlaneRadius = new Vector3F(Math.Max(Math.Abs(projection.Right), Math.Abs(projection.Left)), Math.Max(Math.Abs(projection.Top), Math.Abs(projection.Bottom)), projection.Near ).Length; var originalSourceTexture = context.SourceTexture; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; var savedRenderState = new RenderStateSnapshot(graphicsDevice); // Water surface is opaque. graphicsDevice.BlendState = BlendState.Opaque; #region ----- Common Effect Parameters ----- _parameterView.SetValue(view); _parameterProjection.SetValue(projection); _parameterCameraParameters.SetValue(new Vector4( (Vector3)cameraNode.PoseWorld.Position, cameraNode.Camera.Projection.Far)); var viewport = graphicsDevice.Viewport; _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height)); _parameterTime.SetValue((float)context.Time.TotalSeconds); // Query ambient and directional lights. var lightQuery = context.Scene.Query<GlobalLightQuery>(cameraNode, context); Vector3F ambientLight = Vector3F.Zero; if (lightQuery.AmbientLights.Count > 0) { var light = (AmbientLight)lightQuery.AmbientLights[0].Light; ambientLight = light.Color * light.Intensity * light.HdrScale; } _parameterAmbientLight.SetValue((Vector3)ambientLight); Vector3F directionalLightDirection = new Vector3F(0, -1, 0); Vector3F directionalLightIntensity = Vector3F.Zero; if (lightQuery.DirectionalLights.Count > 0) { var lightNode = lightQuery.DirectionalLights[0]; var light = (DirectionalLight)lightNode.Light; directionalLightDirection = -lightNode.PoseWorld.Orientation.GetColumn(2); directionalLightIntensity = light.Color * light.SpecularIntensity * light.HdrScale; } _parameterDirectionalLightDirection.SetValue((Vector3)directionalLightDirection); _parameterDirectionalLightIntensity.SetValue((Vector3)directionalLightIntensity); _parameterGBuffer0.SetValue(context.GBuffer0); if (_parameterNoiseMap != null) _parameterNoiseMap.SetValue(_noiseMap); #endregion #region ----- Fog Parameters ----- var fogNodes = context.Scene.Query<FogQuery>(cameraNode, context).FogNodes; SetFogParameters(fogNodes, cameraNode, directionalLightDirection); #endregion _parameterProjectedGridParameters.SetValue(new Vector3( ProjectedGridParameters.EdgeAttenuation, ProjectedGridParameters.DistanceAttenuationStart, ProjectedGridParameters.DistanceAttenuationEnd)); for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as WaterNode; if (node == null) continue; // Node is visible in current frame. node.LastFrame = frame; var data = node.RenderData as WaterRenderData; if (data == null) { data = new WaterRenderData(); node.RenderData = data; } var water = node.Water; bool isCameraUnderwater = node.EnableUnderwaterEffect && node.IsUnderwater(cameraNode.PoseWorld.Position); #region ----- Wave bending ----- // Waves should not cut the near plane. --> Bend waves up or down if necessary. // Limits float upperLimit; // Waves must not move above this value. float lowerLimit; // Waves must not move below this value. // Bending fades in over interval [bendStart, bendEnd]: // distance ≤ bendStart ............. Wave is bent up or down. // bendStart < distance < bendEnd ... Lerp between normal wave and bent wave. // distance ≥ bendEnd ............... Normal wave. float bendStart = 1 * nearPlaneRadius; float bendEnd = 10 * nearPlaneRadius; if (!isCameraUnderwater) { // Bend waves down below the camera. upperLimit = cameraNode.PoseWorld.Position.Y - nearPlaneRadius; lowerLimit = -1e20f; if (node.EnableUnderwaterEffect) { if (node.Waves == null || node.Waves.DisplacementMap == null) { // No displacement. The wave bending stuff does not work because the surface // is usually not tessellated. We have to render the underwater geometry when // camera near plane might cut the water surface. if (node.Volume == null) { // Test water plane. isCameraUnderwater = (cameraNode.PoseWorld.Position.Y - nearPlaneRadius) < node.PoseWorld.Position.Y; } else { // Test water AABB. var aabb = node.Aabb; aabb.Minimum -= new Vector3F(nearPlaneRadius); aabb.Maximum += new Vector3F(nearPlaneRadius); isCameraUnderwater = GeometryHelper.HaveContact(aabb, cameraNode.PoseWorld.Position); } } } } else { // Camera is underwater, bend triangles up above camera. upperLimit = 1e20f; lowerLimit = cameraNode.PoseWorld.Position.Y + nearPlaneRadius; } _parameterCameraMisc.SetValue(new Vector4(upperLimit, lowerLimit, bendStart, bendEnd)); #endregion // Update the submesh for the given water volume. data.UpdateSubmesh(graphicsService, node); #region ----- Scroll Normal Maps ----- // We update the normal map offsets once(!) per frame. // Note: We could skip the offsets and compute all in the shader using absolute // time instead of deltaTime, but then the user cannot change the NormalMapVelocity // smoothly. if (data.LastNormalUpdateFrame != frame) { data.LastNormalUpdateFrame = frame; var baseVelocity = (node.Flow != null) ? node.Flow.BaseVelocity : Vector3F.Zero; // Increase offset. // (Note: We have to subtract value and divide by scale because if the normal // should scroll to the right, we have to move the texcoords in the other direction.) data.NormalMapOffset0.X -= (water.NormalMap0Velocity.X + baseVelocity.X) * deltaTime / water.NormalMap0Scale; data.NormalMapOffset0.Y -= (water.NormalMap0Velocity.Z + baseVelocity.Y) * deltaTime / water.NormalMap0Scale; data.NormalMapOffset1.X -= (water.NormalMap1Velocity.X + baseVelocity.X) * deltaTime / water.NormalMap1Scale; data.NormalMapOffset1.Y -= (water.NormalMap1Velocity.Z + baseVelocity.Y) * deltaTime / water.NormalMap1Scale; // Keep only the fractional part to avoid overflow. data.NormalMapOffset0.X = MathHelper.Frac(data.NormalMapOffset0.X); data.NormalMapOffset0.Y = MathHelper.Frac(data.NormalMapOffset0.Y); data.NormalMapOffset1.X = MathHelper.Frac(data.NormalMapOffset1.X); data.NormalMapOffset1.Y = MathHelper.Frac(data.NormalMapOffset1.Y); } #endregion _parameterSurfaceLevel.SetValue(node.PoseWorld.Position.Y); #region ----- Reflection Parameters ----- if (node.PlanarReflection != null && node.PlanarReflection.ActualIsEnabled && node.PlanarReflection.RenderToTexture.Texture is Texture2D) { // Planar reflection. var renderToTexture = node.PlanarReflection.RenderToTexture; var texture = (Texture2D)renderToTexture.Texture; _parameterReflectionTypeParameters.SetValue(new Vector2(0, 1)); _parameterReflectionMatrix.SetValue((Matrix)renderToTexture.TextureMatrix); _parameterReflectionTextureSize.SetValue(new Vector2(texture.Width, texture.Height)); if (_parameterPlanarReflectionMap != null) _parameterPlanarReflectionMap.SetValue(texture); _parameterReflectionParameters.SetValue(new Vector4( (Vector3)water.ReflectionColor, water.ReflectionDistortion)); } else if (node.SkyboxReflection != null) { // Cube map reflection. var rgbmEncoding = node.SkyboxReflection.Encoding as RgbmEncoding; float rgbmMax = 1; if (rgbmEncoding != null) rgbmMax = GraphicsHelper.ToGamma(rgbmEncoding.Max); else if (!(node.SkyboxReflection.Encoding is SRgbEncoding)) throw new NotImplementedException("The reflected skybox must be encoded using sRGB or RGBM."); _parameterReflectionTypeParameters.SetValue(new Vector2(1, rgbmMax)); // Cube maps are left handed --> Sample with inverted z. (Otherwise, the // cube map and objects or texts in it are mirrored.) var mirrorZ = Matrix44F.CreateScale(1, 1, -1); Matrix33F orientation = node.SkyboxReflection.PoseWorld.Orientation; _parameterReflectionMatrix.SetValue((Matrix)(new Matrix44F(orientation, Vector3F.Zero) * mirrorZ)); if (_parameterCubeReflectionMap != null) _parameterCubeReflectionMap.SetValue(node.SkyboxReflection.Texture); _parameterReflectionParameters.SetValue(new Vector4( (Vector3)(water.ReflectionColor * node.SkyboxReflection.Color), water.ReflectionDistortion)); } else { // No reflection texture. The reflection shows only the ReflectionColor. _parameterReflectionTypeParameters.SetValue(new Vector2(-1, 1)); _parameterReflectionParameters.SetValue(new Vector4( (Vector3)water.ReflectionColor, water.ReflectionDistortion)); } #endregion #region ----- Refraction Parameters ----- // If we do not have a source texture, resolve the current render target // and immediately rebuilt it. if (context.SourceTexture == null && context.RenderTarget != null) { // Get RebuildZBufferRenderer from RenderContext. RebuildZBufferRenderer rebuildZBufferRenderer = null; object obj; if (context.Data.TryGetValue(RenderContextKeys.RebuildZBufferRenderer, out obj)) rebuildZBufferRenderer = obj as RebuildZBufferRenderer; // If we didn't find the renderer in the context, use a default instance. if (rebuildZBufferRenderer == null) { if (_defaultRebuildZBufferRenderer == null) _defaultRebuildZBufferRenderer = new RebuildZBufferRenderer(graphicsService); rebuildZBufferRenderer = _defaultRebuildZBufferRenderer; } context.SourceTexture = context.RenderTarget; context.RenderTarget = renderTargetPool.Obtain2D(new RenderTargetFormat(context.RenderTarget)); graphicsDevice.SetRenderTarget(context.RenderTarget); graphicsDevice.Viewport = context.Viewport; rebuildZBufferRenderer.Render(context, context.SourceTexture); } _parameterRefractionTexture.SetValue(context.SourceTexture); _parameterRefractionParameters.SetValue(new Vector4( ((Vector3)water.RefractionColor), water.RefractionDistortion)); #endregion #region ----- Other Water Effect Parameters ----- if (water.NormalMap0 != null) { if (_parameterNormalMap0 != null) _parameterNormalMap0.SetValue(water.NormalMap0); _parameterNormalMap0Parameters.SetValue(new Vector4( 1 / water.NormalMap0Scale, data.NormalMapOffset0.X, data.NormalMapOffset0.Y, water.NormalMap0Strength)); } else { if (_parameterNormalMap0 != null) _parameterNormalMap0.SetValue(_graphicsService.GetDefaultNormalTexture()); _parameterNormalMap0Parameters.SetValue(new Vector4(1, 0, 0, 0)); } if (water.NormalMap1 != null) { if (_parameterNormalMap1 != null) _parameterNormalMap1.SetValue(water.NormalMap1); _parameterNormalMap1Parameters.SetValue(new Vector4( 1 / water.NormalMap1Scale, data.NormalMapOffset1.X, data.NormalMapOffset1.Y, water.NormalMap1Strength)); } else { if (_parameterNormalMap1 != null) _parameterNormalMap1.SetValue(_graphicsService.GetDefaultNormalTexture()); _parameterNormalMap1Parameters.SetValue(new Vector4(1, 0, 0, 0)); } _parameterSpecularParameters.SetValue(new Vector4((Vector3)water.SpecularColor, water.SpecularPower)); _parameterUnderwaterFogParameters.SetValue((Vector3)water.UnderwaterFogDensity); _parameterFresnelParameters.SetValue(new Vector3(water.FresnelBias, water.FresnelScale, water.FresnelPower)); _parameterIntersectionSoftness.SetValue(water.IntersectionSoftness); // We apply some arbitrary scale factors to the water and scatter colors to // move the values into a similar range from the user's perspective. _parameterWaterColor.SetValue((Vector3)water.WaterColor / 10); _parameterScatterColor.SetValue((Vector3)water.ScatterColor); if (_parameterFoamMap != null) { _parameterFoamMap.SetValue(water.FoamMap); _parameterFoamParameters0.SetValue(new Vector4( (Vector3)water.FoamColor, 1 / water.FoamMapScale)); _parameterFoamParameters1.SetValue(new Vector4( water.FoamDistortion, water.FoamShoreIntersection, // Enable crest foam only if we have waves. node.Waves != null ? water.FoamCrestMin : float.MaxValue, water.FoamCrestMax)); } _parameterCausticsSampleCount.SetValue(water.CausticsSampleCount); _parameterCausticsParameters.SetValue(new Vector4( water.CausticsSampleOffset, water.CausticsDistortion, water.CausticsPower, water.CausticsIntensity)); #endregion #region ----- Wave Map ----- var waves = node.Waves; // The displacement map can be null but the normal map must not be null. if (waves != null && waves.NormalMap != null) { // Type: 0 = Tiling, 1 = Clamp. float waveType; if (waves.IsTiling) waveType = 0; else waveType = 1; _parameterWaveMapParameters.SetValue(new Vector4( 1.0f / waves.TileSize, // Scale 0.5f - waves.TileCenter.X / waves.TileSize, // Offset X 0.5f - waves.TileCenter.Z / waves.TileSize, // Offset Y waveType)); if (_parameterDisplacementTexture != null) { if (waves.DisplacementMap != null) _parameterDisplacementTexture.SetValue(waves.DisplacementMap); else _parameterDisplacementTexture.SetValue(graphicsService.GetDefaultTexture2DBlack4F()); } _parameterWaveMapSize.SetValue(new Vector2( waves.NormalMap.Width, waves.NormalMap.Height)); if (_parameterWaveNormalMap != null) _parameterWaveNormalMap.SetValue(waves.NormalMap); } else { _parameterWaveMapParameters.SetValue(new Vector4(0, 0, 0, 0)); } #endregion #region ----- Flow ----- if (node.Flow != null) { var flow = node.Flow; float flowMapSpeed = (flow.FlowMap != null) ? flow.FlowMapSpeed : 0; _parameterFlowParameters0.SetValue(new Vector4(flow.SurfaceSlopeSpeed, flowMapSpeed, flow.CycleDuration, flow.MaxSpeed)); _parameterFlowParameters1.SetValue(new Vector3(flow.MinStrength, 1 / flow.NoiseMapScale, flow.NoiseMapStrength)); if (_parameterFlowMap != null) _parameterFlowMap.SetValue(flow.FlowMap); // Get world space (x, z) to texture space matrix. Aabb aabb = node.Shape.GetAabb(); Vector3F extent = aabb.Extent; Matrix44F m = Matrix44F.CreateScale(1 / extent.X, 1, 1 / extent.Z) * Matrix44F.CreateTranslation(-aabb.Minimum.X, 0, -aabb.Minimum.Z) * Matrix44F.CreateScale(1 / node.ScaleLocal.X, 1, 1 / node.ScaleLocal.Z) * node.PoseWorld.Inverse; // We use a 3x3 2d scale/rotation/translation matrix, ignoring the y component. _parameterFlowMapTextureMatrix.SetValue(new Matrix(m.M00, m.M20, 0, 0, m.M02, m.M22, 0, 0, m.M03, m.M23, 1, 0, 0, 0, 0, 0)); // Get local flow direction to world flow direction matrix. // We use a 2x2 2d rotation matrix, ignoring the y component. var r = node.PoseWorld.Orientation; _parameterFlowMapWorldMatrix.SetValue(new Matrix(r.M00, r.M20, 0, 0, r.M02, r.M22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); } else { _parameterFlowParameters0.SetValue(new Vector4(0, 0, 0, 0)); _parameterFlowParameters1.SetValue(new Vector3(0, 0, 0)); } #endregion if (isCameraUnderwater) RenderUnderwaterGeometry(node, cameraNode); RenderSurface(node, cameraNode, isCameraUnderwater); } // Reset texture effect parameters. _parameterGBuffer0.SetValue((Texture2D)null); _parameterRefractionTexture.SetValue((Texture2D)null); if (_parameterPlanarReflectionMap != null) _parameterPlanarReflectionMap.SetValue((Texture2D)null); if (_parameterCubeReflectionMap != null) _parameterCubeReflectionMap.SetValue((TextureCube)null); if (_parameterNormalMap0 != null) _parameterNormalMap0.SetValue((Texture2D)null); if (_parameterNormalMap1 != null) _parameterNormalMap1.SetValue((Texture2D)null); if (_parameterDisplacementTexture != null) _parameterDisplacementTexture.SetValue((Texture2D)null); if (_parameterNoiseMap != null) _parameterNoiseMap.SetValue((Texture2D)null); if (_parameterWaveNormalMap != null) _parameterWaveNormalMap.SetValue((Texture2D)null); if (_parameterFlowMap != null) _parameterFlowMap.SetValue((Texture2D)null); // This seems to be necessary because the Displacement Texture (vertex texture!) // is not automatically removed from the texture stage, and the WaterWavesRenderer // cannot write into it. XNA Bug!? _passProjectedGrid.Apply(); savedRenderState.Restore(); // Restore original render context. if (originalSourceTexture == null) { // Current render target has been resolved and used as source texture. // A new render target (from pool) has been set. (See region "Refraction Parameters".) // --> Previous render target needs to be recycled. renderTargetPool.Recycle(context.SourceTexture); } context.SourceTexture = originalSourceTexture; }
/// <inheritdoc/> public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (nodes.Count == 0) { return; } context.ThrowIfCameraMissing(); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalSceneNode = context.SceneNode; var originalTechnique = context.Technique; var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); //int frame = context.Frame; //float deltaTime = (float)context.DeltaTime.TotalSeconds; for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++) { var node = nodes[nodeIndex] as TerrainNode; if (node == null) { continue; } context.SceneNode = node; context.RenderPass = RenderPassBase; ProcessClipmap(node, node.BaseClipmap, context); context.RenderPass = RenderPassDetail; ProcessClipmap(node, node.DetailClipmap, context); } context.RenderPass = null; // Clear invalid regions stored in terrain. (Note: Terrains can be shared.) for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++) { var node = nodes[nodeIndex] as TerrainNode; if (node == null) { continue; } node.Terrain.InvalidBaseRegions.Clear(); node.Terrain.InvalidDetailRegions.Clear(); } // The clipmap layers use a MipMapLodBias which must be reset. graphicsDevice.ResetSamplerStates(); savedRenderState.Restore(); graphicsDevice.SetRenderTarget(null); context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.SceneNode = originalSceneNode; context.MaterialBinding = null; context.MaterialInstanceBinding = null; context.Technique = originalTechnique; }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (nodes.Count == 0) return; context.Validate(_effect); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.BlendState = BlendState.Opaque; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.DepthStencilState = DepthStencilState.None; int frame = context.Frame; float deltaTime = (float)context.DeltaTime.TotalSeconds; for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++) { var cloudNode = nodes[nodeIndex] as CloudLayerNode; if (cloudNode == null) continue; var cloudMap = cloudNode.CloudMap as LayeredCloudMap; if (cloudMap == null) continue; // We update the cloud map only once per frame. if (cloudMap.LastFrame == frame) continue; cloudMap.LastFrame = frame; var layers = cloudMap.Layers; var animationTimes = cloudMap.AnimationTimes; var sources = cloudMap.SourceLayers; var targets = cloudMap.TargetLayers; var renderTargets = cloudMap.LayerTextures; // Animate the cloud map layers. for (int i = 0; i < LayeredCloudMap.NumberOfTextures; i++) { if (layers[i] == null || layers[i].Texture != null) continue; if (cloudMap.Random == null) cloudMap.Random = new Random(cloudMap.Seed); // Make sure there is a user-defined texture or data for procedural textures. if (sources[i] == null) { // Each octave is 128 x 128 (= 1 / 4 of the 512 * 512 noise texture). sources[i] = new PackedTexture(null, _noiseTexture, cloudMap.Random.NextVector2F(0, 1), new Vector2F(0.25f)); targets[i] = new PackedTexture(null, _noiseTexture, cloudMap.Random.NextVector2F(0, 1), new Vector2F(0.25f)); renderTargets[i] = new RenderTarget2D(graphicsDevice, 128, 128, false, SurfaceFormat.Alpha8, DepthFormat.None); } // Update animation time. animationTimes[i] += deltaTime * layers[i].AnimationSpeed; // Update source and target if animation time is beyond 1. if (animationTimes[i] > 1) { // Wrap animation time. animationTimes[i] = animationTimes[i] % 1; // Swap source and target. MathHelper.Swap(ref sources[i], ref targets[i]); // Set target to a new random part of the noise texture. targets[i].Offset = cloudMap.Random.NextVector2F(0, 1); } // Lerp source and target together to get the final noise texture. graphicsDevice.SetRenderTarget(renderTargets[i]); _parameterViewportSize.SetValue(new Vector2(graphicsDevice.Viewport.Width, graphicsDevice.Viewport.Height)); _parameterTextures[0].SetValue(sources[i].TextureAtlas); _parameterTextures[1].SetValue(targets[i].TextureAtlas); _parameterTexture0Parameters.SetValue(new Vector4(sources[i].Scale.X, sources[i].Scale.Y, sources[i].Offset.X, sources[i].Offset.Y)); _parameterTexture1Parameters.SetValue(new Vector4(targets[i].Scale.X, targets[i].Scale.Y, targets[i].Offset.X, targets[i].Offset.Y)); _parameterLerp.SetValue(animationTimes[i]); _passLerp.Apply(); graphicsDevice.DrawFullScreenQuad(); } // Initialize the cloud map. if (cloudMap.Texture == null || cloudMap.Size != cloudMap.Texture.Width) { cloudMap.Texture.SafeDispose(); var cloudTexture = new RenderTarget2D( graphicsDevice, cloudMap.Size, cloudMap.Size, false, SurfaceFormat.Alpha8, DepthFormat.None); cloudMap.SetTexture(cloudTexture); } // Combine the layers. graphicsDevice.SetRenderTarget((RenderTarget2D)cloudMap.Texture); _parameterViewportSize.SetValue(new Vector2(cloudMap.Texture.Width, cloudMap.Texture.Height)); for (int i = 0; i < LayeredCloudMap.NumberOfTextures; i++) { var layer = layers[i] ?? EmptyLayer; _parameterTextures[i].SetValue(layer.Texture ?? renderTargets[i]); _parameterMatrices[i].SetValue((Matrix)new Matrix44F(layer.TextureMatrix, Vector3F.Zero)); _parameterDensities[i].SetValue(new Vector2(layer.DensityScale, layer.DensityOffset)); } _parameterCoverage.SetValue(cloudMap.Coverage); _parameterDensity.SetValue(cloudMap.Density); _passDensity.Apply(); graphicsDevice.DrawFullScreenQuad(); } savedRenderState.Restore(); graphicsDevice.SetRenderTarget(null); context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; }
/// <inheritdoc/> public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { ThrowIfDisposed(); if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (nodes.Count == 0) { return; } context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.BlendState = BlendState.AlphaBlend; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.DepthStencilState = DepthStencilState.DepthRead; // Camera properties var cameraNode = context.CameraNode; Matrix view = (Matrix) new Matrix(cameraNode.PoseWorld.Orientation.Transposed, new Vector3()); _parameterView.SetValue(view); Matrix projection = cameraNode.Camera.Projection; _parameterProjection.SetValue(projection); // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as ScatteringSkyNode; if (node == null) { continue; } // ScatteringSkyNode is visible in current frame. node.LastFrame = frame; _parameterSunDirection.SetValue((Vector3)node.SunDirection); _parameterSunIntensity.SetValue((Vector3)(node.SunIntensity * node.SunColor)); _parameterRadii.SetValue(new Vector4( node.AtmosphereHeight + node.PlanetRadius, // Atmosphere radius node.PlanetRadius, // Ground radius node.ObserverAltitude + node.PlanetRadius, // Observer radius node.ScaleHeight)); // Absolute Scale height _parameterNumberOfSamples.SetValue(node.NumberOfSamples); _parameterBetaRayleigh.SetValue((Vector3)node.BetaRayleigh); _parameterBetaMie.SetValue((Vector3)node.BetaMie); _parameterGMie.SetValue(node.GMie); _parameterTransmittance.SetValue(node.Transmittance); if (node.BaseHorizonColor.IsNumericallyZero && node.BaseZenithColor.IsNumericallyZero) { // No base color. if (context.IsHdrEnabled()) { _passLinear.Apply(); } else { _passGamma.Apply(); } } else { // Add base color. _parameterBaseHorizonColor.SetValue((Vector4) new Vector4(node.BaseHorizonColor, node.BaseColorShift)); _parameterBaseZenithColor.SetValue((Vector3)node.BaseZenithColor); if (context.IsHdrEnabled()) { _passLinearWithBaseColor.Apply(); } else { _passGammaWithBaseColor.Apply(); } } _submesh.Draw(); } savedRenderState.Restore(); }
// Renders a fullscreen quad for each EnvironmentLight. All lights are accumulated in the // light buffer using additive alpha blending. public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; if (context.CameraNode == null) throw new GraphicsException("Camera node needs to be set in render context."); var graphicsDevice = _effect.GraphicsDevice; graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd; var viewport = graphicsDevice.Viewport; _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height)); _parameterGBuffer0.SetValue(context.GBuffer0); _parameterGBuffer1.SetValue(context.GBuffer1); var cameraNode = context.CameraNode; int frame = context.GraphicsService.Frame; cameraNode.LastFrame = frame; // Frustum corners are vectors which point from the camera to the far plane corners. GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, _cameraFrustumFarCorners); // Convert frustum far corners from view space to world space. for (int j = 0; j < _cameraFrustumFarCorners.Length; j++) _cameraFrustumFarCorners[j] = (Vector3)cameraNode.PoseWorld.ToWorldDirection((Vector3F)_cameraFrustumFarCorners[j]); _parameterFrustumCorners.SetValue(_cameraFrustumFarCorners); // The current render pipeline is a HDR pipeline if the light buffer is HdrBlendable. // (This will practically always be the case.) var isHdrEnabled = context.RenderTarget != null && context.RenderTarget.Format == SurfaceFormat.HdrBlendable; for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var light = lightNode.Light as EnvironmentLight; if (light == null || light.EnvironmentMap == null || light.Color == new Vector3F(0) || (light.DiffuseIntensity == 0 && light.SpecularIntensity == 0)) continue; lightNode.LastFrame = frame; float hdrScale = isHdrEnabled ? light.HdrScale : 1; _parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale); _parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale); _parameterTextureSize.SetValue(light.EnvironmentMap.Size); _parameterMaxMipLevel.SetValue(Math.Max(0, light.EnvironmentMap.LevelCount - 1)); _parameterTexture.SetValue(light.EnvironmentMap); _effect.CurrentTechnique.Passes[0].Apply(); graphicsDevice.DrawFullScreenQuad(); } }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); if (context.Scene == null) throw new ArgumentException("Scene needs to be set in render context.", "context"); if (context.CameraNode == null) throw new ArgumentException("Camera needs to be set in render context.", "context"); if (!(context.CameraNode.Camera.Projection is PerspectiveProjection)) throw new ArgumentException("The camera in the render context must use a perspective projection.", "context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; var graphicsDevice = context.GraphicsService.GraphicsDevice; int frame = context.Frame; var savedRenderState = new RenderStateSnapshot(graphicsDevice); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalCameraNode = context.CameraNode; var originalLodCameraNode = context.LodCameraNode; float originalLodBias = context.LodBias; var originalReferenceNode = context.ReferenceNode; Pose originalCameraPose = originalCameraNode.PoseWorld; Vector3F originalCameraPosition = originalCameraPose.Position; Matrix33F originalCameraOrientation = originalCameraPose.Orientation; Vector3F right = originalCameraOrientation.GetColumn(0); Vector3F up = originalCameraOrientation.GetColumn(1); Vector3F back = originalCameraOrientation.GetColumn(2); try { // Use foreach instead of for-loop to catch InvalidOperationExceptions in // case the collection is modified. for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as PlanarReflectionNode; if (node == null) continue; // Update each node only once per frame. if (node.LastFrame == frame) continue; node.LastFrame = frame; var texture = node.RenderToTexture.Texture; if (texture == null) continue; var renderTarget = texture as RenderTarget2D; if (renderTarget == null) throw new GraphicsException( "PlanarReflectionNode.RenderToTexture.Texture is invalid. The texture must be a RenderTarget2D."); // RenderToTexture instances can be shared. --> Update them only once per frame. if (node.RenderToTexture.LastFrame == frame) continue; // Do not render if we look at the back of the reflection plane. Vector3F planeNormal = node.NormalWorld; Vector3F planePosition = node.PoseWorld.Position; Vector3F planeToCamera = originalCameraPosition - planePosition; if (Vector3F.Dot(planeNormal, planeToCamera) < 0) continue; var cameraNode = node.CameraNode; // Reflect camera pose. Pose cameraPose; cameraPose.Position = planePosition + Reflect(planeToCamera, planeNormal); cameraPose.Orientation = new Matrix33F(); cameraPose.Orientation.SetColumn(0, Reflect(right, planeNormal)); cameraPose.Orientation.SetColumn(1, -Reflect(up, planeNormal)); cameraPose.Orientation.SetColumn(2, Reflect(back, planeNormal)); cameraNode.PoseWorld = cameraPose; // The projection of the player camera. var originalProjection = originalCameraNode.Camera.Projection; // The projection of the reflected camera. var projection = (PerspectiveProjection)cameraNode.Camera.Projection; // Choose optimal projection. We get the screen-space bounds of the reflection node. // Then we make the FOV so small that it exactly contains the node. projection.Set(originalProjection); var bounds = GraphicsHelper.GetBounds(cameraNode, node); // Abort if the bounds are empty. if (Numeric.AreEqual(bounds.X, bounds.Z) || Numeric.AreEqual(bounds.Y, bounds.W)) continue; // Apply FOV scale to bounds. float fovScale = node.FieldOfViewScale; float deltaX = (bounds.Z - bounds.X) * (fovScale - 1) / 2; bounds.X -= deltaX; bounds.Z += deltaX; float deltaY = (bounds.W - bounds.Y) * (fovScale - 1) / 2; bounds.Y -= deltaY; bounds.W += deltaY; // Update projection to contain only the node bounds. projection.Left = projection.Left + bounds.X * projection.Width; projection.Right = projection.Left + bounds.Z * projection.Width; projection.Top = projection.Top - bounds.Y * projection.Height; projection.Bottom = projection.Top - bounds.W * projection.Height; // Set far clip plane. if (node.Far.HasValue) projection.Far = node.Far.Value; // Set near clip plane. Vector3F planeNormalCamera = cameraPose.ToLocalDirection(-node.NormalWorld); Vector3F planePointCamera = cameraPose.ToLocalPosition(node.PoseWorld.Position); projection.NearClipPlane = new Plane(planeNormalCamera, planePointCamera); context.CameraNode = cameraNode; context.LodCameraNode = cameraNode; context.LodBias = node.LodBias ?? originalLodBias; context.ReferenceNode = node; context.RenderTarget = renderTarget; context.Viewport = new Viewport(0, 0, renderTarget.Width, renderTarget.Height); RenderCallback(context); // Update other properties of RenderToTexture. node.RenderToTexture.LastFrame = frame; node.RenderToTexture.TextureMatrix = GraphicsHelper.ProjectorBiasMatrix * cameraNode.Camera.Projection * cameraNode.PoseWorld.Inverse; } } catch (InvalidOperationException exception) { throw new GraphicsException( "InvalidOperationException was raised in PlanarReflectionRenderer.Render(). " + "This can happen if a SceneQuery instance that is currently in use is modified in the " + "RenderCallback. --> Use different SceneQuery types in the method which calls " + "SceneCaptureRenderer.Render() and in the RenderCallback method.", exception); } graphicsDevice.SetRenderTarget(null); savedRenderState.Restore(); context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.CameraNode = originalCameraNode; context.LodCameraNode = originalLodCameraNode; context.LodBias = originalLodBias; context.ReferenceNode = originalReferenceNode; }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; context.ThrowIfCameraMissing(); context.ThrowIfSceneMissing(); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalReferenceNode = context.ReferenceNode; // Camera properties var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; var projection = cameraNode.Camera.Projection; if (!(projection is PerspectiveProjection)) throw new NotImplementedException( "Cascaded shadow maps not yet implemented for scenes with orthographic camera."); float fieldOfViewY = projection.FieldOfViewY; float aspectRatio = projection.AspectRatio; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; // The scene node renderer should use the light camera instead of the player camera. context.CameraNode = _orthographicCameraNode; context.Technique = "Directional"; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var shadow = lightNode.Shadow as CascadedShadow; if (shadow == null) continue; // LightNode is visible in current frame. lightNode.LastFrame = frame; var format = new RenderTargetFormat( shadow.PreferredSize * shadow.NumberOfCascades, shadow.PreferredSize, false, shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single, DepthFormat.Depth24); bool allLocked = shadow.IsCascadeLocked[0] && shadow.IsCascadeLocked[1] && shadow.IsCascadeLocked[2] && shadow.IsCascadeLocked[3]; if (shadow.ShadowMap == null) { shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(format); allLocked = false; // Need to render shadow map. } // If we can reuse the whole shadow map texture, abort early. if (allLocked) continue; _csmSplitDistances[0] = projection.Near; _csmSplitDistances[1] = shadow.Distances.X; _csmSplitDistances[2] = shadow.Distances.Y; _csmSplitDistances[3] = shadow.Distances.Z; _csmSplitDistances[4] = shadow.Distances.W; // (Re-)Initialize the array for cached matrices in the CascadedShadow. if (shadow.ViewProjections == null || shadow.ViewProjections.Length < shadow.NumberOfCascades) shadow.ViewProjections = new Matrix[shadow.NumberOfCascades]; // Initialize the projection matrices to an empty matrix. // The unused matrices should not contain valid projections because // CsmComputeSplitOptimized in CascadedShadowMask.fxh should not choose // the wrong cascade. for (int j = 0; j < shadow.ViewProjections.Length; j++) { if (!shadow.IsCascadeLocked[j]) // Do not delete cached info for cached cascade. shadow.ViewProjections[j] = new Matrix(); } // If some cascades are cached, we have to create a new shadow map and copy // the old cascades into the new shadow map. if (shadow.IsCascadeLocked[0] || shadow.IsCascadeLocked[1] || shadow.IsCascadeLocked[2] || shadow.IsCascadeLocked[3]) { var oldShadowMap = shadow.ShadowMap; shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(new RenderTargetFormat(oldShadowMap)); graphicsDevice.SetRenderTarget(shadow.ShadowMap); graphicsDevice.Clear(Color.White); var spriteBatch = graphicsService.GetSpriteBatch(); spriteBatch.Begin(SpriteSortMode.Deferred, BlendState.Opaque, SamplerState.PointClamp, DepthStencilState.None, RasterizerState.CullNone); for (int cascade = 0; cascade < shadow.NumberOfCascades; cascade++) { if (shadow.IsCascadeLocked[cascade]) { var viewport = GetViewport(shadow, cascade); var rectangle = new Rectangle(viewport.X, viewport.Y, viewport.Width, viewport.Height); spriteBatch.Draw(oldShadowMap, rectangle, rectangle, Color.White); } } spriteBatch.End(); graphicsService.RenderTargetPool.Recycle(oldShadowMap); } else { graphicsDevice.SetRenderTarget(shadow.ShadowMap); graphicsDevice.Clear(Color.White); } context.RenderTarget = shadow.ShadowMap; graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.Opaque; context.ReferenceNode = lightNode; context.Object = shadow; context.ShadowNear = 0; // Obsolete: Only kept for backward compatibility. bool shadowMapContainsSomething = false; for (int split = 0; split < shadow.NumberOfCascades; split++) { if (shadow.IsCascadeLocked[split]) continue; context.Data[RenderContextKeys.ShadowTileIndex] = CubeMapShadowMapRenderer.BoxedIntegers[split]; // near/far of this split. float near = _csmSplitDistances[split]; float far = Math.Max(_csmSplitDistances[split + 1], near + Numeric.EpsilonF); // Create a view volume for this split. _splitVolume.SetFieldOfView(fieldOfViewY, aspectRatio, near, far); // Find the bounding sphere of the split camera frustum. Vector3F center; float radius; GetBoundingSphere(_splitVolume, out center, out radius); // Extend radius to get enough border for filtering. int shadowMapSize = shadow.ShadowMap.Height; // We could extend by (ShadowMapSize + BorderTexels) / ShadowMapSize; // Add at least 1 texel. (This way, shadow mask shader can clamp uv to // texture rect in without considering half texel border to avoid sampling outside..) radius *= (float)(shadowMapSize + 1) / shadowMapSize; // Convert center to light space. Pose lightPose = lightNode.PoseWorld; center = cameraPose.ToWorldPosition(center); center = lightPose.ToLocalPosition(center); // Snap center to texel positions to avoid shadow swimming. SnapPositionToTexels(ref center, 2 * radius, shadowMapSize); // Convert center back to world space. center = lightPose.ToWorldPosition(center); Matrix33F orientation = lightPose.Orientation; Vector3F backward = orientation.GetColumn(2); var orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection; // Create a tight orthographic frustum around the cascade's bounding sphere. orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius); Vector3F cameraPosition = center + radius * backward; Pose frustumPose = new Pose(cameraPosition, orientation); Pose view = frustumPose.Inverse; shadow.ViewProjections[split] = (Matrix)view * orthographicProjection; // Convert depth bias from "texel" to light space [0, 1] depth. // Minus sign to move receiver depth closer to light. Divide by depth to normalize. float unitsPerTexel = orthographicProjection.Width / shadow.ShadowMap.Height; shadow.EffectiveDepthBias[split] = -shadow.DepthBias[split] * unitsPerTexel / orthographicProjection.Depth; // Convert normal offset from "texel" to world space. shadow.EffectiveNormalOffset[split] = shadow.NormalOffset[split] * unitsPerTexel; // For rendering the shadow map, move near plane back by MinLightDistance // to catch occluders in front of the cascade. orthographicProjection.Near = -shadow.MinLightDistance; _orthographicCameraNode.PoseWorld = frustumPose; // Set a viewport to render a tile in the texture atlas. graphicsDevice.Viewport = GetViewport(shadow, split); context.Viewport = graphicsDevice.Viewport; shadowMapContainsSomething |= RenderCallback(context); } // Recycle shadow map if empty. if (!shadowMapContainsSomething) { graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap); shadow.ShadowMap = null; } } graphicsDevice.SetRenderTarget(null); savedRenderState.Restore(); context.CameraNode = cameraNode; context.ShadowNear = float.NaN; context.Technique = null; context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.ReferenceNode = originalReferenceNode; context.Object = null; context.Data[RenderContextKeys.ShadowTileIndex] = null; }
internal override void BatchJobs(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { // Assign temporary IDs to scene node renderers. for (int i = 0; i < Renderers.Count; i++) { Renderers[i].Id = (uint)(i & 0xff); // ID = index clamped to [0, 255]. } Debug.Assert(_lightNodes.Count == 0, "Internal list of light nodes has not been cleared."); _lightNodes.Clear(); // Collect all shadow-casting lights. int numberOfNodes = nodes.Count; for (int i = 0; i < numberOfNodes; i++) { var node = nodes[i] as LightNode; if (node != null && node.Shadow != null && (node.Shadow.ShadowMap != null || node.Shadow is CompositeShadow)) { _lightNodes.Add(node); } } // If there are too many shadow-casting lights, sort them by importance. numberOfNodes = _lightNodes.Count; if (_lightNodes.Count > _shadowMaskBins.Length) { for (int i = 0; i < numberOfNodes; i++) { var lightNode = _lightNodes[i]; lightNode.SortTag = lightNode.GetLightContribution(context.CameraNode.PoseWorld.Position, 0.7f); } _lightNodes.Sort(DescendingLightNodeComparer.Instance); } // Add jobs. for (int i = 0; i < numberOfNodes; i++) { var node = _lightNodes[i]; var job = new Job(); job.Node = node; foreach (var renderer in Renderers) { if (renderer.CanRender(node, context)) { job.Renderer = renderer; break; } } if (job.Renderer == null) { continue; } // Assign shadow mask index. int shadowMaskIndex = AssignShadowMask(node, context); if (shadowMaskIndex < 0) { continue; } job.SortKey = GetSortKey(shadowMaskIndex, job.Renderer.Order, job.Renderer.Id); Jobs.Add(ref job); } foreach (var bin in _shadowMaskBins) { bin.Clear(); } if (order != RenderOrder.UserDefined) { // Sort draw jobs. Jobs.Sort(Comparer.Instance); } }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; var graphicsDevice = context.GraphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var shadow = lightNode.Shadow as CompositeShadow; if (shadow == null) continue; if (shadow.ShadowMask == null) continue; // Write into a single channel and use min() blending. graphicsDevice.BlendState = BlendStates[shadow.ShadowMaskChannel]; for (int j = 0; j < shadow.Shadows.Count; j++) { // Temporarily set shadow mask and shadow mask channel of child shadows. var childShadow = shadow.Shadows[j]; childShadow.ShadowMask = shadow.ShadowMask; childShadow.ShadowMaskChannel = shadow.ShadowMaskChannel; // Temporarily exchange LightNode.Shadow and render the child shadow. lightNode.Shadow = childShadow; for (int k = 0; k < _shadowMaskRenderers.Count; k++) { var renderer = _shadowMaskRenderers[k]; if (renderer.CanRender(lightNode, context)) { renderer.Render(lightNode, context); break; } } // Remove shadow mask references. Strictly speaking, the mask is correct // for the composite shadow. It is not correct for the child shadow. The child // shadow only contributes to the mask. Therefore, childShadowMask should not be // set. childShadow.ShadowMask = null; childShadow.ShadowMaskChannel = 0; } lightNode.Shadow = shadow; } savedRenderState.Restore(); }
private static void RenderSprite(RenderOrder renderOrder) { var points = new Vector2[4] { new Vector2(0.0f, 1.0f), new Vector2(1.0f, 1.0f), new Vector2(1.0f, 0.0f), new Vector2(0.0f, 0.0f) }; GL.MatrixMode(MatrixMode.Modelview); GL.PushMatrix(); var X = renderOrder.owner.position.X + (renderOrder.sprite.rotationPoint.X + renderOrder.sprite.offset.X) * renderOrder.sprite.scale.X; var Y = renderOrder.owner.position.Y + (renderOrder.sprite.rotationPoint.Y + renderOrder.sprite.offset.Y) * renderOrder.sprite.scale.Y; GL.Translate(X, Y, 0.0); GL.Rotate(renderOrder.sprite.Rotation + renderOrder.owner.Rotation, 0.0f, 0.0f, 1.0f); GL.Translate(-X, -Y, 0.0); X = renderOrder.owner.position.X + renderOrder.sprite.offset.X * renderOrder.sprite.scale.X; Y = renderOrder.owner.position.Y + renderOrder.sprite.offset.Y * renderOrder.sprite.scale.Y; GL.Translate(X, Y, 0.0); GL.Rotate(renderOrder.sprite.TextureRotation, 0.0f, 0.0f, 1.0f); GL.Translate(-X, -Y, 0.0); GL.BindTexture(TextureTarget.Texture2D, renderOrder.sprite.Texture2D.Id); GL.Begin(PrimitiveType.Quads); GL.Color3(renderOrder.sprite.Color); for (int i = 0; i < 4; i++) { if (renderOrder.sprite.DrawingRectangle == null) { GL.TexCoord2(points[i]); } else { GL.TexCoord2( (renderOrder.sprite.DrawingRectangle.Value.X + points[i].X * renderOrder.sprite.DrawingRectangle.Value.Width) / renderOrder.sprite.Texture2D.Width, (renderOrder.sprite.DrawingRectangle.Value.Y + points[i].Y * renderOrder.sprite.DrawingRectangle.Value.Height) / renderOrder.sprite.Texture2D.Height); } if (renderOrder.sprite.FlipX) { points[i].X = points[i].X == 1.0f ? 0.0f : 1.0f; } if (renderOrder.sprite.FlipY) { points[i].Y = points[i].Y == 1.0f ? 0.0f : 1.0f; } points[i].X = renderOrder.sprite.Width * (points[i].X - 0.5f); points[i].Y = renderOrder.sprite.Height * (points[i].Y - 0.5f); points[i] += renderOrder.sprite.offset; points[i] *= renderOrder.sprite.scale; points[i] += renderOrder.owner.position; GL.Vertex2(points[i]); } GL.End(); GL.PopMatrix(); }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = _effect.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; // Set camera properties. var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; Matrix viewInverse = cameraPose; _parameterViewInverse.SetValue(viewInverse); _parameterGBuffer0.SetValue(context.GBuffer0); Viewport viewport = context.Viewport; _parameterParameters0.SetValue(new Vector2(viewport.Width, viewport.Height)); // Set jitter map. if (_jitterMap == null) _jitterMap = NoiseHelper.GetGrainTexture(context.GraphicsService, NoiseHelper.DefaultJitterMapWidth); _parameterJitterMap.SetValue(_jitterMap); float cameraFar = context.CameraNode.Camera.Projection.Far; for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var shadow = lightNode.Shadow as CascadedShadow; if (shadow == null) continue; if (shadow.ShadowMap == null || shadow.ShadowMask == null) continue; // The effect must only render in a specific channel. // Do not change blend state if the correct write channels is already set, e.g. if this // shadow is part of a CompositeShadow, the correct blend state is already set. if ((int)graphicsDevice.BlendState.ColorWriteChannels != (1 << shadow.ShadowMaskChannel)) graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteSingleChannel[shadow.ShadowMaskChannel]; _parameterParameters1.SetValue(new Vector4( shadow.FadeOutRange, shadow.Distances[shadow.NumberOfCascades - 1], shadow.VisualizeCascades ? 1 : 0, shadow.ShadowFog)); float filterRadius = shadow.FilterRadius; // If we use a subset of the Poisson kernel, we have to normalize the scale. int numberOfSamples = Math.Min(shadow.NumberOfSamples, StandardShadowMaskRenderer.PoissonKernel.Length); // Not all shader passes support cascade visualization. Use a similar pass instead. if (shadow.VisualizeCascades) { if (numberOfSamples < 0) { numberOfSamples = 4; } else if (numberOfSamples == 0) { numberOfSamples = 1; filterRadius = 0; } } // The best dithered CSM supports max 22 samples. if (shadow.CascadeSelection == ShadowCascadeSelection.BestDithered && numberOfSamples > 22) numberOfSamples = 22; if (numberOfSamples > 0) filterRadius /= StandardShadowMaskRenderer.PoissonKernel[numberOfSamples - 1].Length(); _parameterParameters2.SetValue(new Vector4( shadow.ShadowMap.Width, shadow.ShadowMap.Height, filterRadius, // The StandardShadow.JitterResolution is the number of texels per world unit. // In the shader the parameter JitterResolution contains the division by the jitter map size. shadow.JitterResolution / _jitterMap.Width)); // Split distances. if (_parameterDistances != null) { // Set not used entries to large values. Vector4F distances = shadow.Distances; for (int j = shadow.NumberOfCascades; j < 4; j++) distances[j] = 10 * cameraFar; _parameterDistances.SetValue((Vector4)distances); } Debug.Assert(shadow.ViewProjections.Length == 4); for (int j = 0; j < _matrices.Length; j++) _matrices[j] = viewInverse * shadow.ViewProjections[j]; _parameterShadowMatrices.SetValue(_matrices); _parameterDepthBias.SetValue((Vector4)shadow.EffectiveDepthBias); _parameterNormalOffset.SetValue((Vector4)shadow.EffectiveNormalOffset); Vector3F lightBackwardWorld = lightNode.PoseWorld.Orientation.GetColumn(2); _parameterLightDirection.SetValue((Vector3)cameraPose.ToLocalDirection(lightBackwardWorld)); _parameterNumberOfCascades.SetValue(shadow.NumberOfCascades); _parameterShadowMap.SetValue(shadow.ShadowMap); var rectangle = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode); Vector2F texCoordTopLeft = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height); Vector2F texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height); GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners); _parameterFrustumCorners.SetValue(_frustumFarCorners); var pass = GetPass(numberOfSamples, shadow.CascadeSelection, shadow.VisualizeCascades); if (numberOfSamples > 0) { if (_lastNumberOfSamples != numberOfSamples) { // Create an array with the first n samples and the rest set to 0. _lastNumberOfSamples = numberOfSamples; for (int j = 0; j < numberOfSamples; j++) { _samples[j].Y = StandardShadowMaskRenderer.PoissonKernel[j].Y; _samples[j].X = StandardShadowMaskRenderer.PoissonKernel[j].X; _samples[j].Z = 1.0f / numberOfSamples; } // Set the rest to zero. for (int j = numberOfSamples; j < _samples.Length; j++) _samples[j] = Vector3.Zero; _parameterSamples.SetValue(_samples); } else if (i == 0) { // Apply offsets in the first loop. _parameterSamples.SetValue(_samples); } } pass.Apply(); graphicsDevice.DrawQuad(rectangle); } _parameterGBuffer0.SetValue((Texture2D)null); _parameterJitterMap.SetValue((Texture2D)null); _parameterShadowMap.SetValue((Texture2D)null); savedRenderState.Restore(); }
public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) { throw new ArgumentNullException("nodes"); } if (context == null) { throw new ArgumentNullException("context"); } int numberOfNodes = nodes.Count; if (numberOfNodes == 0) { return; } // Note: The camera node is not used by the StandardShadowMapRenderer. // Still throw an exception if null for consistency. (All other shadow map // renderers need a camera node.) context.ThrowIfCameraMissing(); context.ThrowIfSceneMissing(); var originalRenderTarget = context.RenderTarget; var originalViewport = context.Viewport; var originalReferenceNode = context.ReferenceNode; var cameraNode = context.CameraNode; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; context.Technique = "Default"; var graphicsService = context.GraphicsService; var graphicsDevice = graphicsService.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) { continue; } var shadow = lightNode.Shadow as StandardShadow; if (shadow == null) { continue; } // LightNode is visible in current frame. lightNode.LastFrame = frame; // Get a new shadow map if necessary. if (shadow.ShadowMap == null) { shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D( new RenderTargetFormat( shadow.PreferredSize, shadow.PreferredSize, false, shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single, DepthFormat.Depth24)); } // Create a suitable shadow camera. CameraNode lightCameraNode; if (lightNode.Light is ProjectorLight) { var light = (ProjectorLight)lightNode.Light; if (light.Projection is PerspectiveProjection) { var lp = (PerspectiveProjection)light.Projection; var cp = (PerspectiveProjection)_perspectiveCameraNode.Camera.Projection; cp.SetOffCenter(lp.Left, lp.Right, lp.Bottom, lp.Top, lp.Near, lp.Far); lightCameraNode = _perspectiveCameraNode; } else //if (light.Projection is OrthographicProjection) { var lp = (OrthographicProjection)light.Projection; var cp = (OrthographicProjection)_orthographicCameraNode.Camera.Projection; cp.SetOffCenter(lp.Left, lp.Right, lp.Bottom, lp.Top, lp.Near, lp.Far); lightCameraNode = _orthographicCameraNode; } } else if (lightNode.Light is Spotlight) { var light = (Spotlight)lightNode.Light; var cp = (PerspectiveProjection)_perspectiveCameraNode.Camera.Projection; cp.SetFieldOfView(2 * light.CutoffAngle, 1, shadow.DefaultNear, light.Range); lightCameraNode = _perspectiveCameraNode; } else { throw new GraphicsException("StandardShadow can only be used with a Spotlight or a ProjectorLight."); } lightCameraNode.PoseWorld = lightNode.PoseWorld; // Store data for use in StandardShadowMaskRenderer. shadow.Near = lightCameraNode.Camera.Projection.Near; shadow.Far = lightCameraNode.Camera.Projection.Far; shadow.View = lightCameraNode.PoseWorld.Inverse; shadow.Projection = lightCameraNode.Camera.Projection; // World units per texel at a planar distance of 1 world unit. float unitsPerTexel = lightCameraNode.Camera.Projection.Width / (shadow.ShadowMap.Height * shadow.Near); // Convert depth bias from "texel" to world space. // Minus to move receiver depth closer to light. shadow.EffectiveDepthBias = -shadow.DepthBias * unitsPerTexel; // Convert normal offset from "texel" to world space. shadow.EffectiveNormalOffset = shadow.NormalOffset * unitsPerTexel; graphicsDevice.SetRenderTarget(shadow.ShadowMap); context.RenderTarget = shadow.ShadowMap; context.Viewport = graphicsDevice.Viewport; graphicsDevice.Clear(Color.White); // The scene node renderer should use the light camera instead of the player camera. context.CameraNode = lightCameraNode; context.ReferenceNode = lightNode; context.Object = shadow; graphicsDevice.DepthStencilState = DepthStencilState.Default; graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise; graphicsDevice.BlendState = BlendState.Opaque; bool shadowMapContainsSomething = RenderCallback(context); if (!shadowMapContainsSomething) { // Shadow map is empty. Recycle it. graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap); shadow.ShadowMap = null; } } graphicsDevice.SetRenderTarget(null); savedRenderState.Restore(); context.CameraNode = cameraNode; context.Technique = null; context.RenderTarget = originalRenderTarget; context.Viewport = originalViewport; context.ReferenceNode = originalReferenceNode; context.Object = null; }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { var graphicsService = context.GraphicsService; var renderTargetPool = graphicsService.RenderTargetPool; var graphicsDevice = graphicsService.GraphicsDevice; // Get a shared RebuildZBufferRenderer which was added by the graphics screen. var rebuildZBufferRenderer = (RebuildZBufferRenderer)context.Data[RenderContextKeys.RebuildZBufferRenderer]; // We only support a render order of "user defined". This is always the case // if this renderer is added to a SceneRenderer. The SceneRenderer does the sorting. Debug.Assert(order == RenderOrder.UserDefined); // This renderer assumes that the current render target is an off-screen render target. Debug.Assert(context.RenderTarget != null); graphicsDevice.ResetTextures(); // Remember the format of the current render target. var backBufferFormat = new RenderTargetFormat(context.RenderTarget); // In the loop below we will use the context.SourceTexture property. // Remember the original source texture. var originalSourceTexture = context.SourceTexture; context.SourceTexture = null; for (int i = 0; i < nodes.Count; i++) { var node = (MeshNode)nodes[i]; // Check if the next node wants to sample from the back buffer. if (RequiresSourceTexture(node, context)) { // The effect of the node wants to sample from the "SourceTexture". // Per default, DigitalRune Graphics uses a delegate effect parameter // binding to set the "SourceTexture" parameters to the // RenderContext.SourceTexture value. However, this property is usually // null. We need to manually set RenderContext.SourceTexture to the // current back buffer render target. Since, we cannot read from this // render target and write to this render target at the same time, // we have to copy it. context.SourceTexture = context.RenderTarget; // Set a new render target and copy the content of the lastBackBuffer // and the depth buffer. context.RenderTarget = renderTargetPool.Obtain2D(backBufferFormat); graphicsDevice.SetRenderTarget(context.RenderTarget); graphicsDevice.Viewport = context.Viewport; rebuildZBufferRenderer.Render(context, context.SourceTexture); } // Add current node to a temporary list. _tempList.Add(node); // Add all following nodes until another node wants to sample from the // back buffer. for (int j = i + 1; j < nodes.Count; j++) { node = (MeshNode)nodes[j]; if (RequiresSourceTexture(node, context)) break; _tempList.Add(node); i++; } // Render nodes. _meshRenderer.Render(_tempList, context); renderTargetPool.Recycle(context.SourceTexture); context.SourceTexture = null; _tempList.Clear(); } // Restore original render context. context.SourceTexture = originalSourceTexture; }
private void BatchJobs(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; context.CameraNode.LastFrame = frame; int numberOfNodes = nodes.Count; for (int i = 0; i < numberOfNodes; i++) { var decalNode = nodes[i] as DecalNode; if (decalNode == null) continue; // DecalNode is visible in current frame. decalNode.LastFrame = frame; context.SceneNode = decalNode; // Update material instance. EffectBinding materialInstanceBinding; if (!decalNode.MaterialInstance.TryGet(context.RenderPass, out materialInstanceBinding)) continue; context.MaterialBinding = materialInstanceBinding.MaterialBinding; context.MaterialInstanceBinding = materialInstanceBinding; // Update all parameter bindings stored in the material instance. foreach (var binding in materialInstanceBinding.ParameterBindings) binding.Update(context); // Select technique for rendering. // (The technique binding must be called for each submesh because it can do // "preshader" stuff, see the SkinnedEffectTechniqueBinding for example.) materialInstanceBinding.TechniqueBinding.Update(context); context.SceneNode = null; context.MaterialBinding = null; context.MaterialInstanceBinding = null; // Add draw job to list. var job = new Job { DrawOrder = decalNode.DrawOrder, MaterialKey = GetMaterialKey(materialInstanceBinding), MaterialInstanceBinding = materialInstanceBinding, DecalNode = decalNode, }; _jobs.Add(ref job); } // Sort draw jobs. switch (order) { case RenderOrder.Default: case RenderOrder.FrontToBack: // Ignore case RenderOrder.BackToFront: // Ignore _jobs.Sort(Comparer.Instance); break; case RenderOrder.UserDefined: // Do nothing. break; } }
private void cmbRenderOrder_SelectedIndexChanged(object sender, EventArgs e) { var renderOrder = (RenderOrder)Enum.Parse(typeof(RenderOrder), cmbRenderOrder.SelectedItem.ToString()); this.renderOrder = renderOrder; }
private void Draw(ref EffectPassBinding passBinding, RenderContext context, int index, int count, RenderOrder order) { var jobs = _jobs.Array; var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; var graphicsDevice = context.GraphicsService.GraphicsDevice; // Flag: true if the box vertex/index buffers are not set in the graphics device. bool setBoxBuffers = true; foreach (var pass in passBinding) { for (int i = index; i < index + count; i++) { var materialInstanceBinding = jobs[i].MaterialInstanceBinding; var decalNode = jobs[i].DecalNode; var decalPose = decalNode.PoseWorld; // Update and apply local, per-instance and per-pass bindings. foreach (var binding in materialInstanceBinding.ParameterBindings) { if (binding.Description.Hint == EffectParameterHint.PerPass) binding.Update(context); binding.Apply(context); } pass.Apply(); bool drawWithQuad = false; if (!ClipAtNearPlane) { // ----- Check if near plane intersects the decal box. // First make a simple AABB check in world space. if (GeometryHelper.HaveContact(_cameraNearPlaneAabbWorld, decalNode.Aabb)) { // Make exact check of decal box against camera near plane AABB in camera space. var decalBoxExtent = new Vector3(1, 1, 1); decalBoxExtent *= decalNode.ScaleLocal; var decalBoxCenter = new Vector3(0, 0, -decalNode.ScaleLocal.Z / 2); // Get pose of decal box in view space. var decalBoxPose = new Pose( cameraPose.ToLocalPosition(decalPose.Position + decalPose.Orientation * decalBoxCenter), cameraPose.Orientation.Transposed * decalPose.Orientation); // Aabb of camera near plane in view space. var projection = cameraNode.Camera.Projection; var cameraNearPlaneAabb = new Aabb( new Vector3(projection.Left, projection.Bottom, -projection.Near), new Vector3(projection.Right, projection.Top, -projection.Near)); drawWithQuad = GeometryHelper.HaveContact(cameraNearPlaneAabb, decalBoxExtent, decalBoxPose, true); } } if (!drawWithQuad) { // Draw a box primitive. if (setBoxBuffers) { graphicsDevice.SetVertexBuffer(_vertexBuffer); graphicsDevice.Indices = _indexBuffer; setBoxBuffers = false; } graphicsDevice.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0, NumberOfPrimitives); #else graphicsDevice.DrawIndexedPrimitives(PrimitiveType.TriangleList, 0, 0, NumberOfVertices, 0, NumberOfPrimitives); } else { // Draw a quad at the near plane because the camera is inside the box. // The quad vertices must be given decal space! var projection = cameraNode.Camera.Projection; Vector3 scale = decalNode.ScaleWorld; Pose cameraToDecalPose = decalPose.Inverse * cameraPose; Vector4 scissor = GraphicsHelper.GetBounds(cameraNode, decalNode); // Use a bias to avoid that this quad is clipped by the near plane. const float bias = 1.0001f; float left = InterpolationHelper.Lerp(projection.Left, projection.Right, scissor.X) * bias; float top = InterpolationHelper.Lerp(projection.Top, projection.Bottom, scissor.Y) * bias; float right = InterpolationHelper.Lerp(projection.Left, projection.Right, scissor.Z) * bias; float bottom = InterpolationHelper.Lerp(projection.Top, projection.Bottom, scissor.W) * bias; float z = -projection.Near * bias; _quadVertices[0] = cameraToDecalPose.ToWorldPosition(new Vector3(left, top, z)); _quadVertices[0].X /= scale.X; _quadVertices[0].Y /= scale.Y; _quadVertices[0].Z /= scale.Z; _quadVertices[1] = cameraToDecalPose.ToWorldPosition(new Vector3(right, top, z)); _quadVertices[1].X /= scale.X; _quadVertices[1].Y /= scale.Y; _quadVertices[1].Z /= scale.Z; _quadVertices[2] = cameraToDecalPose.ToWorldPosition(new Vector3(left, bottom, z)); _quadVertices[2].X /= scale.X; _quadVertices[2].Y /= scale.Y; _quadVertices[2].Z /= scale.Z; _quadVertices[3] = cameraToDecalPose.ToWorldPosition(new Vector3(right, bottom, z)); _quadVertices[3].X /= scale.X; _quadVertices[3].Y /= scale.Y; _quadVertices[3].Z /= scale.Z; graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _quadVertices, 0, 2, VertexPosition.VertexDeclaration); // Remember that the device vertex/index buffers are not set anymore. setBoxBuffers = true; } } } }
public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order) { if (nodes == null) throw new ArgumentNullException("nodes"); if (context == null) throw new ArgumentNullException("context"); int numberOfNodes = nodes.Count; if (numberOfNodes == 0) return; context.Validate(_effect); context.ThrowIfCameraMissing(); var graphicsDevice = _effect.GraphicsDevice; var savedRenderState = new RenderStateSnapshot(graphicsDevice); graphicsDevice.DepthStencilState = DepthStencilState.None; graphicsDevice.RasterizerState = RasterizerState.CullNone; graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd; var viewport = graphicsDevice.Viewport; _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height)); _parameterGBuffer0.SetValue(context.GBuffer0); _parameterGBuffer1.SetValue(context.GBuffer1); var cameraNode = context.CameraNode; var cameraPose = cameraNode.PoseWorld; Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection; // Update SceneNode.LastFrame for all visible nodes. int frame = context.Frame; cameraNode.LastFrame = frame; var isHdrEnabled = context.IsHdrEnabled(); for (int i = 0; i < numberOfNodes; i++) { var lightNode = nodes[i] as LightNode; if (lightNode == null) continue; var light = lightNode.Light as ProjectorLight; if (light == null) continue; // LightNode is visible in current frame. lightNode.LastFrame = frame; float hdrScale = isHdrEnabled ? light.HdrScale : 1; _parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale); _parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale); _parameterTexture.SetValue(light.Texture); var lightPose = lightNode.PoseWorld; _parameterPosition.SetValue((Vector3)(lightPose.Position - cameraPose.Position)); _parameterRange.SetValue(light.Projection.Far); _parameterAttenuation.SetValue(light.Attenuation); _parameterTextureMatrix.SetValue((Matrix)(GraphicsHelper.ProjectorBiasMatrix * light.Projection * (lightPose.Inverse * new Pose(cameraPose.Position)))); var rectangle = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode); var texCoordTopLeft = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height); var texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height); GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners); // Convert frustum far corners from view space to world space. for (int j = 0; j < _frustumFarCorners.Length; j++) _frustumFarCorners[j] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_frustumFarCorners[j]); _parameterFrustumCorners.SetValue(_frustumFarCorners); bool hasShadow = (lightNode.Shadow != null && lightNode.Shadow.ShadowMask != null); if (hasShadow) { switch (lightNode.Shadow.ShadowMaskChannel) { case 0: _parameterShadowMaskChannel.SetValue(new Vector4(1, 0, 0, 0)); break; case 1: _parameterShadowMaskChannel.SetValue(new Vector4(0, 1, 0, 0)); break; case 2: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 1, 0)); break; default: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 0, 1)); break; } _parameterShadowMask.SetValue(lightNode.Shadow.ShadowMask); } if (lightNode.Clip != null) { var data = lightNode.RenderData as LightRenderData; if (data == null) { data = new LightRenderData(); lightNode.RenderData = data; } data.UpdateClipSubmesh(context.GraphicsService, lightNode); graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail; graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite; _parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection); _passClip.Apply(); data.ClipSubmesh.Draw(); graphicsDevice.DepthStencilState = lightNode.InvertClip ? GraphicsHelper.DepthStencilStateStencilEqual0 : GraphicsHelper.DepthStencilStateStencilNotEqual0; graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd; } else { graphicsDevice.DepthStencilState = DepthStencilState.None; } if (hasShadow) { if (light.Texture.Format == SurfaceFormat.Alpha8) _passShadowedAlpha.Apply(); else _passShadowedRgb.Apply(); } else { if (light.Texture.Format == SurfaceFormat.Alpha8) _passDefaultAlpha.Apply(); else _passDefaultRgb.Apply(); } graphicsDevice.DrawQuad(rectangle); } savedRenderState.Restore(); }