コード例 #1
0
        private void RenderHiDef(TextureCube texture, Matrix orientation, float exposure, RenderContext context)
        {
            var graphicsDevice = context.GraphicsService.GraphicsDevice;

            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
            graphicsDevice.BlendState        = BlendState.Opaque;

            var    cameraNode = context.CameraNode;
            Matrix view       = cameraNode.View;
            Matrix projection = cameraNode.Camera.Projection;

            // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
            // cube map and objects or texts in it are mirrored.)
            var mirrorZ = Matrix.CreateScale(1, 1, -1);

            _parameterWorldViewProjection.SetValue(
                (Matrix)(projection * view * new Matrix(orientation, Vector3.Zero) * mirrorZ));
            _parameterExposure.SetValue(new Vector4(exposure, exposure, exposure, 1));
            _textureParameter.SetValue(texture);

            if (context.IsHdrEnabled())
            {
                _passLinear.Apply();
            }
            else
            {
                _passGamma.Apply();
            }

            _submesh.Draw();
            savedRenderState.Restore();
        }
コード例 #2
0
        internal virtual void ProcessJobs(RenderContext context, RenderOrder order)
        {
            if (order == RenderOrder.BackToFront || order == RenderOrder.FrontToBack)
            {
                // The scene nodes are already sorted by distance.
                order = RenderOrder.UserDefined;
            }

            var savedRenderState = new RenderStateSnapshot(context.GraphicsService.GraphicsDevice);

            int index    = 0;
            var jobs     = Jobs.Array;
            int jobCount = Jobs.Count;

            while (index < jobCount)
            {
                var renderer = jobs[index].Renderer;

                // Find end of current batch.
                int endIndexExclusive = index + 1;
                while (endIndexExclusive < jobCount && jobs[endIndexExclusive].Renderer == renderer)
                {
                    endIndexExclusive++;
                }

                // Restore the render state. (The integrated scene node renderers properly
                // restore the render state, but third-party renderers might mess it up.)
                if (index > 0)
                {
                    savedRenderState.Restore();
                }

                // Submit batch to renderer.
                // (Use Accessor to expose current batch as IList<SceneNode>.)
                JobsAccessor.Set(Jobs, index, endIndexExclusive);
                renderer.Render(JobsAccessor, context, order);
                JobsAccessor.Reset();

                index = endIndexExclusive;
            }

            savedRenderState.Restore();
        }
コード例 #3
0
        /// <summary>
        /// Draws the textures.
        /// </summary>
        /// <param name="context">The render context.</param>
        /// <remarks>
        /// If <see cref="SpriteBatch"/> is <see langword="null"/>, then <see cref="Render"/> does
        /// nothing.
        /// </remarks>
        /// <exception cref="ArgumentNullException">
        /// <paramref name="context"/> is <see langword="null"/>.
        /// </exception>
        public void Render(RenderContext context)
        {
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            if (SpriteBatch == null)
            {
                return;
            }

            var count = _textures.Count;

            if (count == 0)
            {
                return;
            }

            context.Validate(SpriteBatch);

            var savedRenderState = new RenderStateSnapshot(SpriteBatch.GraphicsDevice);

            SpriteBatch.Begin(SpriteSortMode.Immediate, BlendState.AlphaBlend, SamplerState.LinearClamp, DepthStencilState.None, RasterizerState.CullNone);

            for (int i = 0; i < count; i++)
            {
                var textureInfo = _textures[i];

                if (textureInfo.Texture.IsDisposed)
                {
                    continue;
                }

                if (TextureHelper.IsFloatingPointFormat(textureInfo.Texture.Format))
                {
                    // Floating-point textures must not use linear hardware filtering!
                    SpriteBatch.GraphicsDevice.SamplerStates[0] = SamplerState.PointClamp;
                    SpriteBatch.Draw(textureInfo.Texture, textureInfo.Rectangle, Color.White);
                    SpriteBatch.GraphicsDevice.SamplerStates[0] = SamplerState.LinearClamp;
                }
                else
                {
                    SpriteBatch.Draw(textureInfo.Texture, textureInfo.Rectangle, Color.White);
                }
            }

            SpriteBatch.End();

            savedRenderState.Restore();
        }
コード例 #4
0
        private void ProcessJobs(RenderContext context)
        {
            var graphicsDevice   = GraphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            PrepareBillboards(context);

            // Batch billboards using the same texture.
            int index    = 0;
            var jobs     = _jobs.Array;
            int jobCount = _jobs.Count;

            while (index < jobCount)
            {
                uint textureId = jobs[index].TextureId;

                int endIndex = index + 1;
                while (endIndex < jobCount && jobs[endIndex].TextureId == textureId)
                {
                    endIndex++;
                }

                // Submit batch.
                if (textureId == FontTextureId)
                {
                    // Text
                    EndBillboards(context);
                    DrawText(index, endIndex, context);
                }
                else
                {
                    // Billboards, particles
                    BeginBillboards(context);
                    DrawBillboards(index, endIndex, context);
                }

                index = endIndex;
            }

            EndBillboards(context);
            savedRenderState.Restore();
        }
コード例 #5
0
        internal void ProcessInternal(RenderContext context)
        {
            Debug.Assert(Enabled, "PostProcessor.ProcessInternal should only be called when the post-processor is enabled.");

            var graphicsDevice = GraphicsService.GraphicsDevice;

            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            // Set render states. The blend state must be set by the user!
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.None;

            // Preform post-processing.
            OnProcess(context);

            savedRenderState.Restore();

            // Reset the texture stages. If a floating point texture is set, we get exceptions
            // when a sampler with bilinear filtering is set.

            graphicsDevice.ResetTextures();
        }
コード例 #6
0
        /// <summary>
        /// Clears the current render target (which must be the G-buffer).
        /// </summary>
        /// <param name="context">The render context.</param>
        public void Render(RenderContext context)
        {
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            context.Validate(_effect);

            var graphicsDevice   = _effect.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.DepthStencilState = DepthStencilState.None;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.BlendState        = BlendState.Opaque;

            // Clear to maximum depth.
            _parameterDepth.SetValue(1.0f);

            // The environment is facing the camera.
            // --> Set normal = cameraBackward.
            var cameraNode = context.CameraNode;

            _parameterNormal.SetValue((cameraNode != null) ? (Vector3)cameraNode.ViewInverse.GetColumn(2).XYZ : Vector3.Backward);

            // Clear specular to arbitrary value.
            _parameterSpecularPower.SetValue(1.0f);

            _effect.CurrentTechnique.Passes[0].Apply();

            // Draw full-screen quad using clip space coordinates.
            graphicsDevice.DrawQuad(
                new VertexPositionTexture(new Vector3(-1, 1, 0), new Vector2(0, 0)),
                new VertexPositionTexture(new Vector3(1, -1, 0), new Vector2(1, 1)));

            savedRenderState.Restore();
        }
コード例 #7
0
        private void DrawText(int index, int endIndex, RenderContext context)
        {
            var graphicsDevice    = context.GraphicsService.GraphicsDevice;
            var savedRenderStates = new RenderStateSnapshot(graphicsDevice);

            // The sprite batch and text effect are only created when needed.
            if (_spriteBatch == null)
            {
                _spriteBatch = context.GraphicsService.GetSpriteBatch();
                _textEffect  = new BasicEffect(graphicsDevice)
                {
                    TextureEnabled     = true,
                    VertexColorEnabled = true,
                };
            }

            _textEffect.View       = (Matrix)context.CameraNode.View;
            _textEffect.Projection = context.CameraNode.Camera.Projection;

            var jobs = _jobs.Array;

            while (index < endIndex)
            {
                var node = jobs[index++].Node as BillboardNode;
                if (node == null)
                {
                    continue;
                }

                var billboard = node.Billboard as TextBillboard;
                if (billboard == null)
                {
                    continue;
                }

                var font = billboard.Font ?? _defaultFont;
                if (font == null)
                {
                    continue;
                }

                var text          = billboard.Text as string;
                var stringBuilder = billboard.Text as StringBuilder;
                if (string.IsNullOrEmpty(text) && (stringBuilder == null || stringBuilder.Length == 0))
                {
                    continue;
                }

                Vector3F position    = node.PoseWorld.Position;
                var      orientation = billboard.Orientation;

                #region ----- Billboarding -----

                // (Code copied from BillboardBatchReach.)

                // Normal
                Vector3F normal;
                if (orientation.Normal == BillboardNormal.ViewPlaneAligned)
                {
                    normal = _defaultNormal;
                }
                else if (orientation.Normal == BillboardNormal.ViewpointOriented)
                {
                    Vector3F n = _cameraPose.Position - position;
                    normal = n.TryNormalize() ? n : _defaultNormal;
                }
                else
                {
                    normal = node.Normal;
                }

                // Axis = up vector
                Vector3F axis = node.Axis;
                if (orientation.IsAxisInViewSpace)
                {
                    axis = _cameraPose.ToWorldDirection(axis);
                }

                if (1 - Vector3F.Dot(normal, axis) < Numeric.EpsilonF)
                {
                    // Normal and axis are parallel.
                    // --> Bend normal by adding a fraction of the camera down vector.
                    Vector3F cameraDown = -_cameraPose.Orientation.GetColumn(1);
                    normal += cameraDown * 0.001f;
                    normal.Normalize();
                }

                // Compute right.
                //Vector3F right = Vector3F.Cross(axis, normal);
                // Inlined:
                Vector3F right;
                right.X = axis.Y * normal.Z - axis.Z * normal.Y;
                right.Y = axis.Z * normal.X - axis.X * normal.Z;
                right.Z = axis.X * normal.Y - axis.Y * normal.X;
                if (!right.TryNormalize())
                {
                    right = normal.Orthonormal1; // Normal and axis are parallel --> Choose random perpendicular vector.
                }
                if (orientation.IsAxisFixed)
                {
                    // Make sure normal is perpendicular to right and up.
                    //normal = Vector3F.Cross(right, axis);
                    // Inlined:
                    normal.X = right.Y * axis.Z - right.Z * axis.Y;
                    normal.Y = right.Z * axis.X - right.X * axis.Z;
                    normal.Z = right.X * axis.Y - right.Y * axis.X;

                    // No need to normalize because right and up are normalized and perpendicular.
                }
                else
                {
                    // Make sure axis is perpendicular to normal and right.
                    //axis = Vector3F.Cross(normal, right);
                    // Inlined:
                    axis.X = normal.Y * right.Z - normal.Z * right.Y;
                    axis.Y = normal.Z * right.X - normal.X * right.Z;
                    axis.Z = normal.X * right.Y - normal.Y * right.X;

                    // No need to normalize because normal and right are normalized and perpendicular.
                }
                #endregion

                _textEffect.World = new Matrix(right.X, right.Y, right.Z, 0,
                                               -axis.X, -axis.Y, -axis.Z, 0,
                                               normal.X, normal.Y, normal.Z, 0,
                                               position.X, position.Y, position.Z, 1);

                Vector3F color3F = node.Color * billboard.Color;
                float    alpha   = node.Alpha * billboard.Alpha;
                Color    color   = new Color(color3F.X * alpha,
                                             color3F.Y * alpha,
                                             color3F.Z * alpha,
                                             alpha);

                Vector2 size   = (text != null) ? font.MeasureString(text) : font.MeasureString(stringBuilder);
                Vector2 origin = size / 2;
                float   scale  = node.ScaleWorld.Y; // Assume uniform scale.

                _spriteBatch.Begin(SpriteSortMode.Immediate, null, null, graphicsDevice.DepthStencilState, RasterizerState.CullNone, _textEffect);
                if (text != null)
                {
                    _spriteBatch.DrawString(font, text, Vector2.Zero, color, 0, origin, scale, SpriteEffects.None, 0);
                }
                else
                {
                    _spriteBatch.DrawString(font, stringBuilder, Vector2.Zero, color, 0, origin, scale, SpriteEffects.None, 0);
                }

                _spriteBatch.End();
            }

            savedRenderStates.Restore();
        }
コード例 #8
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = context.GraphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            // Camera properties
            int viewportHeight = graphicsDevice.Viewport.Height;
            var cameraNode     = context.CameraNode;
            var projection     = cameraNode.Camera.Projection;

            _parameterProjection.SetValue(projection);

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            for (int i = 0; i < numberOfNodes; i++)
            {
                var node = nodes[i] as CloudLayerNode;
                if (node == null)
                {
                    continue;
                }

                // CloudLayerNode is visible in current frame.
                node.LastFrame = frame;

                if (node.CloudMap.Texture == null)
                {
                    continue;
                }

                var sunDirection = node.SunDirection;
                _parameterSunDirection.SetValue((Vector3)sunDirection);
                _parameterSkyCurvature.SetValue(node.SkyCurvature);
                _parameterTextureMatrix.SetValue((Matrix) new Matrix44F(node.TextureMatrix, Vector3F.Zero));

                // The sample at the pixel counts as one, the rest are for the blur.
                // Note: We must not set -1 because a for loop like
                //   for (int i = 0; i < -1, i++)
                // crashes the AMD DX9 WP8.1 graphics driver. LOL
                _parameterNumberOfSamples.SetValue(Math.Max(0, node.NumberOfSamples - 1));

                _parameterSampleDistance.SetValue(node.SampleDistance);
                _parameterScatterParameters.SetValue(new Vector3(node.ForwardScatterExponent, node.ForwardScatterScale, node.ForwardScatterOffset));
                _parameterHorizonFade.SetValue(new Vector2(node.HorizonFade, node.HorizonBias));
                _parameterSunLight.SetValue((Vector3)node.SunLight);
                _parameterAmbientLight.SetValue(new Vector4((Vector3)node.AmbientLight, node.Alpha));
                _parameterTexture.SetValue(node.CloudMap.Texture);

                // Occlusion query.
                if (graphicsDevice.GraphicsProfile != GraphicsProfile.Reach && node.SunQuerySize >= Numeric.EpsilonF)
                {
                    bool skipQuery = false;
                    if (node.OcclusionQuery != null)
                    {
                        if (node.OcclusionQuery.IsComplete)
                        {
                            node.TryUpdateSunOcclusion();
                        }
                        else
                        {
                            // The previous query is still not finished. Do not start a new query, this would
                            // create a SharpDX warning.
                            skipQuery = true;
                        }
                    }
                    else
                    {
                        node.OcclusionQuery = new OcclusionQuery(graphicsDevice);
                    }

                    if (!skipQuery)
                    {
                        node.IsQueryPending = true;

                        float totalPixels = viewportHeight * node.SunQuerySize;
                        totalPixels   *= totalPixels;
                        node.QuerySize = totalPixels;

                        // Use a camera which looks at the sun.
                        // Get an relative up vector which is not parallel to the forward direction.
                        var lookAtUp = Vector3F.UnitY;
                        if (Vector3F.AreNumericallyEqual(sunDirection, lookAtUp))
                        {
                            lookAtUp = Vector3F.UnitZ;
                        }

                        Vector3F zAxis = -sunDirection;
                        Vector3F xAxis = Vector3F.Cross(lookAtUp, zAxis).Normalized;
                        Vector3F yAxis = Vector3F.Cross(zAxis, xAxis);

                        var lookAtSunView = new Matrix(xAxis.X, yAxis.X, zAxis.X, 0,
                                                       xAxis.Y, yAxis.Y, zAxis.Y, 0,
                                                       xAxis.Z, yAxis.Z, zAxis.Z, 0,
                                                       0, 0, 0, 1);
                        _parameterView.SetValue(lookAtSunView);

                        graphicsDevice.BlendState        = GraphicsHelper.BlendStateNoColorWrite;
                        graphicsDevice.DepthStencilState = DepthStencilState.None;
                        graphicsDevice.RasterizerState   = RasterizerState.CullNone;

                        // Create small quad shortly behind the near plane.
                        // Note: We use an "untranslated" view matrix, so we can ignore the camera position.
                        float    width  = (projection.Top - projection.Bottom) * node.SunQuerySize;
                        Vector3F right  = sunDirection.Orthonormal1 * (width / 2);
                        Vector3F up     = sunDirection.Orthonormal2 * (width / 2);
                        Vector3F center = sunDirection * (projection.Near * 1.0001f);
                        _queryGeometry[0] = center - up - right;
                        _queryGeometry[1] = center + up - right;
                        _queryGeometry[2] = center - up + right;
                        _queryGeometry[3] = center + up + right;

                        if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passOcclusionAlpha.Apply();
                        }
                        else
                        {
                            _passOcclusionRgb.Apply();
                        }

                        node.OcclusionQuery.Begin();
                        graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _queryGeometry, 0, 2,
                                                          VertexPosition.VertexDeclaration);
                        node.OcclusionQuery.End();
                    }
                }
                else
                {
                    node.IsQueryPending = false;
                    node.SunOcclusion   = 0;
                }

                Matrix viewUntranslated = (Matrix) new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F(0));
                _parameterView.SetValue(viewUntranslated);

                // Render clouds.
                graphicsDevice.BlendState        = BlendState.AlphaBlend;
                graphicsDevice.RasterizerState   = RasterizerState.CullNone;
                graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;

                if (context.IsHdrEnabled())
                {
                    if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
                    {
                        _passCloudAlphaLinear.Apply();
                    }
                    else
                    {
                        _passCloudRgbLinear.Apply();
                    }
                }
                else
                {
                    if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
                    {
                        _passCloudAlphaGamma.Apply();
                    }
                    else
                    {
                        _passCloudRgbGamma.Apply();
                    }
                }

                _submesh.Draw();
            }

            savedRenderState.Restore();
        }
コード例 #9
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

              if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (nodes.Count == 0)
            return;

              context.Validate(_effect);

              var originalRenderTarget = context.RenderTarget;
              var originalViewport = context.Viewport;

              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.BlendState = BlendState.Opaque;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.DepthStencilState = DepthStencilState.None;

              int frame = context.Frame;
              float deltaTime = (float)context.DeltaTime.TotalSeconds;

              for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++)
              {
            var cloudNode = nodes[nodeIndex] as CloudLayerNode;
            if (cloudNode == null)
              continue;

            var cloudMap = cloudNode.CloudMap as LayeredCloudMap;
            if (cloudMap == null)
              continue;

            // We update the cloud map only once per frame.
            if (cloudMap.LastFrame == frame)
              continue;

            cloudMap.LastFrame = frame;

            var layers = cloudMap.Layers;
            var animationTimes = cloudMap.AnimationTimes;
            var sources = cloudMap.SourceLayers;
            var targets = cloudMap.TargetLayers;
            var renderTargets = cloudMap.LayerTextures;

            // Animate the cloud map layers.
            for (int i = 0; i < LayeredCloudMap.NumberOfTextures; i++)
            {
              if (layers[i] == null || layers[i].Texture != null)
            continue;

              if (cloudMap.Random == null)
            cloudMap.Random = new Random(cloudMap.Seed);

              // Make sure there is a user-defined texture or data for procedural textures.
              if (sources[i] == null)
              {
            // Each octave is 128 x 128 (= 1 / 4 of the 512 * 512 noise texture).
            sources[i] = new PackedTexture(null, _noiseTexture, cloudMap.Random.NextVector2F(0, 1), new Vector2F(0.25f));
            targets[i] = new PackedTexture(null, _noiseTexture, cloudMap.Random.NextVector2F(0, 1), new Vector2F(0.25f));
            renderTargets[i] = new RenderTarget2D(graphicsDevice, 128, 128, false, SurfaceFormat.Alpha8, DepthFormat.None);
              }

              // Update animation time.
              animationTimes[i] += deltaTime * layers[i].AnimationSpeed;

              // Update source and target if animation time is beyond 1.
              if (animationTimes[i] > 1)
              {
            // Wrap animation time.
            animationTimes[i] = animationTimes[i] % 1;

            // Swap source and target.
            MathHelper.Swap(ref sources[i], ref targets[i]);

            // Set target to a new random part of the noise texture.
            targets[i].Offset = cloudMap.Random.NextVector2F(0, 1);
              }

              // Lerp source and target together to get the final noise texture.
              graphicsDevice.SetRenderTarget(renderTargets[i]);
              _parameterViewportSize.SetValue(new Vector2(graphicsDevice.Viewport.Width, graphicsDevice.Viewport.Height));
              _parameterTextures[0].SetValue(sources[i].TextureAtlas);
              _parameterTextures[1].SetValue(targets[i].TextureAtlas);
              _parameterTexture0Parameters.SetValue(new Vector4(sources[i].Scale.X, sources[i].Scale.Y, sources[i].Offset.X, sources[i].Offset.Y));
              _parameterTexture1Parameters.SetValue(new Vector4(targets[i].Scale.X, targets[i].Scale.Y, targets[i].Offset.X, targets[i].Offset.Y));
              _parameterLerp.SetValue(animationTimes[i]);
              _passLerp.Apply();
              graphicsDevice.DrawFullScreenQuad();
            }

            // Initialize the cloud map.
            if (cloudMap.Texture == null || cloudMap.Size != cloudMap.Texture.Width)
            {
              cloudMap.Texture.SafeDispose();

              var cloudTexture = new RenderTarget2D(
            graphicsDevice,
            cloudMap.Size,
            cloudMap.Size,
            false,
            SurfaceFormat.Alpha8,
            DepthFormat.None);

              cloudMap.SetTexture(cloudTexture);
            }

            // Combine the layers.
            graphicsDevice.SetRenderTarget((RenderTarget2D)cloudMap.Texture);
            _parameterViewportSize.SetValue(new Vector2(cloudMap.Texture.Width, cloudMap.Texture.Height));
            for (int i = 0; i < LayeredCloudMap.NumberOfTextures; i++)
            {
              var layer = layers[i] ?? EmptyLayer;
              _parameterTextures[i].SetValue(layer.Texture ?? renderTargets[i]);
              _parameterMatrices[i].SetValue((Matrix)new Matrix44F(layer.TextureMatrix, Vector3F.Zero));
              _parameterDensities[i].SetValue(new Vector2(layer.DensityScale, layer.DensityOffset));
            }
            _parameterCoverage.SetValue(cloudMap.Coverage);
            _parameterDensity.SetValue(cloudMap.Density);
            _passDensity.Apply();
            graphicsDevice.DrawFullScreenQuad();
              }

              savedRenderState.Restore();
              graphicsDevice.SetRenderTarget(null);
              context.RenderTarget = originalRenderTarget;
              context.Viewport = originalViewport;
        }
コード例 #10
0
        private void DrawText(int index, int endIndex, RenderContext context)
        {
            var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderStates = new RenderStateSnapshot(graphicsDevice);

              // The sprite batch and text effect are only created when needed.
              if (_spriteBatch == null)
              {
            _spriteBatch = context.GraphicsService.GetSpriteBatch();
            _textEffect = new BasicEffect(graphicsDevice)
            {
              TextureEnabled = true,
              VertexColorEnabled = true,
            };
              }

              _textEffect.View = (Matrix)context.CameraNode.View;
              _textEffect.Projection = context.CameraNode.Camera.Projection;

              var jobs = _jobs.Array;
              while (index < endIndex)
              {
            var node = jobs[index++].Node as BillboardNode;
            if (node == null)
              continue;

            var billboard = node.Billboard as TextBillboard;
            if (billboard == null)
              continue;

            var font = billboard.Font ?? _defaultFont;
            if (font == null)
              continue;

            var text = billboard.Text as string;
            var stringBuilder = billboard.Text as StringBuilder;
            if (string.IsNullOrEmpty(text) && (stringBuilder == null || stringBuilder.Length == 0))
              continue;

            Vector3F position = node.PoseWorld.Position;
            var orientation = billboard.Orientation;

            #region ----- Billboarding -----

            // (Code copied from BillboardBatchReach.)

            // Normal
            Vector3F normal;
            if (orientation.Normal == BillboardNormal.ViewPlaneAligned)
            {
              normal = _defaultNormal;
            }
            else if (orientation.Normal == BillboardNormal.ViewpointOriented)
            {
              Vector3F n = _cameraPose.Position - position;
              normal = n.TryNormalize() ? n : _defaultNormal;
            }
            else
            {
              normal = node.Normal;
            }

            // Axis = up vector
            Vector3F axis = node.Axis;
            if (orientation.IsAxisInViewSpace)
              axis = _cameraPose.ToWorldDirection(axis);

            if (1 - Vector3F.Dot(normal, axis) < Numeric.EpsilonF)
            {
              // Normal and axis are parallel.
              // --> Bend normal by adding a fraction of the camera down vector.
              Vector3F cameraDown = -_cameraPose.Orientation.GetColumn(1);
              normal += cameraDown * 0.001f;
              normal.Normalize();
            }

            // Compute right.
            //Vector3F right = Vector3F.Cross(axis, normal);
            // Inlined:
            Vector3F right;
            right.X = axis.Y * normal.Z - axis.Z * normal.Y;
            right.Y = axis.Z * normal.X - axis.X * normal.Z;
            right.Z = axis.X * normal.Y - axis.Y * normal.X;
            if (!right.TryNormalize())
              right = normal.Orthonormal1;   // Normal and axis are parallel --> Choose random perpendicular vector.

            if (orientation.IsAxisFixed)
            {
              // Make sure normal is perpendicular to right and up.
              //normal = Vector3F.Cross(right, axis);
              // Inlined:
              normal.X = right.Y * axis.Z - right.Z * axis.Y;
              normal.Y = right.Z * axis.X - right.X * axis.Z;
              normal.Z = right.X * axis.Y - right.Y * axis.X;

              // No need to normalize because right and up are normalized and perpendicular.
            }
            else
            {
              // Make sure axis is perpendicular to normal and right.
              //axis = Vector3F.Cross(normal, right);
              // Inlined:
              axis.X = normal.Y * right.Z - normal.Z * right.Y;
              axis.Y = normal.Z * right.X - normal.X * right.Z;
              axis.Z = normal.X * right.Y - normal.Y * right.X;

              // No need to normalize because normal and right are normalized and perpendicular.
            }
            #endregion

            _textEffect.World = new Matrix(right.X, right.Y, right.Z, 0,
                                       -axis.X, -axis.Y, -axis.Z, 0,
                                       normal.X, normal.Y, normal.Z, 0,
                                       position.X, position.Y, position.Z, 1);

            Vector3F color3F = node.Color * billboard.Color;
            float alpha = node.Alpha * billboard.Alpha;
            Color color = new Color(color3F.X * alpha,
                                color3F.Y * alpha,
                                color3F.Z * alpha,
                                alpha);

            Vector2 size = (text != null) ? font.MeasureString(text) : font.MeasureString(stringBuilder);
            Vector2 origin = size / 2;
            float scale = node.ScaleWorld.Y; // Assume uniform scale.

            _spriteBatch.Begin(SpriteSortMode.Immediate, null, null, graphicsDevice.DepthStencilState, RasterizerState.CullNone, _textEffect);
            if (text != null)
              _spriteBatch.DrawString(font, text, Vector2.Zero, color, 0, origin, scale, SpriteEffects.None, 0);
            else
              _spriteBatch.DrawString(font, stringBuilder, Vector2.Zero, color, 0, origin, scale, SpriteEffects.None, 0);

            _spriteBatch.End();
              }

              savedRenderStates.Restore();
        }
コード例 #11
0
        /// <inheritdoc/>
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (nodes.Count == 0)
            {
                return;
            }

            context.ThrowIfCameraMissing();

            var originalRenderTarget = context.RenderTarget;
            var originalViewport     = context.Viewport;
            var originalSceneNode    = context.SceneNode;
            var originalTechnique    = context.Technique;

            var graphicsDevice   = context.GraphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            //int frame = context.Frame;
            //float deltaTime = (float)context.DeltaTime.TotalSeconds;

            for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++)
            {
                var node = nodes[nodeIndex] as TerrainNode;
                if (node == null)
                {
                    continue;
                }

                context.SceneNode = node;

                context.RenderPass = RenderPassBase;
                ProcessClipmap(node, node.BaseClipmap, context);

                context.RenderPass = RenderPassDetail;
                ProcessClipmap(node, node.DetailClipmap, context);
            }

            context.RenderPass = null;

            // Clear invalid regions stored in terrain. (Note: Terrains can be shared.)
            for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++)
            {
                var node = nodes[nodeIndex] as TerrainNode;
                if (node == null)
                {
                    continue;
                }

                node.Terrain.InvalidBaseRegions.Clear();
                node.Terrain.InvalidDetailRegions.Clear();
            }

            // The clipmap layers use a MipMapLodBias which must be reset.
            graphicsDevice.ResetSamplerStates();

            savedRenderState.Restore();
            graphicsDevice.SetRenderTarget(null);
            context.RenderTarget            = originalRenderTarget;
            context.Viewport                = originalViewport;
            context.SceneNode               = originalSceneNode;
            context.MaterialBinding         = null;
            context.MaterialInstanceBinding = null;
            context.Technique               = originalTechnique;
        }
コード例 #12
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (nodes.Count == 0)
            {
                return;
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = context.GraphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.BlendState        = BlendState.AlphaBlend;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;

            // Camera properties
            var    cameraNode = context.CameraNode;
            Matrix view       = (Matrix) new Matrix(cameraNode.PoseWorld.Orientation.Transposed, new Vector3());

            _parameterView.SetValue(view);
            Matrix projection = cameraNode.Camera.Projection;

            _parameterProjection.SetValue(projection);

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            for (int i = 0; i < numberOfNodes; i++)
            {
                var node = nodes[i] as GradientSkyNode;
                if (node == null)
                {
                    continue;
                }

                // GradientSkyNode is visible in current frame.
                node.LastFrame = frame;

                _parameterSunDirection.SetValue((Vector3)node.SunDirection);
                _parameterFrontColor.SetValue((Vector4)node.FrontColor);
                _parameterZenithColor.SetValue((Vector4)node.ZenithColor);
                _parameterBackColor.SetValue((Vector4)node.BackColor);
                _parameterGroundColor.SetValue((Vector4)node.GroundColor);
                _parameterShift.SetValue(new Vector4(node.FrontZenithShift, node.BackZenithShift, node.FrontGroundShift, node.BackGroundShift));

                if (node.CieSkyStrength < Numeric.EpsilonF)
                {
                    if (context.IsHdrEnabled())
                    {
                        _passLinear.Apply();
                    }
                    else
                    {
                        _passGamma.Apply();
                    }
                }
                else
                {
                    var p = node.CieSkyParameters;
                    _parameterAbcd.SetValue(new Vector4(p.A, p.B, p.C, p.D));
                    _parameterEAndStrength.SetValue(new Vector2(p.E, node.CieSkyStrength));

                    if (context.IsHdrEnabled())
                    {
                        _passCieLinear.Apply();
                    }
                    else
                    {
                        _passCieGamma.Apply();
                    }
                }

                _submesh.Draw();
            }

            savedRenderState.Restore();
        }
コード例 #13
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            context.ThrowIfCameraMissing();
            context.ThrowIfSceneMissing();

            var originalRenderTarget  = context.RenderTarget;
            var originalViewport      = context.Viewport;
            var originalReferenceNode = context.ReferenceNode;

            var cameraNode = context.CameraNode;

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            // The scene node renderer should use the light camera instead of the player camera.
            context.CameraNode = _perspectiveCameraNode;
            context.Technique  = "Omnidirectional";

            var graphicsService  = context.GraphicsService;
            var graphicsDevice   = graphicsService.GraphicsDevice;
            var renderTargetPool = graphicsService.RenderTargetPool;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = nodes[i] as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                var shadow = lightNode.Shadow as CubeMapShadow;
                if (shadow == null)
                {
                    continue;
                }

                var light = lightNode.Light as PointLight;
                if (light == null)
                {
                    throw new GraphicsException("CubeMapShadow can only be used with a PointLight.");
                }

                // LightNode is visible in current frame.
                lightNode.LastFrame = frame;

                if (shadow.ShadowMap == null)
                {
                    shadow.ShadowMap = renderTargetPool.ObtainCube(
                        new RenderTargetFormat(
                            shadow.PreferredSize,
                            null,
                            false,
                            shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single,
                            DepthFormat.Depth24));
                }

                ((PerspectiveProjection)_perspectiveCameraNode.Camera.Projection).SetFieldOfView(
                    ConstantsF.PiOver2, 1, shadow.Near, light.Range);

                // World units per texel at a planar distance of 1 world unit.
                float unitsPerTexel = _perspectiveCameraNode.Camera.Projection.Width / (shadow.ShadowMap.Size * shadow.Near);

                // Convert depth bias from "texel" to  world space.
                // Minus to move receiver closer to light.
                shadow.EffectiveDepthBias = -shadow.DepthBias * unitsPerTexel;

                // Convert normal offset from "texel" to world space.
                shadow.EffectiveNormalOffset = shadow.NormalOffset * unitsPerTexel;

                var pose = lightNode.PoseWorld;

                context.ReferenceNode = lightNode;
                context.Object        = shadow;

                bool shadowMapContainsSomething = false;
                for (int side = 0; side < 6; side++)
                {
                    context.Data[RenderContextKeys.ShadowTileIndex] = BoxedIntegers[side];

                    graphicsDevice.SetRenderTarget(shadow.ShadowMap, CubeMapFaces[side]);
                    // context.RenderTarget = shadow.ShadowMap;   // TODO: Support cube maps targets in the render context.
                    context.Viewport = graphicsDevice.Viewport;

                    graphicsDevice.Clear(Color.White);

                    _perspectiveCameraNode.View = Matrix.CreateLookAt(
                        pose.Position,
                        pose.ToWorldPosition(CubeMapForwardVectors[side]),
                        pose.ToWorldDirection(CubeMapUpVectors[side]));

                    // Abort if this cube map frustum does not touch the camera frustum.
                    if (!context.Scene.HaveContact(cameraNode, _perspectiveCameraNode))
                    {
                        continue;
                    }

                    graphicsDevice.DepthStencilState = DepthStencilState.Default;
                    graphicsDevice.RasterizerState   = RasterizerState.CullCounterClockwise;
                    graphicsDevice.BlendState        = BlendState.Opaque;

                    shadowMapContainsSomething |= RenderCallback(context);
                }

                // Recycle shadow map if empty.
                if (!shadowMapContainsSomething)
                {
                    renderTargetPool.Recycle(shadow.ShadowMap);
                    shadow.ShadowMap = null;
                }
            }

            graphicsDevice.SetRenderTarget(null);
            savedRenderState.Restore();

            context.CameraNode    = cameraNode;
            context.Technique     = null;
            context.RenderTarget  = originalRenderTarget;
            context.Viewport      = originalViewport;
            context.ReferenceNode = originalReferenceNode;
            context.Object        = null;
            context.Data[RenderContextKeys.ShadowTileIndex] = null;
        }
コード例 #14
0
ファイル: SkyboxRenderer.cs プロジェクト: Zolniu/DigitalRune
        private void RenderReach(SkyboxNode node, RenderContext context)
        {
            var graphicsDevice = context.GraphicsService.GraphicsDevice;

              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise;
              graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
              graphicsDevice.BlendState = node.EnableAlphaBlending ? BlendState.AlphaBlend : BlendState.Opaque;
              graphicsDevice.SamplerStates[0] = SamplerState.LinearClamp;

              // Change viewport to render all pixels at max z.
              var originalViewport = graphicsDevice.Viewport;
              var viewport = originalViewport;
              viewport.MinDepth = viewport.MaxDepth;
              graphicsDevice.Viewport = viewport;

              var cameraNode = context.CameraNode;
              var view = cameraNode.View;
              view.Translation = Vector3F.Zero;
              var projection = cameraNode.Camera.Projection;

              var basicEffect = (BasicEffect)_effect;
              basicEffect.View = (Matrix)view;
              basicEffect.Projection = projection;
              basicEffect.DiffuseColor = (Vector3)node.Color;
              basicEffect.Alpha = node.EnableAlphaBlending ? node.Alpha : 1;

              // Scale skybox such that it lies within view frustum:
              //   distance of a skybox corner = √3
              //   √3 * scale = far
              //   => scale = far / √3
              // (Note: If  near > far / √3  then the skybox will be clipped.)
              float scale = projection.Far * 0.577f;

              var orientation = node.PoseWorld.Orientation;

              // Positive X
              basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.PositiveX);
              basicEffect.World = (Matrix)new Matrix44F(orientation * scale, Vector3F.Zero);
              basicEffect.CurrentTechnique.Passes[0].Apply();
              graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

              // Negative X
              // transform = scale * rotY(180°)
              var transform = new Matrix33F(-scale, 0, 0, 0, scale, 0, 0, 0, -scale);
              basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.NegativeX);
              basicEffect.World = (Matrix)new Matrix44F(orientation * transform, Vector3F.Zero);
              basicEffect.CurrentTechnique.Passes[0].Apply();
              graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

              // Positive Y
              // transform = scale * rotX(90°) * rotY(90°)
              transform = new Matrix33F(0, 0, scale, scale, 0, 0, 0, scale, 0);
              basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.PositiveY);
              basicEffect.World = (Matrix)new Matrix44F(orientation * transform, Vector3F.Zero);
              basicEffect.CurrentTechnique.Passes[0].Apply();
              graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

              // Negative Y
              // transform = scale * rotX(-90°) * rotY(90°)
              transform = new Matrix33F(0, 0, scale, -scale, 0, 0, 0, -scale, 0);
              basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.NegativeY);
              basicEffect.World = (Matrix)new Matrix44F(orientation * transform, Vector3F.Zero);
              basicEffect.CurrentTechnique.Passes[0].Apply();
              graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

              // Cube maps are left-handed, where as the world is right-handed!

              // Positive Z (= negative Z in world space)
              // transform = scale * rotY(90°)
              transform = new Matrix33F(0, 0, scale, 0, scale, 0, -scale, 0, 0);
              basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.PositiveZ);
              basicEffect.World = (Matrix)new Matrix44F(orientation * transform, Vector3F.Zero);
              basicEffect.CurrentTechnique.Passes[0].Apply();
              graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

              // Negative Z (= positive Z in world space)
              // transform = scale * rotY(-90°)
              transform = new Matrix33F(0, 0, -scale, 0, scale, 0, scale, 0, 0);
              basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.NegativeZ);
              basicEffect.World = (Matrix)new Matrix44F(orientation * transform, Vector3F.Zero);
              basicEffect.CurrentTechnique.Passes[0].Apply();
              graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

              graphicsDevice.Viewport = originalViewport;
              savedRenderState.Restore();
        }
コード例 #15
0
ファイル: SkyboxRenderer.cs プロジェクト: Zolniu/DigitalRune
        private void RenderHiDef(SkyboxNode node, RenderContext context)
        {
            var graphicsDevice = context.GraphicsService.GraphicsDevice;

              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
              graphicsDevice.BlendState = node.EnableAlphaBlending ? BlendState.AlphaBlend : BlendState.Opaque;

              bool sourceIsFloatingPoint = TextureHelper.IsFloatingPointFormat(node.Texture.Format);

              // Set sampler state. (Floating-point textures cannot use linear filtering. (XNA would throw an exception.))
              if (sourceIsFloatingPoint)
            graphicsDevice.SamplerStates[0] = SamplerState.PointClamp;
              else
            graphicsDevice.SamplerStates[0] = SamplerState.LinearClamp;

              var cameraNode = context.CameraNode;
              Matrix44F view = cameraNode.View;
              Matrix44F projection = cameraNode.Camera.Projection;

              // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
              // cube map and objects or texts in it are mirrored.)
              var mirrorZ = Matrix44F.CreateScale(1, 1, -1);
              Matrix33F orientation = node.PoseWorld.Orientation;
              _parameterWorldViewProjection.SetValue((Matrix)(projection * view * new Matrix44F(orientation, Vector3F.Zero) * mirrorZ));

              Vector4 color = node.EnableAlphaBlending
                      ? new Vector4((Vector3)node.Color * node.Alpha, node.Alpha) // Premultiplied
                      : new Vector4((Vector3)node.Color, 1);                      // Opaque
              _parameterColor.SetValue(color);
              _textureParameter.SetValue(node.Texture);

              if (node.Encoding is RgbEncoding)
              {
            _parameterTextureSize.SetValue(node.Texture.Size);
            if (context.IsHdrEnabled())
              _passRgbToRgb.Apply();
            else
              _passRgbToSRgb.Apply();
              }
              else if (node.Encoding is SRgbEncoding)
              {
            if (!sourceIsFloatingPoint)
            {
              if (context.IsHdrEnabled())
            _passSRgbToRgb.Apply();
              else
            _passSRgbToSRgb.Apply();
            }
            else
            {
              throw new GraphicsException("sRGB encoded skybox cube maps must not use a floating point format.");
            }
              }
              else if (node.Encoding is RgbmEncoding)
              {
            float max = GraphicsHelper.ToGamma(((RgbmEncoding)node.Encoding).Max);
            _parameterRgbmMaxValue.SetValue(max);

            if (context.IsHdrEnabled())
              _passRgbmToRgb.Apply();
            else
              _passRgbmToSRgb.Apply();
              }
              else
              {
            throw new NotSupportedException("The SkyBoxRenderer supports only RgbEncoding, SRgbEncoding and RgbmEncoding.");
              }

              _submesh.Draw();
              savedRenderState.Restore();
        }
コード例 #16
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              context.ThrowIfCameraMissing();
              context.ThrowIfSceneMissing();

              var originalRenderTarget = context.RenderTarget;
              var originalViewport = context.Viewport;
              var originalReferenceNode = context.ReferenceNode;

              var cameraNode = context.CameraNode;

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              // The scene node renderer should use the light camera instead of the player camera.
              context.CameraNode = _perspectiveCameraNode;
              context.Technique = "Omnidirectional";

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var renderTargetPool = graphicsService.RenderTargetPool;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              for (int i = 0; i < numberOfNodes; i++)
              {
            var lightNode = nodes[i] as LightNode;
            if (lightNode == null)
              continue;

            var shadow = lightNode.Shadow as CubeMapShadow;
            if (shadow == null)
              continue;

            var light = lightNode.Light as PointLight;
            if (light == null)
              throw new GraphicsException("CubeMapShadow can only be used with a PointLight.");

            // LightNode is visible in current frame.
            lightNode.LastFrame = frame;

            if (shadow.ShadowMap == null)
            {
              shadow.ShadowMap = renderTargetPool.ObtainCube(
            new RenderTargetFormat(
              shadow.PreferredSize,
              null,
              false,
              shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single,
              DepthFormat.Depth24));
            }

            ((PerspectiveProjection)_perspectiveCameraNode.Camera.Projection).SetFieldOfView(
              ConstantsF.PiOver2, 1, shadow.Near, light.Range);

            // World units per texel at a planar distance of 1 world unit.
            float unitsPerTexel = _perspectiveCameraNode.Camera.Projection.Width / (shadow.ShadowMap.Size * shadow.Near);

            // Convert depth bias from "texel" to  world space.
            // Minus to move receiver closer to light.
            shadow.EffectiveDepthBias = -shadow.DepthBias * unitsPerTexel;

            // Convert normal offset from "texel" to world space.
            shadow.EffectiveNormalOffset = shadow.NormalOffset * unitsPerTexel;

            var pose = lightNode.PoseWorld;

            context.ReferenceNode = lightNode;
            context.Object = shadow;

            bool shadowMapContainsSomething = false;
            for (int side = 0; side < 6; side++)
            {
              context.Data[RenderContextKeys.ShadowTileIndex] = BoxedIntegers[side];

              graphicsDevice.SetRenderTarget(shadow.ShadowMap, CubeMapFaces[side]);
              // context.RenderTarget = shadow.ShadowMap;   // TODO: Support cube maps targets in the render context.
              context.Viewport = graphicsDevice.Viewport;

              graphicsDevice.Clear(Color.White);

              _perspectiveCameraNode.View = Matrix44F.CreateLookAt(
            pose.Position,
            pose.ToWorldPosition(CubeMapForwardVectors[side]),
            pose.ToWorldDirection(CubeMapUpVectors[side]));

              // Abort if this cube map frustum does not touch the camera frustum.
              if (!context.Scene.HaveContact(cameraNode, _perspectiveCameraNode))
            continue;

              graphicsDevice.DepthStencilState = DepthStencilState.Default;
              graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise;
              graphicsDevice.BlendState = BlendState.Opaque;

              shadowMapContainsSomething |= RenderCallback(context);
            }

            // Recycle shadow map if empty.
            if (!shadowMapContainsSomething)
            {
              renderTargetPool.Recycle(shadow.ShadowMap);
              shadow.ShadowMap = null;
            }
              }

              graphicsDevice.SetRenderTarget(null);
              savedRenderState.Restore();

              context.CameraNode = cameraNode;
              context.Technique = null;
              context.RenderTarget = originalRenderTarget;
              context.Viewport = originalViewport;
              context.ReferenceNode = originalReferenceNode;
              context.Object = null;
              context.Data[RenderContextKeys.ShadowTileIndex] = null;
        }
コード例 #17
0
ファイル: TextBatch.cs プロジェクト: Zolniu/DigitalRune
        /// <summary>
        /// Draws the texts.
        /// </summary>
        /// <param name="context">The render context.</param>
        /// <remarks>
        /// If <see cref="SpriteBatch"/> or <see cref="SpriteFont"/> are <see langword="null"/>, then 
        /// <see cref="Render"/> does nothing.
        /// </remarks>
        /// <exception cref="ArgumentNullException">
        /// <paramref name="context"/> is <see langword="null"/>.
        /// </exception>
        public void Render(RenderContext context)
        {
            if (context == null)
            throw new ArgumentNullException("context");

              if (SpriteBatch == null || SpriteFont == null)
            return;

              context.Validate(SpriteBatch);

              if (_texts2D.Count == 0 && _texts3D.Count == 0)
            return;

              if (_texts3D.Count > 0)
            context.ThrowIfCameraMissing();

              var savedRenderState = new RenderStateSnapshot(SpriteBatch.GraphicsDevice);

              if (EnableDepthTest)
              {
            SpriteBatch.Begin(SpriteSortMode.Immediate, BlendState.AlphaBlend, SamplerState.LinearClamp, DepthStencilState.Default, RasterizerState.CullNone);
              }
              else
              {
            SpriteBatch.Begin();
              }

              // ----- Draw world space text.
              if (_texts3D.Count > 0)
              {
            CameraNode cameraNode = context.CameraNode;
            Matrix44F viewProjection = cameraNode.Camera.Projection * cameraNode.View;
            Viewport viewport = SpriteBatch.GraphicsDevice.Viewport;

            foreach (var textInfo in _texts3D)
            {
              // Transform position from world space to the viewport.
              Vector3F pos = viewport.ProjectToViewport(textInfo.Position, viewProjection);
              if (pos.Z < 0 || pos.Z > 1)
            continue;

              // Snap to pixels. Also add a small bias in one direction because when we draw text at
              // certain positions (e.g. view space origin) and the presentation target width is an
              // odd number, the pos will be exactly at pixel centers and due to numerical errors it
              // would jitter between pixels if the camera moves slightly.
              pos.X = (float)Math.Round(pos.X + 0.01f);
              pos.Y = (float)Math.Round(pos.Y + 0.01f);

              var textAsString = textInfo.Text as string;
              if (!string.IsNullOrEmpty(textAsString))
              {
            var textOrigin = GetOrigin(textAsString, textInfo.RelativeOrigin);
            SpriteBatch.DrawString(SpriteFont, textAsString, new Vector2(pos.X, pos.Y), textInfo.Color, 0, textOrigin, 1.0f, SpriteEffects.None, pos.Z);
              }
              else
              {
            var textAsStringBuilder = textInfo.Text as StringBuilder;
            if (textAsStringBuilder != null && textAsStringBuilder.Length > 0)
            {
              var textOrigin = GetOrigin(textAsStringBuilder, textInfo.RelativeOrigin);
              SpriteBatch.DrawString(SpriteFont, textAsStringBuilder, new Vector2(pos.X, pos.Y), textInfo.Color, 0, textOrigin, 1, SpriteEffects.None, pos.Z);
            }
              }
            }
              }

              // ----- Draw screen space text.
              foreach (var textInfo in _texts2D)
              {
            var textAsString = textInfo.Text as string;
            if (!string.IsNullOrEmpty(textAsString))
            {
              var textOrigin = GetOrigin(textAsString, textInfo.RelativeOrigin);
              SpriteBatch.DrawString(SpriteFont, textAsString, (Vector2)textInfo.Position, textInfo.Color, 0, textOrigin, 1, SpriteEffects.None, 0);
            }
            else
            {
              var textAsStringBuilder = textInfo.Text as StringBuilder;
              if (textAsStringBuilder != null && textAsStringBuilder.Length > 0)
              {
            var textOrigin = GetOrigin(textAsStringBuilder, textInfo.RelativeOrigin);
            SpriteBatch.DrawString(SpriteFont, textAsStringBuilder, (Vector2)textInfo.Position, textInfo.Color, 0, textOrigin, 1, SpriteEffects.None, 0);
              }
            }
              }

              SpriteBatch.End();

              savedRenderState.Restore();
        }
コード例 #18
0
ファイル: SpriteRenderer.cs プロジェクト: Zolniu/DigitalRune
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

              if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (nodes.Count == 0)
            return;

              context.Validate(_spriteBatch);
              context.ThrowIfCameraMissing();

              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              // Camera properties
              var cameraNode = context.CameraNode;
              Matrix44F viewProjection = cameraNode.Camera.Projection * cameraNode.View;
              var viewport = graphicsDevice.Viewport;

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              SpriteSortMode sortMode;
              switch (order)
              {
            case RenderOrder.Default:
              sortMode = SpriteSortMode.Texture;
              break;
            case RenderOrder.FrontToBack:
              sortMode = SpriteSortMode.FrontToBack;
              break;
            case RenderOrder.BackToFront:
              sortMode = SpriteSortMode.BackToFront;
              break;
            case RenderOrder.UserDefined:
            default:
              sortMode = SpriteSortMode.Deferred;
              break;
              }

              _spriteBatch.Begin(sortMode, graphicsDevice.BlendState, null, graphicsDevice.DepthStencilState, null);

              for (int i = 0; i < numberOfNodes; i++)
              {
            var node = nodes[i] as SpriteNode;
            if (node == null)
              continue;

            // SpriteNode is visible in current frame.
            node.LastFrame = frame;

            // Position, size, and origin in pixels.
            Vector3F position = new Vector3F();
            Vector2 size = new Vector2();
            Vector2 origin = new Vector2();

            var bitmapSprite = node.Sprite as ImageSprite;
            if (bitmapSprite != null)
            {
              var packedTexture = bitmapSprite.Texture;
              if (packedTexture != null)
              {
            // Project into viewport and snap to pixels.
            position = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection);
            position.X = (int)(position.X + 0.5f);
            position.Y = (int)(position.Y + 0.5f);

            // Get source rectangle (pixel bounds).
            var sourceRectangle = packedTexture.GetBounds(node.AnimationTime);
            size = new Vector2(sourceRectangle.Width, sourceRectangle.Height);

            // Premultiply color.
            Vector3F color3F = node.Color;
            float alpha = node.Alpha;
            Color color = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha);

            // Get absolute origin (relative to pixel bounds).
            origin = (Vector2)node.Origin * size;

            // Draw using SpriteBatch.
            _spriteBatch.Draw(
              packedTexture.TextureAtlas, new Vector2(position.X, position.Y), sourceRectangle,
              color, node.Rotation, origin, (Vector2)node.Scale, SpriteEffects.None, position.Z);
              }
            }
            else
            {
              var textSprite = node.Sprite as TextSprite;
              if (textSprite != null)
              {
            var font = textSprite.Font ?? _spriteFont;
            if (font != null)
            {
              // Text can be a string or StringBuilder.
              var text = textSprite.Text as string;
              if (text != null)
              {
                if (text.Length > 0)
                {
                  // Project into viewport and snap to pixels.
                  position = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection);
                  position.X = (int)(position.X + 0.5f);
                  position.Y = (int)(position.Y + 0.5f);

                  // Premultiply color.
                  Vector3F color3F = node.Color;
                  float alpha = node.Alpha;
                  Color color = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha);

                  // Get absolute origin (relative to pixel bounds).
                  size = font.MeasureString(text);
                  origin = (Vector2)node.Origin * size;

                  // Draw using SpriteBatch.
                  _spriteBatch.DrawString(
                    font, text, new Vector2(position.X, position.Y),
                    color, node.Rotation, origin, (Vector2)node.Scale,
                    SpriteEffects.None, position.Z);
                }
              }
              else
              {
                var stringBuilder = textSprite.Text as StringBuilder;
                if (stringBuilder != null && stringBuilder.Length > 0)
                {
                  // Project into viewport and snap to pixels.
                  position = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection);
                  position.X = (int)(position.X + 0.5f);
                  position.Y = (int)(position.Y + 0.5f);

                  // Premultiply color.
                  Vector3F color3F = node.Color;
                  float alpha = node.Alpha;
                  Color color = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha);

                  // Get absolute origin (relative to pixel bounds).
                  size = font.MeasureString(stringBuilder);
                  origin = (Vector2)node.Origin * size;

                  // Draw using SpriteBatch.
                  _spriteBatch.DrawString(
                    font, stringBuilder, new Vector2(position.X, position.Y),
                    color, node.Rotation, origin, (Vector2)node.Scale,
                    SpriteEffects.None, position.Z);
                }
              }
            }
              }
            }

            // Store bounds an depth for hit tests.
            node.LastBounds = new Rectangle(
              (int)(position.X - origin.X),
              (int)(position.Y - origin.Y),
              (int)(size.X * node.Scale.X),
              (int)(size.Y * node.Scale.Y));

            node.LastDepth = position.Z;
              }

              _spriteBatch.End();
              savedRenderState.Restore();
        }
コード例 #19
0
ファイル: WaterRenderer.cs プロジェクト: Zolniu/DigitalRune
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

              if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              float deltaTime = (float)context.DeltaTime.TotalSeconds;

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var renderTargetPool = graphicsService.RenderTargetPool;

              var cameraNode = context.CameraNode;
              Projection projection = cameraNode.Camera.Projection;
              Pose view = cameraNode.PoseWorld.Inverse;

              // Around the camera we push the waves down to avoid that the camera cuts the near plane.
              // Get largest vector from camera to near plane corners.
              float nearPlaneRadius =
            new Vector3F(Math.Max(Math.Abs(projection.Right), Math.Abs(projection.Left)),
                     Math.Max(Math.Abs(projection.Top), Math.Abs(projection.Bottom)),
                     projection.Near
                    ).Length;

              var originalSourceTexture = context.SourceTexture;

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              // Water surface is opaque.
              graphicsDevice.BlendState = BlendState.Opaque;

              #region ----- Common Effect Parameters -----

              _parameterView.SetValue(view);
              _parameterProjection.SetValue(projection);
              _parameterCameraParameters.SetValue(new Vector4(
            (Vector3)cameraNode.PoseWorld.Position,
            cameraNode.Camera.Projection.Far));

              var viewport = graphicsDevice.Viewport;
              _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));

              _parameterTime.SetValue((float)context.Time.TotalSeconds);

              // Query ambient and directional lights.
              var lightQuery = context.Scene.Query<GlobalLightQuery>(cameraNode, context);
              Vector3F ambientLight = Vector3F.Zero;
              if (lightQuery.AmbientLights.Count > 0)
              {
            var light = (AmbientLight)lightQuery.AmbientLights[0].Light;
            ambientLight = light.Color * light.Intensity * light.HdrScale;
              }

              _parameterAmbientLight.SetValue((Vector3)ambientLight);

              Vector3F directionalLightDirection = new Vector3F(0, -1, 0);
              Vector3F directionalLightIntensity = Vector3F.Zero;
              if (lightQuery.DirectionalLights.Count > 0)
              {
            var lightNode = lightQuery.DirectionalLights[0];
            var light = (DirectionalLight)lightNode.Light;
            directionalLightDirection = -lightNode.PoseWorld.Orientation.GetColumn(2);
            directionalLightIntensity = light.Color * light.SpecularIntensity * light.HdrScale;
              }

              _parameterDirectionalLightDirection.SetValue((Vector3)directionalLightDirection);
              _parameterDirectionalLightIntensity.SetValue((Vector3)directionalLightIntensity);

              _parameterGBuffer0.SetValue(context.GBuffer0);

              if (_parameterNoiseMap != null)
            _parameterNoiseMap.SetValue(_noiseMap);
              #endregion

              #region ----- Fog Parameters -----

              var fogNodes = context.Scene.Query<FogQuery>(cameraNode, context).FogNodes;
              SetFogParameters(fogNodes, cameraNode, directionalLightDirection);
              #endregion

              _parameterProjectedGridParameters.SetValue(new Vector3(
              ProjectedGridParameters.EdgeAttenuation,
              ProjectedGridParameters.DistanceAttenuationStart,
              ProjectedGridParameters.DistanceAttenuationEnd));

              for (int i = 0; i < numberOfNodes; i++)
              {
            var node = nodes[i] as WaterNode;
            if (node == null)
              continue;

            // Node is visible in current frame.
            node.LastFrame = frame;

            var data = node.RenderData as WaterRenderData;
            if (data == null)
            {
              data = new WaterRenderData();
              node.RenderData = data;
            }

            var water = node.Water;
            bool isCameraUnderwater = node.EnableUnderwaterEffect && node.IsUnderwater(cameraNode.PoseWorld.Position);

            #region ----- Wave bending -----

            // Waves should not cut the near plane. --> Bend waves up or down if necessary.

            // Limits
            float upperLimit; // Waves must not move above this value.
            float lowerLimit; // Waves must not move below this value.

            // Bending fades in over interval [bendStart, bendEnd]:
            //   distance ≤ bendStart ............. Wave is bent up or down.
            //   bendStart < distance < bendEnd ... Lerp between normal wave and bent wave.
            //   distance ≥ bendEnd ............... Normal wave.
            float bendStart = 1 * nearPlaneRadius;
            float bendEnd = 10 * nearPlaneRadius;

            if (!isCameraUnderwater)
            {
              // Bend waves down below the camera.
              upperLimit = cameraNode.PoseWorld.Position.Y - nearPlaneRadius;
              lowerLimit = -1e20f;

              if (node.EnableUnderwaterEffect)
              {
            if (node.Waves == null || node.Waves.DisplacementMap == null)
            {
              // No displacement. The wave bending stuff does not work because the surface
              // is usually not tessellated. We have to render the underwater geometry when
              // camera near plane might cut the water surface.
              if (node.Volume == null)
              {
                // Test water plane.
                isCameraUnderwater = (cameraNode.PoseWorld.Position.Y - nearPlaneRadius) < node.PoseWorld.Position.Y;
              }
              else
              {
                // Test water AABB.
                var aabb = node.Aabb;
                aabb.Minimum -= new Vector3F(nearPlaneRadius);
                aabb.Maximum += new Vector3F(nearPlaneRadius);
                isCameraUnderwater = GeometryHelper.HaveContact(aabb, cameraNode.PoseWorld.Position);
              }
            }
              }
            }
            else
            {
              // Camera is underwater, bend triangles up above camera.
              upperLimit = 1e20f;
              lowerLimit = cameraNode.PoseWorld.Position.Y + nearPlaneRadius;
            }

            _parameterCameraMisc.SetValue(new Vector4(upperLimit, lowerLimit, bendStart, bendEnd));
            #endregion

            // Update the submesh for the given water volume.
            data.UpdateSubmesh(graphicsService, node);

            #region ----- Scroll Normal Maps -----

            // We update the normal map offsets once(!) per frame.
            // Note: We could skip the offsets and compute all in the shader using absolute
            // time instead of deltaTime, but then the user cannot change the NormalMapVelocity
            // smoothly.
            if (data.LastNormalUpdateFrame != frame)
            {
              data.LastNormalUpdateFrame = frame;

              var baseVelocity = (node.Flow != null) ? node.Flow.BaseVelocity : Vector3F.Zero;

              // Increase offset.
              // (Note: We have to subtract value and divide by scale because if the normal
              // should scroll to the right, we have to move the texcoords in the other direction.)
              data.NormalMapOffset0.X -= (water.NormalMap0Velocity.X + baseVelocity.X) * deltaTime / water.NormalMap0Scale;
              data.NormalMapOffset0.Y -= (water.NormalMap0Velocity.Z + baseVelocity.Y) * deltaTime / water.NormalMap0Scale;
              data.NormalMapOffset1.X -= (water.NormalMap1Velocity.X + baseVelocity.X) * deltaTime / water.NormalMap1Scale;
              data.NormalMapOffset1.Y -= (water.NormalMap1Velocity.Z + baseVelocity.Y) * deltaTime / water.NormalMap1Scale;

              // Keep only the fractional part to avoid overflow.
              data.NormalMapOffset0.X = MathHelper.Frac(data.NormalMapOffset0.X);
              data.NormalMapOffset0.Y = MathHelper.Frac(data.NormalMapOffset0.Y);
              data.NormalMapOffset1.X = MathHelper.Frac(data.NormalMapOffset1.X);
              data.NormalMapOffset1.Y = MathHelper.Frac(data.NormalMapOffset1.Y);
            }
            #endregion

            _parameterSurfaceLevel.SetValue(node.PoseWorld.Position.Y);

            #region ----- Reflection Parameters -----

            if (node.PlanarReflection != null
            && node.PlanarReflection.ActualIsEnabled
            && node.PlanarReflection.RenderToTexture.Texture is Texture2D)
            {
              // Planar reflection.
              var renderToTexture = node.PlanarReflection.RenderToTexture;
              var texture = (Texture2D)renderToTexture.Texture;

              _parameterReflectionTypeParameters.SetValue(new Vector2(0, 1));
              _parameterReflectionMatrix.SetValue((Matrix)renderToTexture.TextureMatrix);
              _parameterReflectionTextureSize.SetValue(new Vector2(texture.Width, texture.Height));
              if (_parameterPlanarReflectionMap != null)
            _parameterPlanarReflectionMap.SetValue(texture);

              _parameterReflectionParameters.SetValue(new Vector4(
            (Vector3)water.ReflectionColor,
            water.ReflectionDistortion));
            }
            else if (node.SkyboxReflection != null)
            {
              // Cube map reflection.
              var rgbmEncoding = node.SkyboxReflection.Encoding as RgbmEncoding;
              float rgbmMax = 1;
              if (rgbmEncoding != null)
            rgbmMax = GraphicsHelper.ToGamma(rgbmEncoding.Max);
              else if (!(node.SkyboxReflection.Encoding is SRgbEncoding))
            throw new NotImplementedException("The reflected skybox must be encoded using sRGB or RGBM.");

              _parameterReflectionTypeParameters.SetValue(new Vector2(1, rgbmMax));

              // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
              // cube map and objects or texts in it are mirrored.)
              var mirrorZ = Matrix44F.CreateScale(1, 1, -1);
              Matrix33F orientation = node.SkyboxReflection.PoseWorld.Orientation;
              _parameterReflectionMatrix.SetValue((Matrix)(new Matrix44F(orientation, Vector3F.Zero) * mirrorZ));

              if (_parameterCubeReflectionMap != null)
            _parameterCubeReflectionMap.SetValue(node.SkyboxReflection.Texture);

              _parameterReflectionParameters.SetValue(new Vector4(
            (Vector3)(water.ReflectionColor * node.SkyboxReflection.Color),
            water.ReflectionDistortion));
            }
            else
            {
              // No reflection texture. The reflection shows only the ReflectionColor.
              _parameterReflectionTypeParameters.SetValue(new Vector2(-1, 1));
              _parameterReflectionParameters.SetValue(new Vector4(
            (Vector3)water.ReflectionColor,
            water.ReflectionDistortion));
            }
            #endregion

            #region ----- Refraction Parameters -----

            // If we do not have a source texture, resolve the current render target
            // and immediately rebuilt it.
            if (context.SourceTexture == null && context.RenderTarget != null)
            {
              // Get RebuildZBufferRenderer from RenderContext.
              RebuildZBufferRenderer rebuildZBufferRenderer = null;
              object obj;
              if (context.Data.TryGetValue(RenderContextKeys.RebuildZBufferRenderer, out obj))
            rebuildZBufferRenderer = obj as RebuildZBufferRenderer;

              // If we didn't find the renderer in the context, use a default instance.
              if (rebuildZBufferRenderer == null)
              {
            if (_defaultRebuildZBufferRenderer == null)
              _defaultRebuildZBufferRenderer = new RebuildZBufferRenderer(graphicsService);

            rebuildZBufferRenderer = _defaultRebuildZBufferRenderer;
              }

              context.SourceTexture = context.RenderTarget;
              context.RenderTarget = renderTargetPool.Obtain2D(new RenderTargetFormat(context.RenderTarget));
              graphicsDevice.SetRenderTarget(context.RenderTarget);
              graphicsDevice.Viewport = context.Viewport;
              rebuildZBufferRenderer.Render(context, context.SourceTexture);
            }

            _parameterRefractionTexture.SetValue(context.SourceTexture);
            _parameterRefractionParameters.SetValue(new Vector4(
              ((Vector3)water.RefractionColor),
              water.RefractionDistortion));
            #endregion

            #region ----- Other Water Effect Parameters -----

            if (water.NormalMap0 != null)
            {
              if (_parameterNormalMap0 != null)
            _parameterNormalMap0.SetValue(water.NormalMap0);

              _parameterNormalMap0Parameters.SetValue(new Vector4(
            1 / water.NormalMap0Scale,
            data.NormalMapOffset0.X,
            data.NormalMapOffset0.Y,
            water.NormalMap0Strength));
            }
            else
            {
              if (_parameterNormalMap0 != null)
            _parameterNormalMap0.SetValue(_graphicsService.GetDefaultNormalTexture());
              _parameterNormalMap0Parameters.SetValue(new Vector4(1, 0, 0, 0));
            }

            if (water.NormalMap1 != null)
            {
              if (_parameterNormalMap1 != null)
            _parameterNormalMap1.SetValue(water.NormalMap1);

              _parameterNormalMap1Parameters.SetValue(new Vector4(
            1 / water.NormalMap1Scale,
            data.NormalMapOffset1.X,
            data.NormalMapOffset1.Y,
            water.NormalMap1Strength));
            }
            else
            {
              if (_parameterNormalMap1 != null)
            _parameterNormalMap1.SetValue(_graphicsService.GetDefaultNormalTexture());
              _parameterNormalMap1Parameters.SetValue(new Vector4(1, 0, 0, 0));
            }

            _parameterSpecularParameters.SetValue(new Vector4((Vector3)water.SpecularColor, water.SpecularPower));
            _parameterUnderwaterFogParameters.SetValue((Vector3)water.UnderwaterFogDensity);
            _parameterFresnelParameters.SetValue(new Vector3(water.FresnelBias, water.FresnelScale, water.FresnelPower));
            _parameterIntersectionSoftness.SetValue(water.IntersectionSoftness);

            // We apply some arbitrary scale factors to the water and scatter colors to
            // move the values into a similar range from the user's perspective.
            _parameterWaterColor.SetValue((Vector3)water.WaterColor / 10);
            _parameterScatterColor.SetValue((Vector3)water.ScatterColor);

            if (_parameterFoamMap != null)
            {
              _parameterFoamMap.SetValue(water.FoamMap);
              _parameterFoamParameters0.SetValue(new Vector4(
            (Vector3)water.FoamColor,
            1 / water.FoamMapScale));

              _parameterFoamParameters1.SetValue(new Vector4(
            water.FoamDistortion,
            water.FoamShoreIntersection,
            // Enable crest foam only if we have waves.
            node.Waves != null ? water.FoamCrestMin : float.MaxValue,
            water.FoamCrestMax));
            }

            _parameterCausticsSampleCount.SetValue(water.CausticsSampleCount);
            _parameterCausticsParameters.SetValue(new Vector4(
              water.CausticsSampleOffset,
              water.CausticsDistortion,
              water.CausticsPower,
              water.CausticsIntensity));
            #endregion

            #region ----- Wave Map -----

            var waves = node.Waves;

            // The displacement map can be null but the normal map must not be null.
            if (waves != null && waves.NormalMap != null)
            {
              // Type: 0 = Tiling, 1 = Clamp.
              float waveType;
              if (waves.IsTiling)
            waveType = 0;
              else
            waveType = 1;

              _parameterWaveMapParameters.SetValue(new Vector4(
            1.0f / waves.TileSize,                          // Scale
            0.5f - waves.TileCenter.X / waves.TileSize,     // Offset X
            0.5f - waves.TileCenter.Z / waves.TileSize,     // Offset Y
            waveType));

              if (_parameterDisplacementTexture != null)
              {
            if (waves.DisplacementMap != null)
              _parameterDisplacementTexture.SetValue(waves.DisplacementMap);
            else
              _parameterDisplacementTexture.SetValue(graphicsService.GetDefaultTexture2DBlack4F());
              }

              _parameterWaveMapSize.SetValue(new Vector2(
            waves.NormalMap.Width,
            waves.NormalMap.Height));
              if (_parameterWaveNormalMap != null)
            _parameterWaveNormalMap.SetValue(waves.NormalMap);
            }
            else
            {
              _parameterWaveMapParameters.SetValue(new Vector4(0, 0, 0, 0));
            }
            #endregion

            #region ----- Flow -----

            if (node.Flow != null)
            {
              var flow = node.Flow;
              float flowMapSpeed = (flow.FlowMap != null) ? flow.FlowMapSpeed : 0;
              _parameterFlowParameters0.SetValue(new Vector4(flow.SurfaceSlopeSpeed, flowMapSpeed, flow.CycleDuration, flow.MaxSpeed));
              _parameterFlowParameters1.SetValue(new Vector3(flow.MinStrength, 1 / flow.NoiseMapScale, flow.NoiseMapStrength));

              if (_parameterFlowMap != null)
            _parameterFlowMap.SetValue(flow.FlowMap);

              // Get world space (x, z) to texture space matrix.
              Aabb aabb = node.Shape.GetAabb();
              Vector3F extent = aabb.Extent;
              Matrix44F m = Matrix44F.CreateScale(1 / extent.X, 1, 1 / extent.Z)
                        * Matrix44F.CreateTranslation(-aabb.Minimum.X, 0, -aabb.Minimum.Z)
                        * Matrix44F.CreateScale(1 / node.ScaleLocal.X, 1, 1 / node.ScaleLocal.Z)
                        * node.PoseWorld.Inverse;

              // We use a 3x3 2d scale/rotation/translation matrix, ignoring the y component.
              _parameterFlowMapTextureMatrix.SetValue(new Matrix(m.M00, m.M20, 0, 0,
                                                             m.M02, m.M22, 0, 0,
                                                             m.M03, m.M23, 1, 0,
                                                             0, 0, 0, 0));

              // Get local flow direction to world flow direction matrix.
              // We use a 2x2 2d rotation matrix, ignoring the y component.
              var r = node.PoseWorld.Orientation;
              _parameterFlowMapWorldMatrix.SetValue(new Matrix(r.M00, r.M20, 0, 0,
                                                           r.M02, r.M22, 0, 0,
                                                           0, 0, 0, 0,
                                                           0, 0, 0, 0));
            }
            else
            {
              _parameterFlowParameters0.SetValue(new Vector4(0, 0, 0, 0));
              _parameterFlowParameters1.SetValue(new Vector3(0, 0, 0));
            }
            #endregion

            if (isCameraUnderwater)
              RenderUnderwaterGeometry(node, cameraNode);

            RenderSurface(node, cameraNode, isCameraUnderwater);
              }

              // Reset texture effect parameters.
              _parameterGBuffer0.SetValue((Texture2D)null);
              _parameterRefractionTexture.SetValue((Texture2D)null);

              if (_parameterPlanarReflectionMap != null)
            _parameterPlanarReflectionMap.SetValue((Texture2D)null);

              if (_parameterCubeReflectionMap != null)
            _parameterCubeReflectionMap.SetValue((TextureCube)null);

              if (_parameterNormalMap0 != null)
            _parameterNormalMap0.SetValue((Texture2D)null);

              if (_parameterNormalMap1 != null)
            _parameterNormalMap1.SetValue((Texture2D)null);

              if (_parameterDisplacementTexture != null)
            _parameterDisplacementTexture.SetValue((Texture2D)null);

              if (_parameterNoiseMap != null)
            _parameterNoiseMap.SetValue((Texture2D)null);

              if (_parameterWaveNormalMap != null)
            _parameterWaveNormalMap.SetValue((Texture2D)null);

              if (_parameterFlowMap != null)
            _parameterFlowMap.SetValue((Texture2D)null);

              // This seems to be necessary because the Displacement Texture (vertex texture!)
              // is not automatically removed from the texture stage, and the WaterWavesRenderer
              // cannot write into it. XNA Bug!?
              _passProjectedGrid.Apply();

              savedRenderState.Restore();

              // Restore original render context.
              if (originalSourceTexture == null)
              {
            // Current render target has been resolved and used as source texture.
            // A new render target (from pool) has been set. (See region "Refraction Parameters".)
            // --> Previous render target needs to be recycled.
            renderTargetPool.Recycle(context.SourceTexture);
              }

              context.SourceTexture = originalSourceTexture;
        }
コード例 #20
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            context.ThrowIfCameraMissing();
            context.ThrowIfSceneMissing();

            var originalRenderTarget  = context.RenderTarget;
            var originalViewport      = context.Viewport;
            var originalReferenceNode = context.ReferenceNode;

            // Camera properties
            var cameraNode = context.CameraNode;
            var cameraPose = cameraNode.PoseWorld;
            var projection = cameraNode.Camera.Projection;

            if (!(projection is PerspectiveProjection))
            {
                throw new NotImplementedException(
                          "Cascaded shadow maps not yet implemented for scenes with orthographic camera.");
            }

            float fieldOfViewY = projection.FieldOfViewY;
            float aspectRatio  = projection.AspectRatio;

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            // The scene node renderer should use the light camera instead of the player camera.
            context.CameraNode = _orthographicCameraNode;
            context.Technique  = "Directional";

            var graphicsService  = context.GraphicsService;
            var graphicsDevice   = graphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = nodes[i] as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                var shadow = lightNode.Shadow as CascadedShadow;
                if (shadow == null)
                {
                    continue;
                }

                // LightNode is visible in current frame.
                lightNode.LastFrame = frame;

                var format = new RenderTargetFormat(
                    shadow.PreferredSize * shadow.NumberOfCascades,
                    shadow.PreferredSize,
                    false,
                    shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single,
                    DepthFormat.Depth24);

                bool allLocked = shadow.IsCascadeLocked[0] && shadow.IsCascadeLocked[1] && shadow.IsCascadeLocked[2] && shadow.IsCascadeLocked[3];

                if (shadow.ShadowMap == null)
                {
                    shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(format);
                    allLocked        = false; // Need to render shadow map.
                }

                // If we can reuse the whole shadow map texture, abort early.
                if (allLocked)
                {
                    continue;
                }

                _csmSplitDistances[0] = projection.Near;
                _csmSplitDistances[1] = shadow.Distances.X;
                _csmSplitDistances[2] = shadow.Distances.Y;
                _csmSplitDistances[3] = shadow.Distances.Z;
                _csmSplitDistances[4] = shadow.Distances.W;

                // (Re-)Initialize the array for cached matrices in the CascadedShadow.
                if (shadow.ViewProjections == null || shadow.ViewProjections.Length < shadow.NumberOfCascades)
                {
                    shadow.ViewProjections = new Matrix[shadow.NumberOfCascades];
                }

                // Initialize the projection matrices to an empty matrix.
                // The unused matrices should not contain valid projections because
                // CsmComputeSplitOptimized in CascadedShadowMask.fxh should not choose
                // the wrong cascade.
                for (int j = 0; j < shadow.ViewProjections.Length; j++)
                {
                    if (!shadow.IsCascadeLocked[j]) // Do not delete cached info for cached cascade.
                    {
                        shadow.ViewProjections[j] = new Matrix();
                    }
                }

                // If some cascades are cached, we have to create a new shadow map and copy
                // the old cascades into the new shadow map.
                if (shadow.IsCascadeLocked[0] || shadow.IsCascadeLocked[1] || shadow.IsCascadeLocked[2] || shadow.IsCascadeLocked[3])
                {
                    var oldShadowMap = shadow.ShadowMap;
                    shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(new RenderTargetFormat(oldShadowMap));

                    graphicsDevice.SetRenderTarget(shadow.ShadowMap);
                    graphicsDevice.Clear(Color.White);

                    var spriteBatch = graphicsService.GetSpriteBatch();
                    spriteBatch.Begin(SpriteSortMode.Deferred, BlendState.Opaque, SamplerState.PointClamp, DepthStencilState.None, RasterizerState.CullNone);
                    for (int cascade = 0; cascade < shadow.NumberOfCascades; cascade++)
                    {
                        if (shadow.IsCascadeLocked[cascade])
                        {
                            var viewport  = GetViewport(shadow, cascade);
                            var rectangle = new Rectangle(viewport.X, viewport.Y, viewport.Width, viewport.Height);
                            spriteBatch.Draw(oldShadowMap, rectangle, rectangle, Color.White);
                        }
                    }
                    spriteBatch.End();

                    graphicsService.RenderTargetPool.Recycle(oldShadowMap);
                }
                else
                {
                    graphicsDevice.SetRenderTarget(shadow.ShadowMap);
                    graphicsDevice.Clear(Color.White);
                }

                context.RenderTarget             = shadow.ShadowMap;
                graphicsDevice.DepthStencilState = DepthStencilState.Default;
                graphicsDevice.RasterizerState   = RasterizerState.CullCounterClockwise;
                graphicsDevice.BlendState        = BlendState.Opaque;

                context.ReferenceNode = lightNode;
                context.Object        = shadow;
                context.ShadowNear    = 0; // Obsolete: Only kept for backward compatibility.

                bool shadowMapContainsSomething = false;
                for (int split = 0; split < shadow.NumberOfCascades; split++)
                {
                    if (shadow.IsCascadeLocked[split])
                    {
                        continue;
                    }

                    context.Data[RenderContextKeys.ShadowTileIndex] = CubeMapShadowMapRenderer.BoxedIntegers[split];

                    // near/far of this split.
                    float near = _csmSplitDistances[split];
                    float far  = Math.Max(_csmSplitDistances[split + 1], near + Numeric.EpsilonF);

                    // Create a view volume for this split.
                    _splitVolume.SetFieldOfView(fieldOfViewY, aspectRatio, near, far);

                    // Find the bounding sphere of the split camera frustum.
                    Vector3 center;
                    float   radius;
                    GetBoundingSphere(_splitVolume, out center, out radius);

                    // Extend radius to get enough border for filtering.
                    int shadowMapSize = shadow.ShadowMap.Height;

                    // We could extend by (ShadowMapSize + BorderTexels) / ShadowMapSize;
                    // Add at least 1 texel. (This way, shadow mask shader can clamp uv to
                    // texture rect in without considering half texel border to avoid sampling outside..)
                    radius *= (float)(shadowMapSize + 1) / shadowMapSize;

                    // Convert center to light space.
                    Pose lightPose = lightNode.PoseWorld;
                    center = cameraPose.ToWorldPosition(center);
                    center = lightPose.ToLocalPosition(center);

                    // Snap center to texel positions to avoid shadow swimming.
                    SnapPositionToTexels(ref center, 2 * radius, shadowMapSize);

                    // Convert center back to world space.
                    center = lightPose.ToWorldPosition(center);

                    Matrix  orientation            = lightPose.Orientation;
                    Vector3 backward               = orientation.GetColumn(2);
                    var     orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection;

                    // Create a tight orthographic frustum around the cascade's bounding sphere.
                    orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius);
                    Vector3 cameraPosition = center + radius * backward;
                    Pose    frustumPose    = new Pose(cameraPosition, orientation);
                    Pose    view           = frustumPose.Inverse;
                    shadow.ViewProjections[split] = (Matrix)view * orthographicProjection;

                    // Convert depth bias from "texel" to light space [0, 1] depth.
                    // Minus sign to move receiver depth closer to light. Divide by depth to normalize.
                    float unitsPerTexel = orthographicProjection.Width / shadow.ShadowMap.Height;
                    shadow.EffectiveDepthBias[split] = -shadow.DepthBias[split] * unitsPerTexel / orthographicProjection.Depth;

                    // Convert normal offset from "texel" to world space.
                    shadow.EffectiveNormalOffset[split] = shadow.NormalOffset[split] * unitsPerTexel;

                    // For rendering the shadow map, move near plane back by MinLightDistance
                    // to catch occluders in front of the cascade.
                    orthographicProjection.Near       = -shadow.MinLightDistance;
                    _orthographicCameraNode.PoseWorld = frustumPose;

                    // Set a viewport to render a tile in the texture atlas.
                    graphicsDevice.Viewport = GetViewport(shadow, split);
                    context.Viewport        = graphicsDevice.Viewport;

                    shadowMapContainsSomething |= RenderCallback(context);
                }

                // Recycle shadow map if empty.
                if (!shadowMapContainsSomething)
                {
                    graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap);
                    shadow.ShadowMap = null;
                }
            }

            graphicsDevice.SetRenderTarget(null);
            savedRenderState.Restore();

            context.CameraNode    = cameraNode;
            context.ShadowNear    = float.NaN;
            context.Technique     = null;
            context.RenderTarget  = originalRenderTarget;
            context.Viewport      = originalViewport;
            context.ReferenceNode = originalReferenceNode;
            context.Object        = null;
            context.Data[RenderContextKeys.ShadowTileIndex] = null;
        }
コード例 #21
0
        /// <inheritdoc/>
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

              if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (nodes.Count == 0)
            return;

              context.Validate(_effect);

              var originalRenderTarget = context.RenderTarget;
              var originalViewport = context.Viewport;

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              //var renderTargetPool = graphicsService.RenderTargetPool;

              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.BlendState = BlendState.Opaque;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.DepthStencilState = DepthStencilState.None;

              int frame = context.Frame;

              for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++)
              {
            var node = nodes[nodeIndex] as WaterNode;
            if (node == null)
              continue;

            var waves = node.Waves as OceanWaves;
            if (waves == null)
              continue;

            // We update the waves only once per frame.
            if (waves.LastFrame == frame)
              continue;

            waves.LastFrame = frame;

            float time = (float)context.Time.TotalSeconds;

            // Initialize h0 spectrum. Perform CPU FFT.
            waves.Update(graphicsDevice, time);

            int n = waves.TextureSize;

            // Allocate textures in the first frame and when the TextureSize was changed.
            if (waves.DisplacementSpectrum == null || waves.DisplacementSpectrum.Width != n)
            {
              waves.DisplacementSpectrum.SafeDispose();
              waves.NormalSpectrum.SafeDispose();
              waves.DisplacementMap.SafeDispose();
              waves.NormalMap.SafeDispose();

              waves.DisplacementSpectrum = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None);
              waves.NormalSpectrum = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None);
              waves.DisplacementMap = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None);
              waves.NormalMap = new RenderTarget2D(
            _graphicsService.GraphicsDevice,
            n,
            n,
            true,
            SurfaceFormat.Color,
            DepthFormat.None);
            }

            // Create spectrum (h, D, N) for current time from h0.
            _renderTargetBindings[0] = new RenderTargetBinding(waves.DisplacementSpectrum);
            _renderTargetBindings[1] = new RenderTargetBinding(waves.NormalSpectrum);
            graphicsDevice.SetRenderTargets(_renderTargetBindings);
            _parameterSize.SetValue((float)n);
            _parameterSpectrumParameters.SetValue(new Vector4(
              waves.TileSize,
              waves.Gravity,
              time,
              waves.HeightScale));
            _parameterSourceTexture.SetValue(waves.H0Spectrum);
            _passSpectrum.Apply();
            graphicsDevice.DrawFullScreenQuad();

            // Do inverse FFT.
            _fft.Process(
              context,
              false,
              waves.DisplacementSpectrum,
              waves.NormalSpectrum,
              (RenderTarget2D)waves.DisplacementMap,
              (RenderTarget2D)waves.NormalMap,
              waves.Choppiness);

            #region ----- Old Debugging Code -----

            // Create textures from CPU FFT data for debug visualization.
            //n = waves.CpuSize;
            //var s0Data = new Vector4[n * n];
            //var s1Data = new Vector4[n * n];
            //var s0 = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None);
            //var s1 = new RenderTarget2D(_graphicsService.GraphicsDevice, n, n, false, SurfaceFormat.Vector4, DepthFormat.None);
            //for (int y = 0; y < n; y++)
            //{
            //  for (int x = 0; x < n; x++)
            //  {
            //s0Data[y * n + x] = new Vector4(
            //  -waves._D[x, y].X * waves.Choppiness,
            //  waves._h[x, y].X * 1,
            //  -waves._D[x, y].Y * waves.Choppiness,
            //  1);

            //s1Data[y * n + x] = new Vector4(
            //  waves._N[x, y].X,
            //  waves._N[x, y].Y,
            //  0,
            //  0);
            //  }
            //}
            //s0.SetData(s0Data);
            //s1.SetData(s1Data);
            //WaterSample._t0 = s0;
            //WaterSample._t1 = waves.DisplacementMap;
            #endregion
              }

              savedRenderState.Restore();
              graphicsDevice.SetRenderTarget(null);
              context.RenderTarget = originalRenderTarget;
              context.Viewport = originalViewport;

              _renderTargetBindings[0] = default(RenderTargetBinding);
              _renderTargetBindings[1] = default(RenderTargetBinding);

              // Reset the texture stages. If a floating point texture is set, we get exceptions
              // when a sampler with bilinear filtering is set.
            #if !MONOGAME
              graphicsDevice.ResetTextures();
            #endif
        }
コード例 #22
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              var graphicsDevice = _effect.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.DepthStencilState = DepthStencilState.None;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;

              var viewport = graphicsDevice.Viewport;
              _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
              _parameterGBuffer0.SetValue(context.GBuffer0);
              _parameterGBuffer1.SetValue(context.GBuffer1);

              var cameraNode = context.CameraNode;
              Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              var isHdrEnabled = context.IsHdrEnabled();

              for (int i = 0; i < numberOfNodes; i++)
              {
            var lightNode = nodes[i] as LightNode;
            if (lightNode == null)
              continue;

            var light = lightNode.Light as AmbientLight;
            if (light == null)
              continue;

            // LightNode is visible in current frame.
            lightNode.LastFrame = frame;

            float hdrScale = isHdrEnabled ? light.HdrScale : 1;
            _parameterLightColor.SetValue((Vector3)light.Color * light.Intensity * hdrScale);
            _parameterHemisphericAttenuation.SetValue(light.HemisphericAttenuation);

            Vector3F upWorld = lightNode.PoseWorld.ToWorldDirection(Vector3F.Up);
            _parameterUp.SetValue((Vector3)upWorld);

            if (lightNode.Clip != null)
            {
              var data = lightNode.RenderData as LightRenderData;
              if (data == null)
              {
            data = new LightRenderData();
            lightNode.RenderData = data;
              }

              data.UpdateClipSubmesh(context.GraphicsService, lightNode);

              graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail;
              graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;

              _parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection);
              _passClip.Apply();
              data.ClipSubmesh.Draw();

              graphicsDevice.DepthStencilState = lightNode.InvertClip
            ? GraphicsHelper.DepthStencilStateStencilEqual0
            : GraphicsHelper.DepthStencilStateStencilNotEqual0;
              graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
            }
            else
            {
              graphicsDevice.DepthStencilState = DepthStencilState.None;
            }

            _passLight.Apply();
            graphicsDevice.DrawFullScreenQuad();
              }

              savedRenderState.Restore();
        }
コード例 #23
0
ファイル: PointBatch.cs プロジェクト: DireAussie/MinimalRune
        /// <summary>
        /// Draws the points.
        /// </summary>
        /// <param name="context">The render context.</param>
        /// <remarks>
        /// If <see cref="Effect"/> is <see langword="null"/>, then <see cref="Render"/> does nothing.
        /// </remarks>
        /// <exception cref="ArgumentNullException">
        /// <paramref name="context"/> is <see langword="null"/>.
        /// </exception>
        public void Render(RenderContext context)
        {
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            if (Effect == null)
            {
                return;
            }

            int numberOfPoints = _points.Count;

            if (numberOfPoints <= 0)
            {
                return;
            }

            context.Validate(Effect);
            context.ThrowIfCameraMissing();

            // Render state.
            var graphicsDevice   = context.GraphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.RasterizerState = RasterizerState.CullNone;

            // Reset the texture stages. If a floating point texture is set, we get exceptions
            // when a sampler with bilinear filtering is set.
            graphicsDevice.ResetTextures();

            // Effect parameters.
            Effect.Alpha              = 1;
            Effect.DiffuseColor       = new Vector3(1, 1, 1);
            Effect.LightingEnabled    = false;
            Effect.TextureEnabled     = false;
            Effect.VertexColorEnabled = true;
            Effect.World              = Matrix.Identity;
            Effect.View       = Matrix.Identity;
            Effect.Projection = Matrix.Identity;
            Effect.CurrentTechnique.Passes[0].Apply();

            // Get WorldViewProjection matrix.
            Matrix view       = (Matrix)context.CameraNode.View;
            Matrix projection = context.CameraNode.Camera.Projection;
            Matrix wvp        = Matrix.Multiply(view, projection);

            // The x and y point size relative to the viewport.
            Viewport viewport = graphicsDevice.Viewport;
            float    sizeX    = PointSize / viewport.Width;
            float    sizeY    = sizeX * viewport.Width / viewport.Height;

            // Resize buffer if necessary.
            ResizeBuffer(graphicsDevice, numberOfPoints);

            // Submit points. The loop is only needed if we have more points than can
            // be submitted with one draw call.
            var startPointIndex = 0;

            while (startPointIndex < numberOfPoints)
            {
                // Number of points in this batch.
                int pointsPerBatch = Math.Min(numberOfPoints - startPointIndex, _buffer.Length / 6);

                // Create vertices for points. All positions are directly in clip space!
                for (int i = 0; i < pointsPerBatch; i++)
                {
                    var point = _points[startPointIndex + i];

                    // Transform point position to clip space.
                    Vector3 positionWorld = (Vector3)point.Position;
                    Vector3 positionClip;
                    Vector3.Transform(ref positionWorld, ref wvp, out positionClip);
                    float w = (float)((double)positionWorld.X * wvp.M14 + (double)positionWorld.Y * wvp.M24 + (double)positionWorld.Z * wvp.M34 + wvp.M44);

                    // Homogeneous divide.
                    positionClip /= w;

                    // 2 triangles create a point quad. Clip space goes from -1 to 1, therefore
                    // we do not need to divide sizeX and sizeY by 2.
                    Vector3 bottomLeft  = positionClip + new Vector3(-sizeX, +sizeY, 0);
                    Vector3 bottomRight = positionClip + new Vector3(+sizeX, +sizeY, 0);
                    Vector3 topLeft     = positionClip + new Vector3(-sizeX, -sizeY, 0);
                    Vector3 topRight    = positionClip + new Vector3(+sizeX, -sizeY, 0);
                    _buffer[i * 6 + 0].Position = bottomLeft;
                    _buffer[i * 6 + 0].Color    = point.Color;
                    _buffer[i * 6 + 1].Position = bottomRight;
                    _buffer[i * 6 + 1].Color    = point.Color;
                    _buffer[i * 6 + 2].Position = topLeft;
                    _buffer[i * 6 + 2].Color    = point.Color;
                    _buffer[i * 6 + 3].Position = bottomRight;
                    _buffer[i * 6 + 3].Color    = point.Color;
                    _buffer[i * 6 + 4].Position = topRight;
                    _buffer[i * 6 + 4].Color    = point.Color;
                    _buffer[i * 6 + 5].Position = topLeft;
                    _buffer[i * 6 + 5].Color    = point.Color;
                }

                // Draw triangles.
                graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleList, _buffer, 0, pointsPerBatch * 2);

                startPointIndex += pointsPerBatch;
            }

            savedRenderState.Restore();
        }
コード例 #24
0
ファイル: OcclusionBuffer.cs プロジェクト: Zolniu/DigitalRune
        private void VisualizeBuffer(int level, RenderContext context, int passIndex)
        {
            if (context == null)
            throw new ArgumentNullException("context");

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var originalRenderState = new RenderStateSnapshot(graphicsDevice);

              var viewport = graphicsDevice.Viewport;
              _parameterTargetSize.SetValue(new Vector2(viewport.Width, viewport.Height));
              _parameterDebugLevel.SetValue((float)level);
              _effect.CurrentTechnique = _techniqueVisualize;
              _techniqueVisualize.Passes[passIndex].Apply();

              graphicsDevice.DepthStencilState = DepthStencilState.None;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              // Do not override blend state: Use current blend state.

              graphicsDevice.DrawFullScreenQuad();

              originalRenderState.Restore();
        }
コード例 #25
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              context.ThrowIfCameraMissing();
              context.ThrowIfSceneMissing();

              var originalRenderTarget = context.RenderTarget;
              var originalViewport = context.Viewport;
              var originalReferenceNode = context.ReferenceNode;

              // Camera properties
              var cameraNode = context.CameraNode;
              var cameraPose = cameraNode.PoseWorld;
              var projection = cameraNode.Camera.Projection;
              if (!(projection is PerspectiveProjection))
            throw new NotImplementedException(
              "Cascaded shadow maps not yet implemented for scenes with orthographic camera.");

              float fieldOfViewY = projection.FieldOfViewY;
              float aspectRatio = projection.AspectRatio;

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              // The scene node renderer should use the light camera instead of the player camera.
              context.CameraNode = _orthographicCameraNode;
              context.Technique = "Directional";

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              for (int i = 0; i < numberOfNodes; i++)
              {
            var lightNode = nodes[i] as LightNode;
            if (lightNode == null)
              continue;

            var shadow = lightNode.Shadow as CascadedShadow;
            if (shadow == null)
              continue;

            // LightNode is visible in current frame.
            lightNode.LastFrame = frame;

            var format = new RenderTargetFormat(
              shadow.PreferredSize * shadow.NumberOfCascades,
              shadow.PreferredSize,
              false,
              shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single,
              DepthFormat.Depth24);

            bool allLocked = shadow.IsCascadeLocked[0] && shadow.IsCascadeLocked[1] && shadow.IsCascadeLocked[2] && shadow.IsCascadeLocked[3];

            if (shadow.ShadowMap == null)
            {
              shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(format);
              allLocked = false;   // Need to render shadow map.
            }

            // If we can reuse the whole shadow map texture, abort early.
            if (allLocked)
              continue;

            _csmSplitDistances[0] = projection.Near;
            _csmSplitDistances[1] = shadow.Distances.X;
            _csmSplitDistances[2] = shadow.Distances.Y;
            _csmSplitDistances[3] = shadow.Distances.Z;
            _csmSplitDistances[4] = shadow.Distances.W;

            // (Re-)Initialize the array for cached matrices in the CascadedShadow.
            if (shadow.ViewProjections == null || shadow.ViewProjections.Length < shadow.NumberOfCascades)
              shadow.ViewProjections = new Matrix[shadow.NumberOfCascades];

            // Initialize the projection matrices to an empty matrix.
            // The unused matrices should not contain valid projections because
            // CsmComputeSplitOptimized in CascadedShadowMask.fxh should not choose
            // the wrong cascade.
            for (int j = 0; j < shadow.ViewProjections.Length; j++)
            {
              if (!shadow.IsCascadeLocked[j])    // Do not delete cached info for cached cascade.
            shadow.ViewProjections[j] = new Matrix();
            }

            // If some cascades are cached, we have to create a new shadow map and copy
            // the old cascades into the new shadow map.
            if (shadow.IsCascadeLocked[0] || shadow.IsCascadeLocked[1] || shadow.IsCascadeLocked[2] || shadow.IsCascadeLocked[3])
            {
              var oldShadowMap = shadow.ShadowMap;
              shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(new RenderTargetFormat(oldShadowMap));

              graphicsDevice.SetRenderTarget(shadow.ShadowMap);
              graphicsDevice.Clear(Color.White);

              var spriteBatch = graphicsService.GetSpriteBatch();
              spriteBatch.Begin(SpriteSortMode.Deferred, BlendState.Opaque, SamplerState.PointClamp, DepthStencilState.None, RasterizerState.CullNone);
              for (int cascade = 0; cascade < shadow.NumberOfCascades; cascade++)
              {
            if (shadow.IsCascadeLocked[cascade])
            {
              var viewport = GetViewport(shadow, cascade);
              var rectangle = new Rectangle(viewport.X, viewport.Y, viewport.Width, viewport.Height);
              spriteBatch.Draw(oldShadowMap, rectangle, rectangle, Color.White);
            }
              }
              spriteBatch.End();

              graphicsService.RenderTargetPool.Recycle(oldShadowMap);
            }
            else
            {
              graphicsDevice.SetRenderTarget(shadow.ShadowMap);
              graphicsDevice.Clear(Color.White);
            }

            context.RenderTarget = shadow.ShadowMap;
            graphicsDevice.DepthStencilState = DepthStencilState.Default;
            graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise;
            graphicsDevice.BlendState = BlendState.Opaque;

            context.ReferenceNode = lightNode;
            context.Object = shadow;
            context.ShadowNear = 0;           // Obsolete: Only kept for backward compatibility.

            bool shadowMapContainsSomething = false;
            for (int split = 0; split < shadow.NumberOfCascades; split++)
            {
              if (shadow.IsCascadeLocked[split])
            continue;

              context.Data[RenderContextKeys.ShadowTileIndex] = CubeMapShadowMapRenderer.BoxedIntegers[split];

              // near/far of this split.
              float near = _csmSplitDistances[split];
              float far = Math.Max(_csmSplitDistances[split + 1], near + Numeric.EpsilonF);

              // Create a view volume for this split.
              _splitVolume.SetFieldOfView(fieldOfViewY, aspectRatio, near, far);

              // Find the bounding sphere of the split camera frustum.
              Vector3F center;
              float radius;
              GetBoundingSphere(_splitVolume, out center, out radius);

              // Extend radius to get enough border for filtering.
              int shadowMapSize = shadow.ShadowMap.Height;

              // We could extend by (ShadowMapSize + BorderTexels) / ShadowMapSize;
              // Add at least 1 texel. (This way, shadow mask shader can clamp uv to
              // texture rect in without considering half texel border to avoid sampling outside..)
              radius *= (float)(shadowMapSize + 1) / shadowMapSize;

              // Convert center to light space.
              Pose lightPose = lightNode.PoseWorld;
              center = cameraPose.ToWorldPosition(center);
              center = lightPose.ToLocalPosition(center);

              // Snap center to texel positions to avoid shadow swimming.
              SnapPositionToTexels(ref center, 2 * radius, shadowMapSize);

              // Convert center back to world space.
              center = lightPose.ToWorldPosition(center);

              Matrix33F orientation = lightPose.Orientation;
              Vector3F backward = orientation.GetColumn(2);
              var orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection;

              // Create a tight orthographic frustum around the cascade's bounding sphere.
              orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius);
              Vector3F cameraPosition = center + radius * backward;
              Pose frustumPose = new Pose(cameraPosition, orientation);
              Pose view = frustumPose.Inverse;
              shadow.ViewProjections[split] = (Matrix)view * orthographicProjection;

              // Convert depth bias from "texel" to light space [0, 1] depth.
              // Minus sign to move receiver depth closer to light. Divide by depth to normalize.
              float unitsPerTexel = orthographicProjection.Width / shadow.ShadowMap.Height;
              shadow.EffectiveDepthBias[split] = -shadow.DepthBias[split] * unitsPerTexel / orthographicProjection.Depth;

              // Convert normal offset from "texel" to world space.
              shadow.EffectiveNormalOffset[split] = shadow.NormalOffset[split] * unitsPerTexel;

              // For rendering the shadow map, move near plane back by MinLightDistance
              // to catch occluders in front of the cascade.
              orthographicProjection.Near = -shadow.MinLightDistance;
              _orthographicCameraNode.PoseWorld = frustumPose;

              // Set a viewport to render a tile in the texture atlas.
              graphicsDevice.Viewport = GetViewport(shadow, split);
              context.Viewport = graphicsDevice.Viewport;

              shadowMapContainsSomething |= RenderCallback(context);
            }

            // Recycle shadow map if empty.
            if (!shadowMapContainsSomething)
            {
              graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap);
              shadow.ShadowMap = null;
            }
              }

              graphicsDevice.SetRenderTarget(null);
              savedRenderState.Restore();

              context.CameraNode = cameraNode;
              context.ShadowNear = float.NaN;
              context.Technique = null;
              context.RenderTarget = originalRenderTarget;
              context.Viewport = originalViewport;
              context.ReferenceNode = originalReferenceNode;
              context.Object = null;
              context.Data[RenderContextKeys.ShadowTileIndex] = null;
        }
コード例 #26
0
ファイル: OcclusionBuffer.cs プロジェクト: Zolniu/DigitalRune
        private void VisualizeQuery(SceneNode node, RenderContext context, int passIndex)
        {
            if (node == null)
            throw new ArgumentNullException("node");
              if (context == null)
            throw new ArgumentNullException("context");

              context.ThrowIfCameraMissing();

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var originalRenderState = new RenderStateSnapshot(graphicsDevice);

              var viewport = graphicsDevice.Viewport;
              _parameterTargetSize.SetValue(new Vector2(viewport.Width, viewport.Height));

              var aabb = node.Aabb;
              _parameterDebugMinimum.SetValue((Vector3)aabb.Minimum);
              _parameterDebugMaximum.SetValue((Vector3)aabb.Maximum);

              _effect.CurrentTechnique = _techniqueVisualize;
              _techniqueVisualize.Passes[passIndex].Apply();

              graphicsDevice.DepthStencilState = DepthStencilState.None;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              // Do not override blend state: Use current blend state.

              graphicsDevice.DrawFullScreenQuad();

              originalRenderState.Restore();
        }
コード例 #27
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");
              if (context.Scene == null)
            throw new ArgumentException("Scene needs to be set in render context.", "context");
              if (context.CameraNode == null)
            throw new ArgumentException("Camera needs to be set in render context.", "context");
              if (!(context.CameraNode.Camera.Projection is PerspectiveProjection))
            throw new ArgumentException("The camera in the render context must use a perspective projection.", "context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              int frame = context.Frame;

              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              var originalRenderTarget = context.RenderTarget;
              var originalViewport = context.Viewport;
              var originalCameraNode = context.CameraNode;
              var originalLodCameraNode = context.LodCameraNode;
              float originalLodBias = context.LodBias;
              var originalReferenceNode = context.ReferenceNode;

              Pose originalCameraPose = originalCameraNode.PoseWorld;
              Vector3F originalCameraPosition = originalCameraPose.Position;
              Matrix33F originalCameraOrientation = originalCameraPose.Orientation;

              Vector3F right = originalCameraOrientation.GetColumn(0);
              Vector3F up = originalCameraOrientation.GetColumn(1);
              Vector3F back = originalCameraOrientation.GetColumn(2);

              try
              {
            // Use foreach instead of for-loop to catch InvalidOperationExceptions in
            // case the collection is modified.
            for (int i = 0; i < numberOfNodes; i++)
            {
              var node = nodes[i] as PlanarReflectionNode;
              if (node == null)
            continue;

              // Update each node only once per frame.
              if (node.LastFrame == frame)
            continue;

              node.LastFrame = frame;

              var texture = node.RenderToTexture.Texture;
              if (texture == null)
            continue;

              var renderTarget = texture as RenderTarget2D;
              if (renderTarget == null)
            throw new GraphicsException(
              "PlanarReflectionNode.RenderToTexture.Texture is invalid. The texture must be a RenderTarget2D.");

              // RenderToTexture instances can be shared. --> Update them only once per frame.
              if (node.RenderToTexture.LastFrame == frame)
            continue;

              // Do not render if we look at the back of the reflection plane.
              Vector3F planeNormal = node.NormalWorld;
              Vector3F planePosition = node.PoseWorld.Position;
              Vector3F planeToCamera = originalCameraPosition - planePosition;
              if (Vector3F.Dot(planeNormal, planeToCamera) < 0)
            continue;

              var cameraNode = node.CameraNode;

              // Reflect camera pose.
              Pose cameraPose;
              cameraPose.Position = planePosition + Reflect(planeToCamera, planeNormal);
              cameraPose.Orientation = new Matrix33F();
              cameraPose.Orientation.SetColumn(0, Reflect(right, planeNormal));
              cameraPose.Orientation.SetColumn(1, -Reflect(up, planeNormal));
              cameraPose.Orientation.SetColumn(2, Reflect(back, planeNormal));
              cameraNode.PoseWorld = cameraPose;

              // The projection of the player camera.
              var originalProjection = originalCameraNode.Camera.Projection;
              // The projection of the reflected camera.
              var projection = (PerspectiveProjection)cameraNode.Camera.Projection;

              // Choose optimal projection. We get the screen-space bounds of the reflection node.
              // Then we make the FOV so small that it exactly contains the node.
              projection.Set(originalProjection);

              var bounds = GraphicsHelper.GetBounds(cameraNode, node);

              // Abort if the bounds are empty.
              if (Numeric.AreEqual(bounds.X, bounds.Z) || Numeric.AreEqual(bounds.Y, bounds.W))
            continue;

              // Apply FOV scale to bounds.
              float fovScale = node.FieldOfViewScale;
              float deltaX = (bounds.Z - bounds.X) * (fovScale - 1) / 2;
              bounds.X -= deltaX;
              bounds.Z += deltaX;
              float deltaY = (bounds.W - bounds.Y) * (fovScale - 1) / 2;
              bounds.Y -= deltaY;
              bounds.W += deltaY;

              // Update projection to contain only the node bounds.
              projection.Left = projection.Left + bounds.X * projection.Width;
              projection.Right = projection.Left + bounds.Z * projection.Width;
              projection.Top = projection.Top - bounds.Y * projection.Height;
              projection.Bottom = projection.Top - bounds.W * projection.Height;

              // Set far clip plane.
              if (node.Far.HasValue)
            projection.Far = node.Far.Value;

              // Set near clip plane.
              Vector3F planeNormalCamera = cameraPose.ToLocalDirection(-node.NormalWorld);
              Vector3F planePointCamera = cameraPose.ToLocalPosition(node.PoseWorld.Position);
              projection.NearClipPlane = new Plane(planeNormalCamera, planePointCamera);

              context.CameraNode = cameraNode;
              context.LodCameraNode = cameraNode;
              context.LodBias = node.LodBias ?? originalLodBias;
              context.ReferenceNode = node;

              context.RenderTarget = renderTarget;
              context.Viewport = new Viewport(0, 0, renderTarget.Width, renderTarget.Height);

              RenderCallback(context);

              // Update other properties of RenderToTexture.
              node.RenderToTexture.LastFrame = frame;
              node.RenderToTexture.TextureMatrix = GraphicsHelper.ProjectorBiasMatrix
                                               * cameraNode.Camera.Projection
                                               * cameraNode.PoseWorld.Inverse;
            }
              }
              catch (InvalidOperationException exception)
              {
            throw new GraphicsException(
              "InvalidOperationException was raised in PlanarReflectionRenderer.Render(). "
              + "This can happen if a SceneQuery instance that is currently in use is modified in the "
              + "RenderCallback. --> Use different SceneQuery types in the method which calls "
              + "SceneCaptureRenderer.Render() and in the RenderCallback method.",
              exception);
              }

              graphicsDevice.SetRenderTarget(null);
              savedRenderState.Restore();

              context.RenderTarget = originalRenderTarget;
              context.Viewport = originalViewport;
              context.CameraNode = originalCameraNode;
              context.LodCameraNode = originalLodCameraNode;
              context.LodBias = originalLodBias;
              context.ReferenceNode = originalReferenceNode;
        }
コード例 #28
0
ファイル: OcclusionBuffer.cs プロジェクト: Zolniu/DigitalRune
        public void Query(IList<SceneNode> nodes, RenderContext context)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              context.ThrowIfCameraMissing();

              int numberOfNodes = nodes.Count;  // Note: nodes may contain null entries!
              if (numberOfNodes == 0)
              {
            Statistics.ObjectsTotal = 0;
            Statistics.ObjectsCulled = 0;
            Statistics.ShadowCastersTotal = 0;
            Statistics.ShadowCastersCulled = 0;
            return;
              }

              var originalRenderTarget = context.RenderTarget;
              var originalViewport = context.Viewport;

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var originalRenderState = new RenderStateSnapshot(graphicsDevice);

              graphicsDevice.DepthStencilState = DepthStencilState.None;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.BlendState = BlendState.Opaque;

              // Note: The camera parameters are set in Render().

              // ----- Query data.
              // The vertices store the AABB and the pixel address to which the results is
              // written. In MonoGame, we can simply submit the data as a point list. However,
              // XNA does not support point lists. As a workaround we can submit the data as a
              // line strip:
              // - The line strip needs to be continuous: Line strips go left-to-right at even
              //   lines and right-to-left at odd lines.
              // - According to DirectX 9 line rasterization rules, the last pixel of a line
              //   is excluded. An additional vertex needs to be appended to ensure that the
              //   last pixel is rendered.
              if (_queryData == null || _queryData.Length < numberOfNodes + 1)
              {
            // We need at least numberOfNodes + 1 vertices. We use NextPowerOf2() which
            // returns a value > numberOfNodes.
            _queryData = new OcclusionVertex[MathHelper.NextPowerOf2((uint)numberOfNodes)];
              }

              Debug.Assert(_shadowCasters.Count == 0, "List of shadow casters has not been cleared.");

              int index = 0;
              for (int i = 0; i < numberOfNodes; i++)
              {
            var node = nodes[i];
            if (node == null)
              continue;

            // Ignore empty shapes.
            if (node.Shape is EmptyShape)
            {
              nodes[i] = null;  // Cull scene node!
              continue;
            }

            if (node.CastsShadows)
              _shadowCasters.Add(node);

            // Pixel address
            _queryData[index].Pixel = ToPixelAddress(index);

            // AABB
            var aabb = node.Aabb;
            _queryData[index].Minimum = aabb.Minimum;
            _queryData[index].Maximum = aabb.Maximum;

            // Position, Scale and MaxDistance are used for distance culling.
            _queryData[index].Position = node.PoseWorld.Position;
            _queryData[index].Scale = node.ScaleWorld;
            _queryData[index].MaxDistance = node.MaxDistance;

            index++;
              }

              // Append additional vertex. (For XNA line strips.)
              {
            // Copy last vertex and increment pixel address.
            _queryData[index] = _queryData[index - 1];
            _queryData[index].Pixel = ToPixelAddress(index);
              }

              int actualNumberOfNodes = index;

              // Allocate render target storing the results.
              int numberOfQueries = _lightHzbAvailable ? actualNumberOfNodes + _shadowCasters.Count : actualNumberOfNodes;
              int desiredBufferHeight = (numberOfQueries - 1) / ResultsBufferWidth + 1;
              Debug.Assert(ResultsBufferWidth * desiredBufferHeight >= actualNumberOfNodes, "Sanity check.");
              if (_resultsBuffer == null || _resultsBuffer.Height < desiredBufferHeight)
              {
            if (_resultsBuffer != null)
              _resultsBuffer.Dispose();

            _resultsBuffer = new RenderTarget2D(graphicsDevice, ResultsBufferWidth, desiredBufferHeight, false, SurfaceFormat.Single, DepthFormat.None);
            _results = new float[ResultsBufferWidth * desiredBufferHeight];
              }

              // Set new render target before binding the _cameraHzb.
              graphicsDevice.SetRenderTarget(_resultsBuffer);

              float width = _hzbLevels[0].Width;
              float height = _hzbLevels[0].Height;
              Vector2 sourceSize = new Vector2(width, height);
              Vector2 targetSize = new Vector2(_resultsBuffer.Width, _resultsBuffer.Height);
              Vector2 texelSize = new Vector2(1.0f / _cameraHzb.Width, 1.0f / _cameraHzb.Height);
              Vector2 halfTexelSize = 0.5f * texelSize;

              _parameterClampAabbMinimum.SetValue((Vector3)_cameraAabb.Minimum);
              _parameterClampAabbMaximum.SetValue((Vector3)_cameraAabb.Maximum);
              _parameterHzbSize.SetValue(sourceSize);
              _parameterTargetSize.SetValue(targetSize);
              _parameterAtlasSize.SetValue(new Vector2(_cameraHzb.Width, _cameraHzb.Height));
              _parameterTexelOffset.SetValue(texelSize);
              _parameterHalfTexelOffset.SetValue(halfTexelSize);
              _parameterMaxLevel.SetValue((float)_hzbLevels.Length - 1);
              _parameterHzbTexture.SetValue(_cameraHzb);
              _parameterLightHzbTexture.SetValue(_lightHzb);

              _effect.CurrentTechnique = _techniqueQuery;
              _techniqueQuery.Passes[0].Apply();

            #if MONOGAME
              var primitiveType = PrimitiveType.PointList;
            #else
              var primitiveType = PrimitiveType.LineStrip;
            #endif

              graphicsDevice.DrawUserPrimitives(primitiveType, _queryData, 0, actualNumberOfNodes);

              // Query shadow casters.
              int numberOfShadowCasters = _shadowCasters.Count;
              if (_lightHzbAvailable)
              {
            int offset = actualNumberOfNodes;
            for (int i = 0; i < numberOfShadowCasters; i++)
            {
              var node = _shadowCasters[i];

              // Pixel address
              _queryData[i].Pixel = ToPixelAddress(offset + i);

              // AABB
              var aabb = node.Aabb;
              _queryData[i].Minimum = aabb.Minimum;
              _queryData[i].Maximum = aabb.Maximum;

              // Position, Scale and MaxDistance are used for distance culling.
              _queryData[i].Position = node.PoseWorld.Position;
              _queryData[i].Scale = node.ScaleWorld;
              _queryData[i].MaxDistance = node.MaxDistance;
            }

            // Append additional vertex. (For XNA line strips.)
            {
              // Copy last vertex and increment pixel address.
              _queryData[numberOfShadowCasters] = _queryData[numberOfShadowCasters - 1];
              _queryData[numberOfShadowCasters].Pixel = ToPixelAddress(offset + numberOfShadowCasters);
            }

            _parameterClampAabbMinimum.SetValue((Vector3)_lightAabb.Minimum);
            _parameterClampAabbMaximum.SetValue((Vector3)_lightAabb.Maximum);

            int passIndex = ProgressiveShadowCasterCulling ? 2 : 1;
            _techniqueQuery.Passes[passIndex].Apply();

            graphicsDevice.DrawUserPrimitives(primitiveType, _queryData, 0, numberOfShadowCasters);
              }

              // Read back results.
              graphicsDevice.SetRenderTarget(null);
              _resultsBuffer.GetData(_results);

              index = 0;
              int objectsCulled = 0;
              for (int i = 0; i < numberOfNodes; i++)
              {
            var node = nodes[i];
            if (node == null)
              continue;

            int resultIndex = ToResultsBufferIndex(index);

            float viewNormalizedDistance = _results[resultIndex];
            if (viewNormalizedDistance >= 0)
            {
              // Store view-normalized distance in SortTag.
              node.SortTag = viewNormalizedDistance;
            }
            else
            {
              // Scene node culled.
              nodes[i] = null;
              objectsCulled++;
            }

            index++;
              }

              int shadowCastersCulled = 0;
              if (_lightHzbAvailable)
              {
            int offset = actualNumberOfNodes;
            for (int i = 0; i < numberOfShadowCasters; i++)
            {
              var node = _shadowCasters[i];
              int resultIndex = ToResultsBufferIndex(offset + i);

              // ReSharper disable once CompareOfFloatsByEqualityOperator
              float viewNormalizedDistance = _results[resultIndex];
              if (_results[resultIndex] >= 0)
              {
            // Shadow caster is visible.
            node.ClearFlag(SceneNodeFlags.IsShadowCasterCulled);
            node.SortTag = viewNormalizedDistance;
              }
              else
              {
            // Shadow caster is culled.
            node.SetFlag(SceneNodeFlags.IsShadowCasterCulled);
            shadowCastersCulled++;
              }
            }
              }

              Statistics.ObjectsTotal = actualNumberOfNodes;
              Statistics.ObjectsCulled = objectsCulled;
              Statistics.ShadowCastersTotal = _shadowCasters.Count;
              Statistics.ShadowCastersCulled = shadowCastersCulled;

              _shadowCasters.Clear();

              originalRenderState.Restore();

              context.RenderTarget = originalRenderTarget;
              context.Viewport = originalViewport;
        }
コード例 #29
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            // Note: The camera node is not used by the StandardShadowMapRenderer.
            // Still throw an exception if null for consistency. (All other shadow map
            // renderers need a camera node.)
            context.ThrowIfCameraMissing();
            context.ThrowIfSceneMissing();

            var originalRenderTarget  = context.RenderTarget;
            var originalViewport      = context.Viewport;
            var originalReferenceNode = context.ReferenceNode;

            var cameraNode = context.CameraNode;

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            context.Technique = "Default";

            var graphicsService  = context.GraphicsService;
            var graphicsDevice   = graphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = nodes[i] as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                var shadow = lightNode.Shadow as StandardShadow;
                if (shadow == null)
                {
                    continue;
                }

                // LightNode is visible in current frame.
                lightNode.LastFrame = frame;

                // Get a new shadow map if necessary.
                if (shadow.ShadowMap == null)
                {
                    shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(
                        new RenderTargetFormat(
                            shadow.PreferredSize,
                            shadow.PreferredSize,
                            false,
                            shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single,
                            DepthFormat.Depth24));
                }

                // Create a suitable shadow camera.
                CameraNode lightCameraNode;
                if (lightNode.Light is ProjectorLight)
                {
                    var light = (ProjectorLight)lightNode.Light;
                    if (light.Projection is PerspectiveProjection)
                    {
                        var lp = (PerspectiveProjection)light.Projection;
                        var cp = (PerspectiveProjection)_perspectiveCameraNode.Camera.Projection;
                        cp.SetOffCenter(lp.Left, lp.Right, lp.Bottom, lp.Top, lp.Near, lp.Far);

                        lightCameraNode = _perspectiveCameraNode;
                    }
                    else //if (light.Projection is OrthographicProjection)
                    {
                        var lp = (OrthographicProjection)light.Projection;
                        var cp = (OrthographicProjection)_orthographicCameraNode.Camera.Projection;
                        cp.SetOffCenter(lp.Left, lp.Right, lp.Bottom, lp.Top, lp.Near, lp.Far);

                        lightCameraNode = _orthographicCameraNode;
                    }
                }
                else if (lightNode.Light is Spotlight)
                {
                    var light = (Spotlight)lightNode.Light;
                    var cp    = (PerspectiveProjection)_perspectiveCameraNode.Camera.Projection;
                    cp.SetFieldOfView(2 * light.CutoffAngle, 1, shadow.DefaultNear, light.Range);

                    lightCameraNode = _perspectiveCameraNode;
                }
                else
                {
                    throw new GraphicsException("StandardShadow can only be used with a Spotlight or a ProjectorLight.");
                }

                lightCameraNode.PoseWorld = lightNode.PoseWorld;

                // Store data for use in StandardShadowMaskRenderer.
                shadow.Near       = lightCameraNode.Camera.Projection.Near;
                shadow.Far        = lightCameraNode.Camera.Projection.Far;
                shadow.View       = lightCameraNode.PoseWorld.Inverse;
                shadow.Projection = lightCameraNode.Camera.Projection;

                // World units per texel at a planar distance of 1 world unit.
                float unitsPerTexel = lightCameraNode.Camera.Projection.Width / (shadow.ShadowMap.Height * shadow.Near);

                // Convert depth bias from "texel" to world space.
                // Minus to move receiver depth closer to light.
                shadow.EffectiveDepthBias = -shadow.DepthBias * unitsPerTexel;

                // Convert normal offset from "texel" to world space.
                shadow.EffectiveNormalOffset = shadow.NormalOffset * unitsPerTexel;

                graphicsDevice.SetRenderTarget(shadow.ShadowMap);
                context.RenderTarget = shadow.ShadowMap;
                context.Viewport     = graphicsDevice.Viewport;

                graphicsDevice.Clear(Color.White);

                // The scene node renderer should use the light camera instead of the player camera.
                context.CameraNode    = lightCameraNode;
                context.ReferenceNode = lightNode;
                context.Object        = shadow;

                graphicsDevice.DepthStencilState = DepthStencilState.Default;
                graphicsDevice.RasterizerState   = RasterizerState.CullCounterClockwise;
                graphicsDevice.BlendState        = BlendState.Opaque;

                bool shadowMapContainsSomething = RenderCallback(context);
                if (!shadowMapContainsSomething)
                {
                    // Shadow map is empty. Recycle it.
                    graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap);
                    shadow.ShadowMap = null;
                }
            }

            graphicsDevice.SetRenderTarget(null);
            savedRenderState.Restore();

            context.CameraNode    = cameraNode;
            context.Technique     = null;
            context.RenderTarget  = originalRenderTarget;
            context.Viewport      = originalViewport;
            context.ReferenceNode = originalReferenceNode;
            context.Object        = null;
        }
コード例 #30
0
ファイル: OcclusionBuffer.cs プロジェクト: Zolniu/DigitalRune
        public void Render(IList<SceneNode> occluders, LightNode lightNode, SceneNodeRenderer renderer, RenderContext context)
        {
            if (context == null)
            throw new ArgumentNullException("context");

              context.ThrowIfCameraMissing();

              // ----- Sort occluders by type: IOcclusionProxy vs. SceneNode
              SortOccluders(occluders, renderer, context);
              Statistics.Occluders = _occlusionProxies.Count + _sceneNodes.Count;

              // ----- Update all IOcclusionProxy in background.
              if (_occlusionProxies.Count > 0)
              {
            if (EnableMultithreading)
              _updateTask = Parallel.Start(_updateOcclusionProxies);
            else
              UpdateOcclusionProxies();
              }

              // ----- Backup render state.
              var originalRenderTarget = context.RenderTarget;
              var originalViewport = context.Viewport;

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var originalRenderState = new RenderStateSnapshot(graphicsDevice);

              // ----- Camera properties
              var cameraNode = context.CameraNode;
              Matrix cameraView = (Matrix)cameraNode.View;
              var cameraProjection = cameraNode.Camera.Projection;
              Matrix cameraViewProjection = cameraView * cameraProjection;

              if (lightNode == null)
              {
            _lightHzbAvailable = false;
              }
              else
              {
            // ----- Render light HZB.
            _lightHzbAvailable = true;

            var shadow = lightNode.Shadow as CascadedShadow;
            if (shadow == null)
              throw new ArgumentException("LightNode expected to have a CascadedShadow.", "lightNode");

            // Set up orthographic camera similar to CascadedShadowMapRenderer.
            context.CameraNode = _orthographicCameraNode;

            // Part of camera frustum covered by shadow map.
            var maxShadowDistance = shadow.Distances[shadow.NumberOfCascades - 1];
            _splitVolume.SetFieldOfView(cameraProjection.FieldOfViewY, cameraProjection.AspectRatio, cameraProjection.Near, Math.Min(cameraProjection.Far, maxShadowDistance));

            // Find the bounding sphere of the camera frustum.
            Vector3F center;
            float radius;
            GetBoundingSphere(_splitVolume, out center, out radius);

            Matrix33F orientation = lightNode.PoseWorld.Orientation;
            Vector3F lightBackward = orientation.GetColumn(2);
            var orthographicProjection = (OrthographicProjection)_orthographicCameraNode.Camera.Projection;

            // Create a tight orthographic frustum around the cascade's bounding sphere.
            orthographicProjection.SetOffCenter(-radius, radius, -radius, radius, 0, 2 * radius);
            center = cameraNode.PoseWorld.ToWorldPosition(center);
            Vector3F cameraPosition = center + radius * lightBackward;
            Pose frustumPose = new Pose(cameraPosition, orientation);

            // For rendering the shadow map, move near plane back by MinLightDistance
            // to catch occluders in front of the cascade.
            orthographicProjection.Near = -shadow.MinLightDistance;
            _orthographicCameraNode.PoseWorld = frustumPose;

            Pose lightView = frustumPose.Inverse;
            Matrix lightViewProjection = (Matrix)lightView * orthographicProjection;

            _parameterCameraViewProj.SetValue(lightViewProjection);
            _parameterCameraNear.SetValue(orthographicProjection.Near);
            _parameterCameraFar.SetValue(orthographicProjection.Far);

            RenderOccluders(renderer, context);
            CreateDepthHierarchy(_lightHzb, context);

            // Set effect parameters for use in Query().
            _lightAabb = _orthographicCameraNode.Aabb;
            _parameterLightViewProj.SetValue(lightViewProjection);
            _parameterLightToCamera.SetValue(Matrix.Invert(lightViewProjection) * cameraViewProjection);

            context.CameraNode = cameraNode;
              }

              // ----- Render camera HZB.
              // Set camera parameters. (These effect parameters are also needed in Query()!)
              _cameraAabb = cameraNode.Aabb;
              _parameterCameraViewProj.SetValue(cameraViewProjection);
              _parameterCameraNear.SetValue(cameraProjection.Near);
              _parameterCameraFar.SetValue(cameraProjection.Far);

              var lodCameraNode = context.LodCameraNode;
              if (lodCameraNode != null)
              {
            // Enable distance culling.
            _parameterCameraPosition.SetValue((Vector3)lodCameraNode.PoseWorld.Position);
            float yScale = Math.Abs(lodCameraNode.Camera.Projection.ToMatrix44F().M11);
            _parameterNormalizationFactor.SetValue(1.0f / yScale * cameraNode.LodBias * context.LodBias);
              }
              else
              {
            // Disable distance culling.
            _parameterCameraPosition.SetValue(new Vector3());
            _parameterNormalizationFactor.SetValue(0);
              }

              RenderOccluders(renderer, context);
              CreateDepthHierarchy(_cameraHzb, context);

              _sceneNodes.Clear();
              _occlusionProxies.Clear();

              // Restore render state.
              graphicsDevice.SetRenderTarget(null);
              originalRenderState.Restore();

              context.RenderTarget = originalRenderTarget;
              context.Viewport = originalViewport;
        }
コード例 #31
0
    private void ProcessJobs(RenderContext context)
    {
      // Set render states.
      var graphicsDevice = context.GraphicsService.GraphicsDevice;
      var savedRenderState = new RenderStateSnapshot(graphicsDevice);
      graphicsDevice.RasterizerState = RasterizerState.CullNone;

      bool renderFill = (Options & FigureRenderOptions.RenderFill) != 0;
      if (renderFill)
        SetupFill(context);

      bool renderStroke = (Options & FigureRenderOptions.RenderStroke) != 0;
      if (renderStroke)
        SetupStroke(context);

      var jobs = _jobs.Array;
      int jobCount = _jobs.Count;
      _mode = RenderMode.Undefined;
      for (int i = 0; i < jobCount; i++)
      {
        var node = jobs[i].Node;
        var renderData = node.Figure.RenderData;
        if (renderData == null)
          continue;

        var vertices = renderData.Vertices;
        if (vertices == null || vertices.Count == 0)
          continue;

        // We can cache a static vertex buffer for static figures with camera-independent dash patterns.
        if (node.IsStatic && node.DashInWorldSpace)
        {
          var nodeRenderData = node.RenderData as FigureNodeRenderData;
          if (nodeRenderData == null)
          {
            nodeRenderData = new FigureNodeRenderData();
            node.RenderData = nodeRenderData;
          }

          if (!nodeRenderData.IsValid)
            CacheVertexBuffer(node, _graphicsService.GraphicsDevice);
        }

        // Render filled polygons.
        var fillIndices = renderData.FillIndices;
        if (renderFill
            && fillIndices != null
            && fillIndices.Count > 0
            && !Numeric.IsZero(node.FillAlpha))
        {
          Fill(node, vertices, fillIndices);
        }

        // Render stroked lines.
        var strokeIndices = renderData.StrokeIndices;
        if (renderStroke
            && strokeIndices != null
            && strokeIndices.Count > 0
            && !Numeric.IsZero(node.StrokeThickness)
            && !Numeric.IsZero(node.StrokeAlpha))
        {
          Stroke(node, vertices, strokeIndices);
        }
      }

      Flush();

      savedRenderState.Restore();
    }
コード例 #32
0
ファイル: PointBatch.cs プロジェクト: Zolniu/DigitalRune
        /// <summary>
        /// Draws the points.
        /// </summary>
        /// <param name="context">The render context.</param>
        /// <remarks>
        /// If <see cref="Effect"/> is <see langword="null"/>, then <see cref="Render"/> does nothing.
        /// </remarks>
        /// <exception cref="ArgumentNullException">
        /// <paramref name="context"/> is <see langword="null"/>.
        /// </exception>
        public void Render(RenderContext context)
        {
            if (context == null)
            throw new ArgumentNullException("context");

              if (Effect == null)
            return;

              int numberOfPoints = _points.Count;
              if (numberOfPoints <= 0)
            return;

              context.Validate(Effect);
              context.ThrowIfCameraMissing();

              // Render state.
              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.RasterizerState = RasterizerState.CullNone;

              // Reset the texture stages. If a floating point texture is set, we get exceptions
              // when a sampler with bilinear filtering is set.
              graphicsDevice.ResetTextures();

              // Effect parameters.
              Effect.Alpha = 1;
              Effect.DiffuseColor = new Vector3(1, 1, 1);
              Effect.LightingEnabled = false;
              Effect.TextureEnabled = false;
              Effect.VertexColorEnabled = true;
              Effect.World = Matrix.Identity;
              Effect.View = Matrix.Identity;
              Effect.Projection = Matrix.Identity;
              Effect.CurrentTechnique.Passes[0].Apply();

              // Get WorldViewProjection matrix.
              Matrix view = (Matrix)context.CameraNode.View;
              Matrix projection = context.CameraNode.Camera.Projection;
              Matrix wvp = Matrix.Multiply(view, projection);

              // The x and y point size relative to the viewport.
              Viewport viewport = graphicsDevice.Viewport;
              float sizeX = PointSize / viewport.Width;
              float sizeY = sizeX * viewport.Width / viewport.Height;

              // Resize buffer if necessary.
              ResizeBuffer(graphicsDevice, numberOfPoints);

              // Submit points. The loop is only needed if we have more points than can
              // be submitted with one draw call.
              var startPointIndex = 0;
              while (startPointIndex < numberOfPoints)
              {
            // Number of points in this batch.
            int pointsPerBatch = Math.Min(numberOfPoints - startPointIndex, _buffer.Length / 6);

            // Create vertices for points. All positions are directly in clip space!
            for (int i = 0; i < pointsPerBatch; i++)
            {
              var point = _points[startPointIndex + i];

              // Transform point position to clip space.
              Vector3 positionWorld = (Vector3)point.Position;
              Vector3 positionClip;
              Vector3.Transform(ref positionWorld, ref wvp, out positionClip);
              float w = (float)((double)positionWorld.X * wvp.M14 + (double)positionWorld.Y * wvp.M24 + (double)positionWorld.Z * wvp.M34 + wvp.M44);

              // Homogeneous divide.
              positionClip /= w;

              // 2 triangles create a point quad. Clip space goes from -1 to 1, therefore
              // we do not need to divide sizeX and sizeY by 2.
              Vector3 bottomLeft = positionClip + new Vector3(-sizeX, +sizeY, 0);
              Vector3 bottomRight = positionClip + new Vector3(+sizeX, +sizeY, 0);
              Vector3 topLeft = positionClip + new Vector3(-sizeX, -sizeY, 0);
              Vector3 topRight = positionClip + new Vector3(+sizeX, -sizeY, 0);
              _buffer[i * 6 + 0].Position = bottomLeft;
              _buffer[i * 6 + 0].Color = point.Color;
              _buffer[i * 6 + 1].Position = bottomRight;
              _buffer[i * 6 + 1].Color = point.Color;
              _buffer[i * 6 + 2].Position = topLeft;
              _buffer[i * 6 + 2].Color = point.Color;
              _buffer[i * 6 + 3].Position = bottomRight;
              _buffer[i * 6 + 3].Color = point.Color;
              _buffer[i * 6 + 4].Position = topRight;
              _buffer[i * 6 + 4].Color = point.Color;
              _buffer[i * 6 + 5].Position = topLeft;
              _buffer[i * 6 + 5].Color = point.Color;
            }

            // Draw triangles.
            graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleList, _buffer, 0, pointsPerBatch * 2);

            startPointIndex += pointsPerBatch;
              }

              savedRenderState.Restore();
        }
コード例 #33
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              var graphicsDevice = _effect.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.DepthStencilState = DepthStencilState.None;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;

              var viewport = graphicsDevice.Viewport;
              _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
              _parameterGBuffer0.SetValue(context.GBuffer0);
              _parameterGBuffer1.SetValue(context.GBuffer1);

              var cameraNode = context.CameraNode;
              var cameraPose = cameraNode.PoseWorld;
              Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              var isHdrEnabled = context.IsHdrEnabled();
              for (int i = 0; i < numberOfNodes; i++)
              {
            var lightNode = nodes[i] as LightNode;
            if (lightNode == null)
              continue;

            var light = lightNode.Light as ProjectorLight;
            if (light == null)
              continue;

            // LightNode is visible in current frame.
            lightNode.LastFrame = frame;

            float hdrScale = isHdrEnabled ? light.HdrScale : 1;
            _parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
            _parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale);
            _parameterTexture.SetValue(light.Texture);

            var lightPose = lightNode.PoseWorld;
            _parameterPosition.SetValue((Vector3)(lightPose.Position - cameraPose.Position));

            _parameterRange.SetValue(light.Projection.Far);
            _parameterAttenuation.SetValue(light.Attenuation);
            _parameterTextureMatrix.SetValue((Matrix)(GraphicsHelper.ProjectorBiasMatrix * light.Projection * (lightPose.Inverse * new Pose(cameraPose.Position))));

            var rectangle = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode);
            var texCoordTopLeft = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height);
            var texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height);
            GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);

            // Convert frustum far corners from view space to world space.
            for (int j = 0; j < _frustumFarCorners.Length; j++)
              _frustumFarCorners[j] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_frustumFarCorners[j]);

            _parameterFrustumCorners.SetValue(_frustumFarCorners);

            bool hasShadow = (lightNode.Shadow != null && lightNode.Shadow.ShadowMask != null);
            if (hasShadow)
            {
              switch (lightNode.Shadow.ShadowMaskChannel)
              {
            case 0: _parameterShadowMaskChannel.SetValue(new Vector4(1, 0, 0, 0)); break;
            case 1: _parameterShadowMaskChannel.SetValue(new Vector4(0, 1, 0, 0)); break;
            case 2: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 1, 0)); break;
            default: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 0, 1)); break;
              }

              _parameterShadowMask.SetValue(lightNode.Shadow.ShadowMask);
            }

            if (lightNode.Clip != null)
            {
              var data = lightNode.RenderData as LightRenderData;
              if (data == null)
              {
            data = new LightRenderData();
            lightNode.RenderData = data;
              }

              data.UpdateClipSubmesh(context.GraphicsService, lightNode);

              graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail;
              graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;

              _parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection);
              _passClip.Apply();
              data.ClipSubmesh.Draw();

              graphicsDevice.DepthStencilState = lightNode.InvertClip
            ? GraphicsHelper.DepthStencilStateStencilEqual0
            : GraphicsHelper.DepthStencilStateStencilNotEqual0;
              graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
            }
            else
            {
              graphicsDevice.DepthStencilState = DepthStencilState.None;
            }

            if (hasShadow)
            {
              if (light.Texture.Format == SurfaceFormat.Alpha8)
            _passShadowedAlpha.Apply();
              else
            _passShadowedRgb.Apply();
            }
            else
            {
              if (light.Texture.Format == SurfaceFormat.Alpha8)
            _passDefaultAlpha.Apply();
              else
            _passDefaultRgb.Apply();
            }

            graphicsDevice.DrawQuad(rectangle);
              }

              savedRenderState.Restore();
        }
コード例 #34
0
        /// <inheritdoc/>
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

              if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (nodes.Count == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.BlendState = BlendState.AlphaBlend;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;

              // Camera properties
              var cameraNode = context.CameraNode;
              Matrix view = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F());
              _parameterView.SetValue(view);
              Matrix projection = cameraNode.Camera.Projection;
              _parameterProjection.SetValue(projection);

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              for (int i = 0; i < numberOfNodes; i++)
              {
            var node = nodes[i] as ScatteringSkyNode;
            if (node == null)
              continue;

            // ScatteringSkyNode is visible in current frame.
            node.LastFrame = frame;

            _parameterSunDirection.SetValue((Vector3)node.SunDirection);
            _parameterSunIntensity.SetValue((Vector3)(node.SunIntensity * node.SunColor));
            _parameterRadii.SetValue(new Vector4(
              node.AtmosphereHeight + node.PlanetRadius,    // Atmosphere radius
              node.PlanetRadius,                            // Ground radius
              node.ObserverAltitude + node.PlanetRadius,    // Observer radius
              node.ScaleHeight));                           // Absolute Scale height
            _parameterNumberOfSamples.SetValue(node.NumberOfSamples);
            _parameterBetaRayleigh.SetValue((Vector3)node.BetaRayleigh);
            _parameterBetaMie.SetValue((Vector3)node.BetaMie);
            _parameterGMie.SetValue(node.GMie);
            _parameterTransmittance.SetValue(node.Transmittance);

            if (node.BaseHorizonColor.IsNumericallyZero && node.BaseZenithColor.IsNumericallyZero)
            {
              // No base color.
              if (context.IsHdrEnabled())
            _passLinear.Apply();
              else
            _passGamma.Apply();
            }
            else
            {
              // Add base color.
              _parameterBaseHorizonColor.SetValue((Vector4)new Vector4F(node.BaseHorizonColor, node.BaseColorShift));
              _parameterBaseZenithColor.SetValue((Vector3)node.BaseZenithColor);

              if (context.IsHdrEnabled())
            _passLinearWithBaseColor.Apply();
              else
            _passGammaWithBaseColor.Apply();
            }

            _submesh.Draw();
              }

              savedRenderState.Restore();
        }
コード例 #35
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (nodes.Count == 0)
            {
                return;
            }

            context.Validate(_effect);

            var originalRenderTarget = context.RenderTarget;
            var originalViewport     = context.Viewport;

            var graphicsDevice   = context.GraphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.BlendState        = BlendState.Opaque;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.None;

            int   frame     = context.Frame;
            float deltaTime = (float)context.DeltaTime.TotalSeconds;

            for (int nodeIndex = 0; nodeIndex < numberOfNodes; nodeIndex++)
            {
                var cloudNode = nodes[nodeIndex] as CloudLayerNode;
                if (cloudNode == null)
                {
                    continue;
                }

                var cloudMap = cloudNode.CloudMap as LayeredCloudMap;
                if (cloudMap == null)
                {
                    continue;
                }

                // We update the cloud map only once per frame.
                if (cloudMap.LastFrame == frame)
                {
                    continue;
                }

                cloudMap.LastFrame = frame;

                var layers         = cloudMap.Layers;
                var animationTimes = cloudMap.AnimationTimes;
                var sources        = cloudMap.SourceLayers;
                var targets        = cloudMap.TargetLayers;
                var renderTargets  = cloudMap.LayerTextures;

                // Animate the cloud map layers.
                for (int i = 0; i < LayeredCloudMap.NumberOfTextures; i++)
                {
                    if (layers[i] == null || layers[i].Texture != null)
                    {
                        continue;
                    }

                    if (cloudMap.Random == null)
                    {
                        cloudMap.Random = new Random(cloudMap.Seed);
                    }

                    // Make sure there is a user-defined texture or data for procedural textures.
                    if (sources[i] == null)
                    {
                        // Each octave is 128 x 128 (= 1 / 4 of the 512 * 512 noise texture).
                        sources[i]       = new PackedTexture(null, _noiseTexture, cloudMap.Random.NextVector2F(0, 1), new Vector2F(0.25f));
                        targets[i]       = new PackedTexture(null, _noiseTexture, cloudMap.Random.NextVector2F(0, 1), new Vector2F(0.25f));
                        renderTargets[i] = new RenderTarget2D(graphicsDevice, 128, 128, false, SurfaceFormat.Alpha8, DepthFormat.None);
                    }

                    // Update animation time.
                    animationTimes[i] += deltaTime * layers[i].AnimationSpeed;

                    // Update source and target if animation time is beyond 1.
                    if (animationTimes[i] > 1)
                    {
                        // Wrap animation time.
                        animationTimes[i] = animationTimes[i] % 1;

                        // Swap source and target.
                        MathHelper.Swap(ref sources[i], ref targets[i]);

                        // Set target to a new random part of the noise texture.
                        targets[i].Offset = cloudMap.Random.NextVector2F(0, 1);
                    }

                    // Lerp source and target together to get the final noise texture.
                    graphicsDevice.SetRenderTarget(renderTargets[i]);
                    _parameterViewportSize.SetValue(new Vector2(graphicsDevice.Viewport.Width, graphicsDevice.Viewport.Height));
                    _parameterTextures[0].SetValue(sources[i].TextureAtlas);
                    _parameterTextures[1].SetValue(targets[i].TextureAtlas);
                    _parameterTexture0Parameters.SetValue(new Vector4(sources[i].Scale.X, sources[i].Scale.Y, sources[i].Offset.X, sources[i].Offset.Y));
                    _parameterTexture1Parameters.SetValue(new Vector4(targets[i].Scale.X, targets[i].Scale.Y, targets[i].Offset.X, targets[i].Offset.Y));
                    _parameterLerp.SetValue(animationTimes[i]);
                    _passLerp.Apply();
                    graphicsDevice.DrawFullScreenQuad();
                }

                // Initialize the cloud map.
                if (cloudMap.Texture == null || cloudMap.Size != cloudMap.Texture.Width)
                {
                    cloudMap.Texture.SafeDispose();

                    var cloudTexture = new RenderTarget2D(
                        graphicsDevice,
                        cloudMap.Size,
                        cloudMap.Size,
                        false,
                        SurfaceFormat.Alpha8,
                        DepthFormat.None);

                    cloudMap.SetTexture(cloudTexture);
                }

                // Combine the layers.
                graphicsDevice.SetRenderTarget((RenderTarget2D)cloudMap.Texture);
                _parameterViewportSize.SetValue(new Vector2(cloudMap.Texture.Width, cloudMap.Texture.Height));
                for (int i = 0; i < LayeredCloudMap.NumberOfTextures; i++)
                {
                    var layer = layers[i] ?? EmptyLayer;
                    _parameterTextures[i].SetValue(layer.Texture ?? renderTargets[i]);
                    _parameterMatrices[i].SetValue((Matrix) new Matrix44F(layer.TextureMatrix, Vector3F.Zero));
                    _parameterDensities[i].SetValue(new Vector2(layer.DensityScale, layer.DensityOffset));
                }
                _parameterCoverage.SetValue(cloudMap.Coverage);
                _parameterDensity.SetValue(cloudMap.Density);
                _passDensity.Apply();
                graphicsDevice.DrawFullScreenQuad();
            }

            savedRenderState.Restore();
            graphicsDevice.SetRenderTarget(null);
            context.RenderTarget = originalRenderTarget;
            context.Viewport     = originalViewport;
        }
コード例 #36
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              // Note: The camera node is not used by the StandardShadowMapRenderer.
              // Still throw an exception if null for consistency. (All other shadow map
              // renderers need a camera node.)
              context.ThrowIfCameraMissing();
              context.ThrowIfSceneMissing();

              var originalRenderTarget = context.RenderTarget;
              var originalViewport = context.Viewport;
              var originalReferenceNode = context.ReferenceNode;

              var cameraNode = context.CameraNode;

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              context.Technique = "Default";

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              for (int i = 0; i < numberOfNodes; i++)
              {
            var lightNode = nodes[i] as LightNode;
            if (lightNode == null)
              continue;

            var shadow = lightNode.Shadow as StandardShadow;
            if (shadow == null)
              continue;

            // LightNode is visible in current frame.
            lightNode.LastFrame = frame;

            // Get a new shadow map if necessary.
            if (shadow.ShadowMap == null)
            {
              shadow.ShadowMap = graphicsService.RenderTargetPool.Obtain2D(
            new RenderTargetFormat(
              shadow.PreferredSize,
              shadow.PreferredSize,
              false,
              shadow.Prefer16Bit ? SurfaceFormat.HalfSingle : SurfaceFormat.Single,
              DepthFormat.Depth24));
            }

            // Create a suitable shadow camera.
            CameraNode lightCameraNode;
            if (lightNode.Light is ProjectorLight)
            {
              var light = (ProjectorLight)lightNode.Light;
              if (light.Projection is PerspectiveProjection)
              {
            var lp = (PerspectiveProjection)light.Projection;
            var cp = (PerspectiveProjection)_perspectiveCameraNode.Camera.Projection;
            cp.SetOffCenter(lp.Left, lp.Right, lp.Bottom, lp.Top, lp.Near, lp.Far);

            lightCameraNode = _perspectiveCameraNode;
              }
              else //if (light.Projection is OrthographicProjection)
              {
            var lp = (OrthographicProjection)light.Projection;
            var cp = (OrthographicProjection)_orthographicCameraNode.Camera.Projection;
            cp.SetOffCenter(lp.Left, lp.Right, lp.Bottom, lp.Top, lp.Near, lp.Far);

            lightCameraNode = _orthographicCameraNode;
              }
            }
            else if (lightNode.Light is Spotlight)
            {
              var light = (Spotlight)lightNode.Light;
              var cp = (PerspectiveProjection)_perspectiveCameraNode.Camera.Projection;
              cp.SetFieldOfView(2 * light.CutoffAngle, 1, shadow.DefaultNear, light.Range);

              lightCameraNode = _perspectiveCameraNode;
            }
            else
            {
              throw new GraphicsException("StandardShadow can only be used with a Spotlight or a ProjectorLight.");
            }

            lightCameraNode.PoseWorld = lightNode.PoseWorld;

            // Store data for use in StandardShadowMaskRenderer.
            shadow.Near = lightCameraNode.Camera.Projection.Near;
            shadow.Far = lightCameraNode.Camera.Projection.Far;
            shadow.View = lightCameraNode.PoseWorld.Inverse;
            shadow.Projection = lightCameraNode.Camera.Projection;

            // World units per texel at a planar distance of 1 world unit.
            float unitsPerTexel = lightCameraNode.Camera.Projection.Width / (shadow.ShadowMap.Height * shadow.Near);

            // Convert depth bias from "texel" to world space.
            // Minus to move receiver depth closer to light.
            shadow.EffectiveDepthBias = -shadow.DepthBias * unitsPerTexel;

            // Convert normal offset from "texel" to world space.
            shadow.EffectiveNormalOffset = shadow.NormalOffset * unitsPerTexel;

            graphicsDevice.SetRenderTarget(shadow.ShadowMap);
            context.RenderTarget = shadow.ShadowMap;
            context.Viewport = graphicsDevice.Viewport;

            graphicsDevice.Clear(Color.White);

            // The scene node renderer should use the light camera instead of the player camera.
            context.CameraNode = lightCameraNode;
            context.ReferenceNode = lightNode;
            context.Object = shadow;

            graphicsDevice.DepthStencilState = DepthStencilState.Default;
            graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise;
            graphicsDevice.BlendState = BlendState.Opaque;

            bool shadowMapContainsSomething = RenderCallback(context);
            if (!shadowMapContainsSomething)
            {
              // Shadow map is empty. Recycle it.
              graphicsService.RenderTargetPool.Recycle(shadow.ShadowMap);
              shadow.ShadowMap = null;
            }
              }

              graphicsDevice.SetRenderTarget(null);
              savedRenderState.Restore();

              context.CameraNode = cameraNode;
              context.Technique = null;
              context.RenderTarget = originalRenderTarget;
              context.Viewport = originalViewport;
              context.ReferenceNode = originalReferenceNode;
              context.Object = null;
        }
コード例 #37
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

              if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              // Camera properties
              int viewportHeight = graphicsDevice.Viewport.Height;
              var cameraNode = context.CameraNode;
              var projection = cameraNode.Camera.Projection;
              _parameterProjection.SetValue(projection);

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              for (int i = 0; i < numberOfNodes; i++)
              {
            var node = nodes[i] as CloudLayerNode;
            if (node == null)
              continue;

            // CloudLayerNode is visible in current frame.
            node.LastFrame = frame;

            if (node.CloudMap.Texture == null)
              continue;

            var sunDirection = node.SunDirection;
            _parameterSunDirection.SetValue((Vector3)sunDirection);
            _parameterSkyCurvature.SetValue(node.SkyCurvature);
            _parameterTextureMatrix.SetValue((Matrix)new Matrix44F(node.TextureMatrix, Vector3F.Zero));

            // The sample at the pixel counts as one, the rest are for the blur.
            // Note: We must not set -1 because a for loop like
            //   for (int i = 0; i < -1, i++)
            // crashes the AMD DX9 WP8.1 graphics driver. LOL
            _parameterNumberOfSamples.SetValue(Math.Max(0, node.NumberOfSamples - 1));

            _parameterSampleDistance.SetValue(node.SampleDistance);
            _parameterScatterParameters.SetValue(new Vector3(node.ForwardScatterExponent, node.ForwardScatterScale, node.ForwardScatterOffset));
            _parameterHorizonFade.SetValue(new Vector2(node.HorizonFade, node.HorizonBias));
            _parameterSunLight.SetValue((Vector3)node.SunLight);
            _parameterAmbientLight.SetValue(new Vector4((Vector3)node.AmbientLight, node.Alpha));
            _parameterTexture.SetValue(node.CloudMap.Texture);

            // Occlusion query.
            if (graphicsDevice.GraphicsProfile != GraphicsProfile.Reach && node.SunQuerySize >= Numeric.EpsilonF)
            {
              bool skipQuery = false;
              if (node.OcclusionQuery != null)
              {
            if (node.OcclusionQuery.IsComplete)
            {
              node.TryUpdateSunOcclusion();
            }
            else
            {
              // The previous query is still not finished. Do not start a new query, this would
              // create a SharpDX warning.
              skipQuery = true;
            }
              }
              else
              {
            node.OcclusionQuery = new OcclusionQuery(graphicsDevice);
              }

              if (!skipQuery)
              {
            node.IsQueryPending = true;

            float totalPixels = viewportHeight * node.SunQuerySize;
            totalPixels *= totalPixels;
            node.QuerySize = totalPixels;

            // Use a camera which looks at the sun.
            // Get an relative up vector which is not parallel to the forward direction.
            var lookAtUp = Vector3F.UnitY;
            if (Vector3F.AreNumericallyEqual(sunDirection, lookAtUp))
              lookAtUp = Vector3F.UnitZ;

            Vector3F zAxis = -sunDirection;
            Vector3F xAxis = Vector3F.Cross(lookAtUp, zAxis).Normalized;
            Vector3F yAxis = Vector3F.Cross(zAxis, xAxis);

            var lookAtSunView = new Matrix(xAxis.X, yAxis.X, zAxis.X, 0,
                                           xAxis.Y, yAxis.Y, zAxis.Y, 0,
                                           xAxis.Z, yAxis.Z, zAxis.Z, 0,
                                           0, 0, 0, 1);
            _parameterView.SetValue(lookAtSunView);

            graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;
            graphicsDevice.DepthStencilState = DepthStencilState.None;
            graphicsDevice.RasterizerState = RasterizerState.CullNone;

            // Create small quad shortly behind the near plane.
            // Note: We use an "untranslated" view matrix, so we can ignore the camera position.
            float width = (projection.Top - projection.Bottom) * node.SunQuerySize;
            Vector3F right = sunDirection.Orthonormal1 * (width / 2);
            Vector3F up = sunDirection.Orthonormal2 * (width / 2);
            Vector3F center = sunDirection * (projection.Near * 1.0001f);
            _queryGeometry[0] = center - up - right;
            _queryGeometry[1] = center + up - right;
            _queryGeometry[2] = center - up + right;
            _queryGeometry[3] = center + up + right;

            if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
              _passOcclusionAlpha.Apply();
            else
              _passOcclusionRgb.Apply();

            node.OcclusionQuery.Begin();
            graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _queryGeometry, 0, 2,
              VertexPosition.VertexDeclaration);
            node.OcclusionQuery.End();
              }
            }
            else
            {
              node.IsQueryPending = false;
              node.SunOcclusion = 0;
            }

            Matrix viewUntranslated = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F(0));
            _parameterView.SetValue(viewUntranslated);

            // Render clouds.
            graphicsDevice.BlendState = BlendState.AlphaBlend;
            graphicsDevice.RasterizerState = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;

            if (context.IsHdrEnabled())
            {
              if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
            _passCloudAlphaLinear.Apply();
              else
            _passCloudRgbLinear.Apply();
            }
            else
            {
              if (node.CloudMap.Texture.Format == SurfaceFormat.Alpha8)
            _passCloudAlphaGamma.Apply();
              else
            _passCloudRgbGamma.Apply();
            }

            _submesh.Draw();
              }

              savedRenderState.Restore();
        }
コード例 #38
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

              if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (nodes.Count == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.BlendState = BlendState.AlphaBlend;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;

              // Camera properties
              var cameraNode = context.CameraNode;
              Matrix view = (Matrix)new Matrix44F(cameraNode.PoseWorld.Orientation.Transposed, new Vector3F());
              _parameterView.SetValue(view);
              Matrix projection = cameraNode.Camera.Projection;
              _parameterProjection.SetValue(projection);

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              for (int i = 0; i < numberOfNodes; i++)
              {
            var node = nodes[i] as GradientTextureSkyNode;
            if (node == null)
              continue;

            // GradientTextureSkyNode is visible in current frame.
            node.LastFrame = frame;

            _parameterSunDirection.SetValue((Vector3)node.SunDirection);
            _parameterTime.SetValue((float)node.TimeOfDay.TotalHours / 24);
            _parameterColor.SetValue((Vector4)node.Color);
            _parameterFrontTexture.SetValue(node.FrontTexture);
            _parameterBackTexture.SetValue(node.BackTexture);

            if (node.CieSkyStrength < Numeric.EpsilonF)
            {
              if (context.IsHdrEnabled())
            _passLinear.Apply();
              else
            _passGamma.Apply();
            }
            else
            {
              var p = node.CieSkyParameters;
              _parameterAbcd.SetValue(new Vector4(p.A, p.B, p.C, p.D));
              _parameterEAndStrength.SetValue(new Vector2(p.E, node.CieSkyStrength));

              if (context.IsHdrEnabled())
            _passCieLinear.Apply();
              else
            _passCieGamma.Apply();
            }
            _submesh.Draw();
              }

              savedRenderState.Restore();
        }
コード例 #39
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = _effect.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.DepthStencilState = DepthStencilState.None;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.BlendState        = GraphicsHelper.BlendStateAdd;

            var viewport = graphicsDevice.Viewport;

            _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
            _parameterGBuffer0.SetValue(context.GBuffer0);
            _parameterGBuffer1.SetValue(context.GBuffer1);

            var    cameraNode     = context.CameraNode;
            Pose   cameraPose     = cameraNode.PoseWorld;
            Matrix viewProjection = (Matrix)cameraNode.View * cameraNode.Camera.Projection;

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            context.CameraNode.LastFrame = frame;

            var isHdrEnabled = context.IsHdrEnabled();

            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = nodes[i] as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                var light = lightNode.Light as PointLight;
                if (light == null)
                {
                    continue;
                }

                // LightNode is visible in current frame.
                lightNode.LastFrame = frame;

                float hdrScale = isHdrEnabled ? light.HdrScale : 1;
                _parameterDiffuseColor.SetValue((Vector3)light.Color * light.DiffuseIntensity * hdrScale);
                _parameterSpecularColor.SetValue((Vector3)light.Color * light.SpecularIntensity * hdrScale);

                Pose lightPose = lightNode.PoseWorld;

                bool hasShadow = (lightNode.Shadow != null && lightNode.Shadow.ShadowMask != null);
                if (hasShadow)
                {
                    switch (lightNode.Shadow.ShadowMaskChannel)
                    {
                    case 0: _parameterShadowMaskChannel.SetValue(new Vector4(1, 0, 0, 0)); break;

                    case 1: _parameterShadowMaskChannel.SetValue(new Vector4(0, 1, 0, 0)); break;

                    case 2: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 1, 0)); break;

                    default: _parameterShadowMaskChannel.SetValue(new Vector4(0, 0, 0, 1)); break;
                    }

                    _parameterShadowMask.SetValue(lightNode.Shadow.ShadowMask);
                }

                _parameterPosition.SetValue((Vector3)(lightPose.Position - cameraPose.Position));
                _parameterRange.SetValue(light.Range);
                _parameterAttenuation.SetValue(light.Attenuation);

                bool hasTexture = (light.Texture != null);
                if (hasTexture)
                {
                    _parameterTexture.SetValue(light.Texture);

                    // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
                    // cube map and objects or texts in it are mirrored.)
                    var mirrorZ = Matrix44F.CreateScale(1, 1, -1);
                    _parameterTextureMatrix.SetValue((Matrix)(mirrorZ * lightPose.Inverse));
                }

                var rectangle           = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightPose.Position, light.Range);
                var texCoordTopLeft     = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height);
                var texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height);
                GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);

                // Convert frustum far corners from view space to world space.
                for (int j = 0; j < _frustumFarCorners.Length; j++)
                {
                    _frustumFarCorners[j] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_frustumFarCorners[j]);
                }

                _parameterFrustumCorners.SetValue(_frustumFarCorners);

                if (lightNode.Clip != null)
                {
                    var data = lightNode.RenderData as LightRenderData;
                    if (data == null)
                    {
                        data = new LightRenderData();
                        lightNode.RenderData = data;
                    }

                    data.UpdateClipSubmesh(context.GraphicsService, lightNode);

                    graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateOnePassStencilFail;
                    graphicsDevice.BlendState        = GraphicsHelper.BlendStateNoColorWrite;

                    _parameterWorldViewProjection.SetValue((Matrix)data.ClipMatrix * viewProjection);
                    _passClip.Apply();
                    data.ClipSubmesh.Draw();

                    graphicsDevice.DepthStencilState = lightNode.InvertClip
            ? GraphicsHelper.DepthStencilStateStencilEqual0
            : GraphicsHelper.DepthStencilStateStencilNotEqual0;
                    graphicsDevice.BlendState = GraphicsHelper.BlendStateAdd;
                }
                else
                {
                    graphicsDevice.DepthStencilState = DepthStencilState.None;
                }

                if (hasShadow)
                {
                    if (hasTexture)
                    {
                        if (light.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passShadowedTexturedAlpha.Apply();
                        }
                        else
                        {
                            _passShadowedTexturedRgb.Apply();
                        }
                    }
                    else
                    {
                        _passShadowed.Apply();
                    }
                }
                else
                {
                    if (hasTexture)
                    {
                        if (light.Texture.Format == SurfaceFormat.Alpha8)
                        {
                            _passTexturedAlpha.Apply();
                        }
                        else
                        {
                            _passTexturedRgb.Apply();
                        }
                    }
                    else
                    {
                        _passDefault.Apply();
                    }
                }

                graphicsDevice.DrawQuad(rectangle);
            }

            savedRenderState.Restore();
        }
コード例 #40
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = _effect.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.DepthStencilState = DepthStencilState.None;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;

            var cameraNode = context.CameraNode;

            _parameterViewInverse.SetValue(cameraNode.PoseWorld);
            _parameterGBuffer0.SetValue(context.GBuffer0);

            Viewport viewport = context.Viewport;

            _parameterParameters0.SetValue(new Vector2(viewport.Width, viewport.Height));

            if (_jitterMap == null)
            {
                _jitterMap = NoiseHelper.GetGrainTexture(context.GraphicsService, NoiseHelper.DefaultJitterMapWidth);
            }

            _parameterJitterMap.SetValue(_jitterMap);

            for (int i = 0; i < numberOfNodes; i++)
            {
                var lightNode = nodes[i] as LightNode;
                if (lightNode == null)
                {
                    continue;
                }

                var shadow = lightNode.Shadow as StandardShadow;
                if (shadow == null)
                {
                    continue;
                }

                if (shadow.ShadowMap == null || shadow.ShadowMask == null)
                {
                    continue;
                }

                // The effect must only render in a specific channel.
                // Do not change blend state if the correct write channels is already set, e.g. if this
                // shadow is part of a CompositeShadow, the correct blend state is already set.
                if ((int)graphicsDevice.BlendState.ColorWriteChannels != (1 << shadow.ShadowMaskChannel))
                {
                    graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteSingleChannel[shadow.ShadowMaskChannel];
                }

                _parameterParameters1.SetValue(new Vector4(
                                                   shadow.Near,
                                                   shadow.Far,
                                                   shadow.EffectiveDepthBias,
                                                   shadow.EffectiveNormalOffset));

                // If we use a subset of the Poisson kernel, we have to normalize the scale.
                int   numberOfSamples = Math.Min(shadow.NumberOfSamples, PoissonKernel.Length);
                float filterRadius    = shadow.FilterRadius;
                if (numberOfSamples > 0)
                {
                    filterRadius /= PoissonKernel[numberOfSamples - 1].Length();
                }

                _parameterParameters2.SetValue(new Vector3(
                                                   shadow.ShadowMap.Width,
                                                   filterRadius,
                                                   // The StandardShadow.JitterResolution is the number of texels per world unit.
                                                   // In the shader the parameter JitterResolution contains the division by the jitter map size.
                                                   shadow.JitterResolution / _jitterMap.Width));

                _parameterLightPosition.SetValue((Vector3)cameraNode.PoseWorld.ToLocalPosition(lightNode.PoseWorld.Position));

                Matrix cameraViewToShadowView = cameraNode.PoseWorld * shadow.View;
                _parameterShadowView.SetValue(cameraViewToShadowView);
                _parameterShadowMatrix.SetValue(cameraViewToShadowView * shadow.Projection);
                _parameterShadowMap.SetValue(shadow.ShadowMap);

                var      rectangle           = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode);
                Vector2F texCoordTopLeft     = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height);
                Vector2F texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height);
                GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);
                _parameterFrustumCorners.SetValue(_frustumFarCorners);

                var pass = GetPass(numberOfSamples);

                if (numberOfSamples > 0)
                {
                    if (_lastNumberOfSamples != numberOfSamples)
                    {
                        // Create an array with the first n samples and the rest set to 0.
                        _lastNumberOfSamples = numberOfSamples;
                        for (int j = 0; j < numberOfSamples; j++)
                        {
                            _samples[j].X = PoissonKernel[j].X;
                            _samples[j].Y = PoissonKernel[j].Y;
                            _samples[j].Z = 1.0f / numberOfSamples;

                            // Note [HelmutG]: I have tried weights decreasing with distance but that did not
                            // look better.
                        }

                        // Set the rest to zero.
                        for (int j = numberOfSamples; j < _samples.Length; j++)
                        {
                            _samples[j] = Vector3.Zero;
                        }

                        _parameterSamples.SetValue(_samples);
                    }
                    else if (i == 0)
                    {
                        // Apply offsets in the first loop.
                        _parameterSamples.SetValue(_samples);
                    }
                }

                pass.Apply();

                graphicsDevice.DrawQuad(rectangle);
            }

            _parameterGBuffer0.SetValue((Texture2D)null);
            _parameterJitterMap.SetValue((Texture2D)null);
            _parameterShadowMap.SetValue((Texture2D)null);
            savedRenderState.Restore();
        }
コード例 #41
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              for (int i = 0; i < numberOfNodes; i++)
              {
            var lightNode = nodes[i] as LightNode;
            if (lightNode == null)
              continue;

            var shadow = lightNode.Shadow as CompositeShadow;
            if (shadow == null)
              continue;

            if (shadow.ShadowMask == null)
              continue;

            // Write into a single channel and use min() blending.
            graphicsDevice.BlendState = BlendStates[shadow.ShadowMaskChannel];

            for (int j = 0; j < shadow.Shadows.Count; j++)
            {
              // Temporarily set shadow mask and shadow mask channel of child shadows.
              var childShadow = shadow.Shadows[j];
              childShadow.ShadowMask = shadow.ShadowMask;
              childShadow.ShadowMaskChannel = shadow.ShadowMaskChannel;

              // Temporarily exchange LightNode.Shadow and render the child shadow.
              lightNode.Shadow = childShadow;

              for (int k = 0; k < _shadowMaskRenderers.Count; k++)
              {
            var renderer = _shadowMaskRenderers[k];
            if (renderer.CanRender(lightNode, context))
            {
              renderer.Render(lightNode, context);
              break;
            }
              }

              // Remove shadow mask references. Strictly speaking, the mask is correct
              // for the composite shadow. It is not correct for the child shadow. The child
              // shadow only contributes to the mask. Therefore, childShadowMask should not be
              // set.
              childShadow.ShadowMask = null;
              childShadow.ShadowMaskChannel = 0;
            }

            lightNode.Shadow = shadow;
              }

              savedRenderState.Restore();
        }
コード例 #42
0
        private void RenderHiDef(SkyboxNode node, RenderContext context)
        {
            var graphicsDevice = context.GraphicsService.GraphicsDevice;

            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
            graphicsDevice.BlendState        = node.EnableAlphaBlending ? BlendState.AlphaBlend : BlendState.Opaque;

            bool sourceIsFloatingPoint = TextureHelper.IsFloatingPointFormat(node.Texture.Format);

            // Set sampler state. (Floating-point textures cannot use linear filtering. (XNA would throw an exception.))
            if (sourceIsFloatingPoint)
            {
                graphicsDevice.SamplerStates[0] = SamplerState.PointClamp;
            }
            else
            {
                graphicsDevice.SamplerStates[0] = SamplerState.LinearClamp;
            }

            var    cameraNode = context.CameraNode;
            Matrix view       = cameraNode.View;
            Matrix projection = cameraNode.Camera.Projection;

            // Cube maps are left handed --> Sample with inverted z. (Otherwise, the
            // cube map and objects or texts in it are mirrored.)
            var    mirrorZ     = Matrix.CreateScale(1, 1, -1);
            Matrix orientation = node.PoseWorld.Orientation;

            _parameterWorldViewProjection.SetValue((Matrix)(projection * view * new Matrix(orientation, Vector3.Zero) * mirrorZ));

            Vector4 color = node.EnableAlphaBlending
                      ? new Vector4((Vector3)node.Color * node.Alpha, node.Alpha) // Premultiplied
                      : new Vector4((Vector3)node.Color, 1);                      // Opaque

            _parameterColor.SetValue(color);
            _textureParameter.SetValue(node.Texture);

            if (node.Encoding is RgbEncoding)
            {
                _parameterTextureSize.SetValue(node.Texture.Size);
                if (context.IsHdrEnabled())
                {
                    _passRgbToRgb.Apply();
                }
                else
                {
                    _passRgbToSRgb.Apply();
                }
            }
            else if (node.Encoding is SRgbEncoding)
            {
                if (!sourceIsFloatingPoint)
                {
                    if (context.IsHdrEnabled())
                    {
                        _passSRgbToRgb.Apply();
                    }
                    else
                    {
                        _passSRgbToSRgb.Apply();
                    }
                }
                else
                {
                    throw new GraphicsException("sRGB encoded skybox cube maps must not use a floating point format.");
                }
            }
            else if (node.Encoding is RgbmEncoding)
            {
                float max = GraphicsHelper.ToGamma(((RgbmEncoding)node.Encoding).Max);
                _parameterRgbmMaxValue.SetValue(max);

                if (context.IsHdrEnabled())
                {
                    _passRgbmToRgb.Apply();
                }
                else
                {
                    _passRgbmToSRgb.Apply();
                }
            }
            else
            {
                throw new NotSupportedException("The SkyBoxRenderer supports only RgbEncoding, SRgbEncoding and RgbmEncoding.");
            }

            _submesh.Draw();
            savedRenderState.Restore();
        }
コード例 #43
0
ファイル: TextureBatch.cs プロジェクト: Zolniu/DigitalRune
        /// <summary>
        /// Draws the textures.
        /// </summary>
        /// <param name="context">The render context.</param>
        /// <remarks>
        /// If <see cref="SpriteBatch"/> is <see langword="null"/>, then <see cref="Render"/> does 
        /// nothing.
        /// </remarks>
        /// <exception cref="ArgumentNullException">
        /// <paramref name="context"/> is <see langword="null"/>.
        /// </exception>
        public void Render(RenderContext context)
        {
            if (context == null)
            throw new ArgumentNullException("context");

              if (SpriteBatch == null)
            return;

              var count = _textures.Count;
              if (count == 0)
            return;

              context.Validate(SpriteBatch);

              var savedRenderState = new RenderStateSnapshot(SpriteBatch.GraphicsDevice);

              SpriteBatch.Begin(SpriteSortMode.Immediate, BlendState.AlphaBlend, SamplerState.LinearClamp, DepthStencilState.None, RasterizerState.CullNone);

              for (int i = 0; i < count; i++)
              {
            var textureInfo = _textures[i];

            if (textureInfo.Texture.IsDisposed)
              continue;

            if (TextureHelper.IsFloatingPointFormat(textureInfo.Texture.Format))
            {
              // Floating-point textures must not use linear hardware filtering!
              SpriteBatch.GraphicsDevice.SamplerStates[0] = SamplerState.PointClamp;
              SpriteBatch.Draw(textureInfo.Texture, textureInfo.Rectangle, Color.White);
              SpriteBatch.GraphicsDevice.SamplerStates[0] = SamplerState.LinearClamp;
            }
            else
            {
              SpriteBatch.Draw(textureInfo.Texture, textureInfo.Rectangle, Color.White);
            }
              }

              SpriteBatch.End();

              savedRenderState.Restore();
        }
コード例 #44
0
    public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
    {
      ThrowIfDisposed();

      if (nodes == null)
        throw new ArgumentNullException("nodes");
      if (context == null)
        throw new ArgumentNullException("context");

      // Lens flares are used sparsely in most games. --> Early out, if possible.
      int numberOfNodes = nodes.Count;
      if (nodes.Count == 0)
        return;

      context.Validate(_spriteBatch);
      context.ThrowIfCameraMissing();

      var graphicsDevice = context.GraphicsService.GraphicsDevice;
      var savedRenderState = new RenderStateSnapshot(graphicsDevice);
      bool hiDef = (graphicsDevice.GraphicsProfile == GraphicsProfile.HiDef);

      // Camera properties
      var cameraNode = context.CameraNode;
      var cameraPose = cameraNode.PoseWorld;
      Vector3 cameraForward = -cameraPose.Orientation.GetColumn(2); // 3rd column vector (negated)
      Matrix view = cameraNode.View;
      Matrix projection = cameraNode.Camera.Projection;

      // The flares are positioned on a line from the origin through the center of 
      // the screen.
      var viewport = graphicsDevice.Viewport;
      Vector2F screenCenter = new Vector2F(viewport.Width / 2.0f, viewport.Height / 2.0f);

      if (_transformParameter != null)
      {
        // ----- Original:
        // Matrix matrix = (Matrix)(Matrix.CreateOrthographicOffCenter(0, viewport.Width, viewport.Height, 0, 0, 1)
        //                 * Matrix.CreateTranslation(-0.5f, -0.5f, 0)); // Half-pixel offset (only for Direct3D 9).
        // ----- Inlined:
        Matrix matrix = new Matrix();
        float oneOverW = 1.0f / viewport.Width;
        float oneOverH = 1.0f / viewport.Height;
        matrix.M11 = oneOverW * 2f;
        matrix.M22 = -oneOverH * 2f;
        matrix.M33 = -1f;
        matrix.M44 = 1f;

        matrix.M41 = -1f;
        matrix.M42 = 1f;
#else
        // Direct3D 9: half-pixel offset
        matrix.M41 = -oneOverW - 1f;
        matrix.M42 = oneOverH + 1f;


        _transformParameter.SetValue(matrix);
      }

      // Update SceneNode.LastFrame for all visible nodes.
      int frame = context.Frame;
      cameraNode.LastFrame = frame;

      // Choose current effect technique: Linear vs. Gamma-corrected Writes.
      if (_effect != null)
        _effect.CurrentTechnique = context.IsHdrEnabled() ? _techniqueLinear : _techniqueGamma;

      _spriteBatch.Begin(SpriteSortMode.Texture, BlendState.Additive, null, null, null, _effect);
      for (int i = 0; i < numberOfNodes; i++)
      {
        var node = nodes[i] as LensFlareNode;
        if (node == null)
          continue;

        var lensFlare = node.LensFlare;
        float size, intensity;
        if (hiDef)
        {
          // HiDef profile
          object dummy;
          cameraNode.ViewDependentData.TryGetValue(node, out dummy);
          var renderData = dummy as OcclusionData;
          if (renderData == null || renderData.VisiblePixels == 0)
            continue;

          lensFlare.OnGetSizeAndIntensity(node, context, renderData.VisiblePixels, renderData.TotalPixels, out size, out intensity);
        }
        else
        {
          // Reach profile
          lensFlare.OnGetSizeAndIntensity(node, context, 0, 0, out size, out intensity);
        }

        if (size <= 0 || intensity < MinIntensity)
          continue;

        // LensFlareNode is visible in current frame.
        node.LastFrame = frame;

        // Project position to screen space.
        Vector2F screenPosition;
        if (lensFlare.IsDirectional)
        {
          // ----- Directional lights
          Vector3 lightDirectionWorld = -node.PoseWorld.Orientation.GetColumn(2);  // 3rd column vector (negated)
          Vector3 lightDirectionView = cameraPose.ToLocalDirection(lightDirectionWorld);

          // In Reach profile check light direction for visibility.
          // (In HiDef profile this check is done UpdateOcclusion().)
          if (!hiDef && lightDirectionView.Z < 0)
          {
            // Light comes from behind camera.
            continue;
          }

          Vector3 position = viewport.ProjectToViewport(-lightDirectionView, projection);
          screenPosition = new Vector2F(position.X, position.Y);
        }
        else
        {
          // ----- Local lights
          Vector3 position = node.PoseWorld.Position;

          // In Reach profile check light direction for visibility.
          // (In HiDef profile this check is done UpdateOcclusion().)
          if (!hiDef)
          {
            Vector3 cameraToNode = position - cameraPose.Position;
            float distance = Vector3.Dot(cameraToNode, cameraForward);
            if (distance < cameraNode.Camera.Projection.Near)
            {
              // Light is behind near plane.
              continue;
            }
          }

          position = viewport.ProjectToViewport(position, projection * view);
          screenPosition = new Vector2F(position.X, position.Y);
        }

        Vector2F flareVector = screenCenter - screenPosition;
        foreach (var flare in lensFlare.Elements)
        {
          if (flare == null)
            continue;

          var packedTexture = flare.Texture;
          if (packedTexture == null)
            continue;

          // Position the flare on a line from the lens flare origin through the 
          // screen center.
          Vector2F position = screenPosition + flareVector * flare.Distance;

          // The intensity controls the alpha value.
          Vector4 color = flare.Color.ToVector4();
          color.W *= intensity;

          // Get texture.
          Texture2D textureAtlas = packedTexture.TextureAtlas;
          Vector2F textureAtlasSize = new Vector2F(textureAtlas.Width, textureAtlas.Height);
          Vector2F textureOffset = packedTexture.Offset * textureAtlasSize;
          Vector2F textureSize = packedTexture.Scale * textureAtlasSize;
          Rectangle sourceRectangle = new Rectangle((int)textureOffset.X, (int)textureOffset.Y, (int)textureSize.X, (int)textureSize.Y);

          // The image rotates around its origin (= reference point) - usually the
          // center of the image.
          Vector2F origin = textureSize * flare.Origin;
          float rotation = flare.Rotation;
          Vector2F direction = flareVector;
          if (Numeric.IsNaN(rotation) && direction.TryNormalize())
          {
            // NaN = automatic rotation:
            // Determine angle between direction and reference vector (0, 1):
            // From http://www.euclideanspace.com/maths/algebra/vectors/angleBetween/issues/index.htm:
            // rotation = atan2(v2.y,v2.x) - atan2(v1.y,v1.x)
            //          = atan2(v2.y,v2.x) - atan2(1,0)
            //          = atan2(v2.y,v2.x) - π/2
            rotation = (float)Math.Atan2(direction.Y, direction.X) - ConstantsF.PiOver2;
          }

          Vector2F scale = size * viewport.Height * flare.Scale / textureSize.Y;

          // Render flare using additive blending.
          _spriteBatch.Draw(textureAtlas, (Vector2)position, sourceRectangle, new Color(color),
                            rotation, (Vector2)origin, (Vector2)scale, SpriteEffects.None, 0);
        }
      }

      _spriteBatch.End();
      savedRenderState.Restore();
    }
コード例 #45
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              var graphicsDevice = _effect.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.DepthStencilState = DepthStencilState.None;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;

              // Set camera properties.
              var cameraNode = context.CameraNode;
              var cameraPose = cameraNode.PoseWorld;
              Matrix viewInverse = cameraPose;
              _parameterViewInverse.SetValue(viewInverse);
              _parameterGBuffer0.SetValue(context.GBuffer0);

              Viewport viewport = context.Viewport;
              _parameterParameters0.SetValue(new Vector2(viewport.Width, viewport.Height));

              // Set jitter map.
              if (_jitterMap == null)
            _jitterMap = NoiseHelper.GetGrainTexture(context.GraphicsService, NoiseHelper.DefaultJitterMapWidth);

              _parameterJitterMap.SetValue(_jitterMap);

              float cameraFar = context.CameraNode.Camera.Projection.Far;

              for (int i = 0; i < numberOfNodes; i++)
              {
            var lightNode = nodes[i] as LightNode;
            if (lightNode == null)
              continue;

            var shadow = lightNode.Shadow as CascadedShadow;
            if (shadow == null)
              continue;

            if (shadow.ShadowMap == null || shadow.ShadowMask == null)
              continue;

            // The effect must only render in a specific channel.
            // Do not change blend state if the correct write channels is already set, e.g. if this
            // shadow is part of a CompositeShadow, the correct blend state is already set.
            if ((int)graphicsDevice.BlendState.ColorWriteChannels != (1 << shadow.ShadowMaskChannel))
              graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteSingleChannel[shadow.ShadowMaskChannel];

            _parameterParameters1.SetValue(new Vector4(
              shadow.FadeOutRange,
              shadow.Distances[shadow.NumberOfCascades - 1],
              shadow.VisualizeCascades ? 1 : 0,
              shadow.ShadowFog));

            float filterRadius = shadow.FilterRadius;

            // If we use a subset of the Poisson kernel, we have to normalize the scale.
            int numberOfSamples = Math.Min(shadow.NumberOfSamples, StandardShadowMaskRenderer.PoissonKernel.Length);

            // Not all shader passes support cascade visualization. Use a similar pass instead.
            if (shadow.VisualizeCascades)
            {
              if (numberOfSamples < 0)
              {
            numberOfSamples = 4;
              }
              else if (numberOfSamples == 0)
              {
            numberOfSamples = 1;
            filterRadius = 0;
              }
            }

            // The best dithered CSM supports max 22 samples.
            if (shadow.CascadeSelection == ShadowCascadeSelection.BestDithered && numberOfSamples > 22)
              numberOfSamples = 22;

            if (numberOfSamples > 0)
              filterRadius /= StandardShadowMaskRenderer.PoissonKernel[numberOfSamples - 1].Length();

            _parameterParameters2.SetValue(new Vector4(
              shadow.ShadowMap.Width,
              shadow.ShadowMap.Height,
              filterRadius,
              // The StandardShadow.JitterResolution is the number of texels per world unit.
              // In the shader the parameter JitterResolution contains the division by the jitter map size.
              shadow.JitterResolution / _jitterMap.Width));

            // Split distances.
            if (_parameterDistances != null)
            {
              // Set not used entries to large values.
              Vector4F distances = shadow.Distances;
              for (int j = shadow.NumberOfCascades; j < 4; j++)
            distances[j] = 10 * cameraFar;

              _parameterDistances.SetValue((Vector4)distances);
            }

            Debug.Assert(shadow.ViewProjections.Length == 4);
            for (int j = 0; j < _matrices.Length; j++)
              _matrices[j] = viewInverse * shadow.ViewProjections[j];

            _parameterShadowMatrices.SetValue(_matrices);

            _parameterDepthBias.SetValue((Vector4)shadow.EffectiveDepthBias);
            _parameterNormalOffset.SetValue((Vector4)shadow.EffectiveNormalOffset);

            Vector3F lightBackwardWorld = lightNode.PoseWorld.Orientation.GetColumn(2);
            _parameterLightDirection.SetValue((Vector3)cameraPose.ToLocalDirection(lightBackwardWorld));
            _parameterNumberOfCascades.SetValue(shadow.NumberOfCascades);
            _parameterShadowMap.SetValue(shadow.ShadowMap);

            var rectangle = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode);
            Vector2F texCoordTopLeft = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height);
            Vector2F texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height);
            GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);
            _parameterFrustumCorners.SetValue(_frustumFarCorners);

            var pass = GetPass(numberOfSamples, shadow.CascadeSelection, shadow.VisualizeCascades);

            if (numberOfSamples > 0)
            {
              if (_lastNumberOfSamples != numberOfSamples)
              {
            // Create an array with the first n samples and the rest set to 0.
            _lastNumberOfSamples = numberOfSamples;
            for (int j = 0; j < numberOfSamples; j++)
            {
              _samples[j].Y = StandardShadowMaskRenderer.PoissonKernel[j].Y;
              _samples[j].X = StandardShadowMaskRenderer.PoissonKernel[j].X;
              _samples[j].Z = 1.0f / numberOfSamples;
            }

            // Set the rest to zero.
            for (int j = numberOfSamples; j < _samples.Length; j++)
              _samples[j] = Vector3.Zero;

            _parameterSamples.SetValue(_samples);
              }
              else if (i == 0)
              {
            // Apply offsets in the first loop.
            _parameterSamples.SetValue(_samples);
              }
            }

            pass.Apply();

            graphicsDevice.DrawQuad(rectangle);
              }

              _parameterGBuffer0.SetValue((Texture2D)null);
              _parameterJitterMap.SetValue((Texture2D)null);
              _parameterShadowMap.SetValue((Texture2D)null);
              savedRenderState.Restore();
        }
コード例 #46
0
ファイル: OceanFft.cs プロジェクト: Zolniu/DigitalRune
        // Perform FFTs.
        // 4 complex input images: source0.xy, source0.zw, source1.xy, source1.zw
        // 2 targets: target0 = displacement map, target1 = normal map using Color format.
        public void Process(RenderContext context, bool forward, Texture2D source0, Texture2D source1, RenderTarget2D target0, RenderTarget2D target1, float choppiness)
        {
            if (context == null)
            throw new ArgumentNullException("context");
              if (source0 == null)
            throw new ArgumentNullException("source0");
              if (source1 == null)
            throw new ArgumentNullException("source1");

              if (forward)
              {
            // For forward FFT, uncomment the LastPassScale stuff!
            throw new NotImplementedException("Forward FFT not implemented.");
              }

              var graphicsService = context.GraphicsService;
              var graphicsDevice = graphicsService.GraphicsDevice;
              var renderTargetPool = graphicsService.RenderTargetPool;

              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              graphicsDevice.BlendState = BlendState.Opaque;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.DepthStencilState = DepthStencilState.None;

              int size = source0.Width;
              _parameterSize.SetValue((float)size);

              _parameterChoppiness.SetValue(choppiness);

              int numberOfButterflyPasses = (int)MathHelper.Log2GreaterOrEqual((uint)source0.Width);
              // ReSharper disable once ConditionIsAlwaysTrueOrFalse
              _parameterButterflyTexture.SetValue(GetButterflyTexture(forward, numberOfButterflyPasses));

              var format = new RenderTargetFormat(size, size, false, source0.Format, DepthFormat.None);
              var tempPing0 = renderTargetPool.Obtain2D(format);
              var tempPing1 = renderTargetPool.Obtain2D(format);
              var tempPong0 = renderTargetPool.Obtain2D(format);
              var tempPong1 = renderTargetPool.Obtain2D(format);

              //_parameterIsLastPass.SetValue(false);

              // Perform horizontal and vertical FFT pass.
              for (int i = 0; i < 2; i++)
              {
            //_parameterLastPassScale.SetValue(1);

            // Perform butterfly passes. We ping-pong between two temp targets.
            for (int pass = 0; pass < numberOfButterflyPasses; pass++)
            {
              _parameterButterflyIndex.SetValue(0.5f / numberOfButterflyPasses + (float)pass / numberOfButterflyPasses);

              if (i == 0 && pass == 0)
              {
            // First pass.
            _renderTargetBindings[0] = new RenderTargetBinding(tempPing0);
            _renderTargetBindings[1] = new RenderTargetBinding(tempPing1);
            graphicsDevice.SetRenderTargets(_renderTargetBindings);
            _parameterSourceTexture0.SetValue(source0);
            _parameterSourceTexture1.SetValue(source1);
              }
              else if (i == 1 && pass == numberOfButterflyPasses - 1)
              {
            // Last pass.
            // We have explicit shader passes for the last FFT pass.
            break;

            //_parameterIsLastPass.SetValue(true);
            //if (forward)
            //  _parameterLastPassScale.SetValue(1.0f / size / size);

            //if (_renderTargetBindings[0].RenderTarget == tempPing0)
            //{
            //  _renderTargetBindings[0] = new RenderTargetBinding(target0);
            //  _renderTargetBindings[1] = new RenderTargetBinding(target1);
            //  graphicsDevice.SetRenderTargets(_renderTargetBindings);
            //  _parameterSourceTexture0.SetValue(tempPing0);
            //  _parameterSourceTexture1.SetValue(tempPing1);
            //}
            //else
            //{
            //  _renderTargetBindings[0] = new RenderTargetBinding(target0);
            //  _renderTargetBindings[1] = new RenderTargetBinding(target1);
            //  graphicsDevice.SetRenderTargets(_renderTargetBindings);
            //  _parameterSourceTexture0.SetValue(tempPong0);
            //  _parameterSourceTexture1.SetValue(tempPong1);
            //}
              }
              else
              {
            // Intermediate pass.
            if (_renderTargetBindings[0].RenderTarget == tempPing0)
            {
              _renderTargetBindings[0] = new RenderTargetBinding(tempPong0);
              _renderTargetBindings[1] = new RenderTargetBinding(tempPong1);
              graphicsDevice.SetRenderTargets(_renderTargetBindings);
              _parameterSourceTexture0.SetValue(tempPing0);
              _parameterSourceTexture1.SetValue(tempPing1);
            }
            else
            {
              _renderTargetBindings[0] = new RenderTargetBinding(tempPing0);
              _renderTargetBindings[1] = new RenderTargetBinding(tempPing1);
              graphicsDevice.SetRenderTargets(_renderTargetBindings);
              _parameterSourceTexture0.SetValue(tempPong0);
              _parameterSourceTexture1.SetValue(tempPong1);
            }
              }

              if (i == 0)
            _passFftHorizontal.Apply();
              else
            _passFftVertical.Apply();

              graphicsDevice.DrawFullScreenQuad();
            }
              }

              // Perform final vertical FFT passes. We have to perform them separately
              // because displacement map and normal map usually have different bit depth.
              // Final pass for displacement.
              graphicsDevice.SetRenderTarget(target0);
              if (_renderTargetBindings[1].RenderTarget == tempPing1)
            _parameterSourceTexture0.SetValue(tempPing0);
              else
            _parameterSourceTexture0.SetValue(tempPong0);

              _passFftDisplacement.Apply();
              graphicsDevice.DrawFullScreenQuad();

              // Final pass for normals.
              graphicsDevice.SetRenderTarget(target1);
              if (_renderTargetBindings[1].RenderTarget == tempPing1)
            _parameterSourceTexture0.SetValue(tempPing1);
              else
            _parameterSourceTexture0.SetValue(tempPong1);

              _passFftNormal.Apply();
              graphicsDevice.DrawFullScreenQuad();

              // Clean up.
              _renderTargetBindings[0] = default(RenderTargetBinding);
              _renderTargetBindings[1] = default(RenderTargetBinding);
              _parameterButterflyTexture.SetValue((Texture2D)null);
              _parameterSourceTexture0.SetValue((Texture2D)null);
              _parameterSourceTexture1.SetValue((Texture2D)null);

              renderTargetPool.Recycle(tempPing0);
              renderTargetPool.Recycle(tempPing1);
              renderTargetPool.Recycle(tempPong0);
              renderTargetPool.Recycle(tempPong1);

              savedRenderState.Restore();

              // Reset the texture stages. If a floating point texture is set, we get exceptions
              // when a sampler with bilinear filtering is set.
            #if !MONOGAME
              graphicsDevice.ResetTextures();
            #endif
        }
コード例 #47
0
        private void RenderReach(SkyboxNode node, RenderContext context)
        {
            var graphicsDevice = context.GraphicsService.GraphicsDevice;

            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.RasterizerState   = RasterizerState.CullCounterClockwise;
            graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
            graphicsDevice.BlendState        = node.EnableAlphaBlending ? BlendState.AlphaBlend : BlendState.Opaque;
            graphicsDevice.SamplerStates[0]  = SamplerState.LinearClamp;

            // Change viewport to render all pixels at max z.
            var originalViewport = graphicsDevice.Viewport;
            var viewport         = originalViewport;

            viewport.MinDepth       = viewport.MaxDepth;
            graphicsDevice.Viewport = viewport;

            var cameraNode = context.CameraNode;
            var view       = cameraNode.View;

            view.Translation = Vector3.Zero;
            var projection = cameraNode.Camera.Projection;

            var basicEffect = (BasicEffect)_effect;

            basicEffect.View         = (Matrix)view;
            basicEffect.Projection   = projection;
            basicEffect.DiffuseColor = (Vector3)node.Color;
            basicEffect.Alpha        = node.EnableAlphaBlending ? node.Alpha : 1;

            // Scale skybox such that it lies within view frustum:
            //   distance of a skybox corner = √3
            //   √3 * scale = far
            //   => scale = far / √3
            // (Note: If  near > far / √3  then the skybox will be clipped.)
            float scale = projection.Far * 0.577f;

            var orientation = node.PoseWorld.Orientation;

            // Positive X
            basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.PositiveX);
            basicEffect.World   = (Matrix) new Matrix(orientation * scale, Vector3.Zero);
            basicEffect.CurrentTechnique.Passes[0].Apply();
            graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

            // Negative X
            // transform = scale * rotY(180°)
            var transform = new Matrix(-scale, 0, 0, 0, scale, 0, 0, 0, -scale);

            basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.NegativeX);
            basicEffect.World   = (Matrix) new Matrix(orientation * transform, Vector3.Zero);
            basicEffect.CurrentTechnique.Passes[0].Apply();
            graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

            // Positive Y
            // transform = scale * rotX(90°) * rotY(90°)
            transform           = new Matrix(0, 0, scale, scale, 0, 0, 0, scale, 0);
            basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.PositiveY);
            basicEffect.World   = (Matrix) new Matrix(orientation * transform, Vector3.Zero);
            basicEffect.CurrentTechnique.Passes[0].Apply();
            graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

            // Negative Y
            // transform = scale * rotX(-90°) * rotY(90°)
            transform           = new Matrix(0, 0, scale, -scale, 0, 0, 0, -scale, 0);
            basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.NegativeY);
            basicEffect.World   = (Matrix) new Matrix(orientation * transform, Vector3.Zero);
            basicEffect.CurrentTechnique.Passes[0].Apply();
            graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

            // Cube maps are left-handed, where as the world is right-handed!

            // Positive Z (= negative Z in world space)
            // transform = scale * rotY(90°)
            transform           = new Matrix(0, 0, scale, 0, scale, 0, -scale, 0, 0);
            basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.PositiveZ);
            basicEffect.World   = (Matrix) new Matrix(orientation * transform, Vector3.Zero);
            basicEffect.CurrentTechnique.Passes[0].Apply();
            graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

            // Negative Z (= positive Z in world space)
            // transform = scale * rotY(-90°)
            transform           = new Matrix(0, 0, -scale, 0, scale, 0, scale, 0, 0);
            basicEffect.Texture = GetTexture2D(graphicsDevice, node.Texture, CubeMapFace.NegativeZ);
            basicEffect.World   = (Matrix) new Matrix(orientation * transform, Vector3.Zero);
            basicEffect.CurrentTechnique.Passes[0].Apply();
            graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _faceVertices, 0, 2);

            graphicsDevice.Viewport = originalViewport;
            savedRenderState.Restore();
        }
コード例 #48
0
ファイル: PostProcessor.cs プロジェクト: Zolniu/DigitalRune
        internal void ProcessInternal(RenderContext context)
        {
            Debug.Assert(Enabled, "PostProcessor.ProcessInternal should only be called when the post-processor is enabled.");

              var graphicsDevice = GraphicsService.GraphicsDevice;

              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              // Set render states. The blend state must be set by the user!
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.DepthStencilState = DepthStencilState.None;

              // Preform post-processing.
              OnProcess(context);

              savedRenderState.Restore();

              // Reset the texture stages. If a floating point texture is set, we get exceptions
            // when a sampler with bilinear filtering is set.
            #if !MONOGAME
              graphicsDevice.ResetTextures();
            #endif
        }
コード例 #49
0
    public void UpdateOcclusion(IList<SceneNode> nodes, RenderContext context)
    {
      // Measures the visibility of the light source by drawing a screen-aligned quad
      // using an occlusion query. The query result is used in the next frame.

      ThrowIfDisposed();

      if (nodes == null)
        throw new ArgumentNullException("nodes");
      if (context == null)
        throw new ArgumentNullException("context");

      context.Validate(_basicEffect);
      context.ThrowIfCameraMissing();

      // Occlusion queries require HiDef profile.
      var graphicsDevice = context.GraphicsService.GraphicsDevice;
      if (graphicsDevice.GraphicsProfile == GraphicsProfile.Reach)
        return;

      int numberOfNodes = nodes.Count;
      if (nodes.Count == 0)
        return;

      // Camera properties
      var cameraNode = context.CameraNode;
      var cameraPose = cameraNode.PoseWorld;
      Vector3 cameraRight = cameraPose.Orientation.GetColumn(0);    // 1st column vector
      Vector3 cameraUp = cameraPose.Orientation.GetColumn(1);       // 2nd column vector
      Vector3 cameraForward = -cameraPose.Orientation.GetColumn(2); // 3rd column vector (negated)
      Matrix view = cameraNode.View;
      Matrix projection = cameraNode.Camera.Projection;
      bool isOrthographic = (projection.M33 != 0);

      // The following factors are used to estimate the size of a quad in screen space.
      // (The equation is described in GraphicsHelper.GetScreenSize().)
      float xScale = Math.Abs(projection.M00 / 2);
      float yScale = Math.Abs(projection.M11 / 2);

      var viewport = graphicsDevice.Viewport;

      // Lens flares of directional lights are rendered directly on the screen.
      // --> Set up projection transformation for rendering in screen space.
      var orthographicProjection = Matrix.CreateOrthographicOffCenter(0, viewport.Width, viewport.Height, 0, 0, 1);

      // Set render states for rendering occlusion query geometry (quad).
      var savedRenderState = new RenderStateSnapshot(graphicsDevice);
      graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;
      graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
      graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise;

      for (int i = 0; i < numberOfNodes; i++)
      {
        var node = nodes[i] as LensFlareNode;
        if (node == null)
          continue;

        object dummy;
        cameraNode.ViewDependentData.TryGetValue(node, out dummy);
        var renderData = dummy as OcclusionData;
        if (renderData == null)
        {
          renderData = new OcclusionData();
          cameraNode.ViewDependentData[node] = renderData;
        }

        if (renderData.OcclusionQuery != null)
        {
          // Wait until previous occlusion query has completed.
          if (!renderData.OcclusionQuery.IsComplete)
            continue;

          // ----- Read result of previous occlusion query.
          int visiblePixels = renderData.OcclusionQuery.PixelCount;

          // OcclusionData.TotalPixels is only an approximation.
          // --> Clamp pixel count to [0, TotalPixels].
          if (visiblePixels > renderData.TotalPixels)
            visiblePixels = renderData.TotalPixels;

          renderData.VisiblePixels = visiblePixels;
        }

        // ----- Run new occlusion query.
        var lensFlare = node.LensFlare;

        // The user can disable the lens flare by setting LensFlare.Intensity to 0.
        float intensity = node.Intensity * lensFlare.Intensity;
        if (intensity < MinIntensity)
        {
          renderData.VisiblePixels = 0;
          continue;
        }

        float querySize;
        if (lensFlare.IsDirectional)
        {
          // ----- Directional lights

          // Ignore directional lights if camera has orthographic projection.
          // (The light source is infinitely far way and the camera frustum has only 
          // limited width and height. It is very unlikely that camera catches the 
          // directional light.
          if (isOrthographic)
          {
            renderData.VisiblePixels = 0;
            continue;
          }

          // Directional lights are positioned at infinite distance and are not affected
          // by the position of the camera.
          Vector3 lightDirectionWorld = -node.PoseWorld.Orientation.GetColumn(2);  // 3rd column vector (negated)
          Vector3 lightDirectionView = cameraPose.ToLocalDirection(lightDirectionWorld);
          if (lightDirectionView.Z < 0)
          {
            // Light comes from behind camera.
            renderData.VisiblePixels = 0;
            continue;
          }

          // Project position to viewport.
          Vector3 screenPosition = viewport.ProjectToViewport(-lightDirectionView, projection);

          // LensFlare.QuerySize is the size relative to viewport.
          querySize = lensFlare.QuerySize * viewport.Height;
          renderData.TotalPixels = (int)(querySize * querySize);
          if (renderData.TotalPixels < MinPixelSize)
          {
            // Cull small light sources.
            renderData.VisiblePixels = 0;
            continue;
          }

          // Draw quad in screen space.
          querySize /= 2;
          _queryGeometry[0].Position = new Vector3(screenPosition.X - querySize, screenPosition.Y - querySize, -1);
          _queryGeometry[1].Position = new Vector3(screenPosition.X + querySize, screenPosition.Y - querySize, -1);
          _queryGeometry[2].Position = new Vector3(screenPosition.X - querySize, screenPosition.Y + querySize, -1);
          _queryGeometry[3].Position = new Vector3(screenPosition.X + querySize, screenPosition.Y + querySize, -1);

          _basicEffect.World = Matrix.Identity;
          _basicEffect.View = Matrix.Identity;
          _basicEffect.Projection = orthographicProjection;
          _basicEffect.CurrentTechnique.Passes[0].Apply();

          if (renderData.OcclusionQuery == null)
            renderData.OcclusionQuery = new OcclusionQuery(graphicsDevice);

          renderData.OcclusionQuery.Begin();
          graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _queryGeometry, 0, 2);
          renderData.OcclusionQuery.End();
        }
        else
        {
          // ----- Local lights

          // Determine planar distance to camera.
          Vector3 position = node.PoseWorld.Position;
          Vector3 cameraToNode = position - cameraPose.Position;
          float distance = Vector3.Dot(cameraToNode, cameraForward);
          if (distance < cameraNode.Camera.Projection.Near)
          {
            // Light is behind near plane.
            renderData.VisiblePixels = 0;
            continue;
          }

          Debug.Assert(
            node.ScaleWorld.X > 0 && node.ScaleWorld.Y > 0 && node.ScaleWorld.Z > 0,
            "Assuming that all scale factors are positive.");

          // LensFlare.QuerySize is the size in world space.
          querySize = node.ScaleWorld.LargestComponent * node.LensFlare.QuerySize;

          // Estimate screen space size of query geometry.
          float screenSizeX = viewport.Width * querySize * xScale;
          float screenSizeY = viewport.Height * querySize * yScale;
          if (!isOrthographic)
          {
            float oneOverDistance = 1 / distance;
            screenSizeX *= oneOverDistance;
            screenSizeY *= oneOverDistance;
          }

          renderData.TotalPixels = (int)(screenSizeX * screenSizeY);
          if (renderData.TotalPixels < MinPixelSize)
          {
            // Cull small light sources.
            renderData.VisiblePixels = 0;
            continue;
          }

          // Draw screen-aligned quad in world space.
          querySize /= 2;
          Vector3 upVector = querySize * cameraUp;
          Vector3 rightVector = querySize * cameraRight;

          // Offset quad by half its size towards the camera. Otherwise, the geometry 
          // of the light source could obstruct the query geometry.
          position -= querySize * cameraToNode.Normalized;
          _queryGeometry[0].Position = (Vector3)(position - rightVector - upVector);
          _queryGeometry[1].Position = (Vector3)(position - rightVector + upVector);
          _queryGeometry[2].Position = (Vector3)(position + rightVector - upVector);
          _queryGeometry[3].Position = (Vector3)(position + rightVector + upVector);

          _basicEffect.World = Matrix.Identity;
          _basicEffect.View = (Matrix)view;
          _basicEffect.Projection = (Matrix)projection;
          _basicEffect.CurrentTechnique.Passes[0].Apply();

          if (renderData.OcclusionQuery == null)
            renderData.OcclusionQuery = new OcclusionQuery(graphicsDevice);

          renderData.OcclusionQuery.Begin();
          graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, _queryGeometry, 0, 2);
          renderData.OcclusionQuery.End();
        }
      }

      savedRenderState.Restore();
    }
コード例 #50
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

              if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (nodes.Count == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              var graphicsDevice = context.GraphicsService.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);

              // Camera properties
              var cameraNode = context.CameraNode;
              Matrix view = (Matrix)cameraNode.View;
              Matrix projection = cameraNode.Camera.Projection;
              Matrix viewProjection = view * projection;

              // Update SceneNode.LastFrame for all visible nodes.
              int frame = context.Frame;
              cameraNode.LastFrame = frame;

              // Blend additively over any cosmos textures.
              graphicsDevice.RasterizerState = RasterizerState.CullNone;
              graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
              graphicsDevice.BlendState = BlendState.Additive;

              _effectParameterViewportSize.SetValue(new Vector2(context.Viewport.Width, context.Viewport.Height));

              for (int i = 0; i < numberOfNodes; i++)
              {
            var node = nodes[i] as StarfieldNode;
            if (node == null)
              continue;

            // SkyboxNode is visible in current frame.
            node.LastFrame = frame;

            if (node.Stars != null && node.Stars.Count > 0)
            {
              Matrix world = (Matrix)new Matrix44F(node.PoseWorld.Orientation, Vector3F.Zero);
              _effectParameterWorldViewProjection.SetValue(world * viewProjection);

              // In [ZFX] the star luminance of the precomputed star data is scaled with
              // float const viewFactor = tan(fov);
              // float const resolutionFactor = resolution / 1920.0f;
              // float const luminanceScale = 1.0f / (viewFactor * viewFactor) * (resolutionFactor * resolutionFactor);
              // We ignore this here, but we could add this factor to the Intensity parameter.
              _effectParameterIntensity.SetValue((Vector3)node.Color);

              if (context.IsHdrEnabled())
            _effectPassLinear.Apply();
              else
            _effectPassGamma.Apply();

              var mesh = GetStarfieldMesh(node, context);
              mesh.Draw();
            }
              }

              savedRenderState.Restore();
        }
コード例 #51
0
        private void Render(RenderContext context, Vector4F color, Texture2D colorTexture, bool preserveColor)
        {
            if (context == null)
            throw new ArgumentNullException("context");

              context.Validate(_effect);
              context.ThrowIfCameraMissing();
              context.ThrowIfGBuffer0Missing();

              var graphicsDevice = _effect.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateAlways;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;

              if (preserveColor)
            graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;
              else
            graphicsDevice.BlendState = BlendState.Opaque;

              if (colorTexture != null)
              {
            if (TextureHelper.IsFloatingPointFormat(colorTexture.Format))
              graphicsDevice.SamplerStates[1] = SamplerState.PointClamp;
            else
              graphicsDevice.SamplerStates[1] = SamplerState.LinearClamp;
              }

              var projection = context.CameraNode.Camera.Projection;
              bool isPerspective = projection is PerspectiveProjection;
              float near = projection.Near * NearBias;
              float far = projection.Far * FarBias;
              var biasedProjection = isPerspective
                               ? Matrix44F.CreatePerspectiveOffCenter(
                                 projection.Left, projection.Right,
                                 projection.Bottom, projection.Top,
                                 near, far)
                               : Matrix44F.CreateOrthographicOffCenter(
                                 projection.Left, projection.Right,
                                 projection.Bottom, projection.Top,
                                 near, far);

              var viewport = graphicsDevice.Viewport;
              _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
              _parameterProjection.SetValue((Matrix)biasedProjection);
              _parameterCameraFar.SetValue(projection.Far);
              _parameterGBuffer0.SetValue(context.GBuffer0);
              _parameterColor.SetValue((Vector4)color);
              _parameterSourceTexture.SetValue(colorTexture);

              _effect.CurrentTechnique = isPerspective ? _techniquePerspective : _techniqueOrthographic;
              _effect.CurrentTechnique.Passes[(colorTexture == null) ? 0 : 1].Apply();

              graphicsDevice.DrawFullScreenQuad();

              graphicsDevice.ResetTextures();

              savedRenderState.Restore();
        }
コード例 #52
0
    /// <summary>
    /// Draws the batched primitives.
    /// </summary>
    /// <param name="context">The render context.</param>
    /// <exception cref="ArgumentNullException">
    /// <paramref name="context"/> is <see langword="null"/>.
    /// </exception>
    public void Render(RenderContext context)
    {
      if (context == null)
        throw new ArgumentNullException("context");

      if (Effect == null || _primitives.Count == 0)
        return;

      context.Validate(Effect);
      context.ThrowIfCameraMissing();

      var graphicsService = context.GraphicsService;
      var graphicsDevice = graphicsService.GraphicsDevice;
      var savedRenderState = new RenderStateSnapshot(graphicsDevice);
      if (AutoRasterizerState)
        graphicsDevice.RasterizerState = DrawWireFrame ? GraphicsHelper.RasterizerStateWireFrame : GraphicsHelper.RasterizerStateCullCounterClockwise;

      // Sort primitives if necessary.
      Matrix view = context.CameraNode.View;
      if (SortBackToFront && _usesTransparency)
      {
        // Update depth (distance from camera).
        foreach (var job in _primitives)
        {
          Vector3 position = job.Pose.Position;
          job.Depth = view.TransformPosition(position).Z;
        }

        _primitives.Sort(PrimitiveJobDepthComparer.Instance);
      }

      // Reset the texture stages. If a floating point texture is set, we get exceptions
      // when a sampler with bilinear filtering is set.
      graphicsDevice.ResetTextures();

      Effect.LightingEnabled = !DrawWireFrame;
      Effect.TextureEnabled = false;
      Effect.View = (Matrix)view;
      Effect.Projection = context.CameraNode.Camera.Projection;

      foreach (var job in _primitives)
      {
        Effect.Alpha = job.Color.A / 255.0f;
        Effect.DiffuseColor = job.Color.ToVector3();
        Effect.VertexColorEnabled = false;

        switch (job.Type)
        {
          case PrimitiveJobType.Box:
            RenderBox(graphicsService, job);
            break;
          case PrimitiveJobType.Capsule:
            RenderCapsule(graphicsService, job);
            break;
          case PrimitiveJobType.Cone:
            RenderCone(graphicsService, job);
            break;
          case PrimitiveJobType.Cylinder:
            RenderCylinder(graphicsService, job);
            break;
          case PrimitiveJobType.ViewVolume:
            RenderViewVolume(job, context);
            break;
          case PrimitiveJobType.Sphere:
            RenderSphere(graphicsService, job, context);
            break;
          case PrimitiveJobType.Shape:
            RenderShape(graphicsDevice, job);
            break;
          case PrimitiveJobType.Model:
            RenderModel(graphicsDevice, job);
            break;
          case PrimitiveJobType.Submesh:
            RenderSubmesh(job);
            break;
        }
      }

      if (DrawWireFrame)
      {
        _lineBatch.Render(context);
        _lineBatch.Clear();
      }

      savedRenderState.Restore();
    }
コード例 #53
0
        /// <inheritdoc/>
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (nodes.Count == 0)
            {
                return;
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = context.GraphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.BlendState        = BlendState.AlphaBlend;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;

            // Camera properties
            var    cameraNode = context.CameraNode;
            Matrix view       = (Matrix) new Matrix(cameraNode.PoseWorld.Orientation.Transposed, new Vector3());

            _parameterView.SetValue(view);
            Matrix projection = cameraNode.Camera.Projection;

            _parameterProjection.SetValue(projection);

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            for (int i = 0; i < numberOfNodes; i++)
            {
                var node = nodes[i] as ScatteringSkyNode;
                if (node == null)
                {
                    continue;
                }

                // ScatteringSkyNode is visible in current frame.
                node.LastFrame = frame;

                _parameterSunDirection.SetValue((Vector3)node.SunDirection);
                _parameterSunIntensity.SetValue((Vector3)(node.SunIntensity * node.SunColor));
                _parameterRadii.SetValue(new Vector4(
                                             node.AtmosphereHeight + node.PlanetRadius, // Atmosphere radius
                                             node.PlanetRadius,                         // Ground radius
                                             node.ObserverAltitude + node.PlanetRadius, // Observer radius
                                             node.ScaleHeight));                        // Absolute Scale height
                _parameterNumberOfSamples.SetValue(node.NumberOfSamples);
                _parameterBetaRayleigh.SetValue((Vector3)node.BetaRayleigh);
                _parameterBetaMie.SetValue((Vector3)node.BetaMie);
                _parameterGMie.SetValue(node.GMie);
                _parameterTransmittance.SetValue(node.Transmittance);

                if (node.BaseHorizonColor.IsNumericallyZero && node.BaseZenithColor.IsNumericallyZero)
                {
                    // No base color.
                    if (context.IsHdrEnabled())
                    {
                        _passLinear.Apply();
                    }
                    else
                    {
                        _passGamma.Apply();
                    }
                }
                else
                {
                    // Add base color.
                    _parameterBaseHorizonColor.SetValue((Vector4) new Vector4(node.BaseHorizonColor, node.BaseColorShift));
                    _parameterBaseZenithColor.SetValue((Vector3)node.BaseZenithColor);

                    if (context.IsHdrEnabled())
                    {
                        _passLinearWithBaseColor.Apply();
                    }
                    else
                    {
                        _passGammaWithBaseColor.Apply();
                    }
                }

                _submesh.Draw();
            }

            savedRenderState.Restore();
        }
コード例 #54
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            int numberOfNodes = nodes.Count;

            if (nodes.Count == 0)
            {
                return;
            }

            context.Validate(_spriteBatch);
            context.ThrowIfCameraMissing();

            var graphicsDevice   = context.GraphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            // Camera properties
            var       cameraNode     = context.CameraNode;
            Matrix44F viewProjection = cameraNode.Camera.Projection * cameraNode.View;
            var       viewport       = graphicsDevice.Viewport;

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            SpriteSortMode sortMode;

            switch (order)
            {
            case RenderOrder.Default:
                sortMode = SpriteSortMode.Texture;
                break;

            case RenderOrder.FrontToBack:
                sortMode = SpriteSortMode.FrontToBack;
                break;

            case RenderOrder.BackToFront:
                sortMode = SpriteSortMode.BackToFront;
                break;

            case RenderOrder.UserDefined:
            default:
                sortMode = SpriteSortMode.Deferred;
                break;
            }

            _spriteBatch.Begin(sortMode, graphicsDevice.BlendState, null, graphicsDevice.DepthStencilState, null);

            for (int i = 0; i < numberOfNodes; i++)
            {
                var node = nodes[i] as SpriteNode;
                if (node == null)
                {
                    continue;
                }

                // SpriteNode is visible in current frame.
                node.LastFrame = frame;

                // Position, size, and origin in pixels.
                Vector3F position = new Vector3F();
                Vector2  size     = new Vector2();
                Vector2  origin   = new Vector2();

                var bitmapSprite = node.Sprite as ImageSprite;
                if (bitmapSprite != null)
                {
                    var packedTexture = bitmapSprite.Texture;
                    if (packedTexture != null)
                    {
                        // Project into viewport and snap to pixels.
                        position   = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection);
                        position.X = (int)(position.X + 0.5f);
                        position.Y = (int)(position.Y + 0.5f);

                        // Get source rectangle (pixel bounds).
                        var sourceRectangle = packedTexture.GetBounds(node.AnimationTime);
                        size = new Vector2(sourceRectangle.Width, sourceRectangle.Height);

                        // Premultiply color.
                        Vector3F color3F = node.Color;
                        float    alpha   = node.Alpha;
                        Color    color   = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha);

                        // Get absolute origin (relative to pixel bounds).
                        origin = (Vector2)node.Origin * size;

                        // Draw using SpriteBatch.
                        _spriteBatch.Draw(
                            packedTexture.TextureAtlas, new Vector2(position.X, position.Y), sourceRectangle,
                            color, node.Rotation, origin, (Vector2)node.Scale, SpriteEffects.None, position.Z);
                    }
                }
                else
                {
                    var textSprite = node.Sprite as TextSprite;
                    if (textSprite != null)
                    {
                        var font = textSprite.Font ?? _spriteFont;
                        if (font != null)
                        {
                            // Text can be a string or StringBuilder.
                            var text = textSprite.Text as string;
                            if (text != null)
                            {
                                if (text.Length > 0)
                                {
                                    // Project into viewport and snap to pixels.
                                    position   = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection);
                                    position.X = (int)(position.X + 0.5f);
                                    position.Y = (int)(position.Y + 0.5f);

                                    // Premultiply color.
                                    Vector3F color3F = node.Color;
                                    float    alpha   = node.Alpha;
                                    Color    color   = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha);

                                    // Get absolute origin (relative to pixel bounds).
                                    size   = font.MeasureString(text);
                                    origin = (Vector2)node.Origin * size;

                                    // Draw using SpriteBatch.
                                    _spriteBatch.DrawString(
                                        font, text, new Vector2(position.X, position.Y),
                                        color, node.Rotation, origin, (Vector2)node.Scale,
                                        SpriteEffects.None, position.Z);
                                }
                            }
                            else
                            {
                                var stringBuilder = textSprite.Text as StringBuilder;
                                if (stringBuilder != null && stringBuilder.Length > 0)
                                {
                                    // Project into viewport and snap to pixels.
                                    position   = viewport.ProjectToViewport(node.PoseWorld.Position, viewProjection);
                                    position.X = (int)(position.X + 0.5f);
                                    position.Y = (int)(position.Y + 0.5f);

                                    // Premultiply color.
                                    Vector3F color3F = node.Color;
                                    float    alpha   = node.Alpha;
                                    Color    color   = new Color(color3F.X * alpha, color3F.Y * alpha, color3F.Z * alpha, alpha);

                                    // Get absolute origin (relative to pixel bounds).
                                    size   = font.MeasureString(stringBuilder);
                                    origin = (Vector2)node.Origin * size;

                                    // Draw using SpriteBatch.
                                    _spriteBatch.DrawString(
                                        font, stringBuilder, new Vector2(position.X, position.Y),
                                        color, node.Rotation, origin, (Vector2)node.Scale,
                                        SpriteEffects.None, position.Z);
                                }
                            }
                        }
                    }
                }

                // Store bounds an depth for hit tests.
                node.LastBounds = new Rectangle(
                    (int)(position.X - origin.X),
                    (int)(position.Y - origin.Y),
                    (int)(size.X * node.Scale.X),
                    (int)(size.Y * node.Scale.Y));

                node.LastDepth = position.Z;
            }

            _spriteBatch.End();
            savedRenderState.Restore();
        }
コード例 #55
0
        internal override void ProcessJobs(RenderContext context, RenderOrder order)
        {
            var graphicsDevice   = _graphicsService.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);
            var target           = context.RenderTarget;
            var viewport         = context.Viewport;

            Debug.Assert(_shadowMasks.Length > 0);
            Debug.Assert(_shadowMasks[0] != null);

            RenderTarget2D lowResTarget = null;

            if (UseHalfResolution && Numeric.IsGreater(UpsampleDepthSensitivity, 0))
            {
                // Half-res rendering with upsampling.
                var format = new RenderTargetFormat(_shadowMasks[0]);
                format.Width  /= 2;
                format.Height /= 2;
                lowResTarget   = _graphicsService.RenderTargetPool.Obtain2D(format);
            }

            int index               = 0;
            var jobs                = Jobs.Array;
            int jobCount            = Jobs.Count;
            int lastShadowMaskIndex = -1;

            while (index < jobCount)
            {
                int shadowMaskIndex = (int)(jobs[index].SortKey >> 16);
                var renderer        = jobs[index].Renderer;

                // Find end of current batch.
                int endIndexExclusive = index + 1;
                while (endIndexExclusive < jobCount)
                {
                    if ((int)(jobs[endIndexExclusive].SortKey >> 16) != lastShadowMaskIndex ||
                        jobs[endIndexExclusive].Renderer != renderer)
                    {
                        break;
                    }

                    endIndexExclusive++;
                }

                // Restore the render state. (The integrated scene node renderers properly
                // restore the render state, but third-party renderers might mess it up.)
                if (index > 0)
                {
                    savedRenderState.Restore();
                }

                if (shadowMaskIndex != lastShadowMaskIndex)
                {
                    // Done with current shadow mask. Apply filter.
                    if (lastShadowMaskIndex >= 0)
                    {
                        PostProcess(context, context.RenderTarget, _shadowMasks[lastShadowMaskIndex]);
                    }

                    // Switch to next shadow mask.
                    lastShadowMaskIndex = shadowMaskIndex;

                    var shadowMask = lowResTarget ?? _shadowMasks[shadowMaskIndex];

                    // Set device render target and clear it to white (= no shadow).
                    graphicsDevice.SetRenderTarget(shadowMask);
                    context.RenderTarget = shadowMask;
                    context.Viewport     = graphicsDevice.Viewport;
                    graphicsDevice.Clear(Color.White);
                }

                // Submit batch to renderer.
                // (Use Accessor to expose current batch as IList<SceneNode>.)
                JobsAccessor.Set(Jobs, index, endIndexExclusive);
                renderer.Render(JobsAccessor, context, order);
                JobsAccessor.Reset();

                index = endIndexExclusive;
            }

            // Done with last shadow mask. Apply filter.
            PostProcess(context, context.RenderTarget, _shadowMasks[lastShadowMaskIndex]);

            savedRenderState.Restore();
            graphicsDevice.ResetTextures();
            graphicsDevice.SetRenderTarget(null);
            context.RenderTarget = target;
            context.Viewport     = viewport;

            _graphicsService.RenderTargetPool.Recycle(lowResTarget);
        }
コード例 #56
0
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }
            if (context.Scene == null)
            {
                throw new ArgumentException("Scene needs to be set in render context.", "context");
            }
            if (context.CameraNode == null)
            {
                throw new ArgumentException("Camera needs to be set in render context.", "context");
            }
            if (!(context.CameraNode.Camera.Projection is PerspectiveProjection))
            {
                throw new ArgumentException("The camera in the render context must use a perspective projection.", "context");
            }

            int numberOfNodes = nodes.Count;

            if (numberOfNodes == 0)
            {
                return;
            }

            var graphicsDevice = context.GraphicsService.GraphicsDevice;
            int frame          = context.Frame;

            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            var   originalRenderTarget  = context.RenderTarget;
            var   originalViewport      = context.Viewport;
            var   originalCameraNode    = context.CameraNode;
            var   originalLodCameraNode = context.LodCameraNode;
            float originalLodBias       = context.LodBias;
            var   originalReferenceNode = context.ReferenceNode;

            Pose      originalCameraPose        = originalCameraNode.PoseWorld;
            Vector3F  originalCameraPosition    = originalCameraPose.Position;
            Matrix33F originalCameraOrientation = originalCameraPose.Orientation;

            Vector3F right = originalCameraOrientation.GetColumn(0);
            Vector3F up    = originalCameraOrientation.GetColumn(1);
            Vector3F back  = originalCameraOrientation.GetColumn(2);

            try
            {
                // Use foreach instead of for-loop to catch InvalidOperationExceptions in
                // case the collection is modified.
                for (int i = 0; i < numberOfNodes; i++)
                {
                    var node = nodes[i] as PlanarReflectionNode;
                    if (node == null)
                    {
                        continue;
                    }

                    // Update each node only once per frame.
                    if (node.LastFrame == frame)
                    {
                        continue;
                    }

                    node.LastFrame = frame;

                    var texture = node.RenderToTexture.Texture;
                    if (texture == null)
                    {
                        continue;
                    }

                    var renderTarget = texture as RenderTarget2D;
                    if (renderTarget == null)
                    {
                        throw new GraphicsException(
                                  "PlanarReflectionNode.RenderToTexture.Texture is invalid. The texture must be a RenderTarget2D.");
                    }

                    // RenderToTexture instances can be shared. --> Update them only once per frame.
                    if (node.RenderToTexture.LastFrame == frame)
                    {
                        continue;
                    }

                    // Do not render if we look at the back of the reflection plane.
                    Vector3F planeNormal   = node.NormalWorld;
                    Vector3F planePosition = node.PoseWorld.Position;
                    Vector3F planeToCamera = originalCameraPosition - planePosition;
                    if (Vector3F.Dot(planeNormal, planeToCamera) < 0)
                    {
                        continue;
                    }

                    var cameraNode = node.CameraNode;

                    // Reflect camera pose.
                    Pose cameraPose;
                    cameraPose.Position    = planePosition + Reflect(planeToCamera, planeNormal);
                    cameraPose.Orientation = new Matrix33F();
                    cameraPose.Orientation.SetColumn(0, Reflect(right, planeNormal));
                    cameraPose.Orientation.SetColumn(1, -Reflect(up, planeNormal));
                    cameraPose.Orientation.SetColumn(2, Reflect(back, planeNormal));
                    cameraNode.PoseWorld = cameraPose;

                    // The projection of the player camera.
                    var originalProjection = originalCameraNode.Camera.Projection;
                    // The projection of the reflected camera.
                    var projection = (PerspectiveProjection)cameraNode.Camera.Projection;

                    // Choose optimal projection. We get the screen-space bounds of the reflection node.
                    // Then we make the FOV so small that it exactly contains the node.
                    projection.Set(originalProjection);

                    var bounds = GraphicsHelper.GetBounds(cameraNode, node);

                    // Abort if the bounds are empty.
                    if (Numeric.AreEqual(bounds.X, bounds.Z) || Numeric.AreEqual(bounds.Y, bounds.W))
                    {
                        continue;
                    }

                    // Apply FOV scale to bounds.
                    float fovScale = node.FieldOfViewScale;
                    float deltaX   = (bounds.Z - bounds.X) * (fovScale - 1) / 2;
                    bounds.X -= deltaX;
                    bounds.Z += deltaX;
                    float deltaY = (bounds.W - bounds.Y) * (fovScale - 1) / 2;
                    bounds.Y -= deltaY;
                    bounds.W += deltaY;

                    // Update projection to contain only the node bounds.
                    projection.Left   = projection.Left + bounds.X * projection.Width;
                    projection.Right  = projection.Left + bounds.Z * projection.Width;
                    projection.Top    = projection.Top - bounds.Y * projection.Height;
                    projection.Bottom = projection.Top - bounds.W * projection.Height;

                    // Set far clip plane.
                    if (node.Far.HasValue)
                    {
                        projection.Far = node.Far.Value;
                    }

                    // Set near clip plane.
                    Vector3F planeNormalCamera = cameraPose.ToLocalDirection(-node.NormalWorld);
                    Vector3F planePointCamera  = cameraPose.ToLocalPosition(node.PoseWorld.Position);
                    projection.NearClipPlane = new Plane(planeNormalCamera, planePointCamera);

                    context.CameraNode    = cameraNode;
                    context.LodCameraNode = cameraNode;
                    context.LodBias       = node.LodBias ?? originalLodBias;
                    context.ReferenceNode = node;

                    context.RenderTarget = renderTarget;
                    context.Viewport     = new Viewport(0, 0, renderTarget.Width, renderTarget.Height);

                    RenderCallback(context);

                    // Update other properties of RenderToTexture.
                    node.RenderToTexture.LastFrame     = frame;
                    node.RenderToTexture.TextureMatrix = GraphicsHelper.ProjectorBiasMatrix
                                                         * cameraNode.Camera.Projection
                                                         * cameraNode.PoseWorld.Inverse;
                }
            }
            catch (InvalidOperationException exception)
            {
                throw new GraphicsException(
                          "InvalidOperationException was raised in PlanarReflectionRenderer.Render(). "
                          + "This can happen if a SceneQuery instance that is currently in use is modified in the "
                          + "RenderCallback. --> Use different SceneQuery types in the method which calls "
                          + "SceneCaptureRenderer.Render() and in the RenderCallback method.",
                          exception);
            }

            graphicsDevice.SetRenderTarget(null);
            savedRenderState.Restore();

            context.RenderTarget  = originalRenderTarget;
            context.Viewport      = originalViewport;
            context.CameraNode    = originalCameraNode;
            context.LodCameraNode = originalLodCameraNode;
            context.LodBias       = originalLodBias;
            context.ReferenceNode = originalReferenceNode;
        }
コード例 #57
0
        public override void Render(IList<SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            if (nodes == null)
            throw new ArgumentNullException("nodes");
              if (context == null)
            throw new ArgumentNullException("context");

              int numberOfNodes = nodes.Count;
              if (numberOfNodes == 0)
            return;

              context.Validate(_effect);
              context.ThrowIfCameraMissing();

              var graphicsDevice = _effect.GraphicsDevice;
              var savedRenderState = new RenderStateSnapshot(graphicsDevice);
              graphicsDevice.DepthStencilState = DepthStencilState.None;
              graphicsDevice.RasterizerState = RasterizerState.CullNone;

              var cameraNode = context.CameraNode;
              _parameterViewInverse.SetValue(cameraNode.PoseWorld);
              _parameterGBuffer0.SetValue(context.GBuffer0);

              Viewport viewport = context.Viewport;
              _parameterParameters0.SetValue(new Vector2(viewport.Width, viewport.Height));

              if (_jitterMap == null)
            _jitterMap = NoiseHelper.GetGrainTexture(context.GraphicsService, NoiseHelper.DefaultJitterMapWidth);

              _parameterJitterMap.SetValue(_jitterMap);

              for (int i = 0; i < numberOfNodes; i++)
              {
            var lightNode = nodes[i] as LightNode;
            if (lightNode == null)
              continue;

            var light = lightNode.Light as PointLight;
            if (light == null)
              return;

            var shadow = lightNode.Shadow as CubeMapShadow;
            if (shadow == null)
              continue;

            if (shadow.ShadowMap == null || shadow.ShadowMask == null)
              continue;

            // The effect must only render in a specific channel.
            // Do not change blend state if the correct write channels is already set, e.g. if this
            // shadow is part of a CompositeShadow, the correct blend state is already set.
            if ((int)graphicsDevice.BlendState.ColorWriteChannels != (1 << shadow.ShadowMaskChannel))
              graphicsDevice.BlendState = GraphicsHelper.BlendStateWriteSingleChannel[shadow.ShadowMaskChannel];

            _parameterParameters1.SetValue(new Vector4(
              shadow.Near,
              light.Range,
              shadow.EffectiveDepthBias,
              shadow.EffectiveNormalOffset));

            // If we use a subset of the Poisson kernel, we have to normalize the scale.
            int numberOfSamples = Math.Min(shadow.NumberOfSamples, StandardShadowMaskRenderer.PoissonKernel.Length);
            float filterRadius = shadow.FilterRadius;
            if (numberOfSamples > 0)
              filterRadius /= StandardShadowMaskRenderer.PoissonKernel[numberOfSamples - 1].Length();

            _parameterParameters2.SetValue(new Vector3(
              shadow.ShadowMap.Size,
              filterRadius,
              // The StandardShadow.JitterResolution is the number of texels per world unit.
              // In the shader the parameter JitterResolution contains the division by the jitter map size.
              shadow.JitterResolution / _jitterMap.Width));

            _parameterLightPosition.SetValue((Vector3)cameraNode.PoseWorld.ToLocalPosition(lightNode.PoseWorld.Position));

            _parameterShadowView.SetValue(lightNode.PoseWorld.Inverse * cameraNode.PoseWorld);
            _parameterShadowMap.SetValue(shadow.ShadowMap);

            var rectangle = GraphicsHelper.GetViewportRectangle(cameraNode, viewport, lightNode);
            Vector2F texCoordTopLeft = new Vector2F(rectangle.Left / (float)viewport.Width, rectangle.Top / (float)viewport.Height);
            Vector2F texCoordBottomRight = new Vector2F(rectangle.Right / (float)viewport.Width, rectangle.Bottom / (float)viewport.Height);
            GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, texCoordTopLeft, texCoordBottomRight, _frustumFarCorners);
            _parameterFrustumCorners.SetValue(_frustumFarCorners);

            var pass = GetPass(numberOfSamples);

            if (numberOfSamples > 0)
            {
              if (_lastNumberOfSamples != numberOfSamples)
              {
            // Create an array with the first n samples and the rest set to 0.
            _lastNumberOfSamples = numberOfSamples;
            for (int j = 0; j < numberOfSamples; j++)
            {
              _samples[j].Y = StandardShadowMaskRenderer.PoissonKernel[j].Y;
              _samples[j].X = StandardShadowMaskRenderer.PoissonKernel[j].X;
              _samples[j].Z = 1.0f / numberOfSamples;
            }

            // Set the rest to zero.
            for (int j = numberOfSamples; j < _samples.Length; j++)
              _samples[j] = Vector3.Zero;

            _parameterSamples.SetValue(_samples);
              }
              else if (i == 0)
              {
            // Apply offsets in the first loop.
            _parameterSamples.SetValue(_samples);
              }
            }

            pass.Apply();

            graphicsDevice.DrawQuad(rectangle);
              }

              _parameterGBuffer0.SetValue((Texture2D)null);
              _parameterJitterMap.SetValue((Texture2D)null);
              _parameterShadowMap.SetValue((Texture2D)null);
              savedRenderState.Restore();
        }
コード例 #58
0
        private void Render(RenderContext context, Vector4F color, Texture2D colorTexture, bool preserveColor)
        {
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();
            context.ThrowIfGBuffer0Missing();

            var graphicsDevice   = _effect.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.DepthStencilState = GraphicsHelper.DepthStencilStateAlways;
            graphicsDevice.RasterizerState   = RasterizerState.CullNone;

            if (preserveColor)
            {
                graphicsDevice.BlendState = GraphicsHelper.BlendStateNoColorWrite;
            }
            else
            {
                graphicsDevice.BlendState = BlendState.Opaque;
            }

            if (colorTexture != null)
            {
                if (TextureHelper.IsFloatingPointFormat(colorTexture.Format))
                {
                    graphicsDevice.SamplerStates[1] = SamplerState.PointClamp;
                }
                else
                {
                    graphicsDevice.SamplerStates[1] = SamplerState.LinearClamp;
                }
            }

            var   projection       = context.CameraNode.Camera.Projection;
            bool  isPerspective    = projection is PerspectiveProjection;
            float near             = projection.Near * NearBias;
            float far              = projection.Far * FarBias;
            var   biasedProjection = isPerspective
                               ? Matrix44F.CreatePerspectiveOffCenter(
                projection.Left, projection.Right,
                projection.Bottom, projection.Top,
                near, far)
                               : Matrix44F.CreateOrthographicOffCenter(
                projection.Left, projection.Right,
                projection.Bottom, projection.Top,
                near, far);

            var viewport = graphicsDevice.Viewport;

            _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));
            _parameterProjection.SetValue((Matrix)biasedProjection);
            _parameterCameraFar.SetValue(projection.Far);
            _parameterGBuffer0.SetValue(context.GBuffer0);
            _parameterColor.SetValue((Vector4)color);
            _parameterSourceTexture.SetValue(colorTexture);

            _effect.CurrentTechnique = isPerspective ? _techniquePerspective : _techniqueOrthographic;
            _effect.CurrentTechnique.Passes[(colorTexture == null) ? 0 : 1].Apply();

            graphicsDevice.DrawFullScreenQuad();

            graphicsDevice.ResetTextures();

            savedRenderState.Restore();
        }
コード例 #59
0
    private void ProcessJobs(RenderContext context, RenderOrder order)
    {
      Effect currentEffect = null;
      EffectEx currentEffectEx = null;
      EffectBinding currentMaterialBinding = null;

      // Set render states for drawing decals.
      var graphicsDevice = context.GraphicsService.GraphicsDevice;
      var savedRenderState = new RenderStateSnapshot(graphicsDevice);

      graphicsDevice.DepthStencilState = DepthStencilState.DepthRead;
      graphicsDevice.RasterizerState = RasterizerState.CullCounterClockwise;

      if (!ClipAtNearPlane)
      {
        // Cache some info for near plane intersection tests.

        var cameraNode = context.CameraNode;
        var cameraPose = cameraNode.PoseWorld;
        var projection = cameraNode.Camera.Projection;

        // Get min and max of near plane AABB in view space.
        var min = new Vector3(projection.Left, projection.Bottom, -projection.Near);
        var max = new Vector3(projection.Right, projection.Top, -projection.Near);

        // Convert min and max to world space.
        min = cameraPose.ToWorldPosition(min);
        max = cameraPose.ToWorldPosition(max);

        // Get world space aabb
        _cameraNearPlaneAabbWorld = new Aabb(Vector3.Min(min, max), Vector3.Max(min, max));
      }

      // The BlendState is set below.
      bool isGBufferPass = string.Equals(context.RenderPass, "GBuffer", StringComparison.OrdinalIgnoreCase);  // InvariantCultureIgnoreCase would be better but is not available in WindowsStore.
      var blendState = isGBufferPass ? GBufferBlendState : BlendState.AlphaBlend;

      int index = 0;
      var jobs = _jobs.Array;
      int jobCount = _jobs.Count;
      while (index < jobCount)
      {
        // Update BlendState. (Needs to be done for each batch because decals can
        // change the blend mode in the material. For example, alpha-tested decals 
        // can disable alpha blending.)
        graphicsDevice.BlendState = blendState;

        uint materialKey = jobs[index].MaterialKey;
        var materialInstanceBinding = jobs[index].MaterialInstanceBinding;
        var materialBinding = materialInstanceBinding.MaterialBinding;
        var effectEx = materialBinding.EffectEx;

        Debug.Assert(effectEx != null, "EffectEx must not be null.");

        context.MaterialBinding = materialBinding;
        context.MaterialInstanceBinding = materialInstanceBinding;

        if (currentEffectEx != effectEx)
        {
          // ----- Next effect.
          currentEffectEx = effectEx;
          currentEffect = effectEx.Resource;

          // Reset ID. (Only used during state sorting.)
          ResetEffectId(effectEx);

          // Update and apply global bindings.
          foreach (var binding in currentEffectEx.ParameterBindings)
          {
            if (binding.Description.Hint == EffectParameterHint.Global)
            {
              binding.Update(context);
              binding.Apply(context);
            }
          }
        }

        if (currentMaterialBinding != materialBinding)
        {
          // ----- Next material.
          currentMaterialBinding = materialBinding;

          // Reset ID. (Only used during state sorting.)
          ResetMaterialId(materialBinding);

          // Update and apply material bindings.
          foreach (var binding in currentMaterialBinding.ParameterBindings)
          {
            binding.Update(context);
            binding.Apply(context);

            // In "GBuffer" pass the specular power is written to the alpha channel.
            // The specular power needs to be set as the BlendFactor. (See GBufferBlendState.)
            if (isGBufferPass && binding.Description.Semantic == DefaultEffectParameterSemantics.SpecularPower)
            {
              var specularPowerBinding = binding as EffectParameterBinding<float>;
              if (specularPowerBinding != null)
              {
                // Note: Specular power is currently encoded using log2 - see Deferred.fxh.
                // (Blending encoded values is mathematically not correct, but there are no
                // rules for blending specular powers anyway.)
                float specularPower = specularPowerBinding.Value;
                int encodedSpecularPower = (byte)((float)Math.Log(specularPower + 0.0001f, 2) / 17.6f * 255.0f);
                graphicsDevice.BlendFactor = new Color(255, 255, 255, encodedSpecularPower);
              }
            }
          }
        }

        // Note: EffectTechniqueBinding only returns the EffectTechnique, but does 
        // not set it as the current technique.
        var techniqueBinding = materialInstanceBinding.TechniqueBinding;
        var technique = techniqueBinding.GetTechnique(currentEffect, context);

        // See if there is an associated technique that supports hardware instancing.
        //var instancingTechnique = (EffectTechnique)null;
        //var techniqueDescription = currentEffectEx.TechniqueDescriptions[technique];
        //if (techniqueDescription != null)
        //  instancingTechnique = techniqueDescription.InstancingTechnique;

        //if (EnableInstancing && instancingTechnique != null)
        //{
        //  // ----- Instancing
        //  // Render all decals that share the same effect/material and batch instances 
        //  // into a single draw call.
        //  int count = 1;
        //  while (index + count < jobCount && jobs[index + count].MaterialKey == materialKey)
        //    count++;

        //  if (count >= InstancingThreshold)
        //  {
        //    // Draw decals using instancing.
        //    currentEffect.CurrentTechnique = instancingTechnique;
        //    var passBinding = techniqueBinding.GetPassBinding(instancingTechnique, context);
        //    DrawInstanced(ref passBinding, context, index, count);
        //    index += count;
        //  }
        //  else
        //  {
        //    // Draw decals without instancing.
        //    currentEffect.CurrentTechnique = technique;
        //    var passBinding = techniqueBinding.GetPassBinding(technique, context);
        //    Draw(ref passBinding, context, index, count, order);
        //    index += count;
        //  }
        //}
        //else
        {
          // ----- No instancing

          // Render all decals that share the same effect/material.
          int count = 1;
          while (index + count < jobCount && jobs[index + count].MaterialKey == materialKey)
            count++;

          currentEffect.CurrentTechnique = technique;
          var passBinding = techniqueBinding.GetPassBinding(technique, context);
          Draw(ref passBinding, context, index, count, order);
          index += count;
        }
      }

      context.MaterialBinding = null;
      context.MaterialInstanceBinding = null;

      savedRenderState.Restore();
    }
コード例 #60
0
ファイル: FogRenderer.cs プロジェクト: terrynoya/DigitalRune
        public override void Render(IList <SceneNode> nodes, RenderContext context, RenderOrder order)
        {
            ThrowIfDisposed();

            if (nodes == null)
            {
                throw new ArgumentNullException("nodes");
            }
            if (context == null)
            {
                throw new ArgumentNullException("context");
            }

            context.Validate(_effect);
            context.ThrowIfCameraMissing();
            context.ThrowIfGBuffer0Missing();

            // Fog is not used in all games. --> Early out, if possible.
            int numberOfNodes = nodes.Count;

            if (nodes.Count == 0)
            {
                return;
            }

            if (nodes.Count > 1)
            {
                // Get a sorted list of all fog nodes.
                if (_fogNodes == null)
                {
                    _fogNodes = new List <SceneNode>();
                }

                _fogNodes.Clear();
                for (int i = 0; i < numberOfNodes; i++)
                {
                    var node = nodes[i] as FogNode;
                    if (node != null)
                    {
                        _fogNodes.Add(node);
                        node.SortTag = node.Priority;
                    }
                }

                // Sort ascending. (Fog with lower priority is rendered first.)
                // Note: Since this list is a list of SceneNodes, we use the AscendingNodeComparer
                // instead of the AscendingFogNodeComparer. The Priority was written to the SortTag,
                // so this will work.
                _fogNodes.Sort(AscendingNodeComparer.Instance);
                nodes         = _fogNodes;
                numberOfNodes = _fogNodes.Count;
            }

            var graphicsDevice   = _effect.GraphicsDevice;
            var savedRenderState = new RenderStateSnapshot(graphicsDevice);

            graphicsDevice.RasterizerState   = RasterizerState.CullNone;
            graphicsDevice.DepthStencilState = DepthStencilState.None;
            graphicsDevice.BlendState        = BlendState.AlphaBlend;

            var viewport = graphicsDevice.Viewport;

            _parameterViewportSize.SetValue(new Vector2(viewport.Width, viewport.Height));

            var cameraNode = context.CameraNode;
            var cameraPose = cameraNode.PoseWorld;

            GraphicsHelper.GetFrustumFarCorners(cameraNode.Camera.Projection, _cameraFrustumFarCorners);

            // Convert frustum far corners from view space to world space.
            for (int i = 0; i < _cameraFrustumFarCorners.Length; i++)
            {
                _cameraFrustumFarCorners[i] = (Vector3)cameraPose.ToWorldDirection((Vector3F)_cameraFrustumFarCorners[i]);
            }

            _parameterFrustumCorners.SetValue(_cameraFrustumFarCorners);
            _parameterGBuffer0.SetValue(context.GBuffer0);

            // Update SceneNode.LastFrame for all visible nodes.
            int frame = context.Frame;

            cameraNode.LastFrame = frame;

            bool  directionalLightIsSet      = false;
            float scatteringSymmetryStrength = 1;

            for (int i = 0; i < numberOfNodes; i++)
            {
                var node = nodes[i] as FogNode;
                if (node == null)
                {
                    continue;
                }

                // FogNode is visible in current frame.
                node.LastFrame = frame;

                var fog = node.Fog;

                if (fog.Density <= Numeric.EpsilonF)
                {
                    continue;
                }

                // Compute actual density and falloff.
                float fogDensity    = fog.Density;
                float heightFalloff = fog.HeightFalloff;
                // In previous versions, we gave FogDensity * 2^(-h*y) to the effect. Following code
                // avoids numerical problems where this value is numerically 0. This is now handled
                // in the shader.
                //if (!Numeric.IsZero(heightFalloff))
                //{
                //  float cameraDensity = (float)Math.Pow(2, -heightFalloff * cameraPose.Position.Y);
                //  // Trick: If the heightFalloff is very large, the e^x function can quickly reach
                //  // the float limit! If this happens, the shader will not compute any fog and this
                //  // looks like the fog disappears. To avoid this problem we reduce the heightFalloff
                //  // to keep the result of e^x always within floating point range.
                //  const float Limit = 1e-37f;
                //  if (cameraDensity < Limit)
                //  {
                //    heightFalloff = (float)Math.Log(Limit) / -cameraPose.Position.Y / ConstantsF.Ln2;
                //    cameraDensity = Limit;
                //  }

                //  // Compute actual fog density.
                //  // fogDensity is at world space height 0. If the fog node is on another height,
                //  // we change the fogDensity.
                //  fogDensity *= (float)Math.Pow(2, -heightFalloff * (-node.PoseWorld.Position.Y));
                //  // Combine camera and fog density.
                //  fogDensity *= cameraDensity;
                //}

                _parameterFogParameters.SetValue(new Vector4(fog.Start, fog.End, fogDensity, heightFalloff));
                _parameterColor0.SetValue((Vector4)fog.Color0);
                _parameterColor1.SetValue((Vector4)fog.Color1);

                // Compute world space reference heights.
                var fogBaseHeight = node.PoseWorld.Position.Y;
                var height0       = fogBaseHeight + fog.Height0;
                var height1       = fogBaseHeight + fog.Height1;
                // Avoid division by zero in the shader.
                if (Numeric.AreEqual(height0, height1))
                {
                    height1 = height0 + 0.0001f;
                }
                _parameterHeights.SetValue(new Vector4(
                                               cameraNode.PoseWorld.Position.Y,
                                               fogBaseHeight,
                                               height0,
                                               height1));

                var  scatteringSymmetry    = fog.ScatteringSymmetry;
                bool useScatteringSymmetry = !scatteringSymmetry.IsNumericallyZero;

                if (useScatteringSymmetry)
                {
                    if (!directionalLightIsSet)
                    {
                        scatteringSymmetryStrength = SetDirectionalLightParameter(context, cameraNode);
                        directionalLightIsSet      = true;
                    }
                }

                if (!useScatteringSymmetry || Numeric.IsZero(scatteringSymmetryStrength))
                {
                    // No phase function.
                    if (Numeric.IsZero(heightFalloff))
                    {
                        _passFog.Apply();
                    }
                    else
                    {
                        _passFogWithHeightFalloff.Apply();
                    }
                }
                else
                {
                    // Use phase function.
                    // Set parameters for phase function.
                    _parameterScatteringSymmetry.SetValue((Vector3)scatteringSymmetry * scatteringSymmetryStrength);

                    if (Numeric.IsZero(heightFalloff))
                    {
                        _passFogWithPhase.Apply();
                    }
                    else
                    {
                        _passFogWithHeightFalloffWithPhase.Apply();
                    }
                }

                graphicsDevice.DrawFullScreenQuad();
            }

            if (_fogNodes != null)
            {
                _fogNodes.Clear();
            }

            savedRenderState.Restore();
        }